diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml new file mode 100644 index 0000000000..29084e8a33 --- /dev/null +++ b/.github/.OwlBot.lock.yaml @@ -0,0 +1,4 @@ +docker: + digest: sha256:cfc0e802701262c211703c468874d767f65dabe6a1a71d0e07bfc8a3d5175f32 + image: gcr.io/repo-automation-bots/owlbot-python:latest + diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml new file mode 100644 index 0000000000..d60aca5ff1 --- /dev/null +++ b/.github/.OwlBot.yaml @@ -0,0 +1,30 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/repo-automation-bots/owlbot-python:latest + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/spanner/(v.*)/.*-py/(.*) + dest: /owl-bot-staging/spanner/$1/$2 + - source: /google/spanner/admin/instance/(v.*)/.*-py/(.*) + dest: /owl-bot-staging/spanner_admin_instance/$1/$2 + - source: /google/spanner/admin/database/(v.*)/.*-py/(.*) + dest: /owl-bot-staging/spanner_admin_database/$1/$2 + +begin-after-commit-hash: b154da710c5c9eedee127c07f74b6158c9c22382 + diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml new file mode 100644 index 0000000000..1dfef96e3d --- /dev/null +++ b/.github/blunderbuss.yml @@ -0,0 +1,2 @@ +assign_issues: + - larkee \ No newline at end of file diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml index fc281c05bd..6fe78aa798 100644 --- a/.github/header-checker-lint.yml +++ b/.github/header-checker-lint.yml @@ -1,6 +1,6 @@ {"allowedCopyrightHolders": ["Google LLC"], "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], - "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], "sourceFileExtensions": [ "ts", "js", diff --git a/.gitignore b/.gitignore index 708cdcc9eb..b4243ced74 100644 --- a/.gitignore +++ b/.gitignore @@ -45,6 +45,7 @@ pip-log.txt # Built documentation docs/_build +bigquery/docs/generated docs.metadata # Virtual environment diff --git a/.kokoro/release.sh b/.kokoro/release.sh index d15be7e62c..6bdc59e4b5 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") cd github/python-spanner python3 setup.py sdist bdist_wheel -twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* +twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 47b6a1fba3..a09b99531d 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,18 +23,8 @@ env_vars: { value: "github/python-spanner/.kokoro/release.sh" } -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} \ No newline at end of file + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" +} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 32302e4883..8912e9b5d7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,3 +1,17 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: diff --git a/CHANGELOG.md b/CHANGELOG.md index 8714b709df..3015454ac5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,26 @@ [1]: https://pypi.org/project/google-cloud-spanner/#history +## [3.4.0](https://www.github.com/googleapis/python-spanner/compare/v3.3.0...v3.4.0) (2021-04-29) + + +### Features + +* add samples for CMEK support ([#275](https://www.github.com/googleapis/python-spanner/issues/275)) ([f8d9bd3](https://www.github.com/googleapis/python-spanner/commit/f8d9bd33e04675a8dca148c2fae4a9133beebbca)) +* added support for numeric field for python decimal value ([#316](https://www.github.com/googleapis/python-spanner/issues/316)) ([070a171](https://www.github.com/googleapis/python-spanner/commit/070a1712dc34afb68105194060bb2fe6177fbac5)) +* **dbapi:** remove string conversion for numeric fields ([#317](https://www.github.com/googleapis/python-spanner/issues/317)) ([772aa3c](https://www.github.com/googleapis/python-spanner/commit/772aa3c2ffbdf3f863c09db176697b3ad70adbcf)) + + +### Bug Fixes + +* correctly set resume token when restarting streams ([#314](https://www.github.com/googleapis/python-spanner/issues/314)) ([0fcfc23](https://www.github.com/googleapis/python-spanner/commit/0fcfc2301246d3f20b6fbffc1deae06f16721ec7)) +* support INSERT from SELECT clause with args ([#306](https://www.github.com/googleapis/python-spanner/issues/306)) ([0dcda5e](https://www.github.com/googleapis/python-spanner/commit/0dcda5e21f8fb30ee611fddf0829684d86ced0ef)) + + +### Performance Improvements + +* use protobuf for metadata to reduce type conversions ([#325](https://www.github.com/googleapis/python-spanner/issues/325)) ([5110b9b](https://www.github.com/googleapis/python-spanner/commit/5110b9bc31804db9777a23fca60360119840640c)) + ## [3.3.0](https://www.github.com/googleapis/python-spanner/compare/v3.2.0...v3.3.0) (2021-03-25) diff --git a/UPGRADING.md b/UPGRADING.md index e90f2141bf..1a0bdfe19a 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -14,13 +14,13 @@ limitations under the License. # 2.0.0 Migration Guide -The 2.0 release of the `google-cloud-spanner` client is a significant update based on a +The 2.0 release of the `google-cloud-spanner` client is a significant update based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python). -It drops support for Python versions below 3.6. +It drops support for Python versions below 3.6. The handwritten client surfaces have minor changes which may require minimal updates to existing user code. -The generated client surfaces have substantial interface changes. Existing user code which uses these surfaces directly +The generated client surfaces have substantial interface changes. Existing user code which uses these surfaces directly will require significant updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage. @@ -89,7 +89,7 @@ for database_pb in instance.list_databases(): > **WARNING**: Breaking change The library now handles pages for the user. Previously, the library would return a page generator which required a user -to then iterate over each page to get the resource. Now, the library handles iterating over the pages and only returns +to then iterate over each page to get the resource. Now, the library handles iterating over the pages and only returns the resource protos. **Before:** @@ -176,14 +176,14 @@ for database_pb in instance.list_databases(): Methods expect request objects. We provide scripts that will convert most common use cases. -* Install the library +* Install the library with `libcst`. ```py -python3 -m pip install google-cloud-spanner +python3 -m pip install google-cloud-spanner[libcst] ``` * The scripts `fixup_spanner_v1_keywords.py`, `fixup_spanner_admin_database_v1_keywords.py`, and -`fixup_spanner_admin_instance_v1_keywords.py` are shipped with the library. They expect an input directory (with the +`fixup_spanner_admin_instance_v1_keywords.py` are shipped with the library. They expect an input directory (with the code to convert) and an empty destination directory. ```sh @@ -194,10 +194,10 @@ $ fixup_spanner_v1_keywords.py --input-directory .samples/ --output-directory sa >the handwritten surfaces e.g. `client.list_instances()` #### More details - + In `google-cloud-spanner<2.0.0`, parameters required by the API were positional parameters and optional parameters were keyword parameters. - + **Before:** ```py def list_instances( @@ -210,14 +210,14 @@ def list_instances( metadata=None, ): ``` - - In the 2.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a + + In the 2.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a parameter is required or optional. - - Some methods have additional keyword only parameters. The available parameters depend on the + + Some methods have additional keyword only parameters. The available parameters depend on the [`google.api.method_signature` annotation](https://github.com/googleapis/googleapis/blob/master/google/spanner/admin/instance/v1/spanner_instance_admin.proto#L86) specified by the API producer. - - + + **After:** ```py def list_instances( @@ -230,13 +230,13 @@ def list_instances( metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: ``` - + > **NOTE:** The `request` parameter and flattened keyword parameters for the API are mutually exclusive. > Passing both will result in an error. - - + + Both of these calls are valid: - + ```py response = client.list_instances( request={ @@ -244,16 +244,16 @@ def list_instances( } ) ``` - + ```py response = client.execute_sql( parent=project_name, ) ``` - + This call is invalid because it mixes `request` with a keyword argument `parent`. Executing this code will result in an error. - + ```py response = client.execute_sql( request={}, diff --git a/docs/_static/custom.css b/docs/_static/custom.css index bcd37bbd3c..b0a295464b 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,9 +1,20 @@ div#python2-eol { border-color: red; border-width: medium; -} +} /* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px } + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/docs/conf.py b/docs/conf.py index ee774dd1c7..f45ea05991 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,17 @@ # -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # google-cloud-spanner documentation build configuration file # diff --git a/google/cloud/spanner_admin_database_v1/proto/spanner_database_admin.proto b/google/cloud/spanner_admin_database_v1/proto/spanner_database_admin.proto index ac771bc061..f09cf073b2 100644 --- a/google/cloud/spanner_admin_database_v1/proto/spanner_database_admin.proto +++ b/google/cloud/spanner_admin_database_v1/proto/spanner_database_admin.proto @@ -736,7 +736,7 @@ message RestoreDatabaseRequest { // to. If this field is not specified, the restored database will use the same // encryption configuration as the backup by default, namely // [encryption_type][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig.encryption_type] - // = `USE_CONFIG_DEFAULT_OR_DATABASE_ENCRYPTION`. + // = `USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION`. RestoreDatabaseEncryptionConfig encryption_config = 4 [(google.api.field_behavior) = OPTIONAL]; } diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py b/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py index 31b97af061..e40e0b1960 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py @@ -103,8 +103,36 @@ class DatabaseAdminAsyncClient: DatabaseAdminClient.parse_common_location_path ) - from_service_account_info = DatabaseAdminClient.from_service_account_info - from_service_account_file = DatabaseAdminClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatabaseAdminAsyncClient: The constructed client. + """ + return DatabaseAdminClient.from_service_account_info.__func__(DatabaseAdminAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatabaseAdminAsyncClient: The constructed client. + """ + return DatabaseAdminClient.from_service_account_file.__func__(DatabaseAdminAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -236,6 +264,7 @@ async def list_databases( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -431,6 +460,7 @@ async def get_database( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -559,6 +589,7 @@ async def update_database_ddl( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -642,6 +673,7 @@ async def drop_database( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -726,6 +758,7 @@ async def get_database_ddl( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -1007,6 +1040,7 @@ async def get_iam_policy( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=30.0, ), default_timeout=30.0, client_info=DEFAULT_CLIENT_INFO, @@ -1302,6 +1336,7 @@ async def get_backup( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -1402,6 +1437,7 @@ async def update_backup( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -1481,6 +1517,7 @@ async def delete_backup( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -1566,6 +1603,7 @@ async def list_backups( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -1795,6 +1833,7 @@ async def list_database_operations( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -1897,6 +1936,7 @@ async def list_backup_operations( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py index 83cfeb248f..4dfb39e47b 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py @@ -1082,11 +1082,10 @@ def set_iam_policy( "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy.SetIamPolicyRequest(**request) - elif not request: # Null request, just make one. request = iam_policy.SetIamPolicyRequest() @@ -1219,11 +1218,10 @@ def get_iam_policy( "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy.GetIamPolicyRequest(**request) - elif not request: # Null request, just make one. request = iam_policy.GetIamPolicyRequest() @@ -1311,11 +1309,10 @@ def test_iam_permissions( "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy.TestIamPermissionsRequest(**request) - elif not request: # Null request, just make one. request = iam_policy.TestIamPermissionsRequest() diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py index 779f02e840..0e9a7e50c7 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py @@ -79,10 +79,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -90,6 +90,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -99,20 +102,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -125,6 +125,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -141,6 +142,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -154,6 +156,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -167,6 +170,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -180,6 +184,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -196,6 +201,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=30.0, ), default_timeout=30.0, client_info=client_info, @@ -217,6 +223,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -230,6 +237,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -243,6 +251,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -256,6 +265,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -272,6 +282,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -285,6 +296,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py index 665ed4fc15..b695a5a113 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py @@ -118,7 +118,10 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -126,70 +129,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -197,18 +180,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - self._operations_client = None - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -222,7 +195,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py index 25229d58cd..cac4b1e2b6 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py @@ -73,7 +73,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -151,10 +151,10 @@ def __init__( ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -163,7 +163,10 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -171,70 +174,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -242,18 +225,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} - self._operations_client = None + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/spanner_admin_database_v1/types/__init__.py b/google/cloud/spanner_admin_database_v1/types/__init__.py index 9749add377..a1316e789a 100644 --- a/google/cloud/spanner_admin_database_v1/types/__init__.py +++ b/google/cloud/spanner_admin_database_v1/types/__init__.py @@ -15,80 +15,80 @@ # limitations under the License. # -from .common import ( - OperationProgress, - EncryptionConfig, - EncryptionInfo, -) from .backup import ( Backup, - CreateBackupRequest, + BackupInfo, + CreateBackupEncryptionConfig, CreateBackupMetadata, - UpdateBackupRequest, - GetBackupRequest, + CreateBackupRequest, DeleteBackupRequest, - ListBackupsRequest, - ListBackupsResponse, + GetBackupRequest, ListBackupOperationsRequest, ListBackupOperationsResponse, - BackupInfo, - CreateBackupEncryptionConfig, + ListBackupsRequest, + ListBackupsResponse, + UpdateBackupRequest, +) +from .common import ( + EncryptionConfig, + EncryptionInfo, + OperationProgress, ) from .spanner_database_admin import ( - RestoreInfo, - Database, - ListDatabasesRequest, - ListDatabasesResponse, - CreateDatabaseRequest, CreateDatabaseMetadata, - GetDatabaseRequest, - UpdateDatabaseDdlRequest, - UpdateDatabaseDdlMetadata, + CreateDatabaseRequest, + Database, DropDatabaseRequest, GetDatabaseDdlRequest, GetDatabaseDdlResponse, + GetDatabaseRequest, ListDatabaseOperationsRequest, ListDatabaseOperationsResponse, - RestoreDatabaseRequest, + ListDatabasesRequest, + ListDatabasesResponse, + OptimizeRestoredDatabaseMetadata, RestoreDatabaseEncryptionConfig, RestoreDatabaseMetadata, - OptimizeRestoredDatabaseMetadata, + RestoreDatabaseRequest, + RestoreInfo, + UpdateDatabaseDdlMetadata, + UpdateDatabaseDdlRequest, RestoreSourceType, ) __all__ = ( - "OperationProgress", - "EncryptionConfig", - "EncryptionInfo", "Backup", - "CreateBackupRequest", + "BackupInfo", + "CreateBackupEncryptionConfig", "CreateBackupMetadata", - "UpdateBackupRequest", - "GetBackupRequest", + "CreateBackupRequest", "DeleteBackupRequest", - "ListBackupsRequest", - "ListBackupsResponse", + "GetBackupRequest", "ListBackupOperationsRequest", "ListBackupOperationsResponse", - "BackupInfo", - "CreateBackupEncryptionConfig", - "RestoreInfo", - "Database", - "ListDatabasesRequest", - "ListDatabasesResponse", - "CreateDatabaseRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "UpdateBackupRequest", + "EncryptionConfig", + "EncryptionInfo", + "OperationProgress", "CreateDatabaseMetadata", - "GetDatabaseRequest", - "UpdateDatabaseDdlRequest", - "UpdateDatabaseDdlMetadata", + "CreateDatabaseRequest", + "Database", "DropDatabaseRequest", "GetDatabaseDdlRequest", "GetDatabaseDdlResponse", + "GetDatabaseRequest", "ListDatabaseOperationsRequest", "ListDatabaseOperationsResponse", - "RestoreDatabaseRequest", + "ListDatabasesRequest", + "ListDatabasesResponse", + "OptimizeRestoredDatabaseMetadata", "RestoreDatabaseEncryptionConfig", "RestoreDatabaseMetadata", - "OptimizeRestoredDatabaseMetadata", + "RestoreDatabaseRequest", + "RestoreInfo", + "UpdateDatabaseDdlMetadata", + "UpdateDatabaseDdlRequest", "RestoreSourceType", ) diff --git a/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py b/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py index c7309dbbde..278d5e6b95 100644 --- a/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py +++ b/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py @@ -540,7 +540,7 @@ class RestoreDatabaseRequest(proto.Message): not specified, the restored database will use the same encryption configuration as the backup by default, namely [encryption_type][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig.encryption_type] - = ``USE_CONFIG_DEFAULT_OR_DATABASE_ENCRYPTION``. + = ``USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION``. """ parent = proto.Field(proto.STRING, number=1) diff --git a/google/cloud/spanner_admin_instance_v1/proto/spanner_instance_admin.proto b/google/cloud/spanner_admin_instance_v1/proto/spanner_instance_admin.proto index 54767bf263..69043c1b37 100644 --- a/google/cloud/spanner_admin_instance_v1/proto/spanner_instance_admin.proto +++ b/google/cloud/spanner_admin_instance_v1/proto/spanner_instance_admin.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -373,7 +373,7 @@ message Instance { // either omitted or set to `CREATING`. For // [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance], the state must be // either omitted or set to `READY`. - State state = 6; + State state = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Cloud Labels are a flexible and lightweight mechanism for organizing cloud // resources into groups that reflect a customer's organizational needs and diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py index a83b1a2c1d..f2a9c36243 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py @@ -106,8 +106,36 @@ class InstanceAdminAsyncClient: InstanceAdminClient.parse_common_location_path ) - from_service_account_info = InstanceAdminClient.from_service_account_info - from_service_account_file = InstanceAdminClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceAdminAsyncClient: The constructed client. + """ + return InstanceAdminClient.from_service_account_info.__func__(InstanceAdminAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceAdminAsyncClient: The constructed client. + """ + return InstanceAdminClient.from_service_account_file.__func__(InstanceAdminAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -240,6 +268,7 @@ async def list_instance_configs( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -331,6 +360,7 @@ async def get_instance_config( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -416,6 +446,7 @@ async def list_instances( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -504,6 +535,7 @@ async def get_instance( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -883,6 +915,7 @@ async def delete_instance( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -1155,6 +1188,7 @@ async def get_iam_policy( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=30.0, ), default_timeout=30.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py index 2dc7b8e6c3..99cad77f03 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py @@ -1179,11 +1179,10 @@ def set_iam_policy( "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy.SetIamPolicyRequest(**request) - elif not request: # Null request, just make one. request = iam_policy.SetIamPolicyRequest() @@ -1312,11 +1311,10 @@ def get_iam_policy( "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy.GetIamPolicyRequest(**request) - elif not request: # Null request, just make one. request = iam_policy.GetIamPolicyRequest() @@ -1401,11 +1399,10 @@ def test_iam_permissions( "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy.TestIamPermissionsRequest(**request) - elif not request: # Null request, just make one. request = iam_policy.TestIamPermissionsRequest() diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py index fa07b95eeb..e3b368c82a 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py @@ -77,10 +77,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -88,6 +88,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -97,20 +100,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -123,6 +123,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -136,6 +137,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -149,6 +151,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -162,6 +165,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -181,6 +185,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -197,6 +202,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=30.0, ), default_timeout=30.0, client_info=client_info, diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py index e896249468..a3e3f39762 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py @@ -131,7 +131,10 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -139,70 +142,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -210,18 +193,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - self._operations_client = None - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -235,7 +208,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py index ca7f009071..e4a860874e 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py @@ -86,7 +86,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -164,10 +164,10 @@ def __init__( ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -176,7 +176,10 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -184,70 +187,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -255,18 +238,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} - self._operations_client = None + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/spanner_admin_instance_v1/types/__init__.py b/google/cloud/spanner_admin_instance_v1/types/__init__.py index 37b771feed..f5ebcd7d5c 100644 --- a/google/cloud/spanner_admin_instance_v1/types/__init__.py +++ b/google/cloud/spanner_admin_instance_v1/types/__init__.py @@ -16,35 +16,35 @@ # from .spanner_instance_admin import ( - ReplicaInfo, - InstanceConfig, + CreateInstanceMetadata, + CreateInstanceRequest, + DeleteInstanceRequest, + GetInstanceConfigRequest, + GetInstanceRequest, Instance, + InstanceConfig, ListInstanceConfigsRequest, ListInstanceConfigsResponse, - GetInstanceConfigRequest, - GetInstanceRequest, - CreateInstanceRequest, ListInstancesRequest, ListInstancesResponse, - UpdateInstanceRequest, - DeleteInstanceRequest, - CreateInstanceMetadata, + ReplicaInfo, UpdateInstanceMetadata, + UpdateInstanceRequest, ) __all__ = ( - "ReplicaInfo", - "InstanceConfig", + "CreateInstanceMetadata", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "GetInstanceConfigRequest", + "GetInstanceRequest", "Instance", + "InstanceConfig", "ListInstanceConfigsRequest", "ListInstanceConfigsResponse", - "GetInstanceConfigRequest", - "GetInstanceRequest", - "CreateInstanceRequest", "ListInstancesRequest", "ListInstancesResponse", - "UpdateInstanceRequest", - "DeleteInstanceRequest", - "CreateInstanceMetadata", + "ReplicaInfo", "UpdateInstanceMetadata", + "UpdateInstanceRequest", ) diff --git a/google/cloud/spanner_dbapi/parse_utils.py b/google/cloud/spanner_dbapi/parse_utils.py index 082074251c..1744874764 100644 --- a/google/cloud/spanner_dbapi/parse_utils.py +++ b/google/cloud/spanner_dbapi/parse_utils.py @@ -37,6 +37,7 @@ datetime.date: spanner.param_types.DATE, DateStr: spanner.param_types.DATE, TimestampStr: spanner.param_types.TIMESTAMP, + decimal.Decimal: spanner.param_types.NUMERIC, } SPANNER_RESERVED_KEYWORDS = { @@ -224,11 +225,11 @@ def parse_insert(insert_sql, params): } Case b) - SQL: 'INSERT INTO T (s, c) SELECT st, zc FROM cus ORDER BY fn, ln', + SQL: 'INSERT INTO T (s, c) SELECT st, zc FROM cus WHERE col IN (%s, %s)', it produces: { 'sql_params_list': [ - ('INSERT INTO T (s, c) SELECT st, zc FROM cus ORDER BY fn, ln', None), + ('INSERT INTO T (s, c) SELECT st, zc FROM cus ORDER BY fn, ln', ('a', 'b')), ] } @@ -276,7 +277,7 @@ def parse_insert(insert_sql, params): if not after_values_sql: # Case b) insert_sql = sanitize_literals_for_upload(insert_sql) - return {"sql_params_list": [(insert_sql, None)]} + return {"sql_params_list": [(insert_sql, params)]} if not params: # Case a) perhaps? @@ -508,25 +509,11 @@ def sql_pyformat_args_to_spanner(sql, params): resolved_value = pyfmt % params named_args[key] = resolved_value else: - named_args[key] = cast_for_spanner(params[i]) + named_args[key] = params[i] return sanitize_literals_for_upload(sql), named_args -def cast_for_spanner(value): - """Convert the param to its Cloud Spanner equivalent type. - - :type value: Any - :param value: The value to convert to a Cloud Spanner type. - - :rtype: Any - :returns: The value converted to a Cloud Spanner type. - """ - if isinstance(value, decimal.Decimal): - return str(value) - return value - - def get_param_types(params): """Determine Cloud Spanner types for the given parameters. diff --git a/google/cloud/spanner_v1/_helpers.py b/google/cloud/spanner_v1/_helpers.py index bac1f68edb..a9ae36d0d6 100644 --- a/google/cloud/spanner_v1/_helpers.py +++ b/google/cloud/spanner_v1/_helpers.py @@ -205,7 +205,7 @@ def _parse_value_pb(value_pb, field_type): _parse_value_pb(item_pb, field_type.struct_type.fields[i].type_) for (i, item_pb) in enumerate(value_pb.list_value.values) ] - elif field_type.code == TypeCode.NUMERIC: + elif type_code == TypeCode.NUMERIC: return decimal.Decimal(value_pb.string_value) else: raise ValueError("Unknown type: %s" % (field_type,)) diff --git a/google/cloud/spanner_v1/database.py b/google/cloud/spanner_v1/database.py index 1e76bf218f..5eb688d9c6 100644 --- a/google/cloud/spanner_v1/database.py +++ b/google/cloud/spanner_v1/database.py @@ -518,11 +518,11 @@ def execute_pdml(): param_types=param_types, query_options=query_options, ) - restart = functools.partial( - api.execute_streaming_sql, request=request, metadata=metadata, + method = functools.partial( + api.execute_streaming_sql, metadata=metadata, ) - iterator = _restart_on_unavailable(restart) + iterator = _restart_on_unavailable(method, request) result_set = StreamedResultSet(iterator) list(result_set) # consume all partials diff --git a/google/cloud/spanner_v1/proto/keys.proto b/google/cloud/spanner_v1/proto/keys.proto index 267df0d102..d8ce0d6774 100644 --- a/google/cloud/spanner_v1/proto/keys.proto +++ b/google/cloud/spanner_v1/proto/keys.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/proto/query_plan.proto b/google/cloud/spanner_v1/proto/query_plan.proto index 974a70e6d1..35f8fe21c5 100644 --- a/google/cloud/spanner_v1/proto/query_plan.proto +++ b/google/cloud/spanner_v1/proto/query_plan.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/proto/result_set.proto b/google/cloud/spanner_v1/proto/result_set.proto index a87d741fdc..d6bb9a2831 100644 --- a/google/cloud/spanner_v1/proto/result_set.proto +++ b/google/cloud/spanner_v1/proto/result_set.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/proto/spanner.proto b/google/cloud/spanner_v1/proto/spanner.proto index 8f579e333d..c436227221 100644 --- a/google/cloud/spanner_v1/proto/spanner.proto +++ b/google/cloud/spanner_v1/proto/spanner.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -425,6 +425,63 @@ message DeleteSessionRequest { ]; } +// Common request options for various APIs. +message RequestOptions { + // The relative priority for requests. Note that priority is not applicable + // for [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + // + // The priority acts as a hint to the Cloud Spanner scheduler and does not + // guarantee priority or order of execution. For example: + // + // * Some parts of a write operation always execute at `PRIORITY_HIGH`, + // regardless of the specified priority. This may cause you to see an + // increase in high priority workload even when executing a low priority + // request. This can also potentially cause a priority inversion where a + // lower priority request will be fulfilled ahead of a higher priority + // request. + // * If a transaction contains multiple operations with different priorities, + // Cloud Spanner does not guarantee to process the higher priority + // operations first. There may be other constraints to satisfy, such as + // order of operations. + enum Priority { + // `PRIORITY_UNSPECIFIED` is equivalent to `PRIORITY_HIGH`. + PRIORITY_UNSPECIFIED = 0; + + // This specifies that the request is low priority. + PRIORITY_LOW = 1; + + // This specifies that the request is medium priority. + PRIORITY_MEDIUM = 2; + + // This specifies that the request is high priority. + PRIORITY_HIGH = 3; + } + + // Priority for the request. + Priority priority = 1; + + // A per-request tag which can be applied to queries or reads, used for + // statistics collection. + // Both request_tag and transaction_tag can be specified for a read or query + // that belongs to a transaction. + // This field is ignored for requests where it's not applicable (e.g. + // CommitRequest). + // `request_tag` must be a valid identifier of the form: + // `[a-zA-Z][a-zA-Z0-9_\-]` between 2 and 64 characters in length + string request_tag = 2; + + // A tag used for statistics collection about this transaction. + // Both request_tag and transaction_tag can be specified for a read or query + // that belongs to a transaction. + // The value of transaction_tag should be the same for all requests belonging + // to the same transaction. + // If this request doesn’t belong to any transaction, transaction_tag will be + // ignored. + // `transaction_tag` must be a valid identifier of the format: + // `[a-zA-Z][a-zA-Z0-9_\-]{0,49}` + string transaction_tag = 3; +} + // The request for [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and // [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. message ExecuteSqlRequest { @@ -435,21 +492,50 @@ message ExecuteSqlRequest { // This parameter allows individual queries to pick different query // optimizer versions. // - // Specifying "latest" as a value instructs Cloud Spanner to use the + // Specifying `latest` as a value instructs Cloud Spanner to use the // latest supported query optimizer version. If not specified, Cloud Spanner - // uses optimizer version set at the database level options. Any other + // uses the optimizer version set at the database level options. Any other // positive integer (from the list of supported optimizer versions) // overrides the default optimizer version for query execution. + // // The list of supported optimizer versions can be queried from - // SPANNER_SYS.SUPPORTED_OPTIMIZER_VERSIONS. Executing a SQL statement - // with an invalid optimizer version will fail with a syntax error - // (`INVALID_ARGUMENT`) status. + // SPANNER_SYS.SUPPORTED_OPTIMIZER_VERSIONS. + // + // Executing a SQL statement with an invalid optimizer version fails with + // an `INVALID_ARGUMENT` error. + // // See // https://cloud.google.com/spanner/docs/query-optimizer/manage-query-optimizer // for more information on managing the query optimizer. // // The `optimizer_version` statement hint has precedence over this setting. string optimizer_version = 1; + + // An option to control the selection of optimizer statistics package. + // + // This parameter allows individual queries to use a different query + // optimizer statistics package. + // + // Specifying `latest` as a value instructs Cloud Spanner to use the latest + // generated statistics package. If not specified, Cloud Spanner uses + // the statistics package set at the database level options, or the latest + // package if the database option is not set. + // + // The statistics package requested by the query has to be exempt from + // garbage collection. This can be achieved with the following DDL + // statement: + // + // ``` + // ALTER STATISTICS SET OPTIONS (allow_gc=false) + // ``` + // + // The list of available statistics packages can be queried from + // `INFORMATION_SCHEMA.SPANNER_STATISTICS`. + // + // Executing a SQL statement with an invalid optimizer statistics package + // or with a statistics package that allows garbage collection fails with + // an `INVALID_ARGUMENT` error. + string optimizer_statistics_package = 2; } // Mode in which the statement must be processed. @@ -547,6 +633,9 @@ message ExecuteSqlRequest { // Query optimizer configuration to use for the given query. QueryOptions query_options = 10; + + // Common options for this request. + RequestOptions request_options = 11; } // The request for [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. @@ -613,6 +702,9 @@ message ExecuteBatchDmlRequest { // sequence number, the transaction may be aborted. Replays of previously // handled requests will yield the same response as the first execution. int64 seqno = 4 [(google.api.field_behavior) = REQUIRED]; + + // Common options for this request. + RequestOptions request_options = 5; } // The response for [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. Contains a list @@ -847,6 +939,9 @@ message ReadRequest { // match for the values of fields common to this message and the // PartitionReadRequest message used to create this partition_token. bytes partition_token = 10; + + // Common options for this request. + RequestOptions request_options = 11; } // The request for [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. @@ -861,6 +956,13 @@ message BeginTransactionRequest { // Required. Options for the new transaction. TransactionOptions options = 2 [(google.api.field_behavior) = REQUIRED]; + + // Common options for this request. + // Priority is ignored for this request. Setting the priority in this + // request_options struct will not do anything. To set the priority for a + // transaction, set it on the reads and writes that are part of this + // transaction instead. + RequestOptions request_options = 3; } // The request for [Commit][google.spanner.v1.Spanner.Commit]. @@ -899,6 +1001,9 @@ message CommitRequest { // the [CommitResponse][google.spanner.v1.CommitResponse.commit_stats]. Default value is // `false`. bool return_commit_stats = 5; + + // Common options for this request. + RequestOptions request_options = 6; } // The response for [Commit][google.spanner.v1.Spanner.Commit]. diff --git a/google/cloud/spanner_v1/proto/transaction.proto b/google/cloud/spanner_v1/proto/transaction.proto index 5c6f494474..7082c56258 100644 --- a/google/cloud/spanner_v1/proto/transaction.proto +++ b/google/cloud/spanner_v1/proto/transaction.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/proto/type.proto b/google/cloud/spanner_v1/proto/type.proto index 1b863c0fdf..4a5afd485d 100644 --- a/google/cloud/spanner_v1/proto/type.proto +++ b/google/cloud/spanner_v1/proto/type.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/services/spanner/async_client.py b/google/cloud/spanner_v1/services/spanner/async_client.py index a4a188bc97..d220c20f6e 100644 --- a/google/cloud/spanner_v1/services/spanner/async_client.py +++ b/google/cloud/spanner_v1/services/spanner/async_client.py @@ -79,8 +79,36 @@ class SpannerAsyncClient: common_location_path = staticmethod(SpannerClient.common_location_path) parse_common_location_path = staticmethod(SpannerClient.parse_common_location_path) - from_service_account_info = SpannerClient.from_service_account_info - from_service_account_file = SpannerClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpannerAsyncClient: The constructed client. + """ + return SpannerClient.from_service_account_info.__func__(SpannerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpannerAsyncClient: The constructed client. + """ + return SpannerClient.from_service_account_file.__func__(SpannerAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -222,6 +250,7 @@ async def create_session( maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=DEFAULT_CLIENT_INFO, @@ -319,6 +348,7 @@ async def batch_create_sessions( maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -398,6 +428,7 @@ async def get_session( maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=DEFAULT_CLIENT_INFO, @@ -480,6 +511,7 @@ async def list_sessions( maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -561,6 +593,7 @@ async def delete_session( maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=DEFAULT_CLIENT_INFO, @@ -631,6 +664,7 @@ async def execute_sql( maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=DEFAULT_CLIENT_INFO, @@ -798,6 +832,7 @@ async def execute_batch_dml( maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=DEFAULT_CLIENT_INFO, @@ -870,6 +905,7 @@ async def read( maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=DEFAULT_CLIENT_INFO, @@ -1021,6 +1057,7 @@ async def begin_transaction( maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=DEFAULT_CLIENT_INFO, @@ -1158,6 +1195,7 @@ async def commit( maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, @@ -1250,6 +1288,7 @@ async def rollback( maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=DEFAULT_CLIENT_INFO, @@ -1320,6 +1359,7 @@ async def partition_query( maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=DEFAULT_CLIENT_INFO, @@ -1394,6 +1434,7 @@ async def partition_read( maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/google/cloud/spanner_v1/services/spanner/transports/base.py b/google/cloud/spanner_v1/services/spanner/transports/base.py index 36e3c0cb52..f91b98d6fb 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/base.py +++ b/google/cloud/spanner_v1/services/spanner/transports/base.py @@ -73,10 +73,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -84,6 +84,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -93,20 +96,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -117,6 +117,7 @@ def _prep_wrapped_messages(self, client_info): maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=client_info, @@ -128,6 +129,7 @@ def _prep_wrapped_messages(self, client_info): maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -139,6 +141,7 @@ def _prep_wrapped_messages(self, client_info): maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=client_info, @@ -150,6 +153,7 @@ def _prep_wrapped_messages(self, client_info): maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -161,6 +165,7 @@ def _prep_wrapped_messages(self, client_info): maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=client_info, @@ -172,6 +177,7 @@ def _prep_wrapped_messages(self, client_info): maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=client_info, @@ -188,6 +194,7 @@ def _prep_wrapped_messages(self, client_info): maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=client_info, @@ -199,6 +206,7 @@ def _prep_wrapped_messages(self, client_info): maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=client_info, @@ -213,6 +221,7 @@ def _prep_wrapped_messages(self, client_info): maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=client_info, @@ -224,6 +233,7 @@ def _prep_wrapped_messages(self, client_info): maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, @@ -235,6 +245,7 @@ def _prep_wrapped_messages(self, client_info): maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=client_info, @@ -246,6 +257,7 @@ def _prep_wrapped_messages(self, client_info): maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=client_info, @@ -257,6 +269,7 @@ def _prep_wrapped_messages(self, client_info): maximum=32.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=30.0, ), default_timeout=30.0, client_info=client_info, diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/google/cloud/spanner_v1/services/spanner/transports/grpc.py index 2ac10fc5b3..0a3ead94e5 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc.py @@ -111,7 +111,9 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -119,70 +121,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -190,17 +172,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -214,7 +187,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py index 265f4bb30a..a7c83ef512 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py @@ -66,7 +66,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -144,10 +144,10 @@ def __init__( ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -156,7 +156,9 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -164,70 +166,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -235,17 +217,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/spanner_v1/snapshot.py b/google/cloud/spanner_v1/snapshot.py index 1b3ae8097d..f926d7836d 100644 --- a/google/cloud/spanner_v1/snapshot.py +++ b/google/cloud/spanner_v1/snapshot.py @@ -41,16 +41,21 @@ ) -def _restart_on_unavailable(restart, trace_name=None, session=None, attributes=None): +def _restart_on_unavailable( + method, request, trace_name=None, session=None, attributes=None +): """Restart iteration after :exc:`.ServiceUnavailable`. - :type restart: callable - :param restart: curried function returning iterator + :type method: callable + :param method: function returning iterator + + :type request: proto + :param request: request proto to call the method with """ resume_token = b"" item_buffer = [] with trace_call(trace_name, session, attributes): - iterator = restart() + iterator = method(request=request) while True: try: for item in iterator: @@ -61,7 +66,8 @@ def _restart_on_unavailable(restart, trace_name=None, session=None, attributes=N except ServiceUnavailable: del item_buffer[:] with trace_call(trace_name, session, attributes): - iterator = restart(resume_token=resume_token) + request.resume_token = resume_token + iterator = method(request=request) continue except InternalServerError as exc: resumable_error = any( @@ -72,7 +78,8 @@ def _restart_on_unavailable(restart, trace_name=None, session=None, attributes=N raise del item_buffer[:] with trace_call(trace_name, session, attributes): - iterator = restart(resume_token=resume_token) + request.resume_token = resume_token + iterator = method(request=request) continue if len(item_buffer) == 0: @@ -189,7 +196,11 @@ def read( trace_attributes = {"table_id": table, "columns": columns} iterator = _restart_on_unavailable( - restart, "CloudSpanner.ReadOnlyTransaction", self._session, trace_attributes + restart, + request, + "CloudSpanner.ReadOnlyTransaction", + self._session, + trace_attributes, ) self._read_request_count += 1 @@ -302,6 +313,7 @@ def execute_sql( trace_attributes = {"db.statement": sql} iterator = _restart_on_unavailable( restart, + request, "CloudSpanner.ReadWriteTransaction", self._session, trace_attributes, diff --git a/google/cloud/spanner_v1/streamed.py b/google/cloud/spanner_v1/streamed.py index fbcca77795..e5f7e4984e 100644 --- a/google/cloud/spanner_v1/streamed.py +++ b/google/cloud/spanner_v1/streamed.py @@ -18,6 +18,7 @@ from google.protobuf.struct_pb2 import Value from google.cloud import exceptions from google.cloud.spanner_v1 import PartialResultSet +from google.cloud.spanner_v1 import ResultSetMetadata from google.cloud.spanner_v1 import TypeCode import six @@ -65,7 +66,9 @@ def metadata(self): :rtype: :class:`~google.cloud.spanner_v1.types.ResultSetMetadata` :returns: structure describing the results """ - return self._metadata + if self._metadata: + return ResultSetMetadata.wrap(self._metadata) + return None @property def stats(self): @@ -119,7 +122,7 @@ def _consume_next(self): response_pb = PartialResultSet.pb(response) if self._metadata is None: # first response - metadata = self._metadata = response.metadata + metadata = self._metadata = response_pb.metadata source = self._source if source is not None and source._transaction_id is None: diff --git a/google/cloud/spanner_v1/types/__init__.py b/google/cloud/spanner_v1/types/__init__.py index a71a15855c..7a7ac395e4 100644 --- a/google/cloud/spanner_v1/types/__init__.py +++ b/google/cloud/spanner_v1/types/__init__.py @@ -24,44 +24,45 @@ PlanNode, QueryPlan, ) -from .transaction import ( - TransactionOptions, - Transaction, - TransactionSelector, -) -from .type import ( - Type, - StructType, - TypeCode, -) from .result_set import ( - ResultSet, PartialResultSet, + ResultSet, ResultSetMetadata, ResultSetStats, ) from .spanner import ( - CreateSessionRequest, BatchCreateSessionsRequest, BatchCreateSessionsResponse, - Session, - GetSessionRequest, - ListSessionsRequest, - ListSessionsResponse, + BeginTransactionRequest, + CommitRequest, + CommitResponse, + CreateSessionRequest, DeleteSessionRequest, - ExecuteSqlRequest, ExecuteBatchDmlRequest, ExecuteBatchDmlResponse, + ExecuteSqlRequest, + GetSessionRequest, + ListSessionsRequest, + ListSessionsResponse, + Partition, PartitionOptions, PartitionQueryRequest, PartitionReadRequest, - Partition, PartitionResponse, ReadRequest, - BeginTransactionRequest, - CommitRequest, - CommitResponse, + RequestOptions, RollbackRequest, + Session, +) +from .transaction import ( + Transaction, + TransactionOptions, + TransactionSelector, +) +from .type import ( + StructType, + Type, + TypeCode, ) __all__ = ( @@ -70,35 +71,36 @@ "Mutation", "PlanNode", "QueryPlan", - "TransactionOptions", - "Transaction", - "TransactionSelector", - "Type", - "StructType", - "TypeCode", - "ResultSet", "PartialResultSet", + "ResultSet", "ResultSetMetadata", "ResultSetStats", - "CreateSessionRequest", "BatchCreateSessionsRequest", "BatchCreateSessionsResponse", - "Session", - "GetSessionRequest", - "ListSessionsRequest", - "ListSessionsResponse", + "BeginTransactionRequest", + "CommitRequest", + "CommitResponse", + "CreateSessionRequest", "DeleteSessionRequest", - "ExecuteSqlRequest", "ExecuteBatchDmlRequest", "ExecuteBatchDmlResponse", + "ExecuteSqlRequest", + "GetSessionRequest", + "ListSessionsRequest", + "ListSessionsResponse", + "Partition", "PartitionOptions", "PartitionQueryRequest", "PartitionReadRequest", - "Partition", "PartitionResponse", "ReadRequest", - "BeginTransactionRequest", - "CommitRequest", - "CommitResponse", + "RequestOptions", "RollbackRequest", + "Session", + "Transaction", + "TransactionOptions", + "TransactionSelector", + "StructType", + "Type", + "TypeCode", ) diff --git a/google/cloud/spanner_v1/types/spanner.py b/google/cloud/spanner_v1/types/spanner.py index 1dfd8451fe..acb32c8ff9 100644 --- a/google/cloud/spanner_v1/types/spanner.py +++ b/google/cloud/spanner_v1/types/spanner.py @@ -39,6 +39,7 @@ "ListSessionsRequest", "ListSessionsResponse", "DeleteSessionRequest", + "RequestOptions", "ExecuteSqlRequest", "ExecuteBatchDmlRequest", "ExecuteBatchDmlResponse", @@ -240,6 +241,63 @@ class DeleteSessionRequest(proto.Message): name = proto.Field(proto.STRING, number=1) +class RequestOptions(proto.Message): + r"""Common request options for various APIs. + + Attributes: + priority (google.cloud.spanner_v1.types.RequestOptions.Priority): + Priority for the request. + request_tag (str): + A per-request tag which can be applied to queries or reads, + used for statistics collection. Both request_tag and + transaction_tag can be specified for a read or query that + belongs to a transaction. This field is ignored for requests + where it's not applicable (e.g. CommitRequest). + ``request_tag`` must be a valid identifier of the form: + ``[a-zA-Z][a-zA-Z0-9_\-]`` between 2 and 64 characters in + length + transaction_tag (str): + A tag used for statistics collection about this transaction. + Both request_tag and transaction_tag can be specified for a + read or query that belongs to a transaction. The value of + transaction_tag should be the same for all requests + belonging to the same transaction. If this request doesn’t + belong to any transaction, transaction_tag will be ignored. + ``transaction_tag`` must be a valid identifier of the + format: ``[a-zA-Z][a-zA-Z0-9_\-]{0,49}`` + """ + + class Priority(proto.Enum): + r"""The relative priority for requests. Note that priority is not + applicable for + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + + The priority acts as a hint to the Cloud Spanner scheduler and does + not guarantee priority or order of execution. For example: + + - Some parts of a write operation always execute at + ``PRIORITY_HIGH``, regardless of the specified priority. This may + cause you to see an increase in high priority workload even when + executing a low priority request. This can also potentially cause + a priority inversion where a lower priority request will be + fulfilled ahead of a higher priority request. + - If a transaction contains multiple operations with different + priorities, Cloud Spanner does not guarantee to process the + higher priority operations first. There may be other constraints + to satisfy, such as order of operations. + """ + PRIORITY_UNSPECIFIED = 0 + PRIORITY_LOW = 1 + PRIORITY_MEDIUM = 2 + PRIORITY_HIGH = 3 + + priority = proto.Field(proto.ENUM, number=1, enum=Priority,) + + request_tag = proto.Field(proto.STRING, number=2) + + transaction_tag = proto.Field(proto.STRING, number=3) + + class ExecuteSqlRequest(proto.Message): r"""The request for [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and @@ -335,6 +393,8 @@ class ExecuteSqlRequest(proto.Message): query_options (google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryOptions): Query optimizer configuration to use for the given query. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. """ class QueryMode(proto.Enum): @@ -353,25 +413,58 @@ class QueryOptions(proto.Message): This parameter allows individual queries to pick different query optimizer versions. - Specifying "latest" as a value instructs Cloud Spanner to + Specifying ``latest`` as a value instructs Cloud Spanner to use the latest supported query optimizer version. If not - specified, Cloud Spanner uses optimizer version set at the - database level options. Any other positive integer (from the - list of supported optimizer versions) overrides the default - optimizer version for query execution. The list of supported - optimizer versions can be queried from - SPANNER_SYS.SUPPORTED_OPTIMIZER_VERSIONS. Executing a SQL - statement with an invalid optimizer version will fail with a - syntax error (``INVALID_ARGUMENT``) status. See + specified, Cloud Spanner uses the optimizer version set at + the database level options. Any other positive integer (from + the list of supported optimizer versions) overrides the + default optimizer version for query execution. + + The list of supported optimizer versions can be queried from + SPANNER_SYS.SUPPORTED_OPTIMIZER_VERSIONS. + + Executing a SQL statement with an invalid optimizer version + fails with an ``INVALID_ARGUMENT`` error. + + See https://cloud.google.com/spanner/docs/query-optimizer/manage-query-optimizer for more information on managing the query optimizer. The ``optimizer_version`` statement hint has precedence over this setting. + optimizer_statistics_package (str): + An option to control the selection of optimizer statistics + package. + + This parameter allows individual queries to use a different + query optimizer statistics package. + + Specifying ``latest`` as a value instructs Cloud Spanner to + use the latest generated statistics package. If not + specified, Cloud Spanner uses the statistics package set at + the database level options, or the latest package if the + database option is not set. + + The statistics package requested by the query has to be + exempt from garbage collection. This can be achieved with + the following DDL statement: + + :: + + ALTER STATISTICS SET OPTIONS (allow_gc=false) + + The list of available statistics packages can be queried + from ``INFORMATION_SCHEMA.SPANNER_STATISTICS``. + + Executing a SQL statement with an invalid optimizer + statistics package or with a statistics package that allows + garbage collection fails with an ``INVALID_ARGUMENT`` error. """ optimizer_version = proto.Field(proto.STRING, number=1) + optimizer_statistics_package = proto.Field(proto.STRING, number=2) + session = proto.Field(proto.STRING, number=1) transaction = proto.Field( @@ -396,6 +489,8 @@ class QueryOptions(proto.Message): query_options = proto.Field(proto.MESSAGE, number=10, message=QueryOptions,) + request_options = proto.Field(proto.MESSAGE, number=11, message="RequestOptions",) + class ExecuteBatchDmlRequest(proto.Message): r"""The request for @@ -434,6 +529,8 @@ class ExecuteBatchDmlRequest(proto.Message): sequence number, the transaction may be aborted. Replays of previously handled requests will yield the same response as the first execution. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. """ class Statement(proto.Message): @@ -491,6 +588,8 @@ class Statement(proto.Message): seqno = proto.Field(proto.INT64, number=4) + request_options = proto.Field(proto.MESSAGE, number=5, message="RequestOptions",) + class ExecuteBatchDmlResponse(proto.Message): r"""The response for @@ -835,6 +934,8 @@ class ReadRequest(proto.Message): must be an exact match for the values of fields common to this message and the PartitionReadRequest message used to create this partition_token. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. """ session = proto.Field(proto.STRING, number=1) @@ -857,6 +958,8 @@ class ReadRequest(proto.Message): partition_token = proto.Field(proto.BYTES, number=10) + request_options = proto.Field(proto.MESSAGE, number=11, message="RequestOptions",) + class BeginTransactionRequest(proto.Message): r"""The request for @@ -868,6 +971,12 @@ class BeginTransactionRequest(proto.Message): transaction runs. options (google.cloud.spanner_v1.types.TransactionOptions): Required. Options for the new transaction. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. Priority is ignored for + this request. Setting the priority in this request_options + struct will not do anything. To set the priority for a + transaction, set it on the reads and writes that are part of + this transaction instead. """ session = proto.Field(proto.STRING, number=1) @@ -876,6 +985,8 @@ class BeginTransactionRequest(proto.Message): proto.MESSAGE, number=2, message=gs_transaction.TransactionOptions, ) + request_options = proto.Field(proto.MESSAGE, number=3, message="RequestOptions",) + class CommitRequest(proto.Message): r"""The request for [Commit][google.spanner.v1.Spanner.Commit]. @@ -906,6 +1017,8 @@ class CommitRequest(proto.Message): be included in the [CommitResponse][google.spanner.v1.CommitResponse.commit_stats]. Default value is ``false``. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. """ session = proto.Field(proto.STRING, number=1) @@ -923,6 +1036,8 @@ class CommitRequest(proto.Message): return_commit_stats = proto.Field(proto.BOOL, number=5) + request_options = proto.Field(proto.MESSAGE, number=6, message="RequestOptions",) + class CommitResponse(proto.Message): r"""The response for [Commit][google.spanner.v1.Spanner.Commit]. diff --git a/noxfile.py b/noxfile.py index 1a6227824a..7f6991818e 100644 --- a/noxfile.py +++ b/noxfile.py @@ -18,6 +18,7 @@ from __future__ import absolute_import import os +import pathlib import shutil import nox @@ -28,7 +29,23 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -70,10 +87,15 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("asyncmock", "pytest-asyncio") - session.install("mock", "pytest", "pytest-cov") - session.install("-e", ".") + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) + + session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -90,7 +112,7 @@ def default(session): *session.posargs, ) - session.install("-e", ".[tracing]") + session.install("-e", ".[tracing]", "-c", constraints_path) # Run py.test against the unit tests with OpenTelemetry. session.run( @@ -117,6 +139,9 @@ def unit(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -142,10 +167,8 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install( - "mock", "pytest", "google-cloud-testutils", - ) - session.install("-e", ".[tracing]") + session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) + session.install("-e", ".[tracing]", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: @@ -194,9 +217,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".[tracing]") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/owlbot.py b/owlbot.py new file mode 100644 index 0000000000..667e465d61 --- /dev/null +++ b/owlbot.py @@ -0,0 +1,65 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This script is used to synthesize generated parts of this library.""" +import synthtool as s +from synthtool import gcp +from synthtool.languages import python + +common = gcp.CommonTemplates() + +spanner_default_version = "v1" +spanner_admin_instance_default_version = "v1" +spanner_admin_database_default_version = "v1" + +for library in s.get_staging_dirs(spanner_default_version): + if library.parent.absolute() == "spanner": + s.move(library, excludes=["google/cloud/spanner/**", "*.*", "docs/index.rst", "google/cloud/spanner_v1/__init__.py"]) + +s.remove_staging_dirs() + +for library in s.get_staging_dirs(spanner_admin_instance_default_version): + if library.parent.absolute() == "spanner_admin_instance": + s.move(library, excludes=["google/cloud/spanner_admin_instance/**", "*.*", "docs/index.rst"]) + +s.remove_staging_dirs() + +for library in s.get_staging_dirs(spanner_admin_database_default_version): + if library.parent.absolute() == "spanner_admin_database": + s.move(library, excludes=["google/cloud/spanner_admin_database/**", "*.*", "docs/index.rst"]) + +s.remove_staging_dirs() + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- +templated_files = common.py_library(microgenerator=True, samples=True) +s.move(templated_files, excludes=[".coveragerc", "noxfile.py"]) + +# Ensure CI runs on a new instance each time +s.replace( + ".kokoro/build.sh", + "# Remove old nox", + "# Set up creating a new instance for each system test run\n" + "export GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE=true\n" + "\n\g<0>", +) + +# ---------------------------------------------------------------------------- +# Samples templates +# ---------------------------------------------------------------------------- + +python.py_samples() + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/renovate.json b/renovate.json index f08bc22c9a..c04895563e 100644 --- a/renovate.json +++ b/renovate.json @@ -2,5 +2,8 @@ "extends": [ "config:base", ":preserveSemverRanges" ], - "ignorePaths": [".pre-commit-config.yaml"] + "ignorePaths": [".pre-commit-config.yaml"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } } diff --git a/samples/samples/backup_sample.py b/samples/samples/backup_sample.py index f0d5ce363d..196cfbe04b 100644 --- a/samples/samples/backup_sample.py +++ b/samples/samples/backup_sample.py @@ -55,6 +55,42 @@ def create_backup(instance_id, database_id, backup_id, version_time): # [END spanner_create_backup] +# [START spanner_create_backup_with_encryption_key] +def create_backup_with_encryption_key(instance_id, database_id, backup_id, kms_key_name): + """Creates a backup for a database using a Customer Managed Encryption Key (CMEK).""" + from google.cloud.spanner_admin_database_v1 import CreateBackupEncryptionConfig + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + # Create a backup + expire_time = datetime.utcnow() + timedelta(days=14) + encryption_config = { + 'encryption_type': CreateBackupEncryptionConfig.EncryptionType.CUSTOMER_MANAGED_ENCRYPTION, + 'kms_key_name': kms_key_name, + } + backup = instance.backup(backup_id, database=database, expire_time=expire_time, encryption_config=encryption_config) + operation = backup.create() + + # Wait for backup operation to complete. + operation.result(1200) + + # Verify that the backup is ready. + backup.reload() + assert backup.is_ready() is True + + # Get the name, create time, backup size and encryption key. + backup.reload() + print( + "Backup {} of size {} bytes was created at {} using encryption key {}".format( + backup.name, backup.size_bytes, backup.create_time, kms_key_name + ) + ) + + +# [END spanner_create_backup_with_encryption_key] + # [START spanner_restore_backup] def restore_database(instance_id, new_database_id, backup_id): @@ -87,6 +123,42 @@ def restore_database(instance_id, new_database_id, backup_id): # [END spanner_restore_backup] +# [START spanner_restore_backup_with_encryption_key] +def restore_database_with_encryption_key(instance_id, new_database_id, backup_id, kms_key_name): + """Restores a database from a backup using a Customer Managed Encryption Key (CMEK).""" + from google.cloud.spanner_admin_database_v1 import RestoreDatabaseEncryptionConfig + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + + # Start restoring an existing backup to a new database. + backup = instance.backup(backup_id) + encryption_config = { + 'encryption_type': RestoreDatabaseEncryptionConfig.EncryptionType.CUSTOMER_MANAGED_ENCRYPTION, + 'kms_key_name': kms_key_name, + } + new_database = instance.database(new_database_id, encryption_config=encryption_config) + operation = new_database.restore(backup) + + # Wait for restore operation to complete. + operation.result(1600) + + # Newly created database has restore information. + new_database.reload() + restore_info = new_database.restore_info + print( + "Database {} restored to {} from backup {} with using encryption key {}.".format( + restore_info.backup_info.source_database, + new_database_id, + restore_info.backup_info.backup, + new_database.encryption_config.kms_key_name, + ) + ) + + +# [END spanner_restore_backup_with_encryption_key] + + # [START spanner_cancel_backup_create] def cancel_backup(instance_id, database_id, backup_id): spanner_client = spanner.Client() diff --git a/samples/samples/backup_sample_test.py b/samples/samples/backup_sample_test.py index 7118d98bed..8d1d95ff51 100644 --- a/samples/samples/backup_sample_test.py +++ b/samples/samples/backup_sample_test.py @@ -38,9 +38,11 @@ def unique_backup_id(): INSTANCE_ID = unique_instance_id() DATABASE_ID = unique_database_id() -RETENTION_DATABASE_ID = unique_database_id() RESTORE_DB_ID = unique_database_id() BACKUP_ID = unique_backup_id() +CMEK_RESTORE_DB_ID = unique_database_id() +CMEK_BACKUP_ID = unique_backup_id() +RETENTION_DATABASE_ID = unique_database_id() RETENTION_PERIOD = "7d" @@ -54,6 +56,12 @@ def spanner_instance(): op = instance.create() op.result(120) # block until completion yield instance + for database_pb in instance.list_databases(): + database = instance.database(database_pb.name.split("/")[-1]) + database.drop() + for backup_pb in instance.list_backups(): + backup = instance.backup(backup_pb.name.split("/")[-1]) + backup.delete() instance.delete() @@ -77,6 +85,16 @@ def test_create_backup(capsys, database): assert BACKUP_ID in out +def test_create_backup_with_encryption_key(capsys, spanner_instance, database): + kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( + spanner_instance._client.project, "us-central1", "spanner-test-keyring", "spanner-test-cmek" + ) + backup_sample.create_backup_with_encryption_key(INSTANCE_ID, DATABASE_ID, CMEK_BACKUP_ID, kms_key_name) + out, _ = capsys.readouterr() + assert CMEK_BACKUP_ID in out + assert kms_key_name in out + + # Depends on test_create_backup having run first @RetryErrors(exception=DeadlineExceeded, max_tries=2) def test_restore_database(capsys): @@ -87,6 +105,20 @@ def test_restore_database(capsys): assert BACKUP_ID in out +# Depends on test_create_backup having run first +@RetryErrors(exception=DeadlineExceeded, max_tries=2) +def test_restore_database_with_encryption_key(capsys, spanner_instance): + kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( + spanner_instance._client.project, "us-central1", "spanner-test-keyring", "spanner-test-cmek" + ) + backup_sample.restore_database_with_encryption_key(INSTANCE_ID, CMEK_RESTORE_DB_ID, CMEK_BACKUP_ID, kms_key_name) + out, _ = capsys.readouterr() + assert (DATABASE_ID + " restored to ") in out + assert (CMEK_RESTORE_DB_ID + " from backup ") in out + assert CMEK_BACKUP_ID in out + assert kms_key_name in out + + # Depends on test_create_backup having run first def test_list_backup_operations(capsys, spanner_instance): backup_sample.list_backup_operations(INSTANCE_ID, DATABASE_ID) diff --git a/samples/samples/noxfile.py b/samples/samples/noxfile.py index 97bf7da80e..956cdf4f92 100644 --- a/samples/samples/noxfile.py +++ b/samples/samples/noxfile.py @@ -172,10 +172,16 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) diff --git a/samples/samples/requirements.txt b/samples/samples/requirements.txt index 6e3d3ae986..f995caa5ab 100644 --- a/samples/samples/requirements.txt +++ b/samples/samples/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-spanner==3.2.0 +google-cloud-spanner==3.3.0 futures==3.3.0; python_version < "3" diff --git a/samples/samples/snippets.py b/samples/samples/snippets.py index 9a94e85a9b..10fc6413c2 100644 --- a/samples/samples/snippets.py +++ b/samples/samples/snippets.py @@ -92,6 +92,43 @@ def create_database(instance_id, database_id): # [END spanner_create_database] +# [START spanner_create_database_with_encryption_key] +def create_database_with_encryption_key(instance_id, database_id, kms_key_name): + """Creates a database with tables using a Customer Managed Encryption Key (CMEK).""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + + database = instance.database( + database_id, + ddl_statements=[ + """CREATE TABLE Singers ( + SingerId INT64 NOT NULL, + FirstName STRING(1024), + LastName STRING(1024), + SingerInfo BYTES(MAX) + ) PRIMARY KEY (SingerId)""", + """CREATE TABLE Albums ( + SingerId INT64 NOT NULL, + AlbumId INT64 NOT NULL, + AlbumTitle STRING(MAX) + ) PRIMARY KEY (SingerId, AlbumId), + INTERLEAVE IN PARENT Singers ON DELETE CASCADE""", + ], + encryption_config={'kms_key_name': kms_key_name}, + ) + + operation = database.create() + + print("Waiting for operation to complete...") + operation.result(120) + + print("Database {} created with encryption key {}".format( + database.name, database.encryption_config.kms_key_name)) + + +# [END spanner_create_database_with_encryption_key] + + # [START spanner_insert_data] def insert_data(instance_id, database_id): """Inserts sample data into the given database. diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index ee8c6ebe23..28d13fa330 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -33,6 +33,7 @@ def unique_database_id(): INSTANCE_ID = unique_instance_id() DATABASE_ID = unique_database_id() +CMEK_DATABASE_ID = unique_database_id() @pytest.fixture(scope="module") @@ -63,6 +64,16 @@ def test_create_database(database): database.reload() +def test_create_database_with_encryption_config(capsys, spanner_instance): + kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( + spanner_instance._client.project, "us-central1", "spanner-test-keyring", "spanner-test-cmek" + ) + snippets.create_database_with_encryption_key(INSTANCE_ID, CMEK_DATABASE_ID, kms_key_name) + out, _ = capsys.readouterr() + assert CMEK_DATABASE_ID in out + assert kms_key_name in out + + def test_insert_data(capsys): snippets.insert_data(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() diff --git a/scripts/fixup_spanner_v1_keywords.py b/scripts/fixup_spanner_v1_keywords.py index 19e3c0185b..4faf734dcb 100644 --- a/scripts/fixup_spanner_v1_keywords.py +++ b/scripts/fixup_spanner_v1_keywords.py @@ -42,20 +42,20 @@ class spannerCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'batch_create_sessions': ('database', 'session_count', 'session_template', ), - 'begin_transaction': ('session', 'options', ), - 'commit': ('session', 'transaction_id', 'single_use_transaction', 'mutations', 'return_commit_stats', ), + 'begin_transaction': ('session', 'options', 'request_options', ), + 'commit': ('session', 'transaction_id', 'single_use_transaction', 'mutations', 'return_commit_stats', 'request_options', ), 'create_session': ('database', 'session', ), 'delete_session': ('name', ), - 'execute_batch_dml': ('session', 'transaction', 'statements', 'seqno', ), - 'execute_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', ), - 'execute_streaming_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', ), + 'execute_batch_dml': ('session', 'transaction', 'statements', 'seqno', 'request_options', ), + 'execute_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', ), + 'execute_streaming_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', ), 'get_session': ('name', ), 'list_sessions': ('database', 'page_size', 'page_token', 'filter', ), 'partition_query': ('session', 'sql', 'transaction', 'params', 'param_types', 'partition_options', ), 'partition_read': ('session', 'table', 'key_set', 'transaction', 'index', 'columns', 'partition_options', ), - 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', ), + 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', ), 'rollback': ('session', 'transaction_id', ), - 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', ), + 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', ), } diff --git a/setup.py b/setup.py index 4c0d844572..b12cd90f09 100644 --- a/setup.py +++ b/setup.py @@ -22,26 +22,26 @@ name = "google-cloud-spanner" description = "Cloud Spanner API client library" -version = "3.3.0" +version = "3.4.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.22.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "google-cloud-core >= 1.4.1, < 2.0dev", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", - "libcst >= 0.2.5", "proto-plus >= 1.11.0", "sqlparse >= 0.3.0", ] extras = { "tracing": [ - "opentelemetry-api==0.11b0", - "opentelemetry-sdk==0.11b0", - "opentelemetry-instrumentation==0.11b0", - ] + "opentelemetry-api >= 0.11b0", + "opentelemetry-sdk >= 0.11b0", + "opentelemetry-instrumentation >= 0.11b0", + ], + "libcst": "libcst >= 0.2.5", } diff --git a/synth.metadata b/synth.metadata deleted file mode 100644 index 72c4d0ff71..0000000000 --- a/synth.metadata +++ /dev/null @@ -1,211 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/python-spanner.git", - "sha": "5ca63407847ad615dc51beaaaa7f16640daf0e23" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f829b1334cce86aa3738f3c0698d814b56664445", - "internalRef": "358725120" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "f5c5904fb0c6aa3b3730eadf4e5a4485afc65726" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "f5c5904fb0c6aa3b3730eadf4e5a4485afc65726" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "spanner", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - }, - { - "client": { - "source": "googleapis", - "apiName": "spanner_admin_instance", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - }, - { - "client": { - "source": "googleapis", - "apiName": "spanner_admin_database", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - } - ], - "generatedFiles": [ - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/header-checker-lint.yml", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic-head.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic-head.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic-head.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples-against-head.sh", - ".kokoro/test-samples-impl.sh", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".pre-commit-config.yaml", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "docs/multiprocessing.rst", - "docs/spanner_admin_database_v1/database_admin.rst", - "docs/spanner_admin_database_v1/services.rst", - "docs/spanner_admin_database_v1/types.rst", - "docs/spanner_admin_instance_v1/instance_admin.rst", - "docs/spanner_admin_instance_v1/services.rst", - "docs/spanner_admin_instance_v1/types.rst", - "docs/spanner_v1/services.rst", - "docs/spanner_v1/spanner.rst", - "docs/spanner_v1/types.rst", - "google/cloud/spanner_admin_database_v1/__init__.py", - "google/cloud/spanner_admin_database_v1/proto/backup.proto", - "google/cloud/spanner_admin_database_v1/proto/common.proto", - "google/cloud/spanner_admin_database_v1/proto/spanner_database_admin.proto", - "google/cloud/spanner_admin_database_v1/py.typed", - "google/cloud/spanner_admin_database_v1/services/__init__.py", - "google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py", - "google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py", - "google/cloud/spanner_admin_database_v1/services/database_admin/client.py", - "google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py", - "google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py", - "google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py", - "google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py", - "google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py", - "google/cloud/spanner_admin_database_v1/types/__init__.py", - "google/cloud/spanner_admin_database_v1/types/backup.py", - "google/cloud/spanner_admin_database_v1/types/common.py", - "google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py", - "google/cloud/spanner_admin_instance_v1/__init__.py", - "google/cloud/spanner_admin_instance_v1/proto/spanner_instance_admin.proto", - "google/cloud/spanner_admin_instance_v1/py.typed", - "google/cloud/spanner_admin_instance_v1/services/__init__.py", - "google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py", - "google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py", - "google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py", - "google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py", - "google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py", - "google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py", - "google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py", - "google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py", - "google/cloud/spanner_admin_instance_v1/types/__init__.py", - "google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py", - "google/cloud/spanner_v1/proto/keys.proto", - "google/cloud/spanner_v1/proto/mutation.proto", - "google/cloud/spanner_v1/proto/query_plan.proto", - "google/cloud/spanner_v1/proto/result_set.proto", - "google/cloud/spanner_v1/proto/spanner.proto", - "google/cloud/spanner_v1/proto/transaction.proto", - "google/cloud/spanner_v1/proto/type.proto", - "google/cloud/spanner_v1/py.typed", - "google/cloud/spanner_v1/services/__init__.py", - "google/cloud/spanner_v1/services/spanner/__init__.py", - "google/cloud/spanner_v1/services/spanner/async_client.py", - "google/cloud/spanner_v1/services/spanner/client.py", - "google/cloud/spanner_v1/services/spanner/pagers.py", - "google/cloud/spanner_v1/services/spanner/transports/__init__.py", - "google/cloud/spanner_v1/services/spanner/transports/base.py", - "google/cloud/spanner_v1/services/spanner/transports/grpc.py", - "google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py", - "google/cloud/spanner_v1/types/__init__.py", - "google/cloud/spanner_v1/types/keys.py", - "google/cloud/spanner_v1/types/mutation.py", - "google/cloud/spanner_v1/types/query_plan.py", - "google/cloud/spanner_v1/types/result_set.py", - "google/cloud/spanner_v1/types/spanner.py", - "google/cloud/spanner_v1/types/transaction.py", - "google/cloud/spanner_v1/types/type.py", - "renovate.json", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/samples/noxfile.py", - "scripts/decrypt-secrets.sh", - "scripts/fixup_spanner_admin_database_v1_keywords.py", - "scripts/fixup_spanner_admin_instance_v1_keywords.py", - "scripts/fixup_spanner_v1_keywords.py", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/spanner_admin_database_v1/__init__.py", - "tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py", - "tests/unit/gapic/spanner_admin_instance_v1/__init__.py", - "tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py", - "tests/unit/gapic/spanner_v1/__init__.py", - "tests/unit/gapic/spanner_v1/test_spanner.py" - ] -} \ No newline at end of file diff --git a/synth.py b/synth.py deleted file mode 100644 index d13ddb67a5..0000000000 --- a/synth.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" -import synthtool as s -from synthtool import gcp -from synthtool.languages import python - -gapic = gcp.GAPICBazel() -common = gcp.CommonTemplates() - -# ---------------------------------------------------------------------------- -# Generate spanner GAPIC layer -# ---------------------------------------------------------------------------- -library = gapic.py_library( - service="spanner", - version="v1", - bazel_target="//google/spanner/v1:spanner-v1-py", - include_protos=True, -) - -s.move(library, excludes=["google/cloud/spanner/**", "*.*", "docs/index.rst", "google/cloud/spanner_v1/__init__.py"]) - -# ---------------------------------------------------------------------------- -# Generate instance admin client -# ---------------------------------------------------------------------------- -library = gapic.py_library( - service="spanner_admin_instance", - version="v1", - bazel_target="//google/spanner/admin/instance/v1:admin-instance-v1-py", - include_protos=True, -) - -s.move(library, excludes=["google/cloud/spanner_admin_instance/**", "*.*", "docs/index.rst"]) - -# ---------------------------------------------------------------------------- -# Generate database admin client -# ---------------------------------------------------------------------------- -library = gapic.py_library( - service="spanner_admin_database", - version="v1", - bazel_target="//google/spanner/admin/database/v1:admin-database-v1-py", - include_protos=True, -) - -s.move(library, excludes=["google/cloud/spanner_admin_database/**", "*.*", "docs/index.rst"]) - -# Fix formatting for bullet lists. -# See: https://github.com/googleapis/gapic-generator-python/issues/604 -s.replace( - "google/cloud/spanner_admin_database_v1/services/database_admin/*.py", - "``backup.expire_time``.", - "``backup.expire_time``.\n" -) - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -templated_files = common.py_library(microgenerator=True, samples=True) -s.move(templated_files, excludes=[".coveragerc", "noxfile.py"]) - -# Ensure CI runs on a new instance each time -s.replace( - ".kokoro/build.sh", - "# Remove old nox", - "# Set up creating a new instance for each system test run\n" - "export GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE=true\n" - "\n\g<0>", -) - -# ---------------------------------------------------------------------------- -# Samples templates -# ---------------------------------------------------------------------------- - -python.py_samples() - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 050e9c7a18..bfb81c38a2 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -5,7 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.22.0 +google-api-core==1.22.2 google-cloud-core==1.4.1 grpc-google-iam-v1==0.12.3 libcst==0.2.5 @@ -13,4 +13,4 @@ proto-plus==1.13.0 sqlparse==0.3.0 opentelemetry-api==0.11b0 opentelemetry-sdk==0.11b0 -opentelemetry-instrumentation==0.11b0 \ No newline at end of file +opentelemetry-instrumentation==0.11b0 diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 7a7630c0d9..2704e27b53 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -1727,11 +1727,9 @@ def test_transaction_batch_update_wo_statements(self): with self.assertRaises(InvalidArgument): transaction.batch_update([]) + @unittest.skipUnless(HAS_OPENTELEMETRY_INSTALLED, "trace requires OpenTelemetry") def test_transaction_batch_update_w_parent_span(self): - try: - from opentelemetry import trace - except ImportError: - return + from opentelemetry import trace tracer = trace.get_tracer(__name__) diff --git a/tests/unit/gapic/spanner_admin_database_v1/__init__.py b/tests/unit/gapic/spanner_admin_database_v1/__init__.py index 8b13789179..42ffdf2bc4 100644 --- a/tests/unit/gapic/spanner_admin_database_v1/__init__.py +++ b/tests/unit/gapic/spanner_admin_database_v1/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py index 86eba5e283..1906328473 100644 --- a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py +++ b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py @@ -104,15 +104,19 @@ def test__get_default_mtls_endpoint(): ) -def test_database_admin_client_from_service_account_info(): +@pytest.mark.parametrize( + "client_class", [DatabaseAdminClient, DatabaseAdminAsyncClient,] +) +def test_database_admin_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = DatabaseAdminClient.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "spanner.googleapis.com:443" @@ -128,9 +132,11 @@ def test_database_admin_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "spanner.googleapis.com:443" @@ -493,6 +499,22 @@ def test_list_databases_from_dict(): test_list_databases(request_type=dict) +def test_list_databases_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + client.list_databases() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner_database_admin.ListDatabasesRequest() + + @pytest.mark.asyncio async def test_list_databases_async( transport: str = "grpc_asyncio", @@ -842,6 +864,22 @@ def test_create_database_from_dict(): test_create_database(request_type=dict) +def test_create_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + client.create_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner_database_admin.CreateDatabaseRequest() + + @pytest.mark.asyncio async def test_create_database_async( transport: str = "grpc_asyncio", @@ -1052,6 +1090,22 @@ def test_get_database_from_dict(): test_get_database(request_type=dict) +def test_get_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + client.get_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner_database_admin.GetDatabaseRequest() + + @pytest.mark.asyncio async def test_get_database_async( transport: str = "grpc_asyncio", @@ -1252,6 +1306,24 @@ def test_update_database_ddl_from_dict(): test_update_database_ddl(request_type=dict) +def test_update_database_ddl_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), "__call__" + ) as call: + client.update_database_ddl() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner_database_admin.UpdateDatabaseDdlRequest() + + @pytest.mark.asyncio async def test_update_database_ddl_async( transport: str = "grpc_asyncio", @@ -1461,6 +1533,22 @@ def test_drop_database_from_dict(): test_drop_database(request_type=dict) +def test_drop_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.drop_database), "__call__") as call: + client.drop_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner_database_admin.DropDatabaseRequest() + + @pytest.mark.asyncio async def test_drop_database_async( transport: str = "grpc_asyncio", @@ -1647,6 +1735,22 @@ def test_get_database_ddl_from_dict(): test_get_database_ddl(request_type=dict) +def test_get_database_ddl_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database_ddl), "__call__") as call: + client.get_database_ddl() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner_database_admin.GetDatabaseDdlRequest() + + @pytest.mark.asyncio async def test_get_database_ddl_async( transport: str = "grpc_asyncio", @@ -1843,6 +1947,22 @@ def test_set_iam_policy_from_dict(): test_set_iam_policy(request_type=dict) +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.SetIamPolicyRequest() + + @pytest.mark.asyncio async def test_set_iam_policy_async( transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest @@ -2050,6 +2170,22 @@ def test_get_iam_policy_from_dict(): test_get_iam_policy(request_type=dict) +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.GetIamPolicyRequest() + + @pytest.mark.asyncio async def test_get_iam_policy_async( transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest @@ -2259,6 +2395,24 @@ def test_test_iam_permissions_from_dict(): test_test_iam_permissions(request_type=dict) +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.TestIamPermissionsRequest() + + @pytest.mark.asyncio async def test_test_iam_permissions_async( transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest @@ -2487,6 +2641,22 @@ def test_create_backup_from_dict(): test_create_backup(request_type=dict) +def test_create_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + client.create_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == gsad_backup.CreateBackupRequest() + + @pytest.mark.asyncio async def test_create_backup_async( transport: str = "grpc_asyncio", request_type=gsad_backup.CreateBackupRequest @@ -2710,6 +2880,22 @@ def test_get_backup_from_dict(): test_get_backup(request_type=dict) +def test_get_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == backup.GetBackupRequest() + + @pytest.mark.asyncio async def test_get_backup_async( transport: str = "grpc_asyncio", request_type=backup.GetBackupRequest @@ -2925,6 +3111,22 @@ def test_update_backup_from_dict(): test_update_backup(request_type=dict) +def test_update_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + client.update_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == gsad_backup.UpdateBackupRequest() + + @pytest.mark.asyncio async def test_update_backup_async( transport: str = "grpc_asyncio", request_type=gsad_backup.UpdateBackupRequest @@ -3137,6 +3339,22 @@ def test_delete_backup_from_dict(): test_delete_backup(request_type=dict) +def test_delete_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == backup.DeleteBackupRequest() + + @pytest.mark.asyncio async def test_delete_backup_async( transport: str = "grpc_asyncio", request_type=backup.DeleteBackupRequest @@ -3320,6 +3538,22 @@ def test_list_backups_from_dict(): test_list_backups(request_type=dict) +def test_list_backups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == backup.ListBackupsRequest() + + @pytest.mark.asyncio async def test_list_backups_async( transport: str = "grpc_asyncio", request_type=backup.ListBackupsRequest @@ -3622,6 +3856,22 @@ def test_restore_database_from_dict(): test_restore_database(request_type=dict) +def test_restore_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_database), "__call__") as call: + client.restore_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner_database_admin.RestoreDatabaseRequest() + + @pytest.mark.asyncio async def test_restore_database_async( transport: str = "grpc_asyncio", @@ -3839,6 +4089,24 @@ def test_list_database_operations_from_dict(): test_list_database_operations(request_type=dict) +def test_list_database_operations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), "__call__" + ) as call: + client.list_database_operations() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner_database_admin.ListDatabaseOperationsRequest() + + @pytest.mark.asyncio async def test_list_database_operations_async( transport: str = "grpc_asyncio", @@ -4203,6 +4471,24 @@ def test_list_backup_operations_from_dict(): test_list_backup_operations(request_type=dict) +def test_list_backup_operations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), "__call__" + ) as call: + client.list_backup_operations() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == backup.ListBackupOperationsRequest() + + @pytest.mark.asyncio async def test_list_backup_operations_async( transport: str = "grpc_asyncio", request_type=backup.ListBackupOperationsRequest diff --git a/tests/unit/gapic/spanner_admin_instance_v1/__init__.py b/tests/unit/gapic/spanner_admin_instance_v1/__init__.py index 8b13789179..42ffdf2bc4 100644 --- a/tests/unit/gapic/spanner_admin_instance_v1/__init__.py +++ b/tests/unit/gapic/spanner_admin_instance_v1/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py index e2caceee98..b64c5eca33 100644 --- a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py +++ b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py @@ -98,15 +98,19 @@ def test__get_default_mtls_endpoint(): ) -def test_instance_admin_client_from_service_account_info(): +@pytest.mark.parametrize( + "client_class", [InstanceAdminClient, InstanceAdminAsyncClient,] +) +def test_instance_admin_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = InstanceAdminClient.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "spanner.googleapis.com:443" @@ -122,9 +126,11 @@ def test_instance_admin_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "spanner.googleapis.com:443" @@ -490,6 +496,24 @@ def test_list_instance_configs_from_dict(): test_list_instance_configs(request_type=dict) +def test_list_instance_configs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), "__call__" + ) as call: + client.list_instance_configs() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner_instance_admin.ListInstanceConfigsRequest() + + @pytest.mark.asyncio async def test_list_instance_configs_async( transport: str = "grpc_asyncio", @@ -875,6 +899,24 @@ def test_get_instance_config_from_dict(): test_get_instance_config(request_type=dict) +def test_get_instance_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), "__call__" + ) as call: + client.get_instance_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner_instance_admin.GetInstanceConfigRequest() + + @pytest.mark.asyncio async def test_get_instance_config_async( transport: str = "grpc_asyncio", @@ -1083,6 +1125,22 @@ def test_list_instances_from_dict(): test_list_instances(request_type=dict) +def test_list_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner_instance_admin.ListInstancesRequest() + + @pytest.mark.asyncio async def test_list_instances_async( transport: str = "grpc_asyncio", @@ -1452,6 +1510,22 @@ def test_get_instance_from_dict(): test_get_instance(request_type=dict) +def test_get_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner_instance_admin.GetInstanceRequest() + + @pytest.mark.asyncio async def test_get_instance_async( transport: str = "grpc_asyncio", @@ -1658,6 +1732,22 @@ def test_create_instance_from_dict(): test_create_instance(request_type=dict) +def test_create_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner_instance_admin.CreateInstanceRequest() + + @pytest.mark.asyncio async def test_create_instance_async( transport: str = "grpc_asyncio", @@ -1867,6 +1957,22 @@ def test_update_instance_from_dict(): test_update_instance(request_type=dict) +def test_update_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner_instance_admin.UpdateInstanceRequest() + + @pytest.mark.asyncio async def test_update_instance_async( transport: str = "grpc_asyncio", @@ -2072,6 +2178,22 @@ def test_delete_instance_from_dict(): test_delete_instance(request_type=dict) +def test_delete_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner_instance_admin.DeleteInstanceRequest() + + @pytest.mark.asyncio async def test_delete_instance_async( transport: str = "grpc_asyncio", @@ -2258,6 +2380,22 @@ def test_set_iam_policy_from_dict(): test_set_iam_policy(request_type=dict) +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.SetIamPolicyRequest() + + @pytest.mark.asyncio async def test_set_iam_policy_async( transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest @@ -2465,6 +2603,22 @@ def test_get_iam_policy_from_dict(): test_get_iam_policy(request_type=dict) +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.GetIamPolicyRequest() + + @pytest.mark.asyncio async def test_get_iam_policy_async( transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest @@ -2674,6 +2828,24 @@ def test_test_iam_permissions_from_dict(): test_test_iam_permissions(request_type=dict) +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.TestIamPermissionsRequest() + + @pytest.mark.asyncio async def test_test_iam_permissions_async( transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest diff --git a/tests/unit/gapic/spanner_v1/__init__.py b/tests/unit/gapic/spanner_v1/__init__.py index 8b13789179..42ffdf2bc4 100644 --- a/tests/unit/gapic/spanner_v1/__init__.py +++ b/tests/unit/gapic/spanner_v1/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/spanner_v1/test_spanner.py b/tests/unit/gapic/spanner_v1/test_spanner.py index 56d3818009..37ca9c6deb 100644 --- a/tests/unit/gapic/spanner_v1/test_spanner.py +++ b/tests/unit/gapic/spanner_v1/test_spanner.py @@ -87,15 +87,17 @@ def test__get_default_mtls_endpoint(): assert SpannerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -def test_spanner_client_from_service_account_info(): +@pytest.mark.parametrize("client_class", [SpannerClient, SpannerAsyncClient,]) +def test_spanner_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = SpannerClient.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "spanner.googleapis.com:443" @@ -109,9 +111,11 @@ def test_spanner_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "spanner.googleapis.com:443" @@ -448,6 +452,22 @@ def test_create_session_from_dict(): test_create_session(request_type=dict) +def test_create_session_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_session), "__call__") as call: + client.create_session() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner.CreateSessionRequest() + + @pytest.mark.asyncio async def test_create_session_async( transport: str = "grpc_asyncio", request_type=spanner.CreateSessionRequest @@ -635,6 +655,24 @@ def test_batch_create_sessions_from_dict(): test_batch_create_sessions(request_type=dict) +def test_batch_create_sessions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), "__call__" + ) as call: + client.batch_create_sessions() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner.BatchCreateSessionsRequest() + + @pytest.mark.asyncio async def test_batch_create_sessions_async( transport: str = "grpc_asyncio", request_type=spanner.BatchCreateSessionsRequest @@ -844,6 +882,22 @@ def test_get_session_from_dict(): test_get_session(request_type=dict) +def test_get_session_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_session), "__call__") as call: + client.get_session() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner.GetSessionRequest() + + @pytest.mark.asyncio async def test_get_session_async( transport: str = "grpc_asyncio", request_type=spanner.GetSessionRequest @@ -1033,6 +1087,22 @@ def test_list_sessions_from_dict(): test_list_sessions(request_type=dict) +def test_list_sessions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + client.list_sessions() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner.ListSessionsRequest() + + @pytest.mark.asyncio async def test_list_sessions_async( transport: str = "grpc_asyncio", request_type=spanner.ListSessionsRequest @@ -1343,6 +1413,22 @@ def test_delete_session_from_dict(): test_delete_session(request_type=dict) +def test_delete_session_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_session), "__call__") as call: + client.delete_session() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner.DeleteSessionRequest() + + @pytest.mark.asyncio async def test_delete_session_async( transport: str = "grpc_asyncio", request_type=spanner.DeleteSessionRequest @@ -1522,6 +1608,22 @@ def test_execute_sql_from_dict(): test_execute_sql(request_type=dict) +def test_execute_sql_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: + client.execute_sql() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner.ExecuteSqlRequest() + + @pytest.mark.asyncio async def test_execute_sql_async( transport: str = "grpc_asyncio", request_type=spanner.ExecuteSqlRequest @@ -1644,6 +1746,24 @@ def test_execute_streaming_sql_from_dict(): test_execute_streaming_sql(request_type=dict) +def test_execute_streaming_sql_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), "__call__" + ) as call: + client.execute_streaming_sql() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner.ExecuteSqlRequest() + + @pytest.mark.asyncio async def test_execute_streaming_sql_async( transport: str = "grpc_asyncio", request_type=spanner.ExecuteSqlRequest @@ -1775,6 +1895,24 @@ def test_execute_batch_dml_from_dict(): test_execute_batch_dml(request_type=dict) +def test_execute_batch_dml_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), "__call__" + ) as call: + client.execute_batch_dml() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner.ExecuteBatchDmlRequest() + + @pytest.mark.asyncio async def test_execute_batch_dml_async( transport: str = "grpc_asyncio", request_type=spanner.ExecuteBatchDmlRequest @@ -1899,6 +2037,22 @@ def test_read_from_dict(): test_read(request_type=dict) +def test_read_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.read), "__call__") as call: + client.read() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner.ReadRequest() + + @pytest.mark.asyncio async def test_read_async( transport: str = "grpc_asyncio", request_type=spanner.ReadRequest @@ -2017,6 +2171,22 @@ def test_streaming_read_from_dict(): test_streaming_read(request_type=dict) +def test_streaming_read_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.streaming_read), "__call__") as call: + client.streaming_read() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner.ReadRequest() + + @pytest.mark.asyncio async def test_streaming_read_async( transport: str = "grpc_asyncio", request_type=spanner.ReadRequest @@ -2144,6 +2314,24 @@ def test_begin_transaction_from_dict(): test_begin_transaction(request_type=dict) +def test_begin_transaction_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + client.begin_transaction() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner.BeginTransactionRequest() + + @pytest.mark.asyncio async def test_begin_transaction_async( transport: str = "grpc_asyncio", request_type=spanner.BeginTransactionRequest @@ -2355,6 +2543,22 @@ def test_commit_from_dict(): test_commit(request_type=dict) +def test_commit_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + client.commit() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner.CommitRequest() + + @pytest.mark.asyncio async def test_commit_async( transport: str = "grpc_asyncio", request_type=spanner.CommitRequest @@ -2581,6 +2785,22 @@ def test_rollback_from_dict(): test_rollback(request_type=dict) +def test_rollback_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + client.rollback() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner.RollbackRequest() + + @pytest.mark.asyncio async def test_rollback_async( transport: str = "grpc_asyncio", request_type=spanner.RollbackRequest @@ -2774,6 +2994,22 @@ def test_partition_query_from_dict(): test_partition_query(request_type=dict) +def test_partition_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.partition_query), "__call__") as call: + client.partition_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner.PartitionQueryRequest() + + @pytest.mark.asyncio async def test_partition_query_async( transport: str = "grpc_asyncio", request_type=spanner.PartitionQueryRequest @@ -2894,6 +3130,22 @@ def test_partition_read_from_dict(): test_partition_read(request_type=dict) +def test_partition_read_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.partition_read), "__call__") as call: + client.partition_read() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == spanner.PartitionReadRequest() + + @pytest.mark.asyncio async def test_partition_read_async( transport: str = "grpc_asyncio", request_type=spanner.PartitionReadRequest diff --git a/tests/unit/spanner_dbapi/test_parse_utils.py b/tests/unit/spanner_dbapi/test_parse_utils.py index 3713ac11a8..73277a7de3 100644 --- a/tests/unit/spanner_dbapi/test_parse_utils.py +++ b/tests/unit/spanner_dbapi/test_parse_utils.py @@ -307,7 +307,7 @@ def test_sql_pyformat_args_to_spanner(self): ), ( "SELECT (an.p + @a0) AS np FROM an WHERE (an.p + @a1) = @a2", - {"a0": 1, "a1": 1.0, "a2": str(31)}, + {"a0": 1, "a1": 1.0, "a2": decimal.Decimal("31")}, ), ), ] @@ -339,20 +339,10 @@ def test_sql_pyformat_args_to_spanner_invalid(self): lambda: sql_pyformat_args_to_spanner(sql, params), ) - def test_cast_for_spanner(self): - import decimal - - from google.cloud.spanner_dbapi.parse_utils import cast_for_spanner - - dec = 3 - value = decimal.Decimal(dec) - self.assertEqual(cast_for_spanner(value), str(dec)) - self.assertEqual(cast_for_spanner(5), 5) - self.assertEqual(cast_for_spanner("string"), "string") - @unittest.skipIf(skip_condition, skip_message) def test_get_param_types(self): import datetime + import decimal from google.cloud.spanner_dbapi.parse_utils import DateStr from google.cloud.spanner_dbapi.parse_utils import TimestampStr @@ -369,6 +359,7 @@ def test_get_param_types(self): "h1": datetime.date(2011, 9, 1), "i1": b"bytes", "j1": None, + "k1": decimal.Decimal("3.194387483193242e+19"), } want_types = { "a1": param_types.INT64, @@ -380,6 +371,7 @@ def test_get_param_types(self): "g1": param_types.TIMESTAMP, "h1": param_types.DATE, "i1": param_types.BYTES, + "k1": param_types.NUMERIC, } got_types = get_param_types(params) self.assertEqual(got_types, want_types) @@ -425,3 +417,19 @@ def test_escape_name(self): with self.subTest(name=name): got = escape_name(name) self.assertEqual(got, want) + + def test_insert_from_select(self): + """Check that INSERT from SELECT clause can be executed with arguments.""" + from google.cloud.spanner_dbapi.parse_utils import parse_insert + + SQL = """ +INSERT INTO tab_name (id, data) +SELECT tab_name.id + %s AS anon_1, tab_name.data +FROM tab_name +WHERE tab_name.data IN (%s, %s) +""" + ARGS = [5, "data2", "data3"] + + self.assertEqual( + parse_insert(SQL, ARGS), {"sql_params_list": [(SQL, ARGS)]}, + ) diff --git a/tests/unit/test_snapshot.py b/tests/unit/test_snapshot.py index cc9a67cb4d..24f87a30fc 100644 --- a/tests/unit/test_snapshot.py +++ b/tests/unit/test_snapshot.py @@ -47,10 +47,12 @@ class Test_restart_on_unavailable(OpenTelemetryBase): - def _call_fut(self, restart, span_name=None, session=None, attributes=None): + def _call_fut( + self, restart, request, span_name=None, session=None, attributes=None + ): from google.cloud.spanner_v1.snapshot import _restart_on_unavailable - return _restart_on_unavailable(restart, span_name, session, attributes) + return _restart_on_unavailable(restart, request, span_name, session, attributes) def _make_item(self, value, resume_token=b""): return mock.Mock( @@ -59,18 +61,21 @@ def _make_item(self, value, resume_token=b""): def test_iteration_w_empty_raw(self): raw = _MockIterator() + request = mock.Mock(test="test", spec=["test", "resume_token"]) restart = mock.Mock(spec=[], return_value=raw) - resumable = self._call_fut(restart) + resumable = self._call_fut(restart, request) self.assertEqual(list(resumable), []) + restart.assert_called_once_with(request=request) self.assertNoSpans() def test_iteration_w_non_empty_raw(self): ITEMS = (self._make_item(0), self._make_item(1)) raw = _MockIterator(*ITEMS) + request = mock.Mock(test="test", spec=["test", "resume_token"]) restart = mock.Mock(spec=[], return_value=raw) - resumable = self._call_fut(restart) + resumable = self._call_fut(restart, request) self.assertEqual(list(resumable), list(ITEMS)) - restart.assert_called_once_with() + restart.assert_called_once_with(request=request) self.assertNoSpans() def test_iteration_w_raw_w_resume_tken(self): @@ -81,10 +86,11 @@ def test_iteration_w_raw_w_resume_tken(self): self._make_item(3), ) raw = _MockIterator(*ITEMS) + request = mock.Mock(test="test", spec=["test", "resume_token"]) restart = mock.Mock(spec=[], return_value=raw) - resumable = self._call_fut(restart) + resumable = self._call_fut(restart, request) self.assertEqual(list(resumable), list(ITEMS)) - restart.assert_called_once_with() + restart.assert_called_once_with(request=request) self.assertNoSpans() def test_iteration_w_raw_raising_unavailable_no_token(self): @@ -97,10 +103,12 @@ def test_iteration_w_raw_raising_unavailable_no_token(self): ) before = _MockIterator(fail_after=True, error=ServiceUnavailable("testing")) after = _MockIterator(*ITEMS) + request = mock.Mock(test="test", spec=["test", "resume_token"]) restart = mock.Mock(spec=[], side_effect=[before, after]) - resumable = self._call_fut(restart) + resumable = self._call_fut(restart, request) self.assertEqual(list(resumable), list(ITEMS)) - self.assertEqual(restart.mock_calls, [mock.call(), mock.call(resume_token=b"")]) + self.assertEqual(len(restart.mock_calls), 2) + self.assertEqual(request.resume_token, b"") self.assertNoSpans() def test_iteration_w_raw_raising_retryable_internal_error_no_token(self): @@ -118,10 +126,12 @@ def test_iteration_w_raw_raising_retryable_internal_error_no_token(self): ), ) after = _MockIterator(*ITEMS) + request = mock.Mock(test="test", spec=["test", "resume_token"]) restart = mock.Mock(spec=[], side_effect=[before, after]) - resumable = self._call_fut(restart) + resumable = self._call_fut(restart, request) self.assertEqual(list(resumable), list(ITEMS)) - self.assertEqual(restart.mock_calls, [mock.call(), mock.call(resume_token=b"")]) + self.assertEqual(len(restart.mock_calls), 2) + self.assertEqual(request.resume_token, b"") self.assertNoSpans() def test_iteration_w_raw_raising_non_retryable_internal_error_no_token(self): @@ -134,11 +144,12 @@ def test_iteration_w_raw_raising_non_retryable_internal_error_no_token(self): ) before = _MockIterator(fail_after=True, error=InternalServerError("testing")) after = _MockIterator(*ITEMS) + request = mock.Mock(spec=["resume_token"]) restart = mock.Mock(spec=[], side_effect=[before, after]) - resumable = self._call_fut(restart) + resumable = self._call_fut(restart, request) with self.assertRaises(InternalServerError): list(resumable) - self.assertEqual(restart.mock_calls, [mock.call()]) + restart.assert_called_once_with(request=request) self.assertNoSpans() def test_iteration_w_raw_raising_unavailable(self): @@ -151,12 +162,12 @@ def test_iteration_w_raw_raising_unavailable(self): *(FIRST + SECOND), fail_after=True, error=ServiceUnavailable("testing") ) after = _MockIterator(*LAST) + request = mock.Mock(test="test", spec=["test", "resume_token"]) restart = mock.Mock(spec=[], side_effect=[before, after]) - resumable = self._call_fut(restart) + resumable = self._call_fut(restart, request) self.assertEqual(list(resumable), list(FIRST + LAST)) - self.assertEqual( - restart.mock_calls, [mock.call(), mock.call(resume_token=RESUME_TOKEN)] - ) + self.assertEqual(len(restart.mock_calls), 2) + self.assertEqual(request.resume_token, RESUME_TOKEN) self.assertNoSpans() def test_iteration_w_raw_raising_retryable_internal_error(self): @@ -173,12 +184,12 @@ def test_iteration_w_raw_raising_retryable_internal_error(self): ) ) after = _MockIterator(*LAST) + request = mock.Mock(test="test", spec=["test", "resume_token"]) restart = mock.Mock(spec=[], side_effect=[before, after]) - resumable = self._call_fut(restart) + resumable = self._call_fut(restart, request) self.assertEqual(list(resumable), list(FIRST + LAST)) - self.assertEqual( - restart.mock_calls, [mock.call(), mock.call(resume_token=RESUME_TOKEN)] - ) + self.assertEqual(len(restart.mock_calls), 2) + self.assertEqual(request.resume_token, RESUME_TOKEN) self.assertNoSpans() def test_iteration_w_raw_raising_non_retryable_internal_error(self): @@ -191,11 +202,12 @@ def test_iteration_w_raw_raising_non_retryable_internal_error(self): *(FIRST + SECOND), fail_after=True, error=InternalServerError("testing") ) after = _MockIterator(*LAST) + request = mock.Mock(test="test", spec=["test", "resume_token"]) restart = mock.Mock(spec=[], side_effect=[before, after]) - resumable = self._call_fut(restart) + resumable = self._call_fut(restart, request) with self.assertRaises(InternalServerError): list(resumable) - self.assertEqual(restart.mock_calls, [mock.call()]) + restart.assert_called_once_with(request=request) self.assertNoSpans() def test_iteration_w_raw_raising_unavailable_after_token(self): @@ -207,12 +219,12 @@ def test_iteration_w_raw_raising_unavailable_after_token(self): *FIRST, fail_after=True, error=ServiceUnavailable("testing") ) after = _MockIterator(*SECOND) + request = mock.Mock(test="test", spec=["test", "resume_token"]) restart = mock.Mock(spec=[], side_effect=[before, after]) - resumable = self._call_fut(restart) + resumable = self._call_fut(restart, request) self.assertEqual(list(resumable), list(FIRST + SECOND)) - self.assertEqual( - restart.mock_calls, [mock.call(), mock.call(resume_token=RESUME_TOKEN)] - ) + self.assertEqual(len(restart.mock_calls), 2) + self.assertEqual(request.resume_token, RESUME_TOKEN) self.assertNoSpans() def test_iteration_w_raw_raising_retryable_internal_error_after_token(self): @@ -228,12 +240,12 @@ def test_iteration_w_raw_raising_retryable_internal_error_after_token(self): ) ) after = _MockIterator(*SECOND) + request = mock.Mock(test="test", spec=["test", "resume_token"]) restart = mock.Mock(spec=[], side_effect=[before, after]) - resumable = self._call_fut(restart) + resumable = self._call_fut(restart, request) self.assertEqual(list(resumable), list(FIRST + SECOND)) - self.assertEqual( - restart.mock_calls, [mock.call(), mock.call(resume_token=RESUME_TOKEN)] - ) + self.assertEqual(len(restart.mock_calls), 2) + self.assertEqual(request.resume_token, RESUME_TOKEN) self.assertNoSpans() def test_iteration_w_raw_raising_non_retryable_internal_error_after_token(self): @@ -245,19 +257,23 @@ def test_iteration_w_raw_raising_non_retryable_internal_error_after_token(self): *FIRST, fail_after=True, error=InternalServerError("testing") ) after = _MockIterator(*SECOND) + request = mock.Mock(test="test", spec=["test", "resume_token"]) restart = mock.Mock(spec=[], side_effect=[before, after]) - resumable = self._call_fut(restart) + resumable = self._call_fut(restart, request) with self.assertRaises(InternalServerError): list(resumable) - self.assertEqual(restart.mock_calls, [mock.call()]) + restart.assert_called_once_with(request=request) self.assertNoSpans() def test_iteration_w_span_creation(self): name = "TestSpan" extra_atts = {"test_att": 1} raw = _MockIterator() + request = mock.Mock(test="test", spec=["test", "resume_token"]) restart = mock.Mock(spec=[], return_value=raw) - resumable = self._call_fut(restart, name, _Session(_Database()), extra_atts) + resumable = self._call_fut( + restart, request, name, _Session(_Database()), extra_atts + ) self.assertEqual(list(resumable), []) self.assertSpanAttributes(name, attributes=dict(BASE_ATTRIBUTES, test_att=1)) @@ -272,13 +288,13 @@ def test_iteration_w_multiple_span_creation(self): *(FIRST + SECOND), fail_after=True, error=ServiceUnavailable("testing") ) after = _MockIterator(*LAST) + request = mock.Mock(test="test", spec=["test", "resume_token"]) restart = mock.Mock(spec=[], side_effect=[before, after]) name = "TestSpan" - resumable = self._call_fut(restart, name, _Session(_Database())) + resumable = self._call_fut(restart, request, name, _Session(_Database())) self.assertEqual(list(resumable), list(FIRST + LAST)) - self.assertEqual( - restart.mock_calls, [mock.call(), mock.call(resume_token=RESUME_TOKEN)] - ) + self.assertEqual(len(restart.mock_calls), 2) + self.assertEqual(request.resume_token, RESUME_TOKEN) span_list = self.memory_exporter.get_finished_spans() self.assertEqual(len(span_list), 2) diff --git a/tests/unit/test_streamed.py b/tests/unit/test_streamed.py index 7b12f6a94b..66d6f34e2e 100644 --- a/tests/unit/test_streamed.py +++ b/tests/unit/test_streamed.py @@ -147,7 +147,7 @@ def test_properties_set(self): metadata = streamed._metadata = self._make_result_set_metadata(FIELDS) stats = streamed._stats = self._make_result_set_stats() self.assertEqual(list(streamed.fields), FIELDS) - self.assertIs(streamed.metadata, metadata) + self.assertIs(streamed.metadata._pb, metadata) self.assertIs(streamed.stats, stats) def test__merge_chunk_bool(self):