From f6d2c5b8f3c75426881dfce90ab713535416950e Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 9 Dec 2020 10:25:34 -0600 Subject: [PATCH 01/20] docs: add sample for dataset copy (#76) * docs: add sample for dataset copy * add google-cloud-bigquery to test requirements * use relative imports to hopefully fix lint --- samples/snippets/__init__.py | 13 ++++ samples/snippets/copy_dataset.py | 54 +++++++++++++++ samples/snippets/copy_dataset_test.py | 95 ++++++++++++++++++++++++++ samples/snippets/quickstart_test.py | 6 +- samples/snippets/requirements-test.txt | 1 + 5 files changed, 166 insertions(+), 3 deletions(-) create mode 100644 samples/snippets/__init__.py create mode 100644 samples/snippets/copy_dataset.py create mode 100644 samples/snippets/copy_dataset_test.py diff --git a/samples/snippets/__init__.py b/samples/snippets/__init__.py new file mode 100644 index 00000000..c6334245 --- /dev/null +++ b/samples/snippets/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/samples/snippets/copy_dataset.py b/samples/snippets/copy_dataset.py new file mode 100644 index 00000000..084ab733 --- /dev/null +++ b/samples/snippets/copy_dataset.py @@ -0,0 +1,54 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def copy_dataset(override_values={}): + # [START bigquerydatatransfer_copy_dataset] + from google.cloud import bigquery_datatransfer + + transfer_client = bigquery_datatransfer.DataTransferServiceClient() + + destination_project_id = "my-destination-project" + destination_dataset_id = "my_destination_dataset" + source_project_id = "my-source-project" + source_dataset_id = "my_source_dataset" + # [END bigquerydatatransfer_copy_dataset] + # To facilitate testing, we replace values with alternatives + # provided by the testing harness. + destination_project_id = override_values.get( + "destination_project_id", destination_project_id + ) + destination_dataset_id = override_values.get( + "destination_dataset_id", destination_dataset_id + ) + source_project_id = override_values.get("source_project_id", source_project_id) + source_dataset_id = override_values.get("source_dataset_id", source_dataset_id) + # [START bigquerydatatransfer_copy_dataset] + transfer_config = bigquery_datatransfer.TransferConfig( + destination_dataset_id=destination_dataset_id, + display_name="Your Dataset Copy Name", + data_source_id="cross_region_copy", + params={ + "source_project_id": source_project_id, + "source_dataset_id": source_dataset_id, + }, + schedule="every 24 hours", + ) + transfer_config = transfer_client.create_transfer_config( + parent=transfer_client.common_project_path(destination_project_id), + transfer_config=transfer_config, + ) + print(f"Created transfer config: {transfer_config.name}") + # [END bigquerydatatransfer_copy_dataset] + return transfer_config diff --git a/samples/snippets/copy_dataset_test.py b/samples/snippets/copy_dataset_test.py new file mode 100644 index 00000000..6ff70237 --- /dev/null +++ b/samples/snippets/copy_dataset_test.py @@ -0,0 +1,95 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import uuid + +import google.api_core.exceptions +import google.auth +from google.cloud import bigquery +from google.cloud import bigquery_datatransfer +import pytest + +from . import copy_dataset + + +def temp_suffix(): + now = datetime.datetime.now() + return f"{now.strftime('%Y%m%d%H%M%S')}_{uuid.uuid4().hex[:8]}" + + +@pytest.fixture(scope="session") +def default_credentials(): + return google.auth.default(["https://www.googleapis.com/auth/cloud-platform"]) + + +@pytest.fixture(scope="session") +def project_id(default_credentials): + _, project_id = default_credentials + return project_id + + +@pytest.fixture(scope="session") +def bigquery_client(default_credentials): + credentials, project_id = default_credentials + return bigquery.Client(credentials=credentials, project=project_id) + + +@pytest.fixture(scope="session") +def transfer_client(default_credentials): + credentials, _ = default_credentials + return bigquery_datatransfer.DataTransferServiceClient(credentials=credentials) + + +@pytest.fixture +def to_delete_configs(transfer_client): + to_delete = [] + yield to_delete + for config_name in to_delete: + try: + transfer_client.delete_transfer_config(name=config_name) + except google.api_core.exceptions.GoogleAPICallError: + pass + + +@pytest.fixture(scope="module") +def destination_dataset_id(bigquery_client, project_id): + dataset_id = f"bqdts_dest_{temp_suffix()}" + bigquery_client.create_dataset(f"{project_id}.{dataset_id}") + yield dataset_id + bigquery_client.delete_dataset(dataset_id, delete_contents=True) + + +@pytest.fixture(scope="module") +def source_dataset_id(bigquery_client, project_id): + dataset_id = f"bqdts_src_{temp_suffix()}" + bigquery_client.create_dataset(f"{project_id}.{dataset_id}") + yield dataset_id + bigquery_client.delete_dataset(dataset_id, delete_contents=True) + + +def test_copy_dataset( + capsys, project_id, destination_dataset_id, source_dataset_id, to_delete_configs +): + transfer_config = copy_dataset.copy_dataset( + { + "destination_project_id": project_id, + "destination_dataset_id": destination_dataset_id, + "source_project_id": project_id, + "source_dataset_id": source_dataset_id, + } + ) + to_delete_configs.append(transfer_config.name) + out, _ = capsys.readouterr() + assert transfer_config.name in out diff --git a/samples/snippets/quickstart_test.py b/samples/snippets/quickstart_test.py index 387c2e8b..5b873c63 100644 --- a/samples/snippets/quickstart_test.py +++ b/samples/snippets/quickstart_test.py @@ -16,10 +16,10 @@ import pytest -import quickstart +from . import quickstart -PROJECT = os.environ['GOOGLE_CLOUD_PROJECT'] +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] @pytest.fixture @@ -32,4 +32,4 @@ def mock_project_id(): def test_quickstart(capsys, mock_project_id): quickstart.run_quickstart(mock_project_id) out, _ = capsys.readouterr() - assert 'Supported Data Sources:' in out + assert "Supported Data Sources:" in out diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 2466e250..fff09f56 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,2 +1,3 @@ +google-cloud-bigquery==2.6.0 pytest==6.0.1 mock==4.0.2 From aa216bf0ce2f899d47a1a6e6d061497b15b8e800 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 9 Dec 2020 18:12:02 +0100 Subject: [PATCH 02/20] chore(deps): update dependency google-cloud-bigquery-datatransfer to v3 (#81) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-cloud-bigquery-datatransfer](https://togithub.com/googleapis/python-bigquery-datatransfer) | major | `==2.1.0` -> `==3.0.0` | --- ### Release Notes
googleapis/python-bigquery-datatransfer ### [`v3.0.0`](https://togithub.com/googleapis/python-bigquery-datatransfer/blob/master/CHANGELOG.md#​300-httpswwwgithubcomgoogleapispython-bigquery-datatransfercomparev210v300-2020-12-09) [Compare Source](https://togithub.com/googleapis/python-bigquery-datatransfer/compare/v2.1.0...v3.0.0) ##### ⚠ BREAKING CHANGES - type is renamed to type\_ to avoid conflict with built-in functions (introduced in [googleapis/gapic-generator-python#​595](https://togithub.com/googleapis/gapic-generator-python/issues/595)) ##### Features - add common resource path helpers ([#​69](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/69)) ([e0bcedb](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/e0bcedb58109e38a58584d5b3087f03e1fa10835)) ##### Bug Fixes - avoid collision with built-in functions by renaming type property to type\_ ([#​53](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/53)) ([b954411](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/b95441140f7c86dd3e833aef0532badd6280ef48)), closes [/github.com/googleapis/python-talent/blob/ef045e8eb348db36d7a2a611e6f26b11530d273b/samples/snippets/noxfile_config.py#L27-L32](https://www.github.com/googleapis//github.com/googleapis/python-talent/blob/ef045e8eb348db36d7a2a611e6f26b11530d273b/samples/snippets/noxfile_config.py/issues/L27-L32) ##### Documentation - update intersphinx links ([#​78](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/78)) ([a78ba39](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/a78ba39bf1507cbc9e2a51fe4553d602da7f7601))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-datatransfer). --- samples/requirements.txt | 2 +- samples/snippets/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/requirements.txt b/samples/requirements.txt index 1cd31695..e3f405f0 100644 --- a/samples/requirements.txt +++ b/samples/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-datatransfer==2.1.0 +google-cloud-bigquery-datatransfer==3.0.0 google-cloud-bigquery diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 00c87c7c..fca77e10 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-bigquery-datatransfer==2.1.0 +google-cloud-bigquery-datatransfer==3.0.0 From af0406eedac1dc8c663b5c8f67f56255caeea2fa Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 10 Dec 2020 10:56:58 -0600 Subject: [PATCH 03/20] docs: remove out-of-date sample from README (#80) See samples/ directory for maintained and tested samples --- README.rst | 28 +------------- samples/snippets/conftest.py | 53 +++++++++++++++++++++++++++ samples/snippets/copy_dataset_test.py | 38 ------------------- samples/snippets/quickstart.py | 27 +++++++++----- samples/snippets/quickstart_test.py | 18 +-------- 5 files changed, 73 insertions(+), 91 deletions(-) create mode 100644 samples/snippets/conftest.py diff --git a/README.rst b/README.rst index cdb6ce31..c0d4feeb 100644 --- a/README.rst +++ b/README.rst @@ -1,7 +1,7 @@ Python Client for BigQuery Data Transfer API ============================================ -|GA| |pypi| |versions| +|GA| |pypi| |versions| The `BigQuery Data Transfer API`_ allows users to transfer data from partner SaaS applications to Google BigQuery on a scheduled, managed basis. @@ -79,32 +79,6 @@ Windows \Scripts\activate \Scripts\pip.exe install google-cloud-bigquery-datatransfer -Example Usage -~~~~~~~~~~~~~ - -DataTransferServiceClient -^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. code:: py - - from google.cloud import bigquery_datatransfer_v1 - - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - parent = client.location_path('[PROJECT]', '[LOCATION]') - - - # Iterate over all results - for element in client.list_data_sources(parent): - # process element - pass - - # Or iterate over results one page at a time - for page in client.list_data_sources(parent).pages: - for element in page: - # process element - pass - Next Steps ~~~~~~~~~~ diff --git a/samples/snippets/conftest.py b/samples/snippets/conftest.py new file mode 100644 index 00000000..44a8fb49 --- /dev/null +++ b/samples/snippets/conftest.py @@ -0,0 +1,53 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import google.api_core.exceptions +import google.auth +from google.cloud import bigquery +from google.cloud import bigquery_datatransfer +import pytest + + +@pytest.fixture(scope="session") +def default_credentials(): + return google.auth.default(["https://www.googleapis.com/auth/cloud-platform"]) + + +@pytest.fixture(scope="session") +def project_id(default_credentials): + _, project_id = default_credentials + return project_id + + +@pytest.fixture(scope="session") +def bigquery_client(default_credentials): + credentials, project_id = default_credentials + return bigquery.Client(credentials=credentials, project=project_id) + + +@pytest.fixture(scope="session") +def transfer_client(default_credentials): + credentials, _ = default_credentials + return bigquery_datatransfer.DataTransferServiceClient(credentials=credentials) + + +@pytest.fixture +def to_delete_configs(transfer_client): + to_delete = [] + yield to_delete + for config_name in to_delete: + try: + transfer_client.delete_transfer_config(name=config_name) + except google.api_core.exceptions.GoogleAPICallError: + pass diff --git a/samples/snippets/copy_dataset_test.py b/samples/snippets/copy_dataset_test.py index 6ff70237..00a5e560 100644 --- a/samples/snippets/copy_dataset_test.py +++ b/samples/snippets/copy_dataset_test.py @@ -15,10 +15,6 @@ import datetime import uuid -import google.api_core.exceptions -import google.auth -from google.cloud import bigquery -from google.cloud import bigquery_datatransfer import pytest from . import copy_dataset @@ -29,40 +25,6 @@ def temp_suffix(): return f"{now.strftime('%Y%m%d%H%M%S')}_{uuid.uuid4().hex[:8]}" -@pytest.fixture(scope="session") -def default_credentials(): - return google.auth.default(["https://www.googleapis.com/auth/cloud-platform"]) - - -@pytest.fixture(scope="session") -def project_id(default_credentials): - _, project_id = default_credentials - return project_id - - -@pytest.fixture(scope="session") -def bigquery_client(default_credentials): - credentials, project_id = default_credentials - return bigquery.Client(credentials=credentials, project=project_id) - - -@pytest.fixture(scope="session") -def transfer_client(default_credentials): - credentials, _ = default_credentials - return bigquery_datatransfer.DataTransferServiceClient(credentials=credentials) - - -@pytest.fixture -def to_delete_configs(transfer_client): - to_delete = [] - yield to_delete - for config_name in to_delete: - try: - transfer_client.delete_transfer_config(name=config_name) - except google.api_core.exceptions.GoogleAPICallError: - pass - - @pytest.fixture(scope="module") def destination_dataset_id(bigquery_client, project_id): dataset_id = f"bqdts_dest_{temp_suffix()}" diff --git a/samples/snippets/quickstart.py b/samples/snippets/quickstart.py index 042a6459..de8d05e5 100644 --- a/samples/snippets/quickstart.py +++ b/samples/snippets/quickstart.py @@ -14,29 +14,36 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys -def run_quickstart(project="my-project"): + +def run_quickstart(override_values={}): # [START bigquerydatatransfer_quickstart] from google.cloud import bigquery_datatransfer client = bigquery_datatransfer.DataTransferServiceClient() # TODO: Update to your project ID. - # project = "my-project" + project_id = "my-project" + # [END bigquerydatatransfer_quickstart] + # To facilitate testing, we replace values with alternatives + # provided by the testing harness. + project_id = override_values.get("project_id", project_id) + # [START bigquerydatatransfer_quickstart] # Get the full path to your project. - parent = f"projects/{project}" + parent = client.common_project_path(project_id) - print('Supported Data Sources:') + print("Supported Data Sources:") # Iterate over all possible data sources. for data_source in client.list_data_sources(parent=parent): - print('{}:'.format(data_source.display_name)) - print('\tID: {}'.format(data_source.data_source_id)) - print('\tFull path: {}'.format(data_source.name)) - print('\tDescription: {}'.format(data_source.description)) + print("{}:".format(data_source.display_name)) + print("\tID: {}".format(data_source.data_source_id)) + print("\tFull path: {}".format(data_source.name)) + print("\tDescription: {}".format(data_source.description)) # [END bigquerydatatransfer_quickstart] -if __name__ == '__main__': - run_quickstart() +if __name__ == "__main__": + run_quickstart(override_values={"project_id": sys.argv[1]}) diff --git a/samples/snippets/quickstart_test.py b/samples/snippets/quickstart_test.py index 5b873c63..570d5181 100644 --- a/samples/snippets/quickstart_test.py +++ b/samples/snippets/quickstart_test.py @@ -12,24 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os - -import pytest - from . import quickstart -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] - - -@pytest.fixture -def mock_project_id(): - """Mock out project and replace with project from environment.""" - - return PROJECT - - -def test_quickstart(capsys, mock_project_id): - quickstart.run_quickstart(mock_project_id) +def test_quickstart(capsys, project_id): + quickstart.run_quickstart(override_values={"project_id": project_id}) out, _ = capsys.readouterr() assert "Supported Data Sources:" in out From 3e2bbef292ddda6a736be397be4e5a0fb213eeff Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 14 Dec 2020 13:59:37 -0800 Subject: [PATCH 04/20] fix: remove recv msg limit, add enums to `types` (#84) PiperOrigin-RevId: 347055288 --- .../data_transfer_service/transports/__init__.py | 1 - .../services/data_transfer_service/transports/grpc.py | 10 +++++++++- .../data_transfer_service/transports/grpc_asyncio.py | 8 ++++++++ .../cloud/bigquery_datatransfer_v1/types/__init__.py | 5 ++++- synth.metadata | 6 +++--- .../test_data_transfer_service.py | 8 ++++++++ 6 files changed, 32 insertions(+), 6 deletions(-) diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py index 2b71d0f3..097e5854 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py @@ -30,7 +30,6 @@ _transport_registry["grpc"] = DataTransferServiceGrpcTransport _transport_registry["grpc_asyncio"] = DataTransferServiceGrpcAsyncIOTransport - __all__ = ( "DataTransferServiceTransport", "DataTransferServiceGrpcTransport", diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py index 442cdd27..12ce7f93 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py @@ -151,6 +151,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -169,6 +173,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._stubs = {} # type: Dict[str, Callable] @@ -195,7 +203,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py index a65ac425..41eeb000 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py @@ -196,6 +196,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -214,6 +218,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) # Run the base constructor. diff --git a/google/cloud/bigquery_datatransfer_v1/types/__init__.py b/google/cloud/bigquery_datatransfer_v1/types/__init__.py index c4f07ee6..f793415b 100644 --- a/google/cloud/bigquery_datatransfer_v1/types/__init__.py +++ b/google/cloud/bigquery_datatransfer_v1/types/__init__.py @@ -21,6 +21,8 @@ TransferConfig, TransferRun, TransferMessage, + TransferType, + TransferState, ) from .datatransfer import ( DataSourceParameter, @@ -48,13 +50,14 @@ StartManualTransferRunsResponse, ) - __all__ = ( "EmailPreferences", "ScheduleOptions", "TransferConfig", "TransferRun", "TransferMessage", + "TransferType", + "TransferState", "DataSourceParameter", "DataSource", "GetDataSourceRequest", diff --git a/synth.metadata b/synth.metadata index b02e1f7f..5810770d 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-bigquery-datatransfer.git", - "sha": "3fb982cc0d4df052495b267f2a7bd3e1c3ea1683" + "sha": "af0406eedac1dc8c663b5c8f67f56255caeea2fa" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "3f87da2ed1ddc3566ef0810c4fc06a2682cc9f5f", - "internalRef": "343022252" + "sha": "dd372aa22ded7a8ba6f0e03a80e06358a3fa0907", + "internalRef": "347055288" } }, { diff --git a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py index 1596cfad..fafbdc5d 100644 --- a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -4588,6 +4588,10 @@ def test_data_transfer_service_transport_channel_mtls_with_client_cert_source( scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred @@ -4629,6 +4633,10 @@ def test_data_transfer_service_transport_channel_mtls_with_adc(transport_class): scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel From cd519709228cda3bbcf2fd978d37ccd04ef27c82 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 15 Dec 2020 14:04:45 -0600 Subject: [PATCH 05/20] docs: add scheduled query samples (#83) * docs: add scheduled query samples * test: opt-out of type annotations for now * test: use environment variable for project ID * set quota project * consolidate config creation to conserve quota --- samples/snippets/conftest.py | 75 +++++++- samples/snippets/copy_dataset_test.py | 11 +- samples/snippets/manage_transfer_configs.py | 171 ++++++++++++++++++ .../snippets/manage_transfer_configs_test.py | 70 +++++++ samples/snippets/noxfile_config.py | 38 ++++ samples/snippets/quickstart_test.py | 6 +- samples/snippets/scheduled_query.py | 80 ++++++++ .../scheduled_query_test.py} | 18 +- samples/tests/__init__.py | 0 samples/tests/conftest.py | 74 -------- samples/tests/test_update_transfer_config.py | 30 --- 11 files changed, 448 insertions(+), 125 deletions(-) create mode 100644 samples/snippets/manage_transfer_configs.py create mode 100644 samples/snippets/manage_transfer_configs_test.py create mode 100644 samples/snippets/noxfile_config.py create mode 100644 samples/snippets/scheduled_query.py rename samples/{tests/test_create_scheduled_query.py => snippets/scheduled_query_test.py} (57%) delete mode 100644 samples/tests/__init__.py delete mode 100644 samples/tests/conftest.py delete mode 100644 samples/tests/test_update_transfer_config.py diff --git a/samples/snippets/conftest.py b/samples/snippets/conftest.py index 44a8fb49..998d5ea7 100644 --- a/samples/snippets/conftest.py +++ b/samples/snippets/conftest.py @@ -12,6 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime +import os +import uuid + +from google.api_core import client_options import google.api_core.exceptions import google.auth from google.cloud import bigquery @@ -19,27 +24,81 @@ import pytest +def temp_suffix(): + now = datetime.datetime.now() + return f"{now.strftime('%Y%m%d%H%M%S')}_{uuid.uuid4().hex[:8]}" + + +@pytest.fixture(scope="session") +def bigquery_client(default_credentials): + credentials, project_id = default_credentials + return bigquery.Client(credentials=credentials, project=project_id) + + +@pytest.fixture(scope="session") +def dataset_id(bigquery_client, project_id): + dataset_id = f"bqdts_{temp_suffix()}" + bigquery_client.create_dataset(f"{project_id}.{dataset_id}") + yield dataset_id + bigquery_client.delete_dataset(dataset_id, delete_contents=True) + + @pytest.fixture(scope="session") def default_credentials(): return google.auth.default(["https://www.googleapis.com/auth/cloud-platform"]) @pytest.fixture(scope="session") -def project_id(default_credentials): - _, project_id = default_credentials - return project_id +def project_id(): + return os.environ["GOOGLE_CLOUD_PROJECT"] @pytest.fixture(scope="session") -def bigquery_client(default_credentials): - credentials, project_id = default_credentials - return bigquery.Client(credentials=credentials, project=project_id) +def service_account_name(default_credentials): + credentials, _ = default_credentials + # Note: this property is not available when running with user account + # credentials, but only service account credentials are used in our test + # infrastructure. + return credentials.service_account_email @pytest.fixture(scope="session") -def transfer_client(default_credentials): +def transfer_client(default_credentials, project_id): credentials, _ = default_credentials - return bigquery_datatransfer.DataTransferServiceClient(credentials=credentials) + options = client_options.ClientOptions(quota_project_id=project_id) + + transfer_client = bigquery_datatransfer.DataTransferServiceClient( + credentials=credentials, client_options=options + ) + + # Ensure quota is always attributed to the correct project. + bigquery_datatransfer.DataTransferServiceClient = lambda: transfer_client + + return transfer_client + + +@pytest.fixture(scope="session") +def transfer_config_name(transfer_client, project_id, dataset_id, service_account_name): + from . import manage_transfer_configs, scheduled_query + + # Use the transfer_client fixture so we know quota is attributed to the + # correct project. + assert transfer_client is not None + + # To conserve limited BQ-DTS quota, this fixture creates only one transfer + # config for a whole session and is used to test the scheduled_query.py and + # the delete operation in manage_transfer_configs.py. + transfer_config = scheduled_query.create_scheduled_query( + { + "project_id": project_id, + "dataset_id": dataset_id, + "service_account_name": service_account_name, + } + ) + yield transfer_config.name + manage_transfer_configs.delete_config( + {"transfer_config_name": transfer_config.name} + ) @pytest.fixture diff --git a/samples/snippets/copy_dataset_test.py b/samples/snippets/copy_dataset_test.py index 00a5e560..349f05ce 100644 --- a/samples/snippets/copy_dataset_test.py +++ b/samples/snippets/copy_dataset_test.py @@ -42,8 +42,17 @@ def source_dataset_id(bigquery_client, project_id): def test_copy_dataset( - capsys, project_id, destination_dataset_id, source_dataset_id, to_delete_configs + capsys, + transfer_client, + project_id, + destination_dataset_id, + source_dataset_id, + to_delete_configs, ): + # Use the transfer_client fixture so we know quota is attributed to the + # correct project. + assert transfer_client is not None + transfer_config = copy_dataset.copy_dataset( { "destination_project_id": project_id, diff --git a/samples/snippets/manage_transfer_configs.py b/samples/snippets/manage_transfer_configs.py new file mode 100644 index 00000000..6b4abd78 --- /dev/null +++ b/samples/snippets/manage_transfer_configs.py @@ -0,0 +1,171 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def list_configs(override_values={}): + # [START bigquerydatatransfer_list_configs] + from google.cloud import bigquery_datatransfer + + transfer_client = bigquery_datatransfer.DataTransferServiceClient() + + project_id = "my-project" + # [END bigquerydatatransfer_list_configs] + # To facilitate testing, we replace values with alternatives + # provided by the testing harness. + project_id = override_values.get("project_id", project_id) + # [START bigquerydatatransfer_list_configs] + parent = transfer_client.common_project_path(project_id) + + configs = transfer_client.list_transfer_configs(parent=parent) + print("Got the following configs:") + for config in configs: + print(f"\tID: {config.name}, Schedule: {config.schedule}") + # [END bigquerydatatransfer_list_configs] + + +def update_config(override_values={}): + # [START bigquerydatatransfer_update_config] + from google.cloud import bigquery_datatransfer + from google.protobuf import field_mask_pb2 + + transfer_client = bigquery_datatransfer.DataTransferServiceClient() + + transfer_config_name = "projects/1234/locations/us/transferConfigs/abcd" + new_display_name = "My Transfer Config" + # [END bigquerydatatransfer_update_config] + # To facilitate testing, we replace values with alternatives + # provided by the testing harness. + new_display_name = override_values.get("new_display_name", new_display_name) + transfer_config_name = override_values.get( + "transfer_config_name", transfer_config_name + ) + # [START bigquerydatatransfer_update_config] + + transfer_config = bigquery_datatransfer.TransferConfig(name=transfer_config_name) + transfer_config.display_name = new_display_name + + transfer_config = transfer_client.update_transfer_config( + { + "transfer_config": transfer_config, + "update_mask": field_mask_pb2.FieldMask(paths=["display_name"]), + } + ) + + print(f"Updated config: '{transfer_config.name}'") + print(f"New display name: '{transfer_config.display_name}'") + # [END bigquerydatatransfer_update_config] + # Return the config name for testing purposes, so that it can be deleted. + return transfer_config + + +def update_credentials_with_service_account(override_values={}): + # [START bigquerydatatransfer_update_credentials] + from google.cloud import bigquery_datatransfer + from google.protobuf import field_mask_pb2 + + transfer_client = bigquery_datatransfer.DataTransferServiceClient() + + service_account_name = "abcdef-test-sa@abcdef-test.iam.gserviceaccount.com" + transfer_config_name = "projects/1234/locations/us/transferConfigs/abcd" + # [END bigquerydatatransfer_update_credentials] + # To facilitate testing, we replace values with alternatives + # provided by the testing harness. + service_account_name = override_values.get( + "service_account_name", service_account_name + ) + transfer_config_name = override_values.get( + "transfer_config_name", transfer_config_name + ) + # [START bigquerydatatransfer_update_credentials] + + transfer_config = bigquery_datatransfer.TransferConfig(name=transfer_config_name) + + transfer_config = transfer_client.update_transfer_config( + { + "transfer_config": transfer_config, + "update_mask": field_mask_pb2.FieldMask(paths=["service_account_name"]), + "service_account_name": service_account_name, + } + ) + + print("Updated config: '{}'".format(transfer_config.name)) + # [END bigquerydatatransfer_update_credentials] + # Return the config name for testing purposes, so that it can be deleted. + return transfer_config + + +def schedule_backfill(override_values={}): + # [START bigquerydatatransfer_schedule_backfill] + import datetime + + from google.cloud import bigquery_datatransfer + + transfer_client = bigquery_datatransfer.DataTransferServiceClient() + + transfer_config_name = "projects/1234/locations/us/transferConfigs/abcd" + # [END bigquerydatatransfer_schedule_backfill] + # To facilitate testing, we replace values with alternatives + # provided by the testing harness. + transfer_config_name = override_values.get( + "transfer_config_name", transfer_config_name + ) + # [START bigquerydatatransfer_schedule_backfill] + now = datetime.datetime.now(datetime.timezone.utc) + start_time = now - datetime.timedelta(days=5) + end_time = now - datetime.timedelta(days=2) + + # Some data sources, such as scheduled_query only support daily run. + # Truncate start_time and end_time to midnight time (00:00AM UTC). + start_time = datetime.datetime( + start_time.year, start_time.month, start_time.day, tzinfo=datetime.timezone.utc + ) + end_time = datetime.datetime( + end_time.year, end_time.month, end_time.day, tzinfo=datetime.timezone.utc + ) + + response = transfer_client.schedule_transfer_runs( + parent=transfer_config_name, + start_time=start_time, + end_time=end_time, + ) + + print("Started transfer runs:") + for run in response.runs: + print(f"backfill: {run.run_time} run: {run.name}") + # [END bigquerydatatransfer_schedule_backfill] + return response.runs + + +def delete_config(override_values={}): + # [START bigquerydatatransfer_delete_transfer] + import google.api_core.exceptions + from google.cloud import bigquery_datatransfer + + transfer_client = bigquery_datatransfer.DataTransferServiceClient() + + transfer_config_name = "projects/1234/locations/us/transferConfigs/abcd" + # [END bigquerydatatransfer_delete_transfer] + # To facilitate testing, we replace values with alternatives + # provided by the testing harness. + transfer_config_name = override_values.get( + "transfer_config_name", transfer_config_name + ) + # [START bigquerydatatransfer_delete_transfer] + try: + transfer_client.delete_transfer_config(name=transfer_config_name) + except google.api_core.exceptions.NotFound: + print("Transfer config not found.") + else: + print(f"Deleted transfer config: {transfer_config_name}") + # [END bigquerydatatransfer_delete_transfer] diff --git a/samples/snippets/manage_transfer_configs_test.py b/samples/snippets/manage_transfer_configs_test.py new file mode 100644 index 00000000..de31c713 --- /dev/null +++ b/samples/snippets/manage_transfer_configs_test.py @@ -0,0 +1,70 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import manage_transfer_configs + + +def test_list_configs(capsys, project_id, transfer_config_name): + manage_transfer_configs.list_configs({"project_id": project_id}) + out, _ = capsys.readouterr() + assert "Got the following configs:" in out + assert transfer_config_name in out + + +def test_update_config(capsys, transfer_config_name): + manage_transfer_configs.update_config( + { + "new_display_name": "name from test_update_config", + "transfer_config_name": transfer_config_name, + } + ) + out, _ = capsys.readouterr() + assert "Updated config:" in out + assert transfer_config_name in out + assert "name from test_update_config" in out + + +def test_update_credentials_with_service_account( + capsys, project_id, service_account_name, transfer_config_name +): + manage_transfer_configs.update_credentials_with_service_account( + { + "project_id": project_id, + "service_account_name": service_account_name, + "transfer_config_name": transfer_config_name, + } + ) + out, _ = capsys.readouterr() + assert "Updated config:" in out + assert transfer_config_name in out + + +def test_schedule_backfill(capsys, transfer_config_name): + runs = manage_transfer_configs.schedule_backfill( + { + "transfer_config_name": transfer_config_name, + } + ) + out, _ = capsys.readouterr() + assert "Started transfer runs:" in out + # Run IDs should include the transfer name in their path. + assert transfer_config_name in out + # Check that there are runs for 5, 4, 3, and 2 days ago. + assert len(runs) == 4 + + +def test_delete_config(capsys, transfer_config_name): + # transfer_config_name fixture in conftest.py calls the delete config + # sample. To conserve limited BQ-DTS quota we only make basic checks. + assert len(transfer_config_name) != 0 diff --git a/samples/snippets/noxfile_config.py b/samples/snippets/noxfile_config.py new file mode 100644 index 00000000..57b25e58 --- /dev/null +++ b/samples/snippets/noxfile_config.py @@ -0,0 +1,38 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be inported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # "gcloud_project_env": "BUILD_SPECIFIC_GCLOUD_PROJECT", + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} diff --git a/samples/snippets/quickstart_test.py b/samples/snippets/quickstart_test.py index 570d5181..46398b0f 100644 --- a/samples/snippets/quickstart_test.py +++ b/samples/snippets/quickstart_test.py @@ -15,7 +15,11 @@ from . import quickstart -def test_quickstart(capsys, project_id): +def test_quickstart(capsys, transfer_client, project_id): + # Use the transfer_client fixture so we know quota is attributed to the + # correct project. + assert transfer_client is not None + quickstart.run_quickstart(override_values={"project_id": project_id}) out, _ = capsys.readouterr() assert "Supported Data Sources:" in out diff --git a/samples/snippets/scheduled_query.py b/samples/snippets/scheduled_query.py new file mode 100644 index 00000000..ab85c515 --- /dev/null +++ b/samples/snippets/scheduled_query.py @@ -0,0 +1,80 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_scheduled_query(override_values={}): + # [START bigquerydatatransfer_create_scheduled_query] + # [START bigquerydatatransfer_create_scheduled_query_with_service_account] + from google.cloud import bigquery_datatransfer + + transfer_client = bigquery_datatransfer.DataTransferServiceClient() + + # The project where the query job runs is the same as the project + # containing the destination dataset. + project_id = "your-project-id" + dataset_id = "your_dataset_id" + + # This service account will be used to execute the scheduled queries. Omit + # this request parameter to run the query as the user with the credentials + # associated with this client. + service_account_name = "abcdef-test-sa@abcdef-test.iam.gserviceaccount.com" + # [END bigquerydatatransfer_create_scheduled_query_with_service_account] + # [END bigquerydatatransfer_create_scheduled_query] + # To facilitate testing, we replace values with alternatives + # provided by the testing harness. + project_id = override_values.get("project_id", project_id) + dataset_id = override_values.get("dataset_id", dataset_id) + service_account_name = override_values.get( + "service_account_name", service_account_name + ) + # [START bigquerydatatransfer_create_scheduled_query] + # [START bigquerydatatransfer_create_scheduled_query_with_service_account] + + # Use standard SQL syntax for the query. + query_string = """ + SELECT + CURRENT_TIMESTAMP() as current_time, + @run_time as intended_run_time, + @run_date as intended_run_date, + 17 as some_integer + """ + + parent = transfer_client.common_project_path(project_id) + + transfer_config = bigquery_datatransfer.TransferConfig( + destination_dataset_id=dataset_id, + display_name="Your Scheduled Query Name", + data_source_id="scheduled_query", + params={ + "query": query_string, + "destination_table_name_template": "your_table_{run_date}", + "write_disposition": "WRITE_TRUNCATE", + "partitioning_field": "", + }, + schedule="every 24 hours", + ) + + transfer_config = transfer_client.create_transfer_config( + bigquery_datatransfer.CreateTransferConfigRequest( + parent=parent, + transfer_config=transfer_config, + service_account_name=service_account_name, + ) + ) + + print("Created scheduled query '{}'".format(transfer_config.name)) + # [END bigquerydatatransfer_create_scheduled_query_with_service_account] + # [END bigquerydatatransfer_create_scheduled_query] + # Return the config name for testing purposes, so that it can be deleted. + return transfer_config diff --git a/samples/tests/test_create_scheduled_query.py b/samples/snippets/scheduled_query_test.py similarity index 57% rename from samples/tests/test_create_scheduled_query.py rename to samples/snippets/scheduled_query_test.py index 9d885e3f..ef841824 100644 --- a/samples/tests/test_create_scheduled_query.py +++ b/samples/snippets/scheduled_query_test.py @@ -1,12 +1,10 @@ -# -*- coding: utf-8 -*- -# # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,13 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .. import create_scheduled_query +def test_create_scheduled_query(transfer_config_name): + from . import scheduled_query -def test_sample(project_id, dataset_id, capsys, to_delete): - config_name = create_scheduled_query.sample_create_transfer_config( - project_id, dataset_id - ) - to_delete.append(config_name) - out, err = capsys.readouterr() - assert config_name in out + # transfer_config_name fixture in conftest.py calls the scheduled query + # sample. To conserve limited BQ-DTS quota we only make basic checks. + assert hasattr(scheduled_query, "create_scheduled_query") + assert len(transfer_config_name) != 0 diff --git a/samples/tests/__init__.py b/samples/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/samples/tests/conftest.py b/samples/tests/conftest.py deleted file mode 100644 index 90589e8b..00000000 --- a/samples/tests/conftest.py +++ /dev/null @@ -1,74 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import os -import uuid - -import google.auth -import google.cloud.bigquery -import pytest - - -@pytest.fixture -def project_id(): - return os.environ["GOOGLE_CLOUD_PROJECT"] - - -@pytest.fixture(scope="module") -def credentials(): - # If using a service account, the BQ DTS robot associated with your project - # requires the roles/iam.serviceAccountShortTermTokenMinter permission to - # act on behalf of the account. - creds, _ = google.auth.default(["https://www.googleapis.com/auth/cloud-platform"]) - return creds - - -@pytest.fixture(scope="module") -def bqdts_client(credentials): - from google.cloud import bigquery_datatransfer - - return bigquery_datatransfer.DataTransferServiceClient(credentials=credentials) - - -@pytest.fixture(scope="module") -def bigquery_client(credentials): - return google.cloud.bigquery.Client(credentials=credentials) - - -@pytest.fixture(scope="module") -def dataset_id(bigquery_client): - # Ensure the test account has owner permissions on the dataset by creating - # one from scratch. - now = datetime.datetime.now() - temp_ds_id = "bqdts_{}_{}".format( - now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] - ) - bigquery_client.create_dataset(temp_ds_id) - yield temp_ds_id - bigquery_client.delete_dataset(temp_ds_id) - - -@pytest.fixture -def to_delete(bqdts_client): - doomed = [] - yield doomed - - for resource_name in doomed: - try: - bqdts_client.delete_transfer_config(name=resource_name) - except Exception: - pass diff --git a/samples/tests/test_update_transfer_config.py b/samples/tests/test_update_transfer_config.py deleted file mode 100644 index 827d8023..00000000 --- a/samples/tests/test_update_transfer_config.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import create_scheduled_query, update_transfer_config - - -def test_update_config_sample(project_id, dataset_id, capsys, to_delete): - config_name = create_scheduled_query.sample_create_transfer_config( - project_id, dataset_id - ) - - display_name = "Transfer config updated" - config = update_transfer_config.sample_update_transfer_config(config_name, display_name) - to_delete.append(config.name) - out, err = capsys.readouterr() - assert config.name in out - assert config.display_name == display_name From 093e407c60b117a00d2cdf8d225f22d61bc221c4 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Mon, 21 Dec 2020 08:45:49 -0600 Subject: [PATCH 06/20] docs: remove redundant samples (#86) --- samples/__init__.py | 0 samples/create_scheduled_query.py | 97 ------------ samples/noxfile.py | 247 ------------------------------ samples/requirements-test.txt | 4 - samples/requirements.txt | 2 - samples/update_transfer_config.py | 56 ------- 6 files changed, 406 deletions(-) delete mode 100644 samples/__init__.py delete mode 100644 samples/create_scheduled_query.py delete mode 100644 samples/noxfile.py delete mode 100644 samples/requirements-test.txt delete mode 100644 samples/requirements.txt delete mode 100644 samples/update_transfer_config.py diff --git a/samples/__init__.py b/samples/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/samples/create_scheduled_query.py b/samples/create_scheduled_query.py deleted file mode 100644 index 297e1f73..00000000 --- a/samples/create_scheduled_query.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# To install the latest published package dependency, execute the following: -# pip install google-cloud-bigquery-datatransfer - - -def sample_create_transfer_config(project_id, dataset_id, authorization_code=""): - # [START bigquerydatatransfer_create_scheduled_query] - from google.cloud import bigquery_datatransfer - - client = bigquery_datatransfer.DataTransferServiceClient() - - # TODO(developer): Set the project_id to the project that contains the - # destination dataset. - # project_id = "your-project-id" - - # TODO(developer): Set the destination dataset. The authorized user must - # have owner permissions on the dataset. - # dataset_id = "your_dataset_id" - - # TODO(developer): The first time you run this sample, set the - # authorization code to a value from the URL: - # https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=433065040935-hav5fqnc9p9cht3rqneus9115ias2kn1.apps.googleusercontent.com&scope=https://www.googleapis.com/auth/bigquery%20https://www.googleapis.com/auth/drive&redirect_uri=urn:ietf:wg:oauth:2.0:oob - # - # authorization_code = "_4/ABCD-EFGHIJKLMNOP-QRSTUVWXYZ" - # - # You can use an empty string for authorization_code in subsequent runs of - # this code sample with the same credentials. - # - # authorization_code = "" - - # Use standard SQL syntax for the query. - query_string = """ - SELECT - CURRENT_TIMESTAMP() as current_time, - @run_time as intended_run_time, - @run_date as intended_run_date, - 17 as some_integer - """ - - parent = f"projects/{project_id}" - - transfer_config = bigquery_datatransfer.TransferConfig( - destination_dataset_id=dataset_id, - display_name="Your Scheduled Query Name", - data_source_id="scheduled_query", - params={ - "query": query_string, - "destination_table_name_template": "your_table_{run_date}", - "write_disposition": "WRITE_TRUNCATE", - "partitioning_field": "", - }, - schedule="every 24 hours", - ) - - response = client.create_transfer_config( - request={ - "parent": parent, - "transfer_config": transfer_config, - "authorization_code": authorization_code, - } - ) - - print("Created scheduled query '{}'".format(response.name)) - # [END bigquerydatatransfer_create_scheduled_query] - # Return the config name for testing purposes, so that it can be deleted. - return response.name - - -def main(): - import argparse - - parser = argparse.ArgumentParser() - parser.add_argument("--project_id", type=str, default="your-project-id") - parser.add_argument("--dataset_id", type=str, default="your_dataset_id") - parser.add_argument("--authorization_code", type=str, default="") - args = parser.parse_args() - - sample_create_transfer_config(args.project_id, args.dataset_id, args.authorization_code) - - -if __name__ == "__main__": - main() diff --git a/samples/noxfile.py b/samples/noxfile.py deleted file mode 100644 index bca0522e..00000000 --- a/samples/noxfile.py +++ /dev/null @@ -1,247 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import os -from pathlib import Path -import sys -from typing import Callable, Dict, List, Optional - -import nox - - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -# Copy `noxfile_config.py` to your directory and modify it instead. - - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - 'enforce_type_hints': False, - - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - 'envs': {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] - # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) -# -# Style Checks -# - - -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--import-order-style=google", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG['enforce_type_hints']: - session.install("flake8", "flake8-import-order") - else: - session.install("flake8", "flake8-import-order", "flake8-annotations") - - local_names = _determine_local_import_names(".") - args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), - "." - ] - session.run("flake8", *args) -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - session.install("black") - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars() - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) diff --git a/samples/requirements-test.txt b/samples/requirements-test.txt deleted file mode 100644 index cadf5ccd..00000000 --- a/samples/requirements-test.txt +++ /dev/null @@ -1,4 +0,0 @@ -pytest==6.0.1 -mock==4.0.2 - - diff --git a/samples/requirements.txt b/samples/requirements.txt deleted file mode 100644 index e3f405f0..00000000 --- a/samples/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -google-cloud-bigquery-datatransfer==3.0.0 -google-cloud-bigquery diff --git a/samples/update_transfer_config.py b/samples/update_transfer_config.py deleted file mode 100644 index 3e6ed1e8..00000000 --- a/samples/update_transfer_config.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# To install the latest published package dependency, execute the following: -# pip install google-cloud-bigquery-datatransfer - - -def sample_update_transfer_config(config_name, display_name): - # [START bigquerydatatransfer_update_config] - from google.cloud import bigquery_datatransfer - - client = bigquery_datatransfer.DataTransferServiceClient() - # TODO(developer): Set the config_name which user wants to update. - # config_name = "your-created-transfer-config-name" - - # TODO(developer): Set the display_name of transfer_config. - # config_name = "your-created-transfer-config-name" - - transfer_config = client.get_transfer_config(name=config_name) - transfer_config.display_name = display_name - field_mask = {"paths": ["display_name"]} - response = client.update_transfer_config( - transfer_config=transfer_config, update_mask=field_mask - ) - - print("Transfer config updated for '{}'".format(response.name)) - # [END bigquerydatatransfer_update_config] - # Return the config name for testing purposes, so that it can be deleted. - return response - - -def main(): - import argparse - - parser = argparse.ArgumentParser() - parser.add_argument("--transfer_config_name", type=str, default="your-created-transfer-config-name") - args = parser.parse_args() - - sample_update_transfer_config(args.transfer_config_name) - - -if __name__ == "__main__": - main() From 67591d947ae0addfe93d37c4c2733fd770f73f67 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Mon, 21 Dec 2020 06:55:42 -0800 Subject: [PATCH 07/20] test: add system tests for mTLS testing (#88) * test: add system tests for mTLS testing * Update tests/system.py --- tests/system.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 tests/system.py diff --git a/tests/system.py b/tests/system.py new file mode 100644 index 00000000..bce6257e --- /dev/null +++ b/tests/system.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import pytest + +from google.cloud import bigquery_datatransfer + + +@pytest.fixture(scope="session") +def project_id(): + return os.environ["PROJECT_ID"] + + +def test_list_data_sources(project_id): + client = bigquery_datatransfer.DataTransferServiceClient() + + parent = client.common_project_path(project_id) + data_sources = list(client.list_data_sources(parent=parent)) + + assert len(data_sources) >= 0 From c0ec0191ddc6f17b063685beb437b81590fee5cc Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 21 Dec 2020 07:03:20 -0800 Subject: [PATCH 08/20] chore(deps): update precommit hook pre-commit/pre-commit-hooks to v3.4.0 (#87) Co-authored-by: Tres Seaver Source-Author: WhiteSource Renovate Source-Date: Wed Dec 16 18:13:24 2020 +0100 Source-Repo: googleapis/synthtool Source-Sha: aa255b15d52b6d8950cca48cfdf58f7d27a60c8a Source-Link: https://github.com/googleapis/synthtool/commit/aa255b15d52b6d8950cca48cfdf58f7d27a60c8a --- .pre-commit-config.yaml | 2 +- synth.metadata | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6ad83346..a9024b15 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.3.0 + rev: v3.4.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer diff --git a/synth.metadata b/synth.metadata index 5810770d..824a183e 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-bigquery-datatransfer.git", - "sha": "af0406eedac1dc8c663b5c8f67f56255caeea2fa" + "sha": "cd519709228cda3bbcf2fd978d37ccd04ef27c82" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" + "sha": "aa255b15d52b6d8950cca48cfdf58f7d27a60c8a" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" + "sha": "aa255b15d52b6d8950cca48cfdf58f7d27a60c8a" } } ], From dd8459291a3ac0f98606b61ae566cb264ce96825 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 28 Dec 2020 12:00:53 -0800 Subject: [PATCH 09/20] docs(python): document adding Python 3.9 support, dropping 3.5 support (#89) Closes #787 Source-Author: Tres Seaver Source-Date: Thu Dec 17 16:08:02 2020 -0500 Source-Repo: googleapis/synthtool Source-Sha: b670a77a454f415d247907908e8ee7943e06d718 Source-Link: https://github.com/googleapis/synthtool/commit/b670a77a454f415d247907908e8ee7943e06d718 --- CONTRIBUTING.rst | 11 +++++------ synth.metadata | 7 +++---- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 7a6ff4a5..07dc14cb 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -21,8 +21,8 @@ In order to add a feature: - The feature must be documented in both the API and narrative documentation. -- The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. +- The feature must work fully on the following CPython versions: + 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -202,25 +202,24 @@ Supported Python Versions We support: -- `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ +- `Python 3.9`_ -.. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-bigquery-datatransfer/blob/master/noxfile.py -Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version -3.5. Reasons for this include: +3.6. Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ diff --git a/synth.metadata b/synth.metadata index 824a183e..9cdc114e 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-bigquery-datatransfer.git", - "sha": "cd519709228cda3bbcf2fd978d37ccd04ef27c82" + "sha": "c0ec0191ddc6f17b063685beb437b81590fee5cc" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "aa255b15d52b6d8950cca48cfdf58f7d27a60c8a" + "sha": "b670a77a454f415d247907908e8ee7943e06d718" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "aa255b15d52b6d8950cca48cfdf58f7d27a60c8a" + "sha": "b670a77a454f415d247907908e8ee7943e06d718" } } ], @@ -120,7 +120,6 @@ "renovate.json", "samples/AUTHORING_GUIDE.md", "samples/CONTRIBUTING.md", - "samples/noxfile.py", "samples/snippets/noxfile.py", "scripts/decrypt-secrets.sh", "scripts/fixup_bigquery_datatransfer_v1_keywords.py", From 85f6c97fc8c396e3587521775759677d1b9b4cb3 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 29 Dec 2020 08:01:40 -0800 Subject: [PATCH 10/20] chore: exclude `.nox` directories from linting (#91) The samples tests create `.nox` directories with all dependencies installed. These directories should be excluded from linting. I've tested this change locally, and it significantly speeds up linting on my machine. Source-Author: Tim Swast Source-Date: Tue Dec 22 13:04:04 2020 -0600 Source-Repo: googleapis/synthtool Source-Sha: 373861061648b5fe5e0ac4f8a38b32d639ee93e4 Source-Link: https://github.com/googleapis/synthtool/commit/373861061648b5fe5e0ac4f8a38b32d639ee93e4 --- .flake8 | 1 + synth.metadata | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.flake8 b/.flake8 index ed931638..29227d4c 100644 --- a/.flake8 +++ b/.flake8 @@ -26,6 +26,7 @@ exclude = *_pb2.py # Standard linting exemptions. + **/.nox/** __pycache__, .git, *.pyc, diff --git a/synth.metadata b/synth.metadata index 9cdc114e..a8907449 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-bigquery-datatransfer.git", - "sha": "c0ec0191ddc6f17b063685beb437b81590fee5cc" + "sha": "dd8459291a3ac0f98606b61ae566cb264ce96825" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "b670a77a454f415d247907908e8ee7943e06d718" + "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "b670a77a454f415d247907908e8ee7943e06d718" + "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4" } } ], From 50d8d0b88f634bc3f9093e407f320299d8b7db55 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 6 Jan 2021 16:14:37 -0700 Subject: [PATCH 11/20] chore: add constraints file (#92) * chore: add comnstraints file * chore: add comnstraints file * chore: add comnstraints file * chore: add comnstraints file * chore: add comnstraints file * chore: add comnstraints file --- testing/constraints-3.10.txt | 0 testing/constraints-3.11.txt | 0 testing/constraints-3.6.txt | 10 ++++++++++ testing/constraints-3.7.txt | 0 testing/constraints-3.8.txt | 0 testing/constraints-3.9.txt | 0 6 files changed, 10 insertions(+) create mode 100644 testing/constraints-3.10.txt create mode 100644 testing/constraints-3.11.txt create mode 100644 testing/constraints-3.6.txt create mode 100644 testing/constraints-3.7.txt create mode 100644 testing/constraints-3.8.txt create mode 100644 testing/constraints-3.9.txt diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt new file mode 100644 index 00000000..e69de29b diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt new file mode 100644 index 00000000..e69de29b diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt new file mode 100644 index 00000000..d071c726 --- /dev/null +++ b/testing/constraints-3.6.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +google-api-core==1.22.2 +proto-plus==1.4.0 +libcst==0.2.5 \ No newline at end of file diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt new file mode 100644 index 00000000..e69de29b diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt new file mode 100644 index 00000000..e69de29b diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt new file mode 100644 index 00000000..e69de29b From 5c8d7c1e860d1c50d892bfabc7ec936aaa40e714 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 14 Jan 2021 08:04:51 -0800 Subject: [PATCH 12/20] docs: ensure minimum width for 'Parameters' / 'Returns' column (#95) * chore(python): fix column sizing issue in docs Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Thu Jan 7 11:58:32 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: f15b57ccfd71106c2299e9b89835fe6e55015662 Source-Link: https://github.com/googleapis/synthtool/commit/f15b57ccfd71106c2299e9b89835fe6e55015662 * chore(python): use 'http' in LICENSE Co-authored-by: Tim Swast Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Thu Jan 7 13:05:12 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 41a4e56982620d3edcf110d76f4fcdfdec471ac8 Source-Link: https://github.com/googleapis/synthtool/commit/41a4e56982620d3edcf110d76f4fcdfdec471ac8 --- LICENSE | 7 ++++--- docs/_static/custom.css | 7 ++++++- synth.metadata | 6 +++--- 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/LICENSE b/LICENSE index a8ee855d..d6456956 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,7 @@ - Apache License + + Apache License Version 2.0, January 2004 - https://www.apache.org/licenses/ + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +193,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - https://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 0abaf229..bcd37bbd 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,4 +1,9 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} diff --git a/synth.metadata b/synth.metadata index a8907449..84f637d7 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-bigquery-datatransfer.git", - "sha": "dd8459291a3ac0f98606b61ae566cb264ce96825" + "sha": "50d8d0b88f634bc3f9093e407f320299d8b7db55" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4" + "sha": "41a4e56982620d3edcf110d76f4fcdfdec471ac8" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4" + "sha": "41a4e56982620d3edcf110d76f4fcdfdec471ac8" } } ], From 96b6ec162351441078581d566a529ea1ba5d3eda Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Fri, 29 Jan 2021 17:02:03 -0800 Subject: [PATCH 13/20] build: migrate to flakybot (#100) --- .kokoro/test-samples.sh | 8 ++++---- .kokoro/trampoline_v2.sh | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 449266b8..85bae51c 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -87,11 +87,11 @@ for file in samples/**/requirements.txt; do python3.6 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? - # If this is a periodic build, send the test log to the Build Cop Bot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop - $KOKORO_GFILE_DIR/linux_amd64/buildcop + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot fi if [[ $EXIT -ne 0 ]]; then diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 719bcd5b..4af6cdc2 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then "KOKORO_GITHUB_COMMIT" "KOKORO_GITHUB_PULL_REQUEST_NUMBER" "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For Build Cop Bot + # For FlakyBot "KOKORO_GITHUB_COMMIT_URL" "KOKORO_GITHUB_PULL_REQUEST_URL" ) From af63f9e5605efbbf62e7e5cc12cbb02155ec262a Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 3 Feb 2021 09:01:18 -0800 Subject: [PATCH 14/20] chore: only build docfx in docs-presubmit session (#97) * chore(python): skip docfx in main presubmit * chore(python): skip docfx in main presubmit * fix: properly template the repo name Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Fri Jan 8 10:32:13 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: fb53b6fb373b7c3edf4e55f3e8036bc6d73fa483 Source-Link: https://github.com/googleapis/synthtool/commit/fb53b6fb373b7c3edf4e55f3e8036bc6d73fa483 * chore: add missing quotation mark Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Mon Jan 11 09:43:06 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 16ec872dd898d7de6e1822badfac32484b5d9031 Source-Link: https://github.com/googleapis/synthtool/commit/16ec872dd898d7de6e1822badfac32484b5d9031 * NOX_SESSION is optional Co-authored-by: Tim Swast --- .kokoro/build.sh | 16 ++++++++++------ .kokoro/docs/docs-presubmit.cfg | 11 +++++++++++ .trampolinerc | 1 + noxfile.py | 11 +++++++++++ synth.metadata | 6 +++--- 5 files changed, 36 insertions(+), 9 deletions(-) diff --git a/.kokoro/build.sh b/.kokoro/build.sh index bf3d0a68..ab066b75 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,7 +15,11 @@ set -eo pipefail -cd github/python-bigquery-datatransfer +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-bigquery-datatransfer" +fi + +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -30,16 +34,16 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3.6 -m nox -s "${NOX_SESSION:-}" + python3 -m nox -s ${NOX_SESSION:-} else - python3.6 -m nox + python3 -m nox fi diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg index 11181078..1d174432 100644 --- a/.kokoro/docs/docs-presubmit.cfg +++ b/.kokoro/docs/docs-presubmit.cfg @@ -15,3 +15,14 @@ env_vars: { key: "TRAMPOLINE_IMAGE_UPLOAD" value: "false" } + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-datatransfer/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/.trampolinerc b/.trampolinerc index 995ee291..383b6ec8 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -24,6 +24,7 @@ required_envvars+=( pass_down_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Prevent unintentional override on the default image. diff --git a/noxfile.py b/noxfile.py index a4884a08..32a8bcfa 100644 --- a/noxfile.py +++ b/noxfile.py @@ -30,6 +30,17 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): diff --git a/synth.metadata b/synth.metadata index 84f637d7..be4f26dc 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-bigquery-datatransfer.git", - "sha": "50d8d0b88f634bc3f9093e407f320299d8b7db55" + "sha": "5c8d7c1e860d1c50d892bfabc7ec936aaa40e714" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "41a4e56982620d3edcf110d76f4fcdfdec471ac8" + "sha": "16ec872dd898d7de6e1822badfac32484b5d9031" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "41a4e56982620d3edcf110d76f4fcdfdec471ac8" + "sha": "16ec872dd898d7de6e1822badfac32484b5d9031" } } ], From 4fa57bb5744eb8f75abc8d665437fe4af2db3cd6 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 8 Feb 2021 09:21:36 -0800 Subject: [PATCH 15/20] chore: update templates (#104) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore: add 3.9 to noxfile template Since the python-docs-samples noxfile-template doesn't sync with this, I wanted to make sure the noxfile template matched the most recent change [here](https://github.com/GoogleCloudPlatform/python-docs-samples/pull/4968/files) cc @tmatsuo Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Fri Jan 15 17:24:05 2021 -0800 Source-Repo: googleapis/synthtool Source-Sha: 56ddc68f36b32341e9f22c2c59b4ce6aa3ba635f Source-Link: https://github.com/googleapis/synthtool/commit/56ddc68f36b32341e9f22c2c59b4ce6aa3ba635f * build(python): make `NOX_SESSION` optional I added this accidentally in #889. `NOX_SESSION` should be passed down if it is set but not marked required. Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Tue Jan 19 09:38:04 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: ba960d730416fe05c50547e975ce79fcee52c671 Source-Link: https://github.com/googleapis/synthtool/commit/ba960d730416fe05c50547e975ce79fcee52c671 * chore: Add header checker config to python library synth Now that we have it working in [python-docs-samples](https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/.github/header-checker-lint.yml) we should consider adding it to the 🐍 libraries :) Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Mon Jan 25 13:24:08 2021 -0800 Source-Repo: googleapis/synthtool Source-Sha: 573f7655311b553a937f9123bee17bf78497db95 Source-Link: https://github.com/googleapis/synthtool/commit/573f7655311b553a937f9123bee17bf78497db95 * chore: add noxfile parameters for extra dependencies Also, add tests for some noxfile parameters for assurance that the template generates valid Python. Co-authored-by: Jeffrey Rennie Source-Author: Tim Swast Source-Date: Tue Jan 26 12:26:57 2021 -0600 Source-Repo: googleapis/synthtool Source-Sha: 778d8beae28d6d87eb01fdc839a4b4d966ed2ebe Source-Link: https://github.com/googleapis/synthtool/commit/778d8beae28d6d87eb01fdc839a4b4d966ed2ebe * build: migrate to flakybot Source-Author: Justin Beckwith Source-Date: Thu Jan 28 22:22:38 2021 -0800 Source-Repo: googleapis/synthtool Source-Sha: d1bb9173100f62c0cfc8f3138b62241e7f47ca6a Source-Link: https://github.com/googleapis/synthtool/commit/d1bb9173100f62c0cfc8f3138b62241e7f47ca6a --- .github/header-checker-lint.yml | 15 +++++++++++++++ noxfile.py | 1 + samples/snippets/noxfile.py | 2 +- synth.metadata | 7 ++++--- 4 files changed, 21 insertions(+), 4 deletions(-) create mode 100644 .github/header-checker-lint.yml diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml new file mode 100644 index 00000000..fc281c05 --- /dev/null +++ b/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/noxfile.py b/noxfile.py index 32a8bcfa..2c4b84df 100644 --- a/noxfile.py +++ b/noxfile.py @@ -86,6 +86,7 @@ def default(session): session.install( "mock", "pytest", "pytest-cov", ) + session.install("-e", ".") # Run py.test against the unit tests. diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index bca0522e..97bf7da8 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -85,7 +85,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] diff --git a/synth.metadata b/synth.metadata index be4f26dc..0f97572a 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-bigquery-datatransfer.git", - "sha": "5c8d7c1e860d1c50d892bfabc7ec936aaa40e714" + "sha": "af63f9e5605efbbf62e7e5cc12cbb02155ec262a" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "16ec872dd898d7de6e1822badfac32484b5d9031" + "sha": "d1bb9173100f62c0cfc8f3138b62241e7f47ca6a" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "16ec872dd898d7de6e1822badfac32484b5d9031" + "sha": "d1bb9173100f62c0cfc8f3138b62241e7f47ca6a" } } ], @@ -48,6 +48,7 @@ ".github/ISSUE_TEMPLATE/feature_request.md", ".github/ISSUE_TEMPLATE/support_request.md", ".github/PULL_REQUEST_TEMPLATE.md", + ".github/header-checker-lint.yml", ".github/release-please.yml", ".github/snippet-bot.yml", ".gitignore", From 678c3355e1b2e8525005ad337048d60a51400fc0 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 19 Feb 2021 10:11:32 -0800 Subject: [PATCH 16/20] docs: update contributing guide to Python 3.8 (#105) * chore(python): include py.typed files in release A py.typed file must be included in the released package for it to be considered typed by type checkers. https://www.python.org/dev/peps/pep-0561/#packaging-type-information. See https://github.com/googleapis/python-secret-manager/issues/79 Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Fri Feb 5 17:32:06 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 33366574ffb9e11737b3547eb6f020ecae0536e8 Source-Link: https://github.com/googleapis/synthtool/commit/33366574ffb9e11737b3547eb6f020ecae0536e8 * docs: update python contributing guide Adds details about blacken, updates version for system tests, and shows how to pass through pytest arguments. Source-Author: Chris Cotter Source-Date: Mon Feb 8 17:13:36 2021 -0500 Source-Repo: googleapis/synthtool Source-Sha: 4679e7e415221f03ff2a71e3ffad75b9ec41d87e Source-Link: https://github.com/googleapis/synthtool/commit/4679e7e415221f03ff2a71e3ffad75b9ec41d87e --- CONTRIBUTING.rst | 22 ++++++++++++++++++---- MANIFEST.in | 4 ++-- synth.metadata | 6 +++--- 3 files changed, 23 insertions(+), 9 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 07dc14cb..88e59522 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: $ nox -s unit-2.7 - $ nox -s unit-3.7 + $ nox -s unit-3.8 $ ... +- Args to pytest can be passed through the nox command separated by a `--`. For + example, to run a single test:: + + $ nox -s unit-3.8 -- -k + .. note:: The unit tests and system tests are described in the @@ -93,8 +98,12 @@ On Debian/Ubuntu:: ************ Coding Style ************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken -- PEP8 compliance, with exceptions defined in the linter configuration. +- PEP8 compliance is required, with exceptions defined in the linter configuration. If you have ``nox`` installed, you can test that you have not introduced any non-compliant code via:: @@ -133,13 +142,18 @@ Running System Tests - To run system tests, you can execute:: - $ nox -s system-3.7 + # Run all system tests + $ nox -s system-3.8 $ nox -s system-2.7 + # Run a single system test + $ nox -s system-3.8 -- -k + + .. note:: System tests are only configured to run under Python 2.7 and - Python 3.7. For expediency, we do not run them in older versions + Python 3.8. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/MANIFEST.in b/MANIFEST.in index e9e29d12..e783f4c6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -16,10 +16,10 @@ # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE -recursive-include google *.json *.proto +recursive-include google *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ # Exclude scripts for samples readmegen -prune scripts/readme-gen \ No newline at end of file +prune scripts/readme-gen diff --git a/synth.metadata b/synth.metadata index 0f97572a..68b15b5a 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-bigquery-datatransfer.git", - "sha": "af63f9e5605efbbf62e7e5cc12cbb02155ec262a" + "sha": "4fa57bb5744eb8f75abc8d665437fe4af2db3cd6" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d1bb9173100f62c0cfc8f3138b62241e7f47ca6a" + "sha": "4679e7e415221f03ff2a71e3ffad75b9ec41d87e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d1bb9173100f62c0cfc8f3138b62241e7f47ca6a" + "sha": "4679e7e415221f03ff2a71e3ffad75b9ec41d87e" } } ], From 794df75c6f92a84e518375ef994e056dae1d9dd9 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 19 Feb 2021 10:18:03 -0800 Subject: [PATCH 17/20] chore: upgrade gapic-generator-python to 0.39.1 (#96) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/5e826317-fa5c-433f-b9a3-f421ae6c6cee/targets - [ ] To automatically regenerate this PR, check this box. PiperOrigin-RevId: 350246057 Source-Link: https://github.com/googleapis/googleapis/commit/520682435235d9c503983a360a2090025aa47cd1 --- .coveragerc | 31 +--- .../data_transfer_service.rst | 11 ++ docs/bigquery_datatransfer_v1/services.rst | 6 +- docs/bigquery_datatransfer_v1/types.rst | 1 + .../data_transfer_service/async_client.py | 125 +++++++------ .../services/data_transfer_service/client.py | 170 +++++++++++------- .../services/data_transfer_service/pagers.py | 64 +++---- .../types/datatransfer.py | 54 +++--- .../types/transfer.py | 38 ++-- synth.metadata | 8 +- .../test_data_transfer_service.py | 28 ++- 11 files changed, 303 insertions(+), 233 deletions(-) create mode 100644 docs/bigquery_datatransfer_v1/data_transfer_service.rst diff --git a/.coveragerc b/.coveragerc index dd39c854..38dd96f7 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,35 +1,18 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! [run] branch = True [report] fail_under = 100 show_missing = True +omit = + google/cloud/bigquery_datatransfer/__init__.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py \ No newline at end of file + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/docs/bigquery_datatransfer_v1/data_transfer_service.rst b/docs/bigquery_datatransfer_v1/data_transfer_service.rst new file mode 100644 index 00000000..58f85396 --- /dev/null +++ b/docs/bigquery_datatransfer_v1/data_transfer_service.rst @@ -0,0 +1,11 @@ +DataTransferService +------------------------------------- + +.. automodule:: google.cloud.bigquery_datatransfer_v1.services.data_transfer_service + :members: + :inherited-members: + + +.. automodule:: google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers + :members: + :inherited-members: diff --git a/docs/bigquery_datatransfer_v1/services.rst b/docs/bigquery_datatransfer_v1/services.rst index 8dddd817..37a71a43 100644 --- a/docs/bigquery_datatransfer_v1/services.rst +++ b/docs/bigquery_datatransfer_v1/services.rst @@ -1,6 +1,6 @@ Services for Google Cloud Bigquery Datatransfer v1 API ====================================================== +.. toctree:: + :maxdepth: 2 -.. automodule:: google.cloud.bigquery_datatransfer_v1.services.data_transfer_service - :members: - :inherited-members: + data_transfer_service diff --git a/docs/bigquery_datatransfer_v1/types.rst b/docs/bigquery_datatransfer_v1/types.rst index ccda83a5..d46636eb 100644 --- a/docs/bigquery_datatransfer_v1/types.rst +++ b/docs/bigquery_datatransfer_v1/types.rst @@ -3,4 +3,5 @@ Types for Google Cloud Bigquery Datatransfer v1 API .. automodule:: google.cloud.bigquery_datatransfer_v1.types :members: + :undoc-members: :show-inheritance: diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py index 278ebbc5..5c3afda2 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py @@ -94,6 +94,7 @@ class DataTransferServiceAsyncClient: DataTransferServiceClient.parse_common_location_path ) + from_service_account_info = DataTransferServiceClient.from_service_account_info from_service_account_file = DataTransferServiceClient.from_service_account_file from_service_account_json = from_service_account_file @@ -172,7 +173,7 @@ async def get_data_source( settings, which can be used for UI rendering. Args: - request (:class:`~.datatransfer.GetDataSourceRequest`): + request (:class:`google.cloud.bigquery_datatransfer_v1.types.GetDataSourceRequest`): The request object. A request to get data source info. name (:class:`str`): Required. The field will contain name of the resource @@ -180,6 +181,7 @@ async def get_data_source( ``projects/{project_id}/dataSources/{data_source_id}`` or ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -191,7 +193,7 @@ async def get_data_source( sent along with the request as metadata. Returns: - ~.datatransfer.DataSource: + google.cloud.bigquery_datatransfer_v1.types.DataSource: Represents data source metadata. Metadata is sufficient to render UI and request proper OAuth tokens. @@ -256,7 +258,7 @@ async def list_data_sources( settings, which can be used for UI rendering. Args: - request (:class:`~.datatransfer.ListDataSourcesRequest`): + request (:class:`google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest`): The request object. Request to list supported data sources and their data transfer settings. parent (:class:`str`): @@ -264,6 +266,7 @@ async def list_data_sources( should be returned. Must be in the form: ``projects/{project_id}`` or \`projects/{project_id}/locations/{location_id} + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -275,7 +278,7 @@ async def list_data_sources( sent along with the request as metadata. Returns: - ~.pagers.ListDataSourcesAsyncPager: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListDataSourcesAsyncPager: Returns list of supported data sources and their metadata. Iterating over this object will yield @@ -348,7 +351,7 @@ async def create_transfer_config( r"""Creates a new data transfer configuration. Args: - request (:class:`~.datatransfer.CreateTransferConfigRequest`): + request (:class:`google.cloud.bigquery_datatransfer_v1.types.CreateTransferConfigRequest`): The request object. A request to create a data transfer configuration. If new credentials are needed for this transfer configuration, an authorization code must be @@ -364,12 +367,14 @@ async def create_transfer_config( projects/{project_id}. If specified location and location of the destination bigquery dataset do not match - the request will fail. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - transfer_config (:class:`~.transfer.TransferConfig`): + transfer_config (:class:`google.cloud.bigquery_datatransfer_v1.types.TransferConfig`): Required. Data transfer configuration to create. + This corresponds to the ``transfer_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -381,14 +386,15 @@ async def create_transfer_config( sent along with the request as metadata. Returns: - ~.transfer.TransferConfig: - Represents a data transfer configuration. A transfer - configuration contains all metadata needed to perform a - data transfer. For example, ``destination_dataset_id`` - specifies where data should be stored. When a new - transfer configuration is created, the specified - ``destination_dataset_id`` is created when needed and - shared with the appropriate data source service account. + google.cloud.bigquery_datatransfer_v1.types.TransferConfig: + Represents a data transfer configuration. A transfer configuration + contains all metadata needed to perform a data + transfer. For example, destination_dataset_id + specifies where data should be stored. When a new + transfer configuration is created, the specified + destination_dataset_id is created when needed and + shared with the appropriate data source service + account. """ # Create or coerce a protobuf request object. @@ -445,20 +451,22 @@ async def update_transfer_config( All fields must be set, even if they are not updated. Args: - request (:class:`~.datatransfer.UpdateTransferConfigRequest`): + request (:class:`google.cloud.bigquery_datatransfer_v1.types.UpdateTransferConfigRequest`): The request object. A request to update a transfer configuration. To update the user id of the transfer configuration, an authorization code needs to be provided. - transfer_config (:class:`~.transfer.TransferConfig`): + transfer_config (:class:`google.cloud.bigquery_datatransfer_v1.types.TransferConfig`): Required. Data transfer configuration to create. + This corresponds to the ``transfer_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Required list of fields to be updated in this request. + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -470,14 +478,15 @@ async def update_transfer_config( sent along with the request as metadata. Returns: - ~.transfer.TransferConfig: - Represents a data transfer configuration. A transfer - configuration contains all metadata needed to perform a - data transfer. For example, ``destination_dataset_id`` - specifies where data should be stored. When a new - transfer configuration is created, the specified - ``destination_dataset_id`` is created when needed and - shared with the appropriate data source service account. + google.cloud.bigquery_datatransfer_v1.types.TransferConfig: + Represents a data transfer configuration. A transfer configuration + contains all metadata needed to perform a data + transfer. For example, destination_dataset_id + specifies where data should be stored. When a new + transfer configuration is created, the specified + destination_dataset_id is created when needed and + shared with the appropriate data source service + account. """ # Create or coerce a protobuf request object. @@ -535,7 +544,7 @@ async def delete_transfer_config( including any associated transfer runs and logs. Args: - request (:class:`~.datatransfer.DeleteTransferConfigRequest`): + request (:class:`google.cloud.bigquery_datatransfer_v1.types.DeleteTransferConfigRequest`): The request object. A request to delete data transfer information. All associated transfer runs and log messages will be deleted as well. @@ -544,6 +553,7 @@ async def delete_transfer_config( requested, for example: ``projects/{project_id}/transferConfigs/{config_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -611,7 +621,7 @@ async def get_transfer_config( r"""Returns information about a data transfer config. Args: - request (:class:`~.datatransfer.GetTransferConfigRequest`): + request (:class:`google.cloud.bigquery_datatransfer_v1.types.GetTransferConfigRequest`): The request object. A request to get data transfer information. name (:class:`str`): @@ -619,6 +629,7 @@ async def get_transfer_config( requested, for example: ``projects/{project_id}/transferConfigs/{config_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -630,14 +641,15 @@ async def get_transfer_config( sent along with the request as metadata. Returns: - ~.transfer.TransferConfig: - Represents a data transfer configuration. A transfer - configuration contains all metadata needed to perform a - data transfer. For example, ``destination_dataset_id`` - specifies where data should be stored. When a new - transfer configuration is created, the specified - ``destination_dataset_id`` is created when needed and - shared with the appropriate data source service account. + google.cloud.bigquery_datatransfer_v1.types.TransferConfig: + Represents a data transfer configuration. A transfer configuration + contains all metadata needed to perform a data + transfer. For example, destination_dataset_id + specifies where data should be stored. When a new + transfer configuration is created, the specified + destination_dataset_id is created when needed and + shared with the appropriate data source service + account. """ # Create or coerce a protobuf request object. @@ -699,13 +711,14 @@ async def list_transfer_configs( project. Args: - request (:class:`~.datatransfer.ListTransferConfigsRequest`): + request (:class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsRequest`): The request object. A request to list data transfers configured for a BigQuery project. parent (:class:`str`): Required. The BigQuery project id for which data sources should be returned: ``projects/{project_id}`` or ``projects/{project_id}/locations/{location_id}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -717,7 +730,7 @@ async def list_transfer_configs( sent along with the request as metadata. Returns: - ~.pagers.ListTransferConfigsAsyncPager: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferConfigsAsyncPager: The returned list of pipelines in the project. Iterating over this object will yield @@ -795,25 +808,28 @@ async def schedule_transfer_runs( StartManualTransferRuns instead. Args: - request (:class:`~.datatransfer.ScheduleTransferRunsRequest`): + request (:class:`google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsRequest`): The request object. A request to schedule transfer runs for a time range. parent (:class:`str`): Required. Transfer configuration name in the form: ``projects/{project_id}/transferConfigs/{config_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - start_time (:class:`~.timestamp.Timestamp`): + start_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): Required. Start time of the range of transfer runs. For example, ``"2017-05-25T00:00:00+00:00"``. + This corresponds to the ``start_time`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - end_time (:class:`~.timestamp.Timestamp`): + end_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): Required. End time of the range of transfer runs. For example, ``"2017-05-30T00:00:00+00:00"``. + This corresponds to the ``end_time`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -825,7 +841,7 @@ async def schedule_transfer_runs( sent along with the request as metadata. Returns: - ~.datatransfer.ScheduleTransferRunsResponse: + google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsResponse: A response to schedule transfer runs for a time range. @@ -886,7 +902,7 @@ async def start_manual_transfer_runs( and end_time (exclusive), or for a specific run_time. Args: - request (:class:`~.datatransfer.StartManualTransferRunsRequest`): + request (:class:`google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsRequest`): The request object. A request to start manual transfer runs. @@ -897,7 +913,7 @@ async def start_manual_transfer_runs( sent along with the request as metadata. Returns: - ~.datatransfer.StartManualTransferRunsResponse: + google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsResponse: A response to start manual transfer runs. @@ -939,7 +955,7 @@ async def get_transfer_run( run. Args: - request (:class:`~.datatransfer.GetTransferRunRequest`): + request (:class:`google.cloud.bigquery_datatransfer_v1.types.GetTransferRunRequest`): The request object. A request to get data transfer run information. name (:class:`str`): @@ -948,6 +964,7 @@ async def get_transfer_run( ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -959,7 +976,7 @@ async def get_transfer_run( sent along with the request as metadata. Returns: - ~.transfer.TransferRun: + google.cloud.bigquery_datatransfer_v1.types.TransferRun: Represents a data transfer run. """ # Create or coerce a protobuf request object. @@ -1020,7 +1037,7 @@ async def delete_transfer_run( r"""Deletes the specified transfer run. Args: - request (:class:`~.datatransfer.DeleteTransferRunRequest`): + request (:class:`google.cloud.bigquery_datatransfer_v1.types.DeleteTransferRunRequest`): The request object. A request to delete data transfer run information. name (:class:`str`): @@ -1029,6 +1046,7 @@ async def delete_transfer_run( ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1096,7 +1114,7 @@ async def list_transfer_runs( r"""Returns information about running and completed jobs. Args: - request (:class:`~.datatransfer.ListTransferRunsRequest`): + request (:class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest`): The request object. A request to list data transfer runs. UI can use this method to show/filter specific data transfer runs. The data source can use this method @@ -1107,6 +1125,7 @@ async def list_transfer_runs( configuration resource name is: ``projects/{project_id}/transferConfigs/{config_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1118,7 +1137,7 @@ async def list_transfer_runs( sent along with the request as metadata. Returns: - ~.pagers.ListTransferRunsAsyncPager: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferRunsAsyncPager: The returned list of pipelines in the project. Iterating over this object will yield @@ -1191,7 +1210,7 @@ async def list_transfer_logs( transfer run. Args: - request (:class:`~.datatransfer.ListTransferLogsRequest`): + request (:class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest`): The request object. A request to get user facing log messages associated with data transfer run. parent (:class:`str`): @@ -1199,6 +1218,7 @@ async def list_transfer_logs( ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1210,7 +1230,7 @@ async def list_transfer_logs( sent along with the request as metadata. Returns: - ~.pagers.ListTransferLogsAsyncPager: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferLogsAsyncPager: The returned list transfer run messages. Iterating over this object will yield @@ -1288,7 +1308,7 @@ async def check_valid_creds( can create a transfer config. Args: - request (:class:`~.datatransfer.CheckValidCredsRequest`): + request (:class:`google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsRequest`): The request object. A request to determine whether the user has valid credentials. This method is used to limit the number of OAuth popups in the user interface. The @@ -1302,6 +1322,7 @@ async def check_valid_creds( ``projects/{project_id}/dataSources/{data_source_id}`` or ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1313,7 +1334,7 @@ async def check_valid_creds( sent along with the request as metadata. Returns: - ~.datatransfer.CheckValidCredsResponse: + google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsResponse: A response indicating whether the credentials exist and are valid. diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py index 38d47596..8bf606e4 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py @@ -122,6 +122,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTransferServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -134,7 +150,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + DataTransferServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -273,10 +289,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.DataTransferServiceTransport]): The + transport (Union[str, DataTransferServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -387,14 +403,15 @@ def get_data_source( settings, which can be used for UI rendering. Args: - request (:class:`~.datatransfer.GetDataSourceRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.GetDataSourceRequest): The request object. A request to get data source info. - name (:class:`str`): + name (str): Required. The field will contain name of the resource requested, for example: ``projects/{project_id}/dataSources/{data_source_id}`` or ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -406,7 +423,7 @@ def get_data_source( sent along with the request as metadata. Returns: - ~.datatransfer.DataSource: + google.cloud.bigquery_datatransfer_v1.types.DataSource: Represents data source metadata. Metadata is sufficient to render UI and request proper OAuth tokens. @@ -464,14 +481,15 @@ def list_data_sources( settings, which can be used for UI rendering. Args: - request (:class:`~.datatransfer.ListDataSourcesRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest): The request object. Request to list supported data sources and their data transfer settings. - parent (:class:`str`): + parent (str): Required. The BigQuery project id for which data sources should be returned. Must be in the form: ``projects/{project_id}`` or \`projects/{project_id}/locations/{location_id} + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -483,7 +501,7 @@ def list_data_sources( sent along with the request as metadata. Returns: - ~.pagers.ListDataSourcesPager: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListDataSourcesPager: Returns list of supported data sources and their metadata. Iterating over this object will yield @@ -549,7 +567,7 @@ def create_transfer_config( r"""Creates a new data transfer configuration. Args: - request (:class:`~.datatransfer.CreateTransferConfigRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.CreateTransferConfigRequest): The request object. A request to create a data transfer configuration. If new credentials are needed for this transfer configuration, an authorization code must be @@ -558,19 +576,21 @@ def create_transfer_config( id corresponding to the authorization code. Otherwise, the transfer configuration will be associated with the calling user. - parent (:class:`str`): + parent (str): Required. The BigQuery project id where the transfer configuration should be created. Must be in the format projects/{project_id}/locations/{location_id} or projects/{project_id}. If specified location and location of the destination bigquery dataset do not match - the request will fail. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - transfer_config (:class:`~.transfer.TransferConfig`): + transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): Required. Data transfer configuration to create. + This corresponds to the ``transfer_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -582,14 +602,15 @@ def create_transfer_config( sent along with the request as metadata. Returns: - ~.transfer.TransferConfig: - Represents a data transfer configuration. A transfer - configuration contains all metadata needed to perform a - data transfer. For example, ``destination_dataset_id`` - specifies where data should be stored. When a new - transfer configuration is created, the specified - ``destination_dataset_id`` is created when needed and - shared with the appropriate data source service account. + google.cloud.bigquery_datatransfer_v1.types.TransferConfig: + Represents a data transfer configuration. A transfer configuration + contains all metadata needed to perform a data + transfer. For example, destination_dataset_id + specifies where data should be stored. When a new + transfer configuration is created, the specified + destination_dataset_id is created when needed and + shared with the appropriate data source service + account. """ # Create or coerce a protobuf request object. @@ -647,20 +668,22 @@ def update_transfer_config( All fields must be set, even if they are not updated. Args: - request (:class:`~.datatransfer.UpdateTransferConfigRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.UpdateTransferConfigRequest): The request object. A request to update a transfer configuration. To update the user id of the transfer configuration, an authorization code needs to be provided. - transfer_config (:class:`~.transfer.TransferConfig`): + transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): Required. Data transfer configuration to create. + This corresponds to the ``transfer_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Required list of fields to be updated in this request. + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -672,14 +695,15 @@ def update_transfer_config( sent along with the request as metadata. Returns: - ~.transfer.TransferConfig: - Represents a data transfer configuration. A transfer - configuration contains all metadata needed to perform a - data transfer. For example, ``destination_dataset_id`` - specifies where data should be stored. When a new - transfer configuration is created, the specified - ``destination_dataset_id`` is created when needed and - shared with the appropriate data source service account. + google.cloud.bigquery_datatransfer_v1.types.TransferConfig: + Represents a data transfer configuration. A transfer configuration + contains all metadata needed to perform a data + transfer. For example, destination_dataset_id + specifies where data should be stored. When a new + transfer configuration is created, the specified + destination_dataset_id is created when needed and + shared with the appropriate data source service + account. """ # Create or coerce a protobuf request object. @@ -738,15 +762,16 @@ def delete_transfer_config( including any associated transfer runs and logs. Args: - request (:class:`~.datatransfer.DeleteTransferConfigRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.DeleteTransferConfigRequest): The request object. A request to delete data transfer information. All associated transfer runs and log messages will be deleted as well. - name (:class:`str`): + name (str): Required. The field will contain name of the resource requested, for example: ``projects/{project_id}/transferConfigs/{config_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -807,14 +832,15 @@ def get_transfer_config( r"""Returns information about a data transfer config. Args: - request (:class:`~.datatransfer.GetTransferConfigRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.GetTransferConfigRequest): The request object. A request to get data transfer information. - name (:class:`str`): + name (str): Required. The field will contain name of the resource requested, for example: ``projects/{project_id}/transferConfigs/{config_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -826,14 +852,15 @@ def get_transfer_config( sent along with the request as metadata. Returns: - ~.transfer.TransferConfig: - Represents a data transfer configuration. A transfer - configuration contains all metadata needed to perform a - data transfer. For example, ``destination_dataset_id`` - specifies where data should be stored. When a new - transfer configuration is created, the specified - ``destination_dataset_id`` is created when needed and - shared with the appropriate data source service account. + google.cloud.bigquery_datatransfer_v1.types.TransferConfig: + Represents a data transfer configuration. A transfer configuration + contains all metadata needed to perform a data + transfer. For example, destination_dataset_id + specifies where data should be stored. When a new + transfer configuration is created, the specified + destination_dataset_id is created when needed and + shared with the appropriate data source service + account. """ # Create or coerce a protobuf request object. @@ -888,13 +915,14 @@ def list_transfer_configs( project. Args: - request (:class:`~.datatransfer.ListTransferConfigsRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsRequest): The request object. A request to list data transfers configured for a BigQuery project. - parent (:class:`str`): + parent (str): Required. The BigQuery project id for which data sources should be returned: ``projects/{project_id}`` or ``projects/{project_id}/locations/{location_id}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -906,7 +934,7 @@ def list_transfer_configs( sent along with the request as metadata. Returns: - ~.pagers.ListTransferConfigsPager: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferConfigsPager: The returned list of pipelines in the project. Iterating over this object will yield @@ -977,25 +1005,28 @@ def schedule_transfer_runs( StartManualTransferRuns instead. Args: - request (:class:`~.datatransfer.ScheduleTransferRunsRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsRequest): The request object. A request to schedule transfer runs for a time range. - parent (:class:`str`): + parent (str): Required. Transfer configuration name in the form: ``projects/{project_id}/transferConfigs/{config_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - start_time (:class:`~.timestamp.Timestamp`): + start_time (google.protobuf.timestamp_pb2.Timestamp): Required. Start time of the range of transfer runs. For example, ``"2017-05-25T00:00:00+00:00"``. + This corresponds to the ``start_time`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - end_time (:class:`~.timestamp.Timestamp`): + end_time (google.protobuf.timestamp_pb2.Timestamp): Required. End time of the range of transfer runs. For example, ``"2017-05-30T00:00:00+00:00"``. + This corresponds to the ``end_time`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1007,7 +1038,7 @@ def schedule_transfer_runs( sent along with the request as metadata. Returns: - ~.datatransfer.ScheduleTransferRunsResponse: + google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsResponse: A response to schedule transfer runs for a time range. @@ -1069,7 +1100,7 @@ def start_manual_transfer_runs( and end_time (exclusive), or for a specific run_time. Args: - request (:class:`~.datatransfer.StartManualTransferRunsRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsRequest): The request object. A request to start manual transfer runs. @@ -1080,7 +1111,7 @@ def start_manual_transfer_runs( sent along with the request as metadata. Returns: - ~.datatransfer.StartManualTransferRunsResponse: + google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsResponse: A response to start manual transfer runs. @@ -1125,15 +1156,16 @@ def get_transfer_run( run. Args: - request (:class:`~.datatransfer.GetTransferRunRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.GetTransferRunRequest): The request object. A request to get data transfer run information. - name (:class:`str`): + name (str): Required. The field will contain name of the resource requested, for example: ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1145,7 +1177,7 @@ def get_transfer_run( sent along with the request as metadata. Returns: - ~.transfer.TransferRun: + google.cloud.bigquery_datatransfer_v1.types.TransferRun: Represents a data transfer run. """ # Create or coerce a protobuf request object. @@ -1199,15 +1231,16 @@ def delete_transfer_run( r"""Deletes the specified transfer run. Args: - request (:class:`~.datatransfer.DeleteTransferRunRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.DeleteTransferRunRequest): The request object. A request to delete data transfer run information. - name (:class:`str`): + name (str): Required. The field will contain name of the resource requested, for example: ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1268,17 +1301,18 @@ def list_transfer_runs( r"""Returns information about running and completed jobs. Args: - request (:class:`~.datatransfer.ListTransferRunsRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest): The request object. A request to list data transfer runs. UI can use this method to show/filter specific data transfer runs. The data source can use this method to request all scheduled transfer runs. - parent (:class:`str`): + parent (str): Required. Name of transfer configuration for which transfer runs should be retrieved. Format of transfer configuration resource name is: ``projects/{project_id}/transferConfigs/{config_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1290,7 +1324,7 @@ def list_transfer_runs( sent along with the request as metadata. Returns: - ~.pagers.ListTransferRunsPager: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferRunsPager: The returned list of pipelines in the project. Iterating over this object will yield @@ -1356,14 +1390,15 @@ def list_transfer_logs( transfer run. Args: - request (:class:`~.datatransfer.ListTransferLogsRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest): The request object. A request to get user facing log messages associated with data transfer run. - parent (:class:`str`): + parent (str): Required. Transfer run name in the form: ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1375,7 +1410,7 @@ def list_transfer_logs( sent along with the request as metadata. Returns: - ~.pagers.ListTransferLogsPager: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferLogsPager: The returned list transfer run messages. Iterating over this object will yield @@ -1446,7 +1481,7 @@ def check_valid_creds( can create a transfer config. Args: - request (:class:`~.datatransfer.CheckValidCredsRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsRequest): The request object. A request to determine whether the user has valid credentials. This method is used to limit the number of OAuth popups in the user interface. The @@ -1455,11 +1490,12 @@ def check_valid_creds( this method returns false, as it cannot be determined whether the credentials are already valid merely based on the user id. - name (:class:`str`): + name (str): Required. The data source in the form: ``projects/{project_id}/dataSources/{data_source_id}`` or ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1471,7 +1507,7 @@ def check_valid_creds( sent along with the request as metadata. Returns: - ~.datatransfer.CheckValidCredsResponse: + google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsResponse: A response indicating whether the credentials exist and are valid. diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py index d1bb2a72..39e831ba 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py @@ -25,7 +25,7 @@ class ListDataSourcesPager: """A pager for iterating through ``list_data_sources`` requests. This class thinly wraps an initial - :class:`~.datatransfer.ListDataSourcesResponse` object, and + :class:`google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse` object, and provides an ``__iter__`` method to iterate through its ``data_sources`` field. @@ -34,7 +34,7 @@ class ListDataSourcesPager: through the ``data_sources`` field on the corresponding responses. - All the usual :class:`~.datatransfer.ListDataSourcesResponse` + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -52,9 +52,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.datatransfer.ListDataSourcesRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest): The initial request object. - response (:class:`~.datatransfer.ListDataSourcesResponse`): + response (google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -87,7 +87,7 @@ class ListDataSourcesAsyncPager: """A pager for iterating through ``list_data_sources`` requests. This class thinly wraps an initial - :class:`~.datatransfer.ListDataSourcesResponse` object, and + :class:`google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse` object, and provides an ``__aiter__`` method to iterate through its ``data_sources`` field. @@ -96,7 +96,7 @@ class ListDataSourcesAsyncPager: through the ``data_sources`` field on the corresponding responses. - All the usual :class:`~.datatransfer.ListDataSourcesResponse` + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -114,9 +114,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.datatransfer.ListDataSourcesRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest): The initial request object. - response (:class:`~.datatransfer.ListDataSourcesResponse`): + response (google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -153,7 +153,7 @@ class ListTransferConfigsPager: """A pager for iterating through ``list_transfer_configs`` requests. This class thinly wraps an initial - :class:`~.datatransfer.ListTransferConfigsResponse` object, and + :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse` object, and provides an ``__iter__`` method to iterate through its ``transfer_configs`` field. @@ -162,7 +162,7 @@ class ListTransferConfigsPager: through the ``transfer_configs`` field on the corresponding responses. - All the usual :class:`~.datatransfer.ListTransferConfigsResponse` + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -180,9 +180,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.datatransfer.ListTransferConfigsRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsRequest): The initial request object. - response (:class:`~.datatransfer.ListTransferConfigsResponse`): + response (google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -215,7 +215,7 @@ class ListTransferConfigsAsyncPager: """A pager for iterating through ``list_transfer_configs`` requests. This class thinly wraps an initial - :class:`~.datatransfer.ListTransferConfigsResponse` object, and + :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse` object, and provides an ``__aiter__`` method to iterate through its ``transfer_configs`` field. @@ -224,7 +224,7 @@ class ListTransferConfigsAsyncPager: through the ``transfer_configs`` field on the corresponding responses. - All the usual :class:`~.datatransfer.ListTransferConfigsResponse` + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -242,9 +242,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.datatransfer.ListTransferConfigsRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsRequest): The initial request object. - response (:class:`~.datatransfer.ListTransferConfigsResponse`): + response (google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -281,7 +281,7 @@ class ListTransferRunsPager: """A pager for iterating through ``list_transfer_runs`` requests. This class thinly wraps an initial - :class:`~.datatransfer.ListTransferRunsResponse` object, and + :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse` object, and provides an ``__iter__`` method to iterate through its ``transfer_runs`` field. @@ -290,7 +290,7 @@ class ListTransferRunsPager: through the ``transfer_runs`` field on the corresponding responses. - All the usual :class:`~.datatransfer.ListTransferRunsResponse` + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -308,9 +308,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.datatransfer.ListTransferRunsRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest): The initial request object. - response (:class:`~.datatransfer.ListTransferRunsResponse`): + response (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -343,7 +343,7 @@ class ListTransferRunsAsyncPager: """A pager for iterating through ``list_transfer_runs`` requests. This class thinly wraps an initial - :class:`~.datatransfer.ListTransferRunsResponse` object, and + :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse` object, and provides an ``__aiter__`` method to iterate through its ``transfer_runs`` field. @@ -352,7 +352,7 @@ class ListTransferRunsAsyncPager: through the ``transfer_runs`` field on the corresponding responses. - All the usual :class:`~.datatransfer.ListTransferRunsResponse` + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -370,9 +370,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.datatransfer.ListTransferRunsRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest): The initial request object. - response (:class:`~.datatransfer.ListTransferRunsResponse`): + response (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -409,7 +409,7 @@ class ListTransferLogsPager: """A pager for iterating through ``list_transfer_logs`` requests. This class thinly wraps an initial - :class:`~.datatransfer.ListTransferLogsResponse` object, and + :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse` object, and provides an ``__iter__`` method to iterate through its ``transfer_messages`` field. @@ -418,7 +418,7 @@ class ListTransferLogsPager: through the ``transfer_messages`` field on the corresponding responses. - All the usual :class:`~.datatransfer.ListTransferLogsResponse` + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -436,9 +436,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.datatransfer.ListTransferLogsRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest): The initial request object. - response (:class:`~.datatransfer.ListTransferLogsResponse`): + response (google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -471,7 +471,7 @@ class ListTransferLogsAsyncPager: """A pager for iterating through ``list_transfer_logs`` requests. This class thinly wraps an initial - :class:`~.datatransfer.ListTransferLogsResponse` object, and + :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse` object, and provides an ``__aiter__`` method to iterate through its ``transfer_messages`` field. @@ -480,7 +480,7 @@ class ListTransferLogsAsyncPager: through the ``transfer_messages`` field on the corresponding responses. - All the usual :class:`~.datatransfer.ListTransferLogsResponse` + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -498,9 +498,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.datatransfer.ListTransferLogsRequest`): + request (google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest): The initial request object. - response (:class:`~.datatransfer.ListTransferLogsResponse`): + response (google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py b/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py index a78d7e41..9b92f388 100644 --- a/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py +++ b/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py @@ -70,7 +70,7 @@ class DataSourceParameter(proto.Message): Parameter display name in the user interface. description (str): Parameter description. - type_ (~.datatransfer.DataSourceParameter.Type): + type_ (google.cloud.bigquery_datatransfer_v1.types.DataSourceParameter.Type): Parameter type. required (bool): Is parameter required. @@ -81,13 +81,13 @@ class DataSourceParameter(proto.Message): parameter validation. allowed_values (Sequence[str]): All possible values for the parameter. - min_value (~.wrappers.DoubleValue): + min_value (google.protobuf.wrappers_pb2.DoubleValue): For integer and double values specifies minimum allowed value. - max_value (~.wrappers.DoubleValue): + max_value (google.protobuf.wrappers_pb2.DoubleValue): For integer and double values specifies maxminum allowed value. - fields (Sequence[~.datatransfer.DataSourceParameter]): + fields (Sequence[google.cloud.bigquery_datatransfer_v1.types.DataSourceParameter]): Deprecated. This field has no effect. validation_description (str): Description of the requirements for this @@ -173,7 +173,7 @@ class DataSource(proto.Message): data source to prepare data and ingest them into BigQuery, e.g., https://www.googleapis.com/auth/bigquery - transfer_type (~.transfer.TransferType): + transfer_type (google.cloud.bigquery_datatransfer_v1.types.TransferType): Deprecated. This field has no effect. supports_multiple_transfers (bool): Deprecated. This field has no effect. @@ -190,14 +190,14 @@ class DataSource(proto.Message): Specifies whether the data source supports a user defined schedule, or operates on the default schedule. When set to ``true``, user can override default schedule. - parameters (Sequence[~.datatransfer.DataSourceParameter]): + parameters (Sequence[google.cloud.bigquery_datatransfer_v1.types.DataSourceParameter]): Data source parameters. help_url (str): Url for the help document for this data source. - authorization_type (~.datatransfer.DataSource.AuthorizationType): + authorization_type (google.cloud.bigquery_datatransfer_v1.types.DataSource.AuthorizationType): Indicates the type of authorization. - data_refresh_type (~.datatransfer.DataSource.DataRefreshType): + data_refresh_type (google.cloud.bigquery_datatransfer_v1.types.DataSource.DataRefreshType): Specifies whether the data source supports automatic data refresh for the past few days, and how it's supported. For some data sources, @@ -210,7 +210,7 @@ class DataSource(proto.Message): manual_runs_disabled (bool): Disables backfilling and manual run scheduling for the data source. - minimum_schedule_interval (~.duration.Duration): + minimum_schedule_interval (google.protobuf.duration_pb2.Duration): The minimum interval for scheduler to schedule runs. """ @@ -315,7 +315,7 @@ class ListDataSourcesResponse(proto.Message): r"""Returns list of supported data sources and their metadata. Attributes: - data_sources (Sequence[~.datatransfer.DataSource]): + data_sources (Sequence[google.cloud.bigquery_datatransfer_v1.types.DataSource]): List of supported data sources and their transfer settings. next_page_token (str): @@ -350,7 +350,7 @@ class CreateTransferConfigRequest(proto.Message): projects/{project_id}. If specified location and location of the destination bigquery dataset do not match - the request will fail. - transfer_config (~.transfer.TransferConfig): + transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): Required. Data transfer configuration to create. authorization_code (str): @@ -408,7 +408,7 @@ class UpdateTransferConfigRequest(proto.Message): needs to be provided. Attributes: - transfer_config (~.transfer.TransferConfig): + transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): Required. Data transfer configuration to create. authorization_code (str): @@ -431,7 +431,7 @@ class UpdateTransferConfigRequest(proto.Message): should be returned in the title bar of the browser, with the page text prompting the user to copy the code and paste it in the application. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Required list of fields to be updated in this request. version_info (str): @@ -558,7 +558,7 @@ class ListTransferConfigsResponse(proto.Message): r"""The returned list of pipelines in the project. Attributes: - transfer_configs (Sequence[~.transfer.TransferConfig]): + transfer_configs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Output only. The stored pipeline transfer configurations. next_page_token (str): @@ -591,7 +591,7 @@ class ListTransferRunsRequest(proto.Message): resource name is: ``projects/{project_id}/transferConfigs/{config_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - states (Sequence[~.transfer.TransferState]): + states (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferState]): When specified, only transfer runs with requested states are returned. page_token (str): @@ -604,7 +604,7 @@ class ListTransferRunsRequest(proto.Message): page_size (int): Page size. The default page size is the maximum value of 1000 results. - run_attempt (~.datatransfer.ListTransferRunsRequest.RunAttempt): + run_attempt (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest.RunAttempt): Indicates how run attempts are to be pulled. """ @@ -628,7 +628,7 @@ class ListTransferRunsResponse(proto.Message): r"""The returned list of pipelines in the project. Attributes: - transfer_runs (Sequence[~.transfer.TransferRun]): + transfer_runs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]): Output only. The stored pipeline transfer runs. next_page_token (str): @@ -669,7 +669,7 @@ class ListTransferLogsRequest(proto.Message): page_size (int): Page size. The default page size is the maximum value of 1000 results. - message_types (Sequence[~.transfer.TransferMessage.MessageSeverity]): + message_types (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferMessage.MessageSeverity]): Message types to return. If not populated - INFO, WARNING and ERROR messages are returned. """ @@ -689,7 +689,7 @@ class ListTransferLogsResponse(proto.Message): r"""The returned list transfer run messages. Attributes: - transfer_messages (Sequence[~.transfer.TransferMessage]): + transfer_messages (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferMessage]): Output only. The stored pipeline transfer messages. next_page_token (str): @@ -749,10 +749,10 @@ class ScheduleTransferRunsRequest(proto.Message): Required. Transfer configuration name in the form: ``projects/{project_id}/transferConfigs/{config_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - start_time (~.timestamp.Timestamp): + start_time (google.protobuf.timestamp_pb2.Timestamp): Required. Start time of the range of transfer runs. For example, ``"2017-05-25T00:00:00+00:00"``. - end_time (~.timestamp.Timestamp): + end_time (google.protobuf.timestamp_pb2.Timestamp): Required. End time of the range of transfer runs. For example, ``"2017-05-30T00:00:00+00:00"``. """ @@ -768,7 +768,7 @@ class ScheduleTransferRunsResponse(proto.Message): r"""A response to schedule transfer runs for a time range. Attributes: - runs (Sequence[~.transfer.TransferRun]): + runs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]): The transfer runs that were scheduled. """ @@ -783,10 +783,10 @@ class StartManualTransferRunsRequest(proto.Message): Transfer configuration name in the form: ``projects/{project_id}/transferConfigs/{config_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - requested_time_range (~.datatransfer.StartManualTransferRunsRequest.TimeRange): + requested_time_range (google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsRequest.TimeRange): Time range for the transfer runs that should be started. - requested_run_time (~.timestamp.Timestamp): + requested_run_time (google.protobuf.timestamp_pb2.Timestamp): Specific run_time for a transfer run to be started. The requested_run_time must not be in the future. """ @@ -797,13 +797,13 @@ class TimeRange(proto.Message): (exclusive). Attributes: - start_time (~.timestamp.Timestamp): + start_time (google.protobuf.timestamp_pb2.Timestamp): Start time of the range of transfer runs. For example, ``"2017-05-25T00:00:00+00:00"``. The start_time must be strictly less than the end_time. Creates transfer runs where run_time is in the range betwen start_time (inclusive) and end_time (exlusive). - end_time (~.timestamp.Timestamp): + end_time (google.protobuf.timestamp_pb2.Timestamp): End time of the range of transfer runs. For example, ``"2017-05-30T00:00:00+00:00"``. The end_time must not be in the future. Creates transfer runs where run_time is in the @@ -829,7 +829,7 @@ class StartManualTransferRunsResponse(proto.Message): r"""A response to start manual transfer runs. Attributes: - runs (Sequence[~.transfer.TransferRun]): + runs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]): The transfer runs that were created. """ diff --git a/google/cloud/bigquery_datatransfer_v1/types/transfer.py b/google/cloud/bigquery_datatransfer_v1/types/transfer.py index e3d0d9ea..58e7a5e6 100644 --- a/google/cloud/bigquery_datatransfer_v1/types/transfer.py +++ b/google/cloud/bigquery_datatransfer_v1/types/transfer.py @@ -78,7 +78,7 @@ class ScheduleOptions(proto.Message): basis using StartManualTransferRuns API. When automatic scheduling is disabled, the TransferConfig.schedule field will be ignored. - start_time (~.timestamp.Timestamp): + start_time (google.protobuf.timestamp_pb2.Timestamp): Specifies time to start scheduling transfer runs. The first run will be scheduled at or after the start time according to a recurrence @@ -86,7 +86,7 @@ class ScheduleOptions(proto.Message): start time can be changed at any moment. The time when a data transfer can be trigerred manually is not limited by this option. - end_time (~.timestamp.Timestamp): + end_time (google.protobuf.timestamp_pb2.Timestamp): Defines time to stop scheduling transfer runs. A transfer run cannot be scheduled at or after the end time. The end time can be changed @@ -128,7 +128,7 @@ class TransferConfig(proto.Message): data_source_id (str): Data source id. Cannot be changed once data transfer is created. - params (~.struct.Struct): + params (google.protobuf.struct_pb2.Struct): Data transfer specific parameters. schedule (str): Data transfer schedule. If the data source does not support @@ -142,7 +142,7 @@ class TransferConfig(proto.Message): https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format NOTE: the granularity should be at least 8 hours, or less frequent. - schedule_options (~.transfer.ScheduleOptions): + schedule_options (google.cloud.bigquery_datatransfer_v1.types.ScheduleOptions): Options customizing the data transfer schedule. data_refresh_window_days (int): @@ -155,13 +155,13 @@ class TransferConfig(proto.Message): disabled (bool): Is this config disabled. When set to true, no runs are scheduled for a given transfer. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Data transfer modification time. Ignored by server on input. - next_run_time (~.timestamp.Timestamp): + next_run_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Next time when data transfer will run. - state (~.transfer.TransferState): + state (google.cloud.bigquery_datatransfer_v1.types.TransferState): Output only. State of the most recently updated transfer run. user_id (int): @@ -174,7 +174,7 @@ class TransferConfig(proto.Message): Pub/Sub topic where notifications will be sent after transfer runs associated with this transfer config finish. - email_preferences (~.transfer.EmailPreferences): + email_preferences (google.cloud.bigquery_datatransfer_v1.types.EmailPreferences): Email notifications will be sent according to these preferences to the email address of the user who owns this transfer config. @@ -224,32 +224,32 @@ class TransferRun(proto.Message): have the form ``projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}``. The name is ignored when creating a transfer run. - schedule_time (~.timestamp.Timestamp): + schedule_time (google.protobuf.timestamp_pb2.Timestamp): Minimum time after which a transfer run can be started. - run_time (~.timestamp.Timestamp): + run_time (google.protobuf.timestamp_pb2.Timestamp): For batch transfer runs, specifies the date and time of the data should be ingested. - error_status (~.status.Status): + error_status (google.rpc.status_pb2.Status): Status of the transfer run. - start_time (~.timestamp.Timestamp): + start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when transfer run was started. Parameter ignored by server for input requests. - end_time (~.timestamp.Timestamp): + end_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when transfer run ended. Parameter ignored by server for input requests. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Last time the data transfer run state was updated. - params (~.struct.Struct): + params (google.protobuf.struct_pb2.Struct): Output only. Data transfer specific parameters. destination_dataset_id (str): Output only. The BigQuery target dataset id. data_source_id (str): Output only. Data source id. - state (~.transfer.TransferState): + state (google.cloud.bigquery_datatransfer_v1.types.TransferState): Data transfer run state. Ignored for input requests. user_id (int): @@ -266,7 +266,7 @@ class TransferRun(proto.Message): Output only. Pub/Sub topic where a notification will be sent after this transfer run finishes - email_preferences (~.transfer.EmailPreferences): + email_preferences (google.cloud.bigquery_datatransfer_v1.types.EmailPreferences): Output only. Email notifications will be sent according to these preferences to the email address of the user who owns the transfer config @@ -311,9 +311,9 @@ class TransferMessage(proto.Message): transfer run. Attributes: - message_time (~.timestamp.Timestamp): + message_time (google.protobuf.timestamp_pb2.Timestamp): Time when message was logged. - severity (~.transfer.TransferMessage.MessageSeverity): + severity (google.cloud.bigquery_datatransfer_v1.types.TransferMessage.MessageSeverity): Message severity. message_text (str): Message text. diff --git a/synth.metadata b/synth.metadata index 68b15b5a..eb847e38 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-bigquery-datatransfer.git", - "sha": "4fa57bb5744eb8f75abc8d665437fe4af2db3cd6" + "sha": "5c8d7c1e860d1c50d892bfabc7ec936aaa40e714" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "dd372aa22ded7a8ba6f0e03a80e06358a3fa0907", - "internalRef": "347055288" + "sha": "520682435235d9c503983a360a2090025aa47cd1", + "internalRef": "350246057" } }, { @@ -42,6 +42,7 @@ } ], "generatedFiles": [ + ".coveragerc", ".flake8", ".github/CONTRIBUTING.md", ".github/ISSUE_TEMPLATE/bug_report.md", @@ -94,6 +95,7 @@ "MANIFEST.in", "docs/_static/custom.css", "docs/_templates/layout.html", + "docs/bigquery_datatransfer_v1/data_transfer_service.rst", "docs/bigquery_datatransfer_v1/services.rst", "docs/bigquery_datatransfer_v1/types.rst", "docs/conf.py", diff --git a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py index fafbdc5d..d21e3acc 100644 --- a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -97,8 +97,21 @@ def test__get_default_mtls_endpoint(): ) +def test_data_transfer_service_client_from_service_account_info(): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = DataTransferServiceClient.from_service_account_info(info) + assert client.transport._credentials == creds + + assert client.transport._host == "bigquerydatatransfer.googleapis.com:443" + + @pytest.mark.parametrize( - "client_class", [DataTransferServiceClient, DataTransferServiceAsyncClient] + "client_class", [DataTransferServiceClient, DataTransferServiceAsyncClient,] ) def test_data_transfer_service_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -117,7 +130,10 @@ def test_data_transfer_service_client_from_service_account_file(client_class): def test_data_transfer_service_client_get_transport_class(): transport = DataTransferServiceClient.get_transport_class() - assert transport == transports.DataTransferServiceGrpcTransport + available_transports = [ + transports.DataTransferServiceGrpcTransport, + ] + assert transport in available_transports transport = DataTransferServiceClient.get_transport_class("grpc") assert transport == transports.DataTransferServiceGrpcTransport @@ -4522,7 +4538,7 @@ def test_data_transfer_service_host_with_port(): def test_data_transfer_service_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.DataTransferServiceGrpcTransport( @@ -4534,7 +4550,7 @@ def test_data_transfer_service_grpc_transport_channel(): def test_data_transfer_service_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.DataTransferServiceGrpcAsyncIOTransport( @@ -4559,7 +4575,7 @@ def test_data_transfer_service_transport_channel_mtls_with_client_cert_source( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -4612,7 +4628,7 @@ def test_data_transfer_service_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel From 3c0cb6c1cb602c325d8d0befdb739c16a9bf23e0 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 22 Feb 2021 15:03:32 -0800 Subject: [PATCH 18/20] build(python): enable flakybot on library unit and system tests (#107) Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Feb 17 14:10:46 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: d17674372e27fb8f23013935e794aa37502071aa Source-Link: https://github.com/googleapis/synthtool/commit/d17674372e27fb8f23013935e794aa37502071aa --- .gitignore | 4 +++- .kokoro/build.sh | 10 ++++++++++ noxfile.py | 17 +++++++++++++++-- synth.metadata | 6 +++--- 4 files changed, 31 insertions(+), 6 deletions(-) diff --git a/.gitignore b/.gitignore index b9daa52f..b4243ced 100644 --- a/.gitignore +++ b/.gitignore @@ -50,8 +50,10 @@ docs.metadata # Virtual environment env/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/.kokoro/build.sh b/.kokoro/build.sh index ab066b75..d45fa521 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -40,6 +40,16 @@ python3 -m pip uninstall --yes --quiet nox-automation python3 -m pip install --upgrade --quiet nox python3 -m nox --version +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi + # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then diff --git a/noxfile.py b/noxfile.py index 2c4b84df..5bf628be 100644 --- a/noxfile.py +++ b/noxfile.py @@ -93,6 +93,7 @@ def default(session): session.run( "py.test", "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", "--cov=google/cloud", "--cov=tests/unit", "--cov-append", @@ -141,9 +142,21 @@ def system(session): # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/synth.metadata b/synth.metadata index eb847e38..caa565cf 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-bigquery-datatransfer.git", - "sha": "5c8d7c1e860d1c50d892bfabc7ec936aaa40e714" + "sha": "794df75c6f92a84e518375ef994e056dae1d9dd9" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4679e7e415221f03ff2a71e3ffad75b9ec41d87e" + "sha": "d17674372e27fb8f23013935e794aa37502071aa" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4679e7e415221f03ff2a71e3ffad75b9ec41d87e" + "sha": "d17674372e27fb8f23013935e794aa37502071aa" } } ], From 465c4832c35f8a3ea82efa0f99dd318256cd04d2 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 3 Mar 2021 10:14:13 -0800 Subject: [PATCH 19/20] test: install pyopenssl for mtls testing (#110) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/f169e545-1f46-4498-92b8-ef79af1ecedc/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/0780323da96d5a53925fe0547757181fe76e8f1e --- noxfile.py | 3 +++ synth.metadata | 6 +++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/noxfile.py b/noxfile.py index 5bf628be..71ef3083 100644 --- a/noxfile.py +++ b/noxfile.py @@ -123,6 +123,9 @@ def system(session): # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") system_test_exists = os.path.exists(system_test_path) system_test_folder_exists = os.path.exists(system_test_folder_path) diff --git a/synth.metadata b/synth.metadata index caa565cf..d91567b6 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-bigquery-datatransfer.git", - "sha": "794df75c6f92a84e518375ef994e056dae1d9dd9" + "sha": "3c0cb6c1cb602c325d8d0befdb739c16a9bf23e0" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d17674372e27fb8f23013935e794aa37502071aa" + "sha": "0780323da96d5a53925fe0547757181fe76e8f1e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d17674372e27fb8f23013935e794aa37502071aa" + "sha": "0780323da96d5a53925fe0547757181fe76e8f1e" } } ], From 2e189b6f48a726f4746d67e7f25e5d63b86df7f6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 9 Mar 2021 14:01:01 -0600 Subject: [PATCH 20/20] chore: release 3.0.1 (#79) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 18 ++++++++++++++++++ setup.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 01ff2728..74748f1b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,24 @@ [1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history +### [3.0.1](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.0.0...v3.0.1) (2021-03-03) + + +### Bug Fixes + +* remove recv msg limit, add enums to `types` ([#84](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/84)) ([3e2bbef](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/3e2bbef292ddda6a736be397be4e5a0fb213eeff)) + + +### Documentation + +* add sample for dataset copy ([#76](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/76)) ([f6d2c5b](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/f6d2c5b8f3c75426881dfce90ab713535416950e)) +* add scheduled query samples ([#83](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/83)) ([cd51970](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/cd519709228cda3bbcf2fd978d37ccd04ef27c82)) +* ensure minimum width for 'Parameters' / 'Returns' column ([#95](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/95)) ([5c8d7c1](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/5c8d7c1e860d1c50d892bfabc7ec936aaa40e714)) +* **python:** document adding Python 3.9 support, dropping 3.5 support ([#89](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/89)) ([dd84592](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/dd8459291a3ac0f98606b61ae566cb264ce96825)), closes [#787](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/787) +* remove out-of-date sample from README ([#80](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/80)) ([af0406e](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/af0406eedac1dc8c663b5c8f67f56255caeea2fa)) +* remove redundant samples ([#86](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/86)) ([093e407](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/093e407c60b117a00d2cdf8d225f22d61bc221c4)) +* update contributing guide to Python 3.8 ([#105](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/105)) ([678c335](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/678c3355e1b2e8525005ad337048d60a51400fc0)) + ## [3.0.0](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v2.1.0...v3.0.0) (2020-12-09) diff --git a/setup.py b/setup.py index 26c8ae7e..854d6c58 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-bigquery-datatransfer" description = "BigQuery Data Transfer API client library" -version = "3.0.0" +version = "3.0.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta'