Skip to content
This repository was archived by the owner on Nov 29, 2023. It is now read-only.

Commit b998958

Browse files
committed
Adjust samples
1 parent 0a9bc32 commit b998958

5 files changed

Lines changed: 28 additions & 27 deletions

File tree

samples/create_scheduled_query.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,10 @@
2020

2121
def sample_create_transfer_config(project_id, dataset_id, authorization_code=""):
2222
# [START bigquerydatatransfer_create_scheduled_query]
23-
from google.cloud import bigquery_datatransfer_v1
23+
from google.cloud.bigquery import datatransfer_v1
2424
import google.protobuf.json_format
2525

26-
client = bigquery_datatransfer_v1.DataTransferServiceClient()
26+
client = datatransfer_v1.DataTransferServiceClient()
2727

2828
# TODO(developer): Set the project_id to the project that contains the
2929
# destination dataset.
@@ -53,7 +53,7 @@ def sample_create_transfer_config(project_id, dataset_id, authorization_code="")
5353
17 as some_integer
5454
"""
5555

56-
parent = client.project_path(project_id)
56+
parent = f"projects/{project_id}"
5757

5858
transfer_config = google.protobuf.json_format.ParseDict(
5959
{
@@ -68,11 +68,15 @@ def sample_create_transfer_config(project_id, dataset_id, authorization_code="")
6868
},
6969
"schedule": "every 24 hours",
7070
},
71-
bigquery_datatransfer_v1.types.TransferConfig(),
71+
datatransfer_v1.types.TransferConfig.pb()(),
7272
)
7373

7474
response = client.create_transfer_config(
75-
parent, transfer_config, authorization_code=authorization_code
75+
request={
76+
"parent": parent,
77+
"transfer_config": transfer_config,
78+
"authorization_code": authorization_code,
79+
}
7680
)
7781

7882
print("Created scheduled query '{}'".format(response.name))

samples/snippets/quickstart.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,21 +15,22 @@
1515
# limitations under the License.
1616

1717

18-
def run_quickstart():
18+
def run_quickstart(project="my-project"):
1919
# [START bigquerydatatransfer_quickstart]
20-
from google.cloud import bigquery_datatransfer
20+
from google.cloud.bigquery import datatransfer
2121

22-
client = bigquery_datatransfer.DataTransferServiceClient()
22+
client = datatransfer.DataTransferServiceClient()
2323

24-
project = 'my-project' # TODO: Update to your project ID.
24+
# TODO: Update to your project ID.
25+
# project = "my-project"
2526

2627
# Get the full path to your project.
27-
parent = client.project_path(project)
28+
parent = f"projects/{project}"
2829

2930
print('Supported Data Sources:')
3031

3132
# Iterate over all possible data sources.
32-
for data_source in client.list_data_sources(parent):
33+
for data_source in client.list_data_sources(parent=parent):
3334
print('{}:'.format(data_source.display_name))
3435
print('\tID: {}'.format(data_source.data_source_id))
3536
print('\tFull path: {}'.format(data_source.name))

samples/snippets/quickstart_test.py

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414

1515
import os
1616

17-
import mock
1817
import pytest
1918

2019
import quickstart
@@ -24,18 +23,13 @@
2423

2524

2625
@pytest.fixture
27-
def mock_project_path():
26+
def mock_project_id():
2827
"""Mock out project and replace with project from environment."""
29-
project_patch = mock.patch(
30-
'google.cloud.bigquery_datatransfer.DataTransferServiceClient.'
31-
'project_path')
3228

33-
with project_patch as project_mock:
34-
project_mock.return_value = 'projects/{}'.format(PROJECT)
35-
yield project_mock
29+
return PROJECT
3630

3731

38-
def test_quickstart(capsys, mock_project_path):
39-
quickstart.run_quickstart()
32+
def test_quickstart(capsys, mock_project_id):
33+
quickstart.run_quickstart(mock_project_id)
4034
out, _ = capsys.readouterr()
4135
assert 'Supported Data Sources:' in out

samples/tests/conftest.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -39,9 +39,9 @@ def credentials():
3939

4040
@pytest.fixture(scope="module")
4141
def bqdts_client(credentials):
42-
from google.cloud import bigquery_datatransfer_v1
42+
from google.cloud.bigquery import datatransfer_v1
4343

44-
return bigquery_datatransfer_v1.DataTransferServiceClient(credentials=credentials)
44+
return datatransfer_v1.DataTransferServiceClient(credentials=credentials)
4545

4646

4747
@pytest.fixture(scope="module")
@@ -69,6 +69,6 @@ def to_delete(bqdts_client):
6969

7070
for resource_name in doomed:
7171
try:
72-
bqdts_client.delete_transfer_config(resource_name)
72+
bqdts_client.delete_transfer_config(name=resource_name)
7373
except Exception:
7474
pass

samples/update_transfer_config.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,9 @@
2020

2121
def sample_update_transfer_config(config_name, display_name):
2222
# [START bigquerydatatransfer_update_transfer_config]
23-
from google.cloud import bigquery_datatransfer_v1
23+
from google.cloud.bigquery import datatransfer_v1
2424

25-
client = bigquery_datatransfer_v1.DataTransferServiceClient()
25+
client = datatransfer_v1.DataTransferServiceClient()
2626
# TODO(developer): Set the config_name which user wants to update.
2727
# config_name = "your-created-transfer-config-name"
2828

@@ -32,7 +32,9 @@ def sample_update_transfer_config(config_name, display_name):
3232
transfer_config = client.get_transfer_config(name=config_name)
3333
transfer_config.display_name = display_name
3434
field_mask = {"paths": ["display_name"]}
35-
response = client.update_transfer_config(transfer_config, field_mask)
35+
response = client.update_transfer_config(
36+
transfer_config=transfer_config, update_mask=field_mask
37+
)
3638

3739
print("Transfer config updated for '{}'".format(response.name))
3840
# [END bigquerydatatransfer_update_transfer_config]

0 commit comments

Comments
 (0)