Skip to content
This repository was archived by the owner on Nov 29, 2023. It is now read-only.

chore: transition the library to microgenerator #56

Merged
merged 10 commits into from
Sep 21, 2020
Merged
Prev Previous commit
Next Next commit
Simplify TransferConfig instantiation in sample
  • Loading branch information
plamut committed Sep 18, 2020
commit e4ec2bedb3ebb616e0f9157cf8d13456fb6e8e02
24 changes: 10 additions & 14 deletions samples/create_scheduled_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
def sample_create_transfer_config(project_id, dataset_id, authorization_code=""):
# [START bigquerydatatransfer_create_scheduled_query]
from google.cloud.bigquery import datatransfer_v1
import google.protobuf.json_format

client = datatransfer_v1.DataTransferServiceClient()

Expand Down Expand Up @@ -55,20 +54,17 @@ def sample_create_transfer_config(project_id, dataset_id, authorization_code="")

parent = f"projects/{project_id}"

transfer_config = google.protobuf.json_format.ParseDict(
{
"destination_dataset_id": dataset_id,
"display_name": "Your Scheduled Query Name",
"data_source_id": "scheduled_query",
"params": {
"query": query_string,
"destination_table_name_template": "your_table_{run_date}",
"write_disposition": "WRITE_TRUNCATE",
"partitioning_field": "",
},
"schedule": "every 24 hours",
transfer_config = datatransfer_v1.types.TransferConfig(
destination_dataset_id=dataset_id,
display_name="Your Scheduled Query Name",
data_source_id="scheduled_query",
params={
"query": query_string,
"destination_table_name_template": "your_table_{run_date}",
"write_disposition": "WRITE_TRUNCATE",
"partitioning_field": "",
},
datatransfer_v1.types.TransferConfig.pb()(),
schedule="every 24 hours",
)

response = client.create_transfer_config(
Expand Down