Skip to content

Commit a58d06d

Browse files
Shun FanJon Wayne Parrott
Shun Fan
authored and
Jon Wayne Parrott
committed
Update samples to be run from command line without modification
Switch to dictionary for JSON string Update description strings
1 parent 03251f8 commit a58d06d

File tree

4 files changed

+142
-75
lines changed

4 files changed

+142
-75
lines changed

storage/transfer_service/README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ This app creates two types of transfers using the Transfer Service tool.
1111
1. Select Add credentials > Service account > JSON key.
1212
1. Set the environment variable GOOGLE_APPLICATION_CREDENTIALS to point to your JSON key.
1313
1. Add the Storage Transfer service account as an editor of your project
14-
storage-transfer-5031963314028297433@partnercontent.gserviceaccount.com
14+
storage-transfer-<accountId>@partnercontent.gserviceaccount.com
1515
1. Set up gcloud for application default credentials.
1616
1. `gcloud components update`
1717
1. `gcloud auth login`

storage/transfer_service/aws_request.py

+63-31
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@
1212
# limitations under the License.
1313
#
1414
# [START all]
15+
import argparse
16+
import datetime
1517
import json
1618
import logging
1719

@@ -22,54 +24,84 @@
2224
logging.basicConfig(level=logging.DEBUG)
2325

2426

25-
def main():
26-
"""Create a one-off transfer from Amazon S3 to GCS."""
27+
# [START main]
28+
def main(description, project_id, day, month, year, hours, minutes,
29+
source_bucket, access_key, secret_access_key, sink_bucket):
30+
"""Create a one-off transfer from Amazon S3 to Google Cloud Storage."""
2731
credentials = GoogleCredentials.get_application_default()
2832
storagetransfer = discovery.build(
2933
'storagetransfer', 'v1', credentials=credentials)
3034

3135
# Edit this template with desired parameters.
3236
# Specify times below using US Pacific Time Zone.
33-
transfer_job = '''
34-
{
35-
"description": "YOUR DESCRIPTION",
36-
"status": "ENABLED",
37-
"projectId": "YOUR_PROJECT_ID",
38-
"schedule": {
39-
"scheduleStartDate": {
40-
"day": 1,
41-
"month": 1,
42-
"year": 2015
37+
transfer_job = {
38+
'description': description,
39+
'status': 'ENABLED',
40+
'projectId': project_id,
41+
'schedule': {
42+
'scheduleStartDate': {
43+
'day': day,
44+
'month': month,
45+
'year': year
4346
},
44-
"scheduleEndDate": {
45-
"day": 1,
46-
"month": 1,
47-
"year": 2015
47+
'scheduleEndDate': {
48+
'day': day,
49+
'month': month,
50+
'year': year
4851
},
49-
"startTimeOfDay": {
50-
"hours": 0,
51-
"minutes": 0
52+
'startTimeOfDay': {
53+
'hours': hours,
54+
'minutes': minutes
5255
}
5356
},
54-
"transferSpec": {
55-
"awsS3DataSource": {
56-
"bucketName": "YOUR_SOURCE_BUCKET",
57-
"awsAccessKey": {
58-
"accessKeyId": "YOUR_ACCESS_KEY_ID",
59-
"secretAccessKey": "YOUR_SECRET_ACCESS_KEY"
57+
'transferSpec': {
58+
'awsS3DataSource': {
59+
'bucketName': source_bucket,
60+
'awsAccessKey': {
61+
'accessKeyId': access_key,
62+
'secretAccessKey': secret_access_key
6063
}
6164
},
62-
"gcsDataSink": {
63-
"bucketName": "YOUR_SINK_BUCKET"
65+
'gcsDataSink': {
66+
'bucketName': sink_bucket
6467
}
6568
}
6669
}
67-
'''
6870

69-
result = storagetransfer.transferJobs().create(body=json.loads(
70-
transfer_job)).execute()
71+
print(transfer_job)
72+
result = storagetransfer.transferJobs().create(body=transfer_job).execute()
7173
logging.info('Returned transferJob: %s', json.dumps(result, indent=4))
74+
# [END main]
7275

7376
if __name__ == '__main__':
74-
main()
77+
parser = argparse.ArgumentParser(
78+
description='Create a one-off transfer from Amazon S3 to Google Cloud '
79+
'Storage.')
80+
parser.add_argument('description', help='Transfer description.')
81+
parser.add_argument('project_id', help='Your Google Cloud project ID.')
82+
parser.add_argument('date', help='Date YYYY/MM/DD.')
83+
parser.add_argument('time', help='Time (24hr) HH:MM.')
84+
parser.add_argument('source_bucket', help='Source bucket name.')
85+
parser.add_argument('access_key', help='Your AWS access key id.')
86+
parser.add_argument('secret_access_key', help='Your AWS secret access '
87+
'key.')
88+
parser.add_argument('sink_bucket', help='Sink bucket name.')
89+
90+
args = parser.parse_args()
91+
date = datetime.datetime.strptime(args.date, '%Y/%m/%d')
92+
time = datetime.datetime.strptime(args.time, '%H:%M')
93+
94+
main(
95+
args.description,
96+
args.project_id,
97+
date.year,
98+
date.month,
99+
date.day,
100+
time.hour,
101+
time.minute,
102+
args.source_bucket,
103+
args.access_key,
104+
args.secret_access_key,
105+
args.sink_bucket)
106+
75107
# [END all]

storage/transfer_service/nearline_request.py

+56-28
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@
1212
# limitations under the License.
1313
#
1414
# [START all]
15+
import argparse
16+
import datetime
1517
import json
1618
import logging
1719

@@ -22,50 +24,76 @@
2224
logging.basicConfig(level=logging.DEBUG)
2325

2426

25-
def main():
26-
"""Transfer from standard Cloud Storage to Cloud Storage Nearline."""
27+
# [START main]
28+
def main(description, project_id, day, month, year, hours, minutes,
29+
source_bucket, sink_bucket):
30+
"""Create a transfer from the Google Cloud Storage Standard class to the
31+
Nearline Storage class."""
2732
credentials = GoogleCredentials.get_application_default()
2833
storagetransfer = discovery.build(
2934
'storagetransfer', 'v1', credentials=credentials)
3035

3136
# Edit this template with desired parameters.
3237
# Specify times below using US Pacific Time Zone.
33-
transfer_job = '''
34-
{
35-
"description": "YOUR DESCRIPTION",
36-
"status": "ENABLED",
37-
"projectId": "YOUR_PROJECT_ID",
38-
"schedule": {
39-
"scheduleStartDate": {
40-
"day": 1,
41-
"month": 1,
42-
"year": 2015
38+
transfer_job = {
39+
'description': description,
40+
'status': 'ENABLED',
41+
'projectId': project_id,
42+
'schedule': {
43+
'scheduleStartDate': {
44+
'day': day,
45+
'month': month,
46+
'year': year
4347
},
44-
"startTimeOfDay": {
45-
"hours": 1,
46-
"minutes": 1
48+
'startTimeOfDay': {
49+
'hours': hours,
50+
'minutes': minutes
4751
}
4852
},
49-
"transferSpec": {
50-
"gcsDataSource": {
51-
"bucketName": "YOUR_SOURCE_BUCKET"
53+
'transferSpec': {
54+
'gcsDataSource': {
55+
'bucketName': source_bucket
5256
},
53-
"gcsDataSink": {
54-
"bucketName": "YOUR_SINK_BUCKET"
57+
'gcsDataSink': {
58+
'bucketName': sink_bucket
5559
},
56-
"objectConditions": {
57-
"minTimeElapsedSinceLastModification": "2592000s"
60+
'objectConditions': {
61+
'minTimeElapsedSinceLastModification': '2592000s'
5862
},
59-
"transferOptions": {
60-
"deleteObjectsFromSourceAfterTransfer": true
63+
'transferOptions': {
64+
'deleteObjectsFromSourceAfterTransfer': 'true'
6165
}
6266
}
6367
}
64-
'''
65-
result = storagetransfer.transferJobs().create(body=json.loads(
66-
transfer_job)).execute()
68+
69+
result = storagetransfer.transferJobs().create(body=transfer_job).execute()
6770
logging.info('Returned transferJob: %s', json.dumps(result, indent=4))
71+
# [END main]
6872

6973
if __name__ == '__main__':
70-
main()
74+
parser = argparse.ArgumentParser(
75+
description='Create a transfer from the Google Cloud Storage Standard '
76+
'class to the Nearline Storage class.')
77+
parser.add_argument('description', help='Transfer description.')
78+
parser.add_argument('project_id', help='Your Google Cloud project ID.')
79+
parser.add_argument('date', help='Date YYYY/MM/DD.')
80+
parser.add_argument('time', help='Time (24hr) HH:MM.')
81+
parser.add_argument('source_bucket', help='Source bucket name.')
82+
parser.add_argument('sink_bucket', help='Sink bucket name.')
83+
84+
args = parser.parse_args()
85+
date = datetime.datetime.strptime(args.date, '%Y/%m/%d')
86+
time = datetime.datetime.strptime(args.time, '%H:%M')
87+
88+
main(
89+
args.description,
90+
args.project_id,
91+
date.year,
92+
date.month,
93+
date.day,
94+
time.hour,
95+
time.minute,
96+
args.source_bucket,
97+
args.sink_bucket)
98+
7199
# [END all]

storage/transfer_service/transfer_check.py

+22-15
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
# limitations under the License.
1313
#
1414
# [START all]
15+
import argparse
1516
import json
1617
import logging
1718

@@ -21,31 +22,37 @@
2122

2223
logging.basicConfig(level=logging.DEBUG)
2324

24-
# Edit these values with desired parameters.
25-
PROJECT_ID = 'YOUR_PROJECT_ID'
26-
JOB_NAME = 'YOUR_JOB_NAME'
2725

28-
29-
def check_operation(storagetransfer, project_id, job_name):
26+
# [START main]
27+
def main(project_id, job_name):
3028
"""Review the transfer operations associated with a transfer job."""
29+
credentials = GoogleCredentials.get_application_default()
30+
storagetransfer = discovery.build(
31+
'storagetransfer', 'v1', credentials=credentials)
32+
3133
filterString = (
3234
'{{"project_id": "{project_id}", '
3335
'"job_names": ["{job_name}"]}}'
3436
).format(project_id=project_id, job_name=job_name)
35-
return storagetransfer.transferOperations().list(
37+
38+
result = storagetransfer.transferOperations().list(
3639
name="transferOperations",
3740
filter=filterString).execute()
38-
39-
40-
def main():
41-
credentials = GoogleCredentials.get_application_default()
42-
storagetransfer = discovery.build(
43-
'storagetransfer', 'v1', credentials=credentials)
44-
45-
result = check_operation(storagetransfer, PROJECT_ID, JOB_NAME)
4641
logging.info('Result of transferOperations/list: %s',
4742
json.dumps(result, indent=4, sort_keys=True))
43+
# [END main]
4844

4945
if __name__ == '__main__':
50-
main()
46+
parser = argparse.ArgumentParser(
47+
description='Review the transfer operations associated with a transfer'
48+
' job.')
49+
parser.add_argument('project_id', help='Your Google Cloud project ID.')
50+
parser.add_argument('job_name', help='Your job name.')
51+
52+
args = parser.parse_args()
53+
54+
main(
55+
args.project_id,
56+
args.job_name)
57+
5158
# [END all]

0 commit comments

Comments
 (0)