From e9063ccf199e9938f7ef563e3683d25375072df3 Mon Sep 17 00:00:00 2001 From: cclauss Date: Thu, 21 Jun 2018 00:18:20 +0200 Subject: print() is a function in Python 3 (#4754) --- benchmarks/python/py_benchmark.py | 17 +++++++++-------- benchmarks/util/big_query_utils.py | 27 ++++++++++++++------------- benchmarks/util/run_and_upload.py | 14 ++++++++------ 3 files changed, 31 insertions(+), 27 deletions(-) diff --git a/benchmarks/python/py_benchmark.py b/benchmarks/python/py_benchmark.py index 6942d208..e86b61e7 100755 --- a/benchmarks/python/py_benchmark.py +++ b/benchmarks/python/py_benchmark.py @@ -1,3 +1,4 @@ +from __future__ import print_function import sys import os import timeit @@ -138,15 +139,15 @@ if __name__ == "__main__": results.append(run_one_test(file)) if args.json != "no": - print json.dumps(results) + print(json.dumps(results)) else: for result in results: - print "Message %s of dataset file %s" % \ - (result["message_name"], result["filename"]) - print "Average time for parse_from_benchmark: %.2f ns" % \ + print("Message %s of dataset file %s" % \ + (result["message_name"], result["filename"])) + print("Average time for parse_from_benchmark: %.2f ns" % \ (result["benchmarks"][ \ - args.behavior_prefix + "_parse_from_benchmark"]) - print "Average time for serialize_to_benchmark: %.2f ns" % \ + args.behavior_prefix + "_parse_from_benchmark"])) + print("Average time for serialize_to_benchmark: %.2f ns" % \ (result["benchmarks"][ \ - args.behavior_prefix + "_serialize_to_benchmark"]) - print "" + args.behavior_prefix + "_serialize_to_benchmark"])) + print("") diff --git a/benchmarks/util/big_query_utils.py b/benchmarks/util/big_query_utils.py index 14105aa6..aea55bbd 100755 --- a/benchmarks/util/big_query_utils.py +++ b/benchmarks/util/big_query_utils.py @@ -1,5 +1,6 @@ #!/usr/bin/env python2.7 +from __future__ import print_function import argparse import json import uuid @@ -37,11 +38,11 @@ def create_dataset(biq_query, project_id, dataset_id): dataset_req.execute(num_retries=NUM_RETRIES) except HttpError as http_error: if http_error.resp.status == 409: - print 'Warning: The dataset %s already exists' % dataset_id + print('Warning: The dataset %s already exists' % dataset_id) else: # Note: For more debugging info, print "http_error.content" - print 'Error in creating dataset: %s. Err: %s' % (dataset_id, - http_error) + print('Error in creating dataset: %s. Err: %s' % (dataset_id, + http_error)) is_success = False return is_success @@ -109,13 +110,13 @@ def create_table2(big_query, table_req = big_query.tables().insert( projectId=project_id, datasetId=dataset_id, body=body) res = table_req.execute(num_retries=NUM_RETRIES) - print 'Successfully created %s "%s"' % (res['kind'], res['id']) + print('Successfully created %s "%s"' % (res['kind'], res['id'])) except HttpError as http_error: if http_error.resp.status == 409: - print 'Warning: Table %s already exists' % table_id + print('Warning: Table %s already exists' % table_id) else: - print 'Error in creating table: %s. Err: %s' % (table_id, - http_error) + print('Error in creating table: %s. Err: %s' % (table_id, + http_error)) is_success = False return is_success @@ -141,9 +142,9 @@ def patch_table(big_query, project_id, dataset_id, table_id, fields_schema): tableId=table_id, body=body) res = table_req.execute(num_retries=NUM_RETRIES) - print 'Successfully patched %s "%s"' % (res['kind'], res['id']) + print('Successfully patched %s "%s"' % (res['kind'], res['id'])) except HttpError as http_error: - print 'Error in creating table: %s. Err: %s' % (table_id, http_error) + print('Error in creating table: %s. Err: %s' % (table_id, http_error)) is_success = False return is_success @@ -159,10 +160,10 @@ def insert_rows(big_query, project_id, dataset_id, table_id, rows_list): body=body) res = insert_req.execute(num_retries=NUM_RETRIES) if res.get('insertErrors', None): - print 'Error inserting rows! Response: %s' % res + print('Error inserting rows! Response: %s' % res) is_success = False except HttpError as http_error: - print 'Error inserting rows to the table %s' % table_id + print('Error inserting rows to the table %s' % table_id) is_success = False return is_success @@ -176,8 +177,8 @@ def sync_query_job(big_query, project_id, query, timeout=5000): projectId=project_id, body=query_data).execute(num_retries=NUM_RETRIES) except HttpError as http_error: - print 'Query execute job failed with error: %s' % http_error - print http_error.content + print('Query execute job failed with error: %s' % http_error) + print(http_error.content) return query_job diff --git a/benchmarks/util/run_and_upload.py b/benchmarks/util/run_and_upload.py index ae22a668..43c9fa2d 100755 --- a/benchmarks/util/run_and_upload.py +++ b/benchmarks/util/run_and_upload.py @@ -1,3 +1,5 @@ +from __future__ import print_function +from __future__ import absolute_import import argparse import os import re @@ -5,7 +7,7 @@ import copy import uuid import calendar import time -import big_query_utils +from . import big_query_utils import datetime import json # This import depends on the automake rule protoc_middleman, please make sure @@ -255,7 +257,7 @@ def upload_result(result_list, metadata): if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET, _TABLE + "$" + _NOW, [row]): - print 'Error when uploading result', new_result + print('Error when uploading result', new_result) if __name__ == "__main__": @@ -280,11 +282,11 @@ if __name__ == "__main__": parse_go_result(args.go_input_file) metadata = get_metadata() - print "uploading cpp results..." + print("uploading cpp results...") upload_result(cpp_result, metadata) - print "uploading java results..." + print("uploading java results...") upload_result(java_result, metadata) - print "uploading python results..." + print("uploading python results...") upload_result(python_result, metadata) - print "uploading go results..." + print("uploading go results...") upload_result(go_result, metadata) -- cgit v1.2.3