blob: 0f53ba5d02d9166fb99a6ade3b7e12d4d97a1a14 [file] [log] [blame]
Jan Tattermusch6d7fa552016-04-14 17:42:54 -07001#!/usr/bin/env python2.7
Jan Tattermuschefd98032016-04-14 16:29:24 -07002# Copyright 2016, Google Inc.
3# All rights reserved.
4#
5# Redistribution and use in source and binary forms, with or without
6# modification, are permitted provided that the following conditions are
7# met:
8#
9# * Redistributions of source code must retain the above copyright
10# notice, this list of conditions and the following disclaimer.
11# * Redistributions in binary form must reproduce the above
12# copyright notice, this list of conditions and the following disclaimer
13# in the documentation and/or other materials provided with the
14# distribution.
15# * Neither the name of Google Inc. nor the names of its
16# contributors may be used to endorse or promote products derived from
17# this software without specific prior written permission.
18#
19# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
Jan Tattermusch6d7fa552016-04-14 17:42:54 -070031# Uploads performance benchmark result file to bigquery.
Jan Tattermuschefd98032016-04-14 16:29:24 -070032
Jan Tattermusch6d7fa552016-04-14 17:42:54 -070033import argparse
Jan Tattermuschefd98032016-04-14 16:29:24 -070034import json
35import os
36import sys
37import uuid
38
39
40gcp_utils_dir = os.path.abspath(os.path.join(
41 os.path.dirname(__file__), '../../gcp/utils'))
42sys.path.append(gcp_utils_dir)
43import big_query_utils
44
45
46_PROJECT_ID='grpc-testing'
Jan Tattermuschefd98032016-04-14 16:29:24 -070047
48
Jan Tattermusch6d7fa552016-04-14 17:42:54 -070049def _upload_scenario_result_to_bigquery(dataset_id, table_id, result_file):
Jan Tattermuschefd98032016-04-14 16:29:24 -070050 bq = big_query_utils.create_big_query()
Jan Tattermusch6d7fa552016-04-14 17:42:54 -070051 _create_results_table(bq, dataset_id, table_id)
Jan Tattermuschefd98032016-04-14 16:29:24 -070052
53 with open(result_file, 'r') as f:
54 scenario_result = json.loads(f.read())
Jan Tattermusch6d7fa552016-04-14 17:42:54 -070055
56 if not _insert_result(bq, dataset_id, table_id, scenario_result):
57 print 'Error uploading result to bigquery.'
58 sys.exit(1)
Jan Tattermuschefd98032016-04-14 16:29:24 -070059
60
Jan Tattermusch6d7fa552016-04-14 17:42:54 -070061def _insert_result(bq, dataset_id, table_id, scenario_result):
Jan Tattermuschefd98032016-04-14 16:29:24 -070062 _flatten_result_inplace(scenario_result)
Jan Tattermuschefd98032016-04-14 16:29:24 -070063 row = big_query_utils.make_row(str(uuid.uuid4()), scenario_result)
64 return big_query_utils.insert_rows(bq,
65 _PROJECT_ID,
Jan Tattermusch6d7fa552016-04-14 17:42:54 -070066 dataset_id,
67 table_id,
Jan Tattermuschefd98032016-04-14 16:29:24 -070068 [row])
69
70
Jan Tattermusch6d7fa552016-04-14 17:42:54 -070071def _create_results_table(bq, dataset_id, table_id):
Jan Tattermuschefd98032016-04-14 16:29:24 -070072 with open(os.path.dirname(__file__) + '/scenario_result_schema.json', 'r') as f:
73 table_schema = json.loads(f.read())
74 desc = 'Results of performance benchmarks.'
Jan Tattermusch6d7fa552016-04-14 17:42:54 -070075 return big_query_utils.create_table2(bq, _PROJECT_ID, dataset_id,
76 table_id, table_schema, desc)
Jan Tattermuschefd98032016-04-14 16:29:24 -070077
78
79def _flatten_result_inplace(scenario_result):
80 """Bigquery is not really great for handling deeply nested data
81 and repeated fields. To maintain values of some fields while keeping
82 the schema relatively simple, we artificially leave some of the fields
83 as JSON strings.
84 """
85 scenario_result['scenario']['clientConfig'] = json.dumps(scenario_result['scenario']['clientConfig'])
86 scenario_result['scenario']['serverConfig'] = json.dumps(scenario_result['scenario']['serverConfig'])
87 scenario_result['latencies'] = json.dumps(scenario_result['latencies'])
88 for stats in scenario_result['clientStats']:
89 stats['latencies'] = json.dumps(stats['latencies'])
Jan Tattermusch88cc4e22016-04-14 16:58:50 -070090 scenario_result['serverCores'] = json.dumps(scenario_result['serverCores'])
Jan Tattermusch6d7fa552016-04-14 17:42:54 -070091
92
93argp = argparse.ArgumentParser(description='Upload result to big query.')
94argp.add_argument('--bq_result_table', required=True, default=None, type=str,
95 help='Bigquery "dataset.table" to upload results to.')
96argp.add_argument('--file_to_upload', default='scenario_result.json', type=str,
97 help='Report file to upload.')
98
99args = argp.parse_args()
100
101dataset_id, table_id = args.bq_result_table.split('.', 2)
102_upload_scenario_result_to_bigquery(dataset_id, table_id, args.file_to_upload)
103print 'Successfully uploaded %s to BigQuery.\n' % args.file_to_upload