blob: a2dd1c66cf71f9bea78b1395c0e0cb46dc0482b0 [file] [log] [blame]
Matt Kwong52ff9862017-04-17 13:56:51 -07001#!/usr/bin/env python
Jan Tattermusch7897ae92017-06-07 22:57:36 +02002# Copyright 2017 gRPC authors.
Matt Kwong52ff9862017-04-17 13:56:51 -07003#
Jan Tattermusch7897ae92017-06-07 22:57:36 +02004# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
Matt Kwong52ff9862017-04-17 13:56:51 -07007#
Jan Tattermusch7897ae92017-06-07 22:57:36 +02008# http://www.apache.org/licenses/LICENSE-2.0
Matt Kwong52ff9862017-04-17 13:56:51 -07009#
Jan Tattermusch7897ae92017-06-07 22:57:36 +020010# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
Matt Kwong52ff9862017-04-17 13:56:51 -070015"""Helper to upload Jenkins test results to BQ"""
16
17from __future__ import print_function
18
19import os
20import six
21import sys
22import time
23import uuid
24
ncteisen05687c32017-12-11 16:54:47 -080025gcp_utils_dir = os.path.abspath(
26 os.path.join(os.path.dirname(__file__), '../../gcp/utils'))
Matt Kwong52ff9862017-04-17 13:56:51 -070027sys.path.append(gcp_utils_dir)
28import big_query_utils
29
30_DATASET_ID = 'jenkins_test_results'
31_DESCRIPTION = 'Test results from master job run on Jenkins'
Matt Kwongd14c0ea2017-05-19 14:25:01 -070032# 90 days in milliseconds
33_EXPIRATION_MS = 90 * 24 * 60 * 60 * 1000
34_PARTITION_TYPE = 'DAY'
Matt Kwong52ff9862017-04-17 13:56:51 -070035_PROJECT_ID = 'grpc-testing'
36_RESULTS_SCHEMA = [
ncteisen05687c32017-12-11 16:54:47 -080037 ('job_name', 'STRING', 'Name of Jenkins job'),
38 ('build_id', 'INTEGER', 'Build ID of Jenkins job'),
39 ('build_url', 'STRING', 'URL of Jenkins job'),
40 ('test_name', 'STRING', 'Individual test name'),
41 ('language', 'STRING', 'Language of test'),
42 ('platform', 'STRING', 'Platform used for test'),
43 ('config', 'STRING', 'Config used for test'),
44 ('compiler', 'STRING', 'Compiler used for test'),
45 ('iomgr_platform', 'STRING', 'Iomgr used for test'),
46 ('result', 'STRING', 'Test result: PASSED, TIMEOUT, FAILED, or SKIPPED'),
47 ('timestamp', 'TIMESTAMP', 'Timestamp of test run'),
48 ('elapsed_time', 'FLOAT', 'How long test took to run'),
49 ('cpu_estimated', 'FLOAT', 'Estimated CPU usage of test'),
50 ('cpu_measured', 'FLOAT', 'Actual CPU usage of test'),
51 ('return_code', 'INTEGER', 'Exit code of test'),
Matt Kwong52ff9862017-04-17 13:56:51 -070052]
Matt Kwonga9712ba2017-10-04 11:46:36 -070053_INTEROP_RESULTS_SCHEMA = [
ncteisen05687c32017-12-11 16:54:47 -080054 ('job_name', 'STRING', 'Name of Jenkins/Kokoro job'),
55 ('build_id', 'INTEGER', 'Build ID of Jenkins/Kokoro job'),
56 ('build_url', 'STRING', 'URL of Jenkins/Kokoro job'),
57 ('test_name', 'STRING',
58 'Unique test name combining client, server, and test_name'),
59 ('suite', 'STRING',
60 'Test suite: cloud_to_cloud, cloud_to_prod, or cloud_to_prod_auth'),
61 ('client', 'STRING', 'Client language'),
62 ('server', 'STRING', 'Server host name'),
63 ('test_case', 'STRING', 'Name of test case'),
64 ('result', 'STRING', 'Test result: PASSED, TIMEOUT, FAILED, or SKIPPED'),
65 ('timestamp', 'TIMESTAMP', 'Timestamp of test run'),
66 ('elapsed_time', 'FLOAT', 'How long test took to run'),
Matt Kwonga9712ba2017-10-04 11:46:36 -070067]
Matt Kwong52ff9862017-04-17 13:56:51 -070068
69
70def _get_build_metadata(test_results):
ncteisen05687c32017-12-11 16:54:47 -080071 """Add Jenkins/Kokoro build metadata to test_results based on environment
Matt Kwongea1fa272017-06-07 14:21:15 -070072 variables set by Jenkins/Kokoro.
73 """
ncteisen05687c32017-12-11 16:54:47 -080074 build_id = os.getenv('BUILD_ID') or os.getenv('KOKORO_BUILD_NUMBER')
Matt Kwong827c6e92017-12-20 13:21:37 -080075 build_url = os.getenv('BUILD_URL')
76 if os.getenv('KOKORO_BUILD_ID'):
77 build_url = 'https://sponge.corp.google.com/invocation?id=%s' % os.getenv(
78 'KOKORO_BUILD_ID')
ncteisen05687c32017-12-11 16:54:47 -080079 job_name = os.getenv('JOB_BASE_NAME') or os.getenv('KOKORO_JOB_NAME')
Matt Kwong52ff9862017-04-17 13:56:51 -070080
ncteisen05687c32017-12-11 16:54:47 -080081 if build_id:
82 test_results['build_id'] = build_id
83 if build_url:
84 test_results['build_url'] = build_url
85 if job_name:
86 test_results['job_name'] = job_name
Matt Kwong52ff9862017-04-17 13:56:51 -070087
Matt Kwongea1fa272017-06-07 14:21:15 -070088
Matt Kwong52ff9862017-04-17 13:56:51 -070089def upload_results_to_bq(resultset, bq_table, args, platform):
ncteisen05687c32017-12-11 16:54:47 -080090 """Upload test results to a BQ table.
Matt Kwong52ff9862017-04-17 13:56:51 -070091
92 Args:
93 resultset: dictionary generated by jobset.run
94 bq_table: string name of table to create/upload results to in BQ
95 args: args in run_tests.py, generated by argparse
96 platform: string name of platform tests were run on
97 """
ncteisen05687c32017-12-11 16:54:47 -080098 bq = big_query_utils.create_big_query()
99 big_query_utils.create_partitioned_table(
100 bq,
101 _PROJECT_ID,
102 _DATASET_ID,
103 bq_table,
104 _RESULTS_SCHEMA,
105 _DESCRIPTION,
106 partition_type=_PARTITION_TYPE,
107 expiration_ms=_EXPIRATION_MS)
Matt Kwong52ff9862017-04-17 13:56:51 -0700108
ncteisen05687c32017-12-11 16:54:47 -0800109 for shortname, results in six.iteritems(resultset):
110 for result in results:
111 test_results = {}
112 _get_build_metadata(test_results)
113 test_results['compiler'] = args.compiler
114 test_results['config'] = args.config
115 test_results['cpu_estimated'] = result.cpu_estimated
116 test_results['cpu_measured'] = result.cpu_measured
117 test_results['elapsed_time'] = '%.2f' % result.elapsed_time
118 test_results['iomgr_platform'] = args.iomgr_platform
119 # args.language is a list, but will always have one element in the contexts
120 # this function is used.
121 test_results['language'] = args.language[0]
122 test_results['platform'] = platform
123 test_results['result'] = result.state
124 test_results['return_code'] = result.returncode
125 test_results['test_name'] = shortname
126 test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S')
Matt Kwong52ff9862017-04-17 13:56:51 -0700127
ncteisen05687c32017-12-11 16:54:47 -0800128 row = big_query_utils.make_row(str(uuid.uuid4()), test_results)
Jan Tattermuschac3d7652017-10-05 15:27:01 +0200129
ncteisen05687c32017-12-11 16:54:47 -0800130 # TODO(jtattermusch): rows are inserted one by one, very inefficient
131 max_retries = 3
132 for attempt in range(max_retries):
133 if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID,
134 bq_table, [row]):
135 break
136 else:
137 if attempt < max_retries - 1:
138 print('Error uploading result to bigquery, will retry.')
139 else:
140 print(
141 'Error uploading result to bigquery, all attempts failed.'
142 )
143 sys.exit(1)
Matt Kwonga9712ba2017-10-04 11:46:36 -0700144
145
146def upload_interop_results_to_bq(resultset, bq_table, args):
ncteisen05687c32017-12-11 16:54:47 -0800147 """Upload interop test results to a BQ table.
Matt Kwonga9712ba2017-10-04 11:46:36 -0700148
149 Args:
150 resultset: dictionary generated by jobset.run
151 bq_table: string name of table to create/upload results to in BQ
152 args: args in run_interop_tests.py, generated by argparse
153 """
ncteisen05687c32017-12-11 16:54:47 -0800154 bq = big_query_utils.create_big_query()
155 big_query_utils.create_partitioned_table(
156 bq,
157 _PROJECT_ID,
158 _DATASET_ID,
159 bq_table,
160 _INTEROP_RESULTS_SCHEMA,
161 _DESCRIPTION,
162 partition_type=_PARTITION_TYPE,
163 expiration_ms=_EXPIRATION_MS)
Matt Kwonga9712ba2017-10-04 11:46:36 -0700164
ncteisen05687c32017-12-11 16:54:47 -0800165 for shortname, results in six.iteritems(resultset):
166 for result in results:
167 test_results = {}
168 _get_build_metadata(test_results)
169 test_results['elapsed_time'] = '%.2f' % result.elapsed_time
170 test_results['result'] = result.state
171 test_results['test_name'] = shortname
172 test_results['suite'] = shortname.split(':')[0]
173 test_results['client'] = shortname.split(':')[1]
174 test_results['server'] = shortname.split(':')[2]
175 test_results['test_case'] = shortname.split(':')[3]
176 test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S')
177 row = big_query_utils.make_row(str(uuid.uuid4()), test_results)
178 # TODO(jtattermusch): rows are inserted one by one, very inefficient
179 max_retries = 3
180 for attempt in range(max_retries):
181 if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID,
182 bq_table, [row]):
183 break
184 else:
185 if attempt < max_retries - 1:
186 print('Error uploading result to bigquery, will retry.')
187 else:
188 print(
189 'Error uploading result to bigquery, all attempts failed.'
190 )
191 sys.exit(1)