add support for running netperf scenario
diff --git a/tools/run_tests/performance/bq_upload_result.py b/tools/run_tests/performance/bq_upload_result.py
index ebd28f7..fbccf3b 100755
--- a/tools/run_tests/performance/bq_upload_result.py
+++ b/tools/run_tests/performance/bq_upload_result.py
@@ -48,20 +48,47 @@
 _PROJECT_ID='grpc-testing'
 
 
-def _upload_scenario_result_to_bigquery(dataset_id, table_id, result_file):
+def _upload_netperf_latency_csv_to_bigquery(dataset_id, table_id, result_file):
+  with open(result_file, 'r') as f:
+    (col1, col2, col3) = f.read().split(',')
+    latency50 = float(col1.strip()) * 1000
+    latency90 = float(col2.strip()) * 1000
+    latency99 = float(col3.strip()) * 1000
+
+    scenario_result = {
+        'scenario': {
+          'name': 'netperf_tcp_rr'
+        },
+        'summary': {
+          'latency50': latency50,
+          'latency90': latency90,
+          'latency99': latency99
+        }
+    }
+
   bq = big_query_utils.create_big_query()
   _create_results_table(bq, dataset_id, table_id)
 
+  if not _insert_result(bq, dataset_id, table_id, scenario_result, flatten=False):
+    print 'Error uploading result to bigquery.'
+    sys.exit(1)
+
+
+def _upload_scenario_result_to_bigquery(dataset_id, table_id, result_file):
   with open(result_file, 'r') as f:
     scenario_result = json.loads(f.read())
 
+  bq = big_query_utils.create_big_query()
+  _create_results_table(bq, dataset_id, table_id)
+
   if not _insert_result(bq, dataset_id, table_id, scenario_result):
     print 'Error uploading result to bigquery.'
     sys.exit(1)
 
 
-def _insert_result(bq, dataset_id, table_id, scenario_result):
-  _flatten_result_inplace(scenario_result)
+def _insert_result(bq, dataset_id, table_id, scenario_result, flatten=True):
+  if flatten:
+    _flatten_result_inplace(scenario_result)
   _populate_metadata_inplace(scenario_result)
   row = big_query_utils.make_row(str(uuid.uuid4()), scenario_result)
   return big_query_utils.insert_rows(bq,
@@ -127,9 +154,17 @@
                   help='Bigquery "dataset.table" to upload results to.')
 argp.add_argument('--file_to_upload', default='scenario_result.json', type=str,
                   help='Report file to upload.')
+argp.add_argument('--file_format',
+                  choices=['scenario_result','netperf_latency_csv'],
+                  default='scenario_result',
+                  help='Format of the file to upload.')
 
 args = argp.parse_args()
 
 dataset_id, table_id = args.bq_result_table.split('.', 2)
-_upload_scenario_result_to_bigquery(dataset_id, table_id, args.file_to_upload)
+
+if args.file_format == 'netperf_latency_csv':
+  _upload_netperf_latency_csv_to_bigquery(dataset_id, table_id, args.file_to_upload)
+else:
+  _upload_scenario_result_to_bigquery(dataset_id, table_id, args.file_to_upload)
 print 'Successfully uploaded %s to BigQuery.\n' % args.file_to_upload