Merge pull request #6192 from jtattermusch/populate_summary_result

Populate ScenarioResultSummary result in QPS driver.
diff --git a/test/cpp/qps/driver.cc b/test/cpp/qps/driver.cc
index 9c0649c..2583ceb 100644
--- a/test/cpp/qps/driver.cc
+++ b/test/cpp/qps/driver.cc
@@ -52,6 +52,7 @@
 #include "test/cpp/qps/driver.h"
 #include "test/cpp/qps/histogram.h"
 #include "test/cpp/qps/qps_worker.h"
+#include "test/cpp/qps/stats.h"
 
 using std::list;
 using std::thread;
@@ -115,6 +116,47 @@
   }
 }
 
+// helpers for postprocess_scenario_result
+static double WallTime(ClientStats s) { return s.time_elapsed(); }
+static double SystemTime(ClientStats s) { return s.time_system(); }
+static double UserTime(ClientStats s) { return s.time_user(); }
+static double ServerWallTime(ServerStats s) { return s.time_elapsed(); }
+static double ServerSystemTime(ServerStats s) { return s.time_system(); }
+static double ServerUserTime(ServerStats s) { return s.time_user(); }
+static int Cores(int n) { return n; }
+
+// Postprocess ScenarioResult and populate result summary.
+static void postprocess_scenario_result(ScenarioResult* result) {
+  Histogram histogram;
+  histogram.MergeProto(result->latencies());
+
+  auto qps = histogram.Count() / average(result->client_stats(), WallTime);
+  auto qps_per_server_core = qps / sum(result->server_cores(), Cores);
+
+  result->mutable_summary()->set_qps(qps);
+  result->mutable_summary()->set_qps_per_server_core(qps_per_server_core);
+  result->mutable_summary()->set_latency_50(histogram.Percentile(50));
+  result->mutable_summary()->set_latency_90(histogram.Percentile(90));
+  result->mutable_summary()->set_latency_95(histogram.Percentile(95));
+  result->mutable_summary()->set_latency_99(histogram.Percentile(99));
+  result->mutable_summary()->set_latency_999(histogram.Percentile(99.9));
+
+  auto server_system_time = 100.0 *
+                            sum(result->server_stats(), ServerSystemTime) /
+                            sum(result->server_stats(), ServerWallTime);
+  auto server_user_time = 100.0 * sum(result->server_stats(), ServerUserTime) /
+                          sum(result->server_stats(), ServerWallTime);
+  auto client_system_time = 100.0 * sum(result->client_stats(), SystemTime) /
+                            sum(result->client_stats(), WallTime);
+  auto client_user_time = 100.0 * sum(result->client_stats(), UserTime) /
+                          sum(result->client_stats(), WallTime);
+
+  result->mutable_summary()->set_server_system_time(server_system_time);
+  result->mutable_summary()->set_server_user_time(server_user_time);
+  result->mutable_summary()->set_client_system_time(client_system_time);
+  result->mutable_summary()->set_client_user_time(client_user_time);
+}
+
 // Namespace for classes and functions used only in RunScenario
 // Using this rather than local definitions to workaround gcc-4.4 limitations
 // regarding using templates without linkage
@@ -380,6 +422,8 @@
   }
 
   delete[] servers;
+
+  postprocess_scenario_result(result.get());
   return result;
 }
 
diff --git a/test/cpp/qps/report.cc b/test/cpp/qps/report.cc
index 07ab0a8..3ae4139 100644
--- a/test/cpp/qps/report.cc
+++ b/test/cpp/qps/report.cc
@@ -45,14 +45,6 @@
 namespace grpc {
 namespace testing {
 
-static double WallTime(ClientStats s) { return s.time_elapsed(); }
-static double SystemTime(ClientStats s) { return s.time_system(); }
-static double UserTime(ClientStats s) { return s.time_user(); }
-static double ServerWallTime(ServerStats s) { return s.time_elapsed(); }
-static double ServerSystemTime(ServerStats s) { return s.time_system(); }
-static double ServerUserTime(ServerStats s) { return s.time_user(); }
-static int Cores(int n) { return n; }
-
 void CompositeReporter::add(std::unique_ptr<Reporter> reporter) {
   reporters_.emplace_back(std::move(reporter));
 }
@@ -82,44 +74,33 @@
 }
 
 void GprLogReporter::ReportQPS(const ScenarioResult& result) {
-  Histogram histogram;
-  histogram.MergeProto(result.latencies());
-  gpr_log(GPR_INFO, "QPS: %.1f",
-          histogram.Count() / average(result.client_stats(), WallTime));
+  gpr_log(GPR_INFO, "QPS: %.1f", result.summary().qps());
 }
 
 void GprLogReporter::ReportQPSPerCore(const ScenarioResult& result) {
-  Histogram histogram;
-  histogram.MergeProto(result.latencies());
-  auto qps = histogram.Count() / average(result.client_stats(), WallTime);
-
-  gpr_log(GPR_INFO, "QPS: %.1f (%.1f/server core)", qps,
-          qps / sum(result.server_cores(), Cores));
+  gpr_log(GPR_INFO, "QPS: %.1f (%.1f/server core)", result.summary().qps(),
+          result.summary().qps_per_server_core());
 }
 
 void GprLogReporter::ReportLatency(const ScenarioResult& result) {
-  Histogram histogram;
-  histogram.MergeProto(result.latencies());
   gpr_log(GPR_INFO,
           "Latencies (50/90/95/99/99.9%%-ile): %.1f/%.1f/%.1f/%.1f/%.1f us",
-          histogram.Percentile(50) / 1000, histogram.Percentile(90) / 1000,
-          histogram.Percentile(95) / 1000, histogram.Percentile(99) / 1000,
-          histogram.Percentile(99.9) / 1000);
+          result.summary().latency_50() / 1000,
+          result.summary().latency_90() / 1000,
+          result.summary().latency_95() / 1000,
+          result.summary().latency_99() / 1000,
+          result.summary().latency_999() / 1000);
 }
 
 void GprLogReporter::ReportTimes(const ScenarioResult& result) {
   gpr_log(GPR_INFO, "Server system time: %.2f%%",
-          100.0 * sum(result.server_stats(), ServerSystemTime) /
-              sum(result.server_stats(), ServerWallTime));
+          result.summary().server_system_time());
   gpr_log(GPR_INFO, "Server user time:   %.2f%%",
-          100.0 * sum(result.server_stats(), ServerUserTime) /
-              sum(result.server_stats(), ServerWallTime));
+          result.summary().server_user_time());
   gpr_log(GPR_INFO, "Client system time: %.2f%%",
-          100.0 * sum(result.client_stats(), SystemTime) /
-              sum(result.client_stats(), WallTime));
+          result.summary().client_system_time());
   gpr_log(GPR_INFO, "Client user time:   %.2f%%",
-          100.0 * sum(result.client_stats(), UserTime) /
-              sum(result.client_stats(), WallTime));
+          result.summary().client_user_time());
 }
 
 void JsonReporter::ReportQPS(const ScenarioResult& result) {
diff --git a/tools/gcp/utils/big_query_utils.py b/tools/gcp/utils/big_query_utils.py
index 913afd0..9dbc69c 100755
--- a/tools/gcp/utils/big_query_utils.py
+++ b/tools/gcp/utils/big_query_utils.py
@@ -119,9 +119,13 @@
                                                  tableId=table_id,
                                                  body=body)
     res = insert_req.execute(num_retries=NUM_RETRIES)
+    if res.get('insertErrors', None):
+      print 'Error inserting rows! Response: %s' % res
+      is_success = False
   except HttpError as http_error:
-    print 'Error in inserting rows in the table %s' % table_id
+    print 'Error inserting rows to the table %s' % table_id
     is_success = False
+
   return is_success
 
 
diff --git a/tools/run_tests/performance/scenario_result_schema.json b/tools/run_tests/performance/scenario_result_schema.json
index 10d24a2..0325414 100644
--- a/tools/run_tests/performance/scenario_result_schema.json
+++ b/tools/run_tests/performance/scenario_result_schema.json
@@ -148,52 +148,52 @@
         "mode": "NULLABLE"
       },
       {
-        "name": "qps_per_server_core",
+        "name": "qpsPerServerCore",
         "type": "FLOAT",
         "mode": "NULLABLE"
       },
       {
-        "name": "server_system_time",
+        "name": "serverSystemTime",
         "type": "FLOAT",
         "mode": "NULLABLE"
       },
       {
-        "name": "server_user_time",
+        "name": "serverUserTime",
         "type": "FLOAT",
         "mode": "NULLABLE"
       },
       {
-        "name": "client_system_time",
+        "name": "clientSystemTime",
         "type": "FLOAT",
         "mode": "NULLABLE"
       },
       {
-        "name": "client_user_time",
+        "name": "clientUserTime",
         "type": "FLOAT",
         "mode": "NULLABLE"
       },
       {
-        "name": "latency_50",
+        "name": "latency50",
         "type": "FLOAT",
         "mode": "NULLABLE"
       },
       {
-        "name": "latency_90",
+        "name": "latency90",
         "type": "FLOAT",
         "mode": "NULLABLE"
       },
       {
-        "name": "latency_95",
+        "name": "latency95",
         "type": "FLOAT",
         "mode": "NULLABLE"
       },
       {
-        "name": "latency_99",
+        "name": "latency99",
         "type": "FLOAT",
         "mode": "NULLABLE"
       },
       {
-        "name": "latency_999",
+        "name": "latency999",
         "type": "FLOAT",
         "mode": "NULLABLE"
       }