enable uploading server cpu usage data from performance tests to big query
diff --git a/tools/run_tests/run_performance_tests.py b/tools/run_tests/run_performance_tests.py
index b7b742d..d6eed3f 100755
--- a/tools/run_tests/run_performance_tests.py
+++ b/tools/run_tests/run_performance_tests.py
@@ -113,7 +113,7 @@
 
 
 def create_scenario_jobspec(scenario_json, workers, remote_host=None,
-                            bq_result_table=None):
+                            bq_result_table=None, server_cpu_load=0):
   """Runs one scenario using QPS driver."""
   # setting QPS_WORKERS env variable here makes sure it works with SSH too.
   cmd = 'QPS_WORKERS="%s" ' % ','.join(workers)
@@ -121,7 +121,9 @@
     cmd += 'BQ_RESULT_TABLE="%s" ' % bq_result_table
   cmd += 'tools/run_tests/performance/run_qps_driver.sh '
   cmd += '--scenarios_json=%s ' % pipes.quote(json.dumps({'scenarios': [scenario_json]}))
-  cmd += '--scenario_result_file=scenario_result.json'
+  cmd += '--scenario_result_file=scenario_result.json '
+  if server_cpu_load != 0:
+      cmd += '--search_param=offered_load --initial_search_value=1000 --targeted_cpu_load=%d --stride=500 --error_tolerance=0.01' % server_cpu_load
   if remote_host:
     user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, remote_host)
     cmd = 'ssh %s "cd ~/performance_workspace/grpc/ && "%s' % (user_at_host, pipes.quote(cmd))
@@ -129,7 +131,7 @@
   return jobset.JobSpec(
       cmdline=[cmd],
       shortname='qps_json_driver.%s' % scenario_json['name'],
-      timeout_seconds=3*60,
+      timeout_seconds=12*60,
       shell=True,
       verbose_success=True)
 
@@ -318,7 +320,7 @@
 
 def create_scenarios(languages, workers_by_lang, remote_host=None, regex='.*',
                      category='all', bq_result_table=None,
-                     netperf=False, netperf_hosts=[]):
+                     netperf=False, netperf_hosts=[], server_cpu_load=0):
   """Create jobspecs for scenarios to run."""
   all_workers = [worker
                  for workers in workers_by_lang.values()
@@ -379,7 +381,8 @@
               create_scenario_jobspec(scenario_json,
                                       [w.host_and_port for w in workers],
                                       remote_host=remote_host,
-                                      bq_result_table=bq_result_table),
+                                      bq_result_table=bq_result_table,
+                                      server_cpu_load=server_cpu_load),
               workers,
               scenario_json['name'])
           scenarios.append(scenario)
@@ -461,6 +464,9 @@
                   action='store_const',
                   const=True,
                   help='Run netperf benchmark as one of the scenarios.')
+argp.add_argument('--server_cpu_load',
+                  default=0, type=int,
+                  help='Select a targeted server cpu load to run. 0 means ignore this flag')
 argp.add_argument('-x', '--xml_report', default='report.xml', type=str,
                   help='Name of XML report file to generate.')
 argp.add_argument('--perf_args',
@@ -490,7 +496,6 @@
                         'May be useful if "perf_args" arguments do not make sense for '
                         'generating flamegraphs (e.g., "--perf_args=stat ...")'))
 
-
 args = argp.parse_args()
 
 languages = set(scenario_config.LANGUAGES[l]
@@ -540,7 +545,8 @@
                            category=args.category,
                            bq_result_table=args.bq_result_table,
                            netperf=args.netperf,
-                           netperf_hosts=args.remote_worker_host)
+                           netperf_hosts=args.remote_worker_host,
+                           server_cpu_load=args.server_cpu_load)
 
 if not scenarios:
   raise Exception('No scenarios to run')