Merge pull request #12647 from nicolasnoble/pull_request_interval_script

Adding script to generate pull request list from an interval.
diff --git a/include/grpc/grpc.h b/include/grpc/grpc.h
index fab7d43..1de289f 100644
--- a/include/grpc/grpc.h
+++ b/include/grpc/grpc.h
@@ -313,7 +313,7 @@
                                                      void *reserved);
 
 /** Ref a call.
-    THREAD SAFETY: grpc_call_unref is thread-compatible */
+    THREAD SAFETY: grpc_call_ref is thread-compatible */
 GRPCAPI void grpc_call_ref(grpc_call *call);
 
 /** Unref a call.
diff --git a/src/cpp/client/secure_credentials.h b/src/cpp/client/secure_credentials.h
index fa1e319..ed9afb3 100644
--- a/src/cpp/client/secure_credentials.h
+++ b/src/cpp/client/secure_credentials.h
@@ -73,7 +73,7 @@
       grpc_auth_metadata_context context,
       grpc_credentials_plugin_metadata_cb cb, void* user_data,
       grpc_metadata creds_md[GRPC_METADATA_CREDENTIALS_PLUGIN_SYNC_MAX],
-      size_t* num_creds_md, grpc_status_code* status,
+      size_t* num_creds_md, grpc_status_code* status_code,
       const char** error_details);
   std::unique_ptr<ThreadPoolInterface> thread_pool_;
   std::unique_ptr<MetadataCredentialsPlugin> plugin_;
diff --git a/tools/interop_matrix/README.md b/tools/interop_matrix/README.md
index f92dc69..c2f3543 100644
--- a/tools/interop_matrix/README.md
+++ b/tools/interop_matrix/README.md
@@ -5,6 +5,21 @@
 The setup builds gRPC docker images for each language/runtime and upload it to Google Container Registry (GCR). These images, encapsulating gRPC stack
 from specific releases/tag, are used to test version compatiblity between gRPC release versions.
 
+## Step-by-step instructions for adding a new release to compatibility test
+We have continuous nightly test setup to test gRPC backward compatibility between old clients and latest server.  When a gRPC developer creates a new gRPC release, s/he is also responsible to add the just-released gRPC client to the nightly test.  The steps are:
+- Add (or update) an entry in ./client_matrix.py file to reference the github tag for the release.
+- Build new client docker image(s).  For example, for java release `v1.9.9`, do
+  - `tools/interop_matrix/create_matrix_images.py --git_checkout --release=v1.9.9 --language=java`
+- Verify that the new docker image was built successfully and uploaded to GCR.  For example,
+  - `gcloud beta container images list-tags gcr.io/grpc-testing/grpc_interop_java_oracle8`
+  - should show an image entry with tag `v1.9.9`.
+- Verify the just-created docker client image would pass backward compatibility test (it should).  For example,
+  - `gcloud docker -- pull gcr.io/grpc-testing/grpc_interop_java_oracle8:v1.9.9` followed by
+  - `docker_image=gcr.io/grpc-testing/grpc_interop_java_oracle8:v1.9.9 ./testcases/java__master`
+- git commit the change and merge it to upstream/master.
+- (Optional) clean up the tmp directory to where grpc source is cloned at `/export/hda3/tmp/grpc_matrix/`.
+For more details on each step, refer to sections below.
+
 ## Instructions for creating GCR images
 - Edit  `./client_matrix.py` to include desired gRPC release.
 - Run `tools/interop_matrix/create_matrix_images.py`.  Useful options:
@@ -45,3 +60,4 @@
 
 Note:
 - File path starting with `tools/` or `template/` are relative to the grpc repo root dir.  File path starting with `./` are relative to current directory (`tools/interop_matrix`).
+- Creating and referencing images in GCR require read and write permission to Google Container Registry path gcr.io/grpc-testing.
diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_main.py b/tools/profiling/microbenchmarks/bm_diff/bm_main.py
index 5aa11ac..516d110 100755
--- a/tools/profiling/microbenchmarks/bm_diff/bm_main.py
+++ b/tools/profiling/microbenchmarks/bm_diff/bm_main.py
@@ -23,6 +23,7 @@
 
 import sys
 import os
+import random
 import argparse
 import multiprocessing
 import subprocess
@@ -32,6 +33,12 @@
     os.path.dirname(sys.argv[0]), '..', '..', 'run_tests', 'python_utils'))
 import comment_on_pr
 
+sys.path.append(
+  os.path.join(
+    os.path.dirname(sys.argv[0]), '..', '..', '..', 'run_tests',
+    'python_utils'))
+import jobset
+
 
 def _args():
   argp = argparse.ArgumentParser(
@@ -125,8 +132,13 @@
       subprocess.check_call(['git', 'checkout', where_am_i])
       subprocess.check_call(['git', 'submodule', 'update'])
 
-  bm_run.run('new', args.benchmarks, args.jobs, args.loops, args.regex, args.counters)
-  bm_run.run(old, args.benchmarks, args.jobs, args.loops, args.regex, args.counters)
+  jobs_list = []
+  jobs_list += bm_run.create_jobs('new', args.benchmarks, args.loops, args.regex, args.counters)
+  jobs_list += bm_run.create_jobs(old, args.benchmarks, args.loops, args.regex, args.counters)
+
+  # shuffle all jobs to eliminate noise from GCE CPU drift
+  random.shuffle(jobs_list, random.SystemRandom().random)
+  jobset.run(jobs_list, maxjobs=args.jobs)
 
   diff, note = bm_diff.diff(args.benchmarks, args.loops, args.regex, args.track, old,
                 'new', args.counters)
diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_run.py b/tools/profiling/microbenchmarks/bm_diff/bm_run.py
index 206f7c5..81db5a2 100755
--- a/tools/profiling/microbenchmarks/bm_diff/bm_run.py
+++ b/tools/profiling/microbenchmarks/bm_diff/bm_run.py
@@ -95,11 +95,12 @@
         shortname='%s %s %s %s %d/%d' % (bm, line, cfg, name, idx + 1,
                          loops),
         verbose_success=True,
+        cpu_cost=2,
         timeout_seconds=60 * 60)) # one hour
   return jobs_list
 
 
-def run(name, benchmarks, jobs, loops, regex, counters):
+def create_jobs(name, benchmarks, loops, regex, counters):
   jobs_list = []
   for loop in range(0, loops):
     for bm in benchmarks:
@@ -108,9 +109,11 @@
         jobs_list += _collect_bm_data(bm, 'counters', name, regex, loop,
                         loops)
   random.shuffle(jobs_list, random.SystemRandom().random)
-  jobset.run(jobs_list, maxjobs=jobs)
+  return jobs_list
 
 
 if __name__ == '__main__':
   args = _args()
-  run(args.name, args.benchmarks, args.jobs, args.loops, args.regex, args.counters)
+  jobs_list = create_jobs(args.name, args.benchmarks, args.loops, 
+                          args.regex, args.counters)
+  jobset.run(jobs_list, maxjobs=args.jobs)