Merge pull request #13798 from apolcyn/min_max_rpc_version_constants

Define RPC protocol version constants
diff --git a/WORKSPACE b/WORKSPACE
index bf09aa7..adce809 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -1,127 +1,4 @@
-bind(
-    name = "nanopb",
-    actual = "//third_party/nanopb",
-)
+workspace(name = "com_github_grpc_grpc")
 
-bind(
-    name = "libssl",
-    actual = "@boringssl//:ssl",
-)
-
-bind(
-    name = "zlib",
-    actual = "@com_github_madler_zlib//:z",
-)
-
-bind(
-    name = "protobuf",
-    actual = "@com_google_protobuf//:protobuf",
-)
-
-bind(
-    name = "protobuf_clib",
-    actual = "@com_google_protobuf//:protoc_lib",
-)
-
-bind(
-    name = "protobuf_headers",
-    actual = "@com_google_protobuf//:protobuf_headers",
-)
-
-bind(
-    name = "protocol_compiler",
-    actual = "@com_google_protobuf//:protoc",
-)
-
-bind(
-    name = "cares",
-    actual = "@com_github_cares_cares//:ares",
-)
-
-bind(
-    name = "gtest",
-    actual = "@com_github_google_googletest//:gtest",
-)
-
-bind(
-    name = "gmock",
-    actual = "@com_github_google_googletest//:gmock",
-)
-
-bind(
-    name = "benchmark",
-    actual = "@com_github_google_benchmark//:benchmark",
-)
-
-bind(
-    name = "gflags",
-    actual = "@com_github_gflags_gflags//:gflags",
-)
-
-http_archive(
-    name = "boringssl",
-    # on the master-with-bazel branch
-    url = "https://boringssl.googlesource.com/boringssl/+archive/886e7d75368e3f4fab3f4d0d3584e4abfc557755.tar.gz",
-)
-
-new_http_archive(
-    name = "com_github_madler_zlib",
-    build_file = "third_party/zlib.BUILD",
-    strip_prefix = "zlib-cacf7f1d4e3d44d871b605da3b647f07d718623f",
-    url = "https://github.com/madler/zlib/archive/cacf7f1d4e3d44d871b605da3b647f07d718623f.tar.gz",
-)
-
-http_archive(
-    name = "com_google_protobuf",
-    strip_prefix = "protobuf-2761122b810fe8861004ae785cc3ab39f384d342",
-    url = "https://github.com/google/protobuf/archive/2761122b810fe8861004ae785cc3ab39f384d342.tar.gz",
-)
-
-new_http_archive(
-    name = "com_github_google_googletest",
-    build_file = "third_party/gtest.BUILD",
-    strip_prefix = "googletest-ec44c6c1675c25b9827aacd08c02433cccde7780",
-    url = "https://github.com/google/googletest/archive/ec44c6c1675c25b9827aacd08c02433cccde7780.tar.gz",
-)
-
-http_archive(
-    name = "com_github_gflags_gflags",
-    strip_prefix = "gflags-30dbc81fb5ffdc98ea9b14b1918bfe4e8779b26e",
-    url = "https://github.com/gflags/gflags/archive/30dbc81fb5ffdc98ea9b14b1918bfe4e8779b26e.tar.gz",
-)
-
-new_http_archive(
-    name = "com_github_google_benchmark",
-    build_file = "third_party/benchmark.BUILD",
-    strip_prefix = "benchmark-5b7683f49e1e9223cf9927b24f6fd3d6bd82e3f8",
-    url = "https://github.com/google/benchmark/archive/5b7683f49e1e9223cf9927b24f6fd3d6bd82e3f8.tar.gz",
-)
-
-new_local_repository(
-    name = "cares_local_files",
-    build_file = "third_party/cares/cares_local_files.BUILD",
-    path = "third_party/cares",
-)
-
-new_http_archive(
-    name = "com_github_cares_cares",
-    build_file = "third_party/cares/cares.BUILD",
-    strip_prefix = "c-ares-3be1924221e1326df520f8498d704a5c4c8d0cce",
-    url = "https://github.com/c-ares/c-ares/archive/3be1924221e1326df520f8498d704a5c4c8d0cce.tar.gz",
-)
-
-http_archive(
-    name = "com_google_absl",
-    strip_prefix = "abseil-cpp-cc4bed2d74f7c8717e31f9579214ab52a9c9c610",
-    url = "https://github.com/abseil/abseil-cpp/archive/cc4bed2d74f7c8717e31f9579214ab52a9c9c610.tar.gz",
-)
-
-http_archive(
-    name = "bazel_toolchains",
-    urls = [
-        "https://mirror.bazel.build/github.com/bazelbuild/bazel-toolchains/archive/af4681c3d19f063f090222ec3d04108c4e0ca255.tar.gz",
-        "https://github.com/bazelbuild/bazel-toolchains/archive/af4681c3d19f063f090222ec3d04108c4e0ca255.tar.gz",
-    ],
-    strip_prefix = "bazel-toolchains-af4681c3d19f063f090222ec3d04108c4e0ca255",
-    sha256 = "d58bb2d6c8603f600d522b6104d6192a65339aa26cbba9f11ff5c4b36dedb928",
-)
+load("//bazel:grpc_deps.bzl", "grpc_deps")
+grpc_deps()
diff --git a/bazel/grpc_build_system.bzl b/bazel/grpc_build_system.bzl
index 234d129..d146ca9 100644
--- a/bazel/grpc_build_system.bzl
+++ b/bazel/grpc_build_system.bzl
@@ -26,6 +26,27 @@
 # The set of pollers to test against if a test exercises polling
 POLLERS = ['epollex', 'epollsig', 'epoll1', 'poll', 'poll-cv']
 
+def _get_external_deps(external_deps):
+  ret = []
+  for dep in external_deps:
+    if dep == "nanopb":
+      ret.append("//third_party/nanopb")
+    else:
+      ret.append("//external:" + dep)
+  return ret
+
+def _maybe_update_cc_library_hdrs(hdrs):
+  ret = []
+  hdrs_to_update = {
+      "third_party/objective_c/Cronet/bidirectional_stream_c.h": "//third_party:objective_c/Cronet/bidirectional_stream_c.h",
+  }
+  for h in hdrs:
+    if h in hdrs_to_update.keys():
+      ret.append(hdrs_to_update[h])
+    else:
+      ret.append(h)
+  return ret
+
 def grpc_cc_library(name, srcs = [], public_hdrs = [], hdrs = [],
                     external_deps = [], deps = [], standalone = False,
                     language = "C++", testonly = False, visibility = None,
@@ -40,8 +61,8 @@
                       "//conditions:default": [],}) +
               select({"//:remote_execution":  ["GRPC_PORT_ISOLATED_RUNTIME=1"],
                       "//conditions:default": [],}),
-    hdrs = hdrs + public_hdrs,
-    deps = deps + ["//external:" + dep for dep in external_deps],
+    hdrs = _maybe_update_cc_library_hdrs(hdrs + public_hdrs),
+    deps = deps + _get_external_deps(external_deps),
     copts = copts,
     visibility = visibility,
     testonly = testonly,
@@ -82,7 +103,7 @@
     'srcs': srcs,
     'args': args,
     'data': data,
-    'deps': deps + ["//external:" + dep for dep in external_deps],
+    'deps': deps + _get_external_deps(external_deps),
     'copts': copts,
     'linkopts': ["-pthread"],
   }
@@ -114,7 +135,7 @@
     data = data,
     testonly = testonly,
     linkshared = linkshared,
-    deps = deps + ["//external:" + dep for dep in external_deps],
+    deps = deps + _get_external_deps(external_deps),
     copts = copts,
     linkopts = ["-pthread"] + linkopts,
   )
diff --git a/bazel/grpc_deps.bzl b/bazel/grpc_deps.bzl
new file mode 100644
index 0000000..e465312
--- /dev/null
+++ b/bazel/grpc_deps.bzl
@@ -0,0 +1,129 @@
+"""Load dependencies needed to compile and test the grpc library as a 3rd-party consumer."""
+
+def grpc_deps():
+    """Loads dependencies need to compile and test the grpc library."""
+    native.bind(
+        name = "libssl",
+        actual = "@boringssl//:ssl",
+    )
+
+    native.bind(
+        name = "zlib",
+        actual = "@com_github_madler_zlib//:z",
+    )
+
+    native.bind(
+        name = "protobuf",
+        actual = "@com_google_protobuf//:protobuf",
+    )
+
+    native.bind(
+        name = "protobuf_clib",
+        actual = "@com_google_protobuf//:protoc_lib",
+    )
+
+    native.bind(
+        name = "protobuf_headers",
+        actual = "@com_google_protobuf//:protobuf_headers",
+    )
+
+    native.bind(
+        name = "protocol_compiler",
+        actual = "@com_google_protobuf//:protoc",
+    )
+
+    native.bind(
+        name = "cares",
+        actual = "@com_github_cares_cares//:ares",
+    )
+
+    native.bind(
+        name = "gtest",
+        actual = "@com_github_google_googletest//:gtest",
+    )
+
+    native.bind(
+        name = "gmock",
+        actual = "@com_github_google_googletest//:gmock",
+    )
+
+    native.bind(
+        name = "benchmark",
+        actual = "@com_github_google_benchmark//:benchmark",
+    )
+
+    native.bind(
+        name = "gflags",
+        actual = "@com_github_gflags_gflags//:gflags",
+    )
+
+    if "boringssl" not in native.existing_rules():
+        native.http_archive(
+            name = "boringssl",
+            # on the master-with-bazel branch
+            url = "https://boringssl.googlesource.com/boringssl/+archive/886e7d75368e3f4fab3f4d0d3584e4abfc557755.tar.gz",
+        )
+
+    if "com_github_madler_zlib" not in native.existing_rules():
+        native.new_http_archive(
+            name = "com_github_madler_zlib",
+            build_file = "@com_github_grpc_grpc//third_party:zlib.BUILD",
+            strip_prefix = "zlib-cacf7f1d4e3d44d871b605da3b647f07d718623f",
+            url = "https://github.com/madler/zlib/archive/cacf7f1d4e3d44d871b605da3b647f07d718623f.tar.gz",
+        )
+
+    if "com_google_protobuf" not in native.existing_rules():
+        native.http_archive(
+            name = "com_google_protobuf",
+            strip_prefix = "protobuf-2761122b810fe8861004ae785cc3ab39f384d342",
+            url = "https://github.com/google/protobuf/archive/2761122b810fe8861004ae785cc3ab39f384d342.tar.gz",
+        )
+
+    if "com_github_google_googletest" not in native.existing_rules():
+        native.new_http_archive(
+            name = "com_github_google_googletest",
+            build_file = "@com_github_grpc_grpc//third_party:gtest.BUILD",
+            strip_prefix = "googletest-ec44c6c1675c25b9827aacd08c02433cccde7780",
+            url = "https://github.com/google/googletest/archive/ec44c6c1675c25b9827aacd08c02433cccde7780.tar.gz",
+        )
+
+    if "com_github_gflags_gflags" not in native.existing_rules():
+        native.http_archive(
+            name = "com_github_gflags_gflags",
+            strip_prefix = "gflags-30dbc81fb5ffdc98ea9b14b1918bfe4e8779b26e",
+            url = "https://github.com/gflags/gflags/archive/30dbc81fb5ffdc98ea9b14b1918bfe4e8779b26e.tar.gz",
+        )
+
+    if "com_github_google_benchmark" not in native.existing_rules():
+        native.new_http_archive(
+            name = "com_github_google_benchmark",
+            build_file = "@com_github_grpc_grpc//third_party:benchmark.BUILD",
+            strip_prefix = "benchmark-5b7683f49e1e9223cf9927b24f6fd3d6bd82e3f8",
+            url = "https://github.com/google/benchmark/archive/5b7683f49e1e9223cf9927b24f6fd3d6bd82e3f8.tar.gz",
+        )
+
+    if "com_github_cares_cares" not in native.existing_rules():
+        native.new_http_archive(
+            name = "com_github_cares_cares",
+            build_file = "@com_github_grpc_grpc//third_party:cares/cares.BUILD",
+            strip_prefix = "c-ares-3be1924221e1326df520f8498d704a5c4c8d0cce",
+            url = "https://github.com/c-ares/c-ares/archive/3be1924221e1326df520f8498d704a5c4c8d0cce.tar.gz",
+        )
+
+    if "com_google_absl" not in native.existing_rules():
+        native.http_archive(
+            name = "com_google_absl",
+            strip_prefix = "abseil-cpp-cc4bed2d74f7c8717e31f9579214ab52a9c9c610",
+            url = "https://github.com/abseil/abseil-cpp/archive/cc4bed2d74f7c8717e31f9579214ab52a9c9c610.tar.gz",
+        )
+
+    if "com_github_bazelbuild_bazeltoolchains" not in native.existing_rules():
+        native.http_archive(
+            name = "com_github_bazelbuild_bazeltoolchains",
+            strip_prefix = "bazel-toolchains-af4681c3d19f063f090222ec3d04108c4e0ca255",
+            urls = [
+                "https://mirror.bazel.build/github.com/bazelbuild/bazel-toolchains/archive/af4681c3d19f063f090222ec3d04108c4e0ca255.tar.gz",
+                "https://github.com/bazelbuild/bazel-toolchains/archive/af4681c3d19f063f090222ec3d04108c4e0ca255.tar.gz",
+            ],
+            sha256 = "d58bb2d6c8603f600d522b6104d6192a65339aa26cbba9f11ff5c4b36dedb928",
+        )
diff --git a/src/core/lib/iomgr/executor.cc b/src/core/lib/iomgr/executor.cc
index b7288d5..67a0412 100644
--- a/src/core/lib/iomgr/executor.cc
+++ b/src/core/lib/iomgr/executor.cc
@@ -242,7 +242,7 @@
         }
         continue;
       }
-      if (grpc_closure_list_empty(ts->elems)) {
+      if (grpc_closure_list_empty(ts->elems) && !ts->shutdown) {
         GRPC_STATS_INC_EXECUTOR_WAKEUP_INITIATED();
         gpr_cv_signal(&ts->cv);
       }
diff --git a/src/core/lib/iomgr/resource_quota.cc b/src/core/lib/iomgr/resource_quota.cc
index cabe28e..eaf2f5d 100644
--- a/src/core/lib/iomgr/resource_quota.cc
+++ b/src/core/lib/iomgr/resource_quota.cc
@@ -507,6 +507,7 @@
     gpr_log(GPR_DEBUG, "RU shutdown %p", ru);
   }
   grpc_resource_user* resource_user = (grpc_resource_user*)ru;
+  gpr_mu_lock(&resource_user->mu);
   GRPC_CLOSURE_SCHED(resource_user->reclaimers[0], GRPC_ERROR_CANCELLED);
   GRPC_CLOSURE_SCHED(resource_user->reclaimers[1], GRPC_ERROR_CANCELLED);
   resource_user->reclaimers[0] = nullptr;
@@ -516,6 +517,7 @@
   if (resource_user->allocating) {
     rq_step_sched(resource_user->resource_quota);
   }
+  gpr_mu_unlock(&resource_user->mu);
 }
 
 static void ru_destroy(void* ru, grpc_error* error) {
diff --git a/src/core/lib/iomgr/tcp_posix.cc b/src/core/lib/iomgr/tcp_posix.cc
index 155329d..816acf2 100644
--- a/src/core/lib/iomgr/tcp_posix.cc
+++ b/src/core/lib/iomgr/tcp_posix.cc
@@ -270,7 +270,11 @@
 
 static grpc_error* tcp_annotate_error(grpc_error* src_error, grpc_tcp* tcp) {
   return grpc_error_set_str(
-      grpc_error_set_int(src_error, GRPC_ERROR_INT_FD, tcp->fd),
+      grpc_error_set_int(
+          grpc_error_set_int(src_error, GRPC_ERROR_INT_FD, tcp->fd),
+          /* All tcp errors are marked with UNAVAILABLE so that application may
+           * choose to retry. */
+          GRPC_ERROR_INT_GRPC_STATUS, GRPC_STATUS_UNAVAILABLE),
       GRPC_ERROR_STR_TARGET_ADDRESS,
       grpc_slice_from_copied_string(tcp->peer_string));
 }
@@ -565,9 +569,7 @@
         }
         return false;
       } else if (errno == EPIPE) {
-        *error = grpc_error_set_int(GRPC_OS_ERROR(errno, "sendmsg"),
-                                    GRPC_ERROR_INT_GRPC_STATUS,
-                                    GRPC_STATUS_UNAVAILABLE);
+        *error = tcp_annotate_error(GRPC_OS_ERROR(errno, "sendmsg"), tcp);
         grpc_slice_buffer_reset_and_unref_internal(tcp->outgoing_buffer);
         return true;
       } else {
diff --git a/src/python/grpcio/grpc/_cython/_cygrpc/grpc_string.pyx.pxi b/src/python/grpcio/grpc/_cython/_cygrpc/grpc_string.pyx.pxi
index e3cad9a..53e06a1 100644
--- a/src/python/grpcio/grpc/_cython/_cygrpc/grpc_string.pyx.pxi
+++ b/src/python/grpcio/grpc/_cython/_cygrpc/grpc_string.pyx.pxi
@@ -26,15 +26,20 @@
     raise TypeError('Expected bytes, str, or unicode, not {}'.format(type(s)))
 
 
-cdef bytes _encode(str native_string_or_none):
-  if native_string_or_none is None:
+# TODO(https://github.com/grpc/grpc/issues/13782): It would be nice for us if
+# the type of metadata that we accept were exactly the same as the type of
+# metadata that we deliver to our users (so "str" for this function's
+# parameter rather than "object"), but would it be nice for our users? Right
+# now we haven't yet heard from enough users to know one way or another.
+cdef bytes _encode(object string_or_none):
+  if string_or_none is None:
     return b''
-  elif isinstance(native_string_or_none, (bytes,)):
-    return <bytes>native_string_or_none
-  elif isinstance(native_string_or_none, (unicode,)):
-    return native_string_or_none.encode('ascii')
+  elif isinstance(string_or_none, (bytes,)):
+    return <bytes>string_or_none
+  elif isinstance(string_or_none, (unicode,)):
+    return string_or_none.encode('ascii')
   else:
-    raise TypeError('Expected str, not {}'.format(type(native_string_or_none)))
+    raise TypeError('Expected str, not {}'.format(type(string_or_none)))
 
 
 cdef str _decode(bytes bytestring):
diff --git a/src/python/grpcio/grpc/_server.py b/src/python/grpcio/grpc/_server.py
index 8857bd3..02d3af8 100644
--- a/src/python/grpcio/grpc/_server.py
+++ b/src/python/grpcio/grpc/_server.py
@@ -634,7 +634,7 @@
     # pylint: disable=too-many-arguments
     def __init__(self, completion_queue, server, generic_handlers,
                  interceptor_pipeline, thread_pool, maximum_concurrent_rpcs):
-        self.lock = threading.Lock()
+        self.lock = threading.RLock()
         self.completion_queue = completion_queue
         self.server = server
         self.generic_handlers = list(generic_handlers)
@@ -747,22 +747,12 @@
             state.shutdown_events.append(shutdown_event)
             if grace is None:
                 state.server.cancel_all_calls()
-                # TODO(https://github.com/grpc/grpc/issues/6597): delete this loop.
-                for rpc_state in state.rpc_states:
-                    with rpc_state.condition:
-                        rpc_state.client = _CANCELLED
-                        rpc_state.condition.notify_all()
             else:
 
                 def cancel_all_calls_after_grace():
                     shutdown_event.wait(timeout=grace)
                     with state.lock:
                         state.server.cancel_all_calls()
-                        # TODO(https://github.com/grpc/grpc/issues/6597): delete this loop.
-                        for rpc_state in state.rpc_states:
-                            with rpc_state.condition:
-                                rpc_state.client = _CANCELLED
-                                rpc_state.condition.notify_all()
 
                 thread = threading.Thread(target=cancel_all_calls_after_grace)
                 thread.start()
diff --git a/src/python/grpcio_tests/tests/unit/_metadata_test.py b/src/python/grpcio_tests/tests/unit/_metadata_test.py
index 557d527..0669486 100644
--- a/src/python/grpcio_tests/tests/unit/_metadata_test.py
+++ b/src/python/grpcio_tests/tests/unit/_metadata_test.py
@@ -34,16 +34,19 @@
 _STREAM_UNARY = '/test/StreamUnary'
 _STREAM_STREAM = '/test/StreamStream'
 
-_CLIENT_METADATA = (('client-md-key', 'client-md-key'),
-                    ('client-md-key-bin', b'\x00\x01'))
+_INVOCATION_METADATA = ((b'invocation-md-key', u'invocation-md-value',),
+                        (u'invocation-md-key-bin', b'\x00\x01',),)
+_EXPECTED_INVOCATION_METADATA = (('invocation-md-key', 'invocation-md-value',),
+                                 ('invocation-md-key-bin', b'\x00\x01',),)
 
-_SERVER_INITIAL_METADATA = (
-    ('server-initial-md-key', 'server-initial-md-value'),
-    ('server-initial-md-key-bin', b'\x00\x02'))
+_INITIAL_METADATA = ((b'initial-md-key', u'initial-md-value'),
+                     (u'initial-md-key-bin', b'\x00\x02'))
+_EXPECTED_INITIAL_METADATA = (('initial-md-key', 'initial-md-value',),
+                              ('initial-md-key-bin', b'\x00\x02',),)
 
-_SERVER_TRAILING_METADATA = (
-    ('server-trailing-md-key', 'server-trailing-md-value'),
-    ('server-trailing-md-key-bin', b'\x00\x03'))
+_TRAILING_METADATA = (('server-trailing-md-key', 'server-trailing-md-value',),
+                      ('server-trailing-md-key-bin', b'\x00\x03',),)
+_EXPECTED_TRAILING_METADATA = _TRAILING_METADATA
 
 
 def user_agent(metadata):
@@ -56,7 +59,8 @@
 def validate_client_metadata(test, servicer_context):
     test.assertTrue(
         test_common.metadata_transmitted(
-            _CLIENT_METADATA, servicer_context.invocation_metadata()))
+            _EXPECTED_INVOCATION_METADATA,
+            servicer_context.invocation_metadata()))
     test.assertTrue(
         user_agent(servicer_context.invocation_metadata())
         .startswith('primary-agent ' + _channel._USER_AGENT))
@@ -67,23 +71,23 @@
 
 def handle_unary_unary(test, request, servicer_context):
     validate_client_metadata(test, servicer_context)
-    servicer_context.send_initial_metadata(_SERVER_INITIAL_METADATA)
-    servicer_context.set_trailing_metadata(_SERVER_TRAILING_METADATA)
+    servicer_context.send_initial_metadata(_INITIAL_METADATA)
+    servicer_context.set_trailing_metadata(_TRAILING_METADATA)
     return _RESPONSE
 
 
 def handle_unary_stream(test, request, servicer_context):
     validate_client_metadata(test, servicer_context)
-    servicer_context.send_initial_metadata(_SERVER_INITIAL_METADATA)
-    servicer_context.set_trailing_metadata(_SERVER_TRAILING_METADATA)
+    servicer_context.send_initial_metadata(_INITIAL_METADATA)
+    servicer_context.set_trailing_metadata(_TRAILING_METADATA)
     for _ in range(test_constants.STREAM_LENGTH):
         yield _RESPONSE
 
 
 def handle_stream_unary(test, request_iterator, servicer_context):
     validate_client_metadata(test, servicer_context)
-    servicer_context.send_initial_metadata(_SERVER_INITIAL_METADATA)
-    servicer_context.set_trailing_metadata(_SERVER_TRAILING_METADATA)
+    servicer_context.send_initial_metadata(_INITIAL_METADATA)
+    servicer_context.set_trailing_metadata(_TRAILING_METADATA)
     # TODO(issue:#6891) We should be able to remove this loop
     for request in request_iterator:
         pass
@@ -92,8 +96,8 @@
 
 def handle_stream_stream(test, request_iterator, servicer_context):
     validate_client_metadata(test, servicer_context)
-    servicer_context.send_initial_metadata(_SERVER_INITIAL_METADATA)
-    servicer_context.set_trailing_metadata(_SERVER_TRAILING_METADATA)
+    servicer_context.send_initial_metadata(_INITIAL_METADATA)
+    servicer_context.set_trailing_metadata(_TRAILING_METADATA)
     # TODO(issue:#6891) We should be able to remove this loop,
     # and replace with return; yield
     for request in request_iterator:
@@ -156,50 +160,50 @@
     def testUnaryUnary(self):
         multi_callable = self._channel.unary_unary(_UNARY_UNARY)
         unused_response, call = multi_callable.with_call(
-            _REQUEST, metadata=_CLIENT_METADATA)
+            _REQUEST, metadata=_INVOCATION_METADATA)
         self.assertTrue(
-            test_common.metadata_transmitted(_SERVER_INITIAL_METADATA,
+            test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA,
                                              call.initial_metadata()))
         self.assertTrue(
-            test_common.metadata_transmitted(_SERVER_TRAILING_METADATA,
+            test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA,
                                              call.trailing_metadata()))
 
     def testUnaryStream(self):
         multi_callable = self._channel.unary_stream(_UNARY_STREAM)
-        call = multi_callable(_REQUEST, metadata=_CLIENT_METADATA)
+        call = multi_callable(_REQUEST, metadata=_INVOCATION_METADATA)
         self.assertTrue(
-            test_common.metadata_transmitted(_SERVER_INITIAL_METADATA,
+            test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA,
                                              call.initial_metadata()))
         for _ in call:
             pass
         self.assertTrue(
-            test_common.metadata_transmitted(_SERVER_TRAILING_METADATA,
+            test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA,
                                              call.trailing_metadata()))
 
     def testStreamUnary(self):
         multi_callable = self._channel.stream_unary(_STREAM_UNARY)
         unused_response, call = multi_callable.with_call(
             iter([_REQUEST] * test_constants.STREAM_LENGTH),
-            metadata=_CLIENT_METADATA)
+            metadata=_INVOCATION_METADATA)
         self.assertTrue(
-            test_common.metadata_transmitted(_SERVER_INITIAL_METADATA,
+            test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA,
                                              call.initial_metadata()))
         self.assertTrue(
-            test_common.metadata_transmitted(_SERVER_TRAILING_METADATA,
+            test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA,
                                              call.trailing_metadata()))
 
     def testStreamStream(self):
         multi_callable = self._channel.stream_stream(_STREAM_STREAM)
         call = multi_callable(
             iter([_REQUEST] * test_constants.STREAM_LENGTH),
-            metadata=_CLIENT_METADATA)
+            metadata=_INVOCATION_METADATA)
         self.assertTrue(
-            test_common.metadata_transmitted(_SERVER_INITIAL_METADATA,
+            test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA,
                                              call.initial_metadata()))
         for _ in call:
             pass
         self.assertTrue(
-            test_common.metadata_transmitted(_SERVER_TRAILING_METADATA,
+            test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA,
                                              call.trailing_metadata()))
 
 
diff --git a/src/ruby/README.md b/src/ruby/README.md
index 5c7dae6..f6fce3e 100644
--- a/src/ruby/README.md
+++ b/src/ruby/README.md
@@ -22,6 +22,12 @@
 ---------------------
 - Clone this repository
 
+- Init submodules
+
+```sh
+git submodule update --init
+```
+
 - Install Ruby 2.x. Consider doing this with [RVM](http://rvm.io), it's a nice way of controlling
   the exact ruby version that's used.
 ```sh
diff --git a/src/ruby/end2end/channel_closing_client.rb b/src/ruby/end2end/channel_closing_client.rb
index 8f6888c..62c7421 100755
--- a/src/ruby/end2end/channel_closing_client.rb
+++ b/src/ruby/end2end/channel_closing_client.rb
@@ -44,7 +44,7 @@
   ch = GRPC::Core::Channel.new("localhost:#{server_port}", {},
                                :this_channel_is_insecure)
 
-  srv = GRPC::RpcServer.new
+  srv = new_rpc_server_for_testing
   thd = Thread.new do
     srv.add_http2_port("0.0.0.0:#{client_control_port}", :this_port_is_insecure)
     srv.handle(ChannelClosingClientController.new(ch))
diff --git a/src/ruby/end2end/end2end_common.rb b/src/ruby/end2end/end2end_common.rb
index 790fc23..ffbaa19 100755
--- a/src/ruby/end2end/end2end_common.rb
+++ b/src/ruby/end2end/end2end_common.rb
@@ -29,6 +29,9 @@
 require 'thread'
 require 'timeout'
 require 'English' # see https://github.com/bbatsov/rubocop/issues/1747
+require_relative '../spec/support/helpers'
+
+include GRPC::Spec::Helpers
 
 # GreeterServer is simple server that implements the Helloworld Greeter server.
 class EchoServerImpl < Echo::EchoServer::Service
@@ -46,7 +49,7 @@
   end
 
   def run
-    @srv = GRPC::RpcServer.new(@rpc_server_args)
+    @srv = new_rpc_server_for_testing(@rpc_server_args)
     port = @srv.add_http2_port('0.0.0.0:0', :this_port_is_insecure)
     @srv.handle(@service_impl)
 
diff --git a/src/ruby/end2end/sig_handling_client.rb b/src/ruby/end2end/sig_handling_client.rb
index 129ad7c..6cd289a 100755
--- a/src/ruby/end2end/sig_handling_client.rb
+++ b/src/ruby/end2end/sig_handling_client.rb
@@ -66,7 +66,7 @@
 
   # The "shutdown" RPC should end very quickly.
   # Allow a few seconds to be safe.
-  srv = GRPC::RpcServer.new(poll_period: 3)
+  srv = new_rpc_server_for_testing(poll_period: 3)
   srv.add_http2_port("0.0.0.0:#{client_control_port}",
                      :this_port_is_insecure)
   stub = Echo::EchoServer::Stub.new("localhost:#{server_port}",
diff --git a/src/ruby/spec/channel_connection_spec.rb b/src/ruby/spec/channel_connection_spec.rb
index ce3e3b1..5c31f41 100644
--- a/src/ruby/spec/channel_connection_spec.rb
+++ b/src/ruby/spec/channel_connection_spec.rb
@@ -16,9 +16,10 @@
 
 include Timeout
 include GRPC::Core
+include GRPC::Spec::Helpers
 
 def start_server(port = 0)
-  @srv = GRPC::RpcServer.new(pool_size: 1)
+  @srv = new_rpc_server_for_testing(pool_size: 1)
   server_port = @srv.add_http2_port("localhost:#{port}", :this_port_is_insecure)
   @srv.handle(EchoService)
   @server_thd = Thread.new { @srv.run }
diff --git a/src/ruby/spec/client_auth_spec.rb b/src/ruby/spec/client_auth_spec.rb
index 79c9192..b955ad2 100644
--- a/src/ruby/spec/client_auth_spec.rb
+++ b/src/ruby/spec/client_auth_spec.rb
@@ -95,7 +95,7 @@
     server_opts = {
       poll_period: 1
     }
-    @srv = RpcServer.new(**server_opts)
+    @srv = new_rpc_server_for_testing(**server_opts)
     port = @srv.add_http2_port('0.0.0.0:0', create_server_creds)
     @srv.handle(SslTestService)
     @srv_thd = Thread.new { @srv.run }
diff --git a/src/ruby/spec/client_server_spec.rb b/src/ruby/spec/client_server_spec.rb
index adab8c9..14ad369 100644
--- a/src/ruby/spec/client_server_spec.rb
+++ b/src/ruby/spec/client_server_spec.rb
@@ -542,7 +542,7 @@
 describe 'the http client/server' do
   before(:example) do
     server_host = '0.0.0.0:0'
-    @server = GRPC::Core::Server.new(nil)
+    @server = new_core_server_for_testing(nil)
     server_port = @server.add_http2_port(server_host, :this_port_is_insecure)
     @server.start
     @ch = Channel.new("0.0.0.0:#{server_port}", nil, :this_channel_is_insecure)
@@ -574,7 +574,7 @@
     server_host = '0.0.0.0:0'
     server_creds = GRPC::Core::ServerCredentials.new(
       nil, [{ private_key: certs[1], cert_chain: certs[2] }], false)
-    @server = GRPC::Core::Server.new(nil)
+    @server = new_core_server_for_testing(nil)
     server_port = @server.add_http2_port(server_host, server_creds)
     @server.start
     args = { Channel::SSL_TARGET => 'foo.test.google.fr' }
diff --git a/src/ruby/spec/generic/active_call_spec.rb b/src/ruby/spec/generic/active_call_spec.rb
index 120acc3..135d1f2 100644
--- a/src/ruby/spec/generic/active_call_spec.rb
+++ b/src/ruby/spec/generic/active_call_spec.rb
@@ -40,7 +40,7 @@
   before(:each) do
     @pass_through = proc { |x| x }
     host = '0.0.0.0:0'
-    @server = GRPC::Core::Server.new(nil)
+    @server = new_core_server_for_testing(nil)
     server_port = @server.add_http2_port(host, :this_port_is_insecure)
     @server.start
     @ch = GRPC::Core::Channel.new("0.0.0.0:#{server_port}", nil,
diff --git a/src/ruby/spec/generic/client_stub_spec.rb b/src/ruby/spec/generic/client_stub_spec.rb
index 9539e56..79eeca9 100644
--- a/src/ruby/spec/generic/client_stub_spec.rb
+++ b/src/ruby/spec/generic/client_stub_spec.rb
@@ -888,12 +888,12 @@
     secure_credentials = GRPC::Core::ServerCredentials.new(
       nil, [{ private_key: certs[1], cert_chain: certs[2] }], false)
 
-    @server = GRPC::Core::Server.new(nil)
+    @server = new_core_server_for_testing(nil)
     @server.add_http2_port('0.0.0.0:0', secure_credentials)
   end
 
   def create_test_server
-    @server = GRPC::Core::Server.new(nil)
+    @server = new_core_server_for_testing(nil)
     @server.add_http2_port('0.0.0.0:0', :this_port_is_insecure)
   end
 
diff --git a/src/ruby/spec/generic/interceptor_registry_spec.rb b/src/ruby/spec/generic/interceptor_registry_spec.rb
index f93f5ce..eb75d1e 100644
--- a/src/ruby/spec/generic/interceptor_registry_spec.rb
+++ b/src/ruby/spec/generic/interceptor_registry_spec.rb
@@ -14,7 +14,7 @@
 require 'spec_helper'
 
 describe GRPC::InterceptorRegistry do
-  let(:server) { RpcServer.new }
+  let(:server) { new_rpc_server_for_testing }
   let(:interceptor) { TestServerInterceptor.new }
   let(:interceptors) { [interceptor] }
   let(:registry) { described_class.new(interceptors) }
diff --git a/src/ruby/spec/generic/rpc_server_spec.rb b/src/ruby/spec/generic/rpc_server_spec.rb
index 05059fb..e072d0c 100644
--- a/src/ruby/spec/generic/rpc_server_spec.rb
+++ b/src/ruby/spec/generic/rpc_server_spec.rb
@@ -172,7 +172,7 @@
     it 'can be created with just some args' do
       opts = { server_args: { a_channel_arg: 'an_arg' } }
       blk = proc do
-        RpcServer.new(**opts)
+        new_rpc_server_for_testing(**opts)
       end
       expect(&blk).not_to raise_error
     end
@@ -183,7 +183,7 @@
           server_args: { a_channel_arg: 'an_arg' },
           creds: Object.new
         }
-        RpcServer.new(**opts)
+        new_rpc_server_for_testing(**opts)
       end
       expect(&blk).to raise_error
     end
@@ -192,7 +192,7 @@
   describe '#stopped?' do
     before(:each) do
       opts = { server_args: { a_channel_arg: 'an_arg' }, poll_period: 1.5 }
-      @srv = RpcServer.new(**opts)
+      @srv = new_rpc_server_for_testing(**opts)
       @srv.add_http2_port('0.0.0.0:0', :this_port_is_insecure)
     end
 
@@ -224,7 +224,7 @@
       opts = {
         server_args: { a_channel_arg: 'an_arg' }
       }
-      r = RpcServer.new(**opts)
+      r = new_rpc_server_for_testing(**opts)
       expect(r.running?).to be(false)
     end
 
@@ -233,7 +233,7 @@
         server_args: { a_channel_arg: 'an_arg' },
         poll_period: 2
       }
-      r = RpcServer.new(**opts)
+      r = new_rpc_server_for_testing(**opts)
       r.add_http2_port('0.0.0.0:0', :this_port_is_insecure)
       expect { r.run }.to raise_error(RuntimeError)
     end
@@ -243,7 +243,7 @@
         server_args: { a_channel_arg: 'an_arg' },
         poll_period: 2.5
       }
-      r = RpcServer.new(**opts)
+      r = new_rpc_server_for_testing(**opts)
       r.add_http2_port('0.0.0.0:0', :this_port_is_insecure)
       r.handle(EchoService)
       t = Thread.new { r.run }
@@ -257,7 +257,7 @@
   describe '#handle' do
     before(:each) do
       @opts = { server_args: { a_channel_arg: 'an_arg' }, poll_period: 1 }
-      @srv = RpcServer.new(**@opts)
+      @srv = new_rpc_server_for_testing(**@opts)
       @srv.add_http2_port('0.0.0.0:0', :this_port_is_insecure)
     end
 
@@ -303,7 +303,7 @@
         server_opts = {
           poll_period: 1
         }
-        @srv = RpcServer.new(**server_opts)
+        @srv = new_rpc_server_for_testing(**server_opts)
         server_port = @srv.add_http2_port('0.0.0.0:0', :this_port_is_insecure)
         @host = "localhost:#{server_port}"
         @ch = GRPC::Core::Channel.new(@host, nil, :this_channel_is_insecure)
@@ -474,7 +474,7 @@
           poll_period: 1,
           max_waiting_requests: 1
         }
-        alt_srv = RpcServer.new(**opts)
+        alt_srv = new_rpc_server_for_testing(**opts)
         alt_srv.handle(SlowService)
         alt_port = alt_srv.add_http2_port('0.0.0.0:0', :this_port_is_insecure)
         alt_host = "0.0.0.0:#{alt_port}"
@@ -538,7 +538,7 @@
           poll_period: 1,
           connect_md_proc: test_md_proc
         }
-        @srv = RpcServer.new(**server_opts)
+        @srv = new_rpc_server_for_testing(**server_opts)
         alt_port = @srv.add_http2_port('0.0.0.0:0', :this_port_is_insecure)
         @alt_host = "0.0.0.0:#{alt_port}"
       end
@@ -573,7 +573,7 @@
         server_opts = {
           poll_period: 1
         }
-        @srv = RpcServer.new(**server_opts)
+        @srv = new_rpc_server_for_testing(**server_opts)
         alt_port = @srv.add_http2_port('0.0.0.0:0', :this_port_is_insecure)
         @alt_host = "0.0.0.0:#{alt_port}"
       end
@@ -624,7 +624,7 @@
         server_opts = {
           poll_period: 1
         }
-        @srv = RpcServer.new(**server_opts)
+        @srv = new_rpc_server_for_testing(**server_opts)
         alt_port = @srv.add_http2_port('0.0.0.0:0', :this_port_is_insecure)
         @alt_host = "0.0.0.0:#{alt_port}"
 
diff --git a/src/ruby/spec/google_rpc_status_utils_spec.rb b/src/ruby/spec/google_rpc_status_utils_spec.rb
index 6f2a06b..3263589 100644
--- a/src/ruby/spec/google_rpc_status_utils_spec.rb
+++ b/src/ruby/spec/google_rpc_status_utils_spec.rb
@@ -19,6 +19,7 @@
 require 'google/protobuf/well_known_types'
 
 include GRPC::Core
+include GRPC::Spec::Helpers
 
 describe 'conversion from a status struct to a google protobuf status' do
   it 'fails if the input is not a status struct' do
@@ -150,7 +151,7 @@
 
 describe 'receving a google rpc status from a remote endpoint' do
   def start_server(encoded_rpc_status)
-    @srv = GRPC::RpcServer.new(pool_size: 1)
+    @srv = new_rpc_server_for_testing(pool_size: 1)
     @server_port = @srv.add_http2_port('localhost:0',
                                        :this_port_is_insecure)
     @srv.handle(GoogleRpcStatusTestService.new(encoded_rpc_status))
@@ -238,7 +239,7 @@
 
 describe 'when the endpoint doesnt send grpc-status-details-bin' do
   def start_server
-    @srv = GRPC::RpcServer.new(pool_size: 1)
+    @srv = new_rpc_server_for_testing(pool_size: 1)
     @server_port = @srv.add_http2_port('localhost:0',
                                        :this_port_is_insecure)
     @srv.handle(NoStatusDetailsBinTestService)
diff --git a/src/ruby/spec/pb/health/checker_spec.rb b/src/ruby/spec/pb/health/checker_spec.rb
index c79ccfd..58a6023 100644
--- a/src/ruby/spec/pb/health/checker_spec.rb
+++ b/src/ruby/spec/pb/health/checker_spec.rb
@@ -192,7 +192,7 @@
       server_opts = {
         poll_period: 1
       }
-      @srv = RpcServer.new(**server_opts)
+      @srv = new_rpc_server_for_testing(**server_opts)
       server_port = @srv.add_http2_port(server_host, :this_port_is_insecure)
       @host = "localhost:#{server_port}"
       @ch = GRPC::Core::Channel.new(@host, nil, :this_channel_is_insecure)
diff --git a/src/ruby/spec/server_spec.rb b/src/ruby/spec/server_spec.rb
index c0a5957..a0d27b6 100644
--- a/src/ruby/spec/server_spec.rb
+++ b/src/ruby/spec/server_spec.rb
@@ -30,12 +30,12 @@
 
   describe '#start' do
     it 'runs without failing' do
-      blk = proc { Server.new(nil).start }
+      blk = proc { new_core_server_for_testing(nil).start }
       expect(&blk).to_not raise_error
     end
 
     it 'fails if the server is closed' do
-      s = Server.new(nil)
+      s = new_core_server_for_testing(nil)
       s.close
       expect { s.start }.to raise_error(RuntimeError)
     end
@@ -85,7 +85,7 @@
     describe 'for insecure servers' do
       it 'runs without failing' do
         blk = proc do
-          s = Server.new(nil)
+          s = new_core_server_for_testing(nil)
           s.add_http2_port('localhost:0', :this_port_is_insecure)
           s.close
         end
@@ -93,7 +93,7 @@
       end
 
       it 'fails if the server is closed' do
-        s = Server.new(nil)
+        s = new_core_server_for_testing(nil)
         s.close
         blk = proc do
           s.add_http2_port('localhost:0', :this_port_is_insecure)
@@ -106,7 +106,7 @@
       let(:cert) { create_test_cert }
       it 'runs without failing' do
         blk = proc do
-          s = Server.new(nil)
+          s = new_core_server_for_testing(nil)
           s.add_http2_port('localhost:0', cert)
           s.close
         end
@@ -114,7 +114,7 @@
       end
 
       it 'fails if the server is closed' do
-        s = Server.new(nil)
+        s = new_core_server_for_testing(nil)
         s.close
         blk = proc { s.add_http2_port('localhost:0', cert) }
         expect(&blk).to raise_error(RuntimeError)
@@ -124,7 +124,7 @@
 
   shared_examples '#new' do
     it 'takes nil channel args' do
-      expect { Server.new(nil) }.to_not raise_error
+      expect { new_core_server_for_testing(nil) }.to_not raise_error
     end
 
     it 'does not take a hash with bad keys as channel args' do
@@ -175,14 +175,14 @@
 
   describe '#new with an insecure channel' do
     def construct_with_args(a)
-      proc { Server.new(a) }
+      proc { new_core_server_for_testing(a) }
     end
 
     it_behaves_like '#new'
   end
 
   def start_a_server
-    s = Server.new(nil)
+    s = new_core_server_for_testing(nil)
     s.add_http2_port('0.0.0.0:0', :this_port_is_insecure)
     s.start
     s
diff --git a/src/ruby/spec/support/helpers.rb b/src/ruby/spec/support/helpers.rb
index 65fffff..29028df 100644
--- a/src/ruby/spec/support/helpers.rb
+++ b/src/ruby/spec/support/helpers.rb
@@ -31,7 +31,7 @@
       #
       def build_rpc_server(server_opts: {},
                            client_opts: {})
-        @server = RpcServer.new({ poll_period: 1 }.merge(server_opts))
+        @server = new_rpc_server_for_testing({ poll_period: 1 }.merge(server_opts))
         @port = @server.add_http2_port('0.0.0.0:0', :this_port_is_insecure)
         @host = "0.0.0.0:#{@port}"
         @client_opts = client_opts
@@ -68,6 +68,40 @@
         opts ||= @client_opts
         klass.new(host, :this_channel_is_insecure, **opts)
       end
+
+      ##
+      # Build an RPCServer for use in tests. Adds args
+      # that are useful for all tests.
+      #
+      # @param [Hash] server_opts
+      #
+      def new_rpc_server_for_testing(server_opts = {})
+        server_opts[:server_args] ||= {}
+        update_server_args_hash(server_opts[:server_args])
+        RpcServer.new(**server_opts)
+      end
+
+      ##
+      # Build an GRPC::Core::Server for use in tests. Adds args
+      # that are useful for all tests.
+      #
+      # @param [Hash] server_args
+      #
+      def new_core_server_for_testing(server_args)
+        server_args.nil? && server_args = {}
+        update_server_args_hash(server_args)
+        GRPC::Core::Server.new(server_args)
+      end
+
+      def update_server_args_hash(server_args)
+        so_reuseport_arg = 'grpc.so_reuseport'
+        unless server_args[so_reuseport_arg].nil?
+          fail 'Unexpected. grpc.so_reuseport already set.'
+        end
+        # Run tests without so_reuseport to eliminate the chance of
+        # cross-talk.
+        server_args[so_reuseport_arg] = 0
+      end
     end
   end
 end
diff --git a/test/cpp/end2end/BUILD b/test/cpp/end2end/BUILD
index 265f754..fa77c30 100644
--- a/test/cpp/end2end/BUILD
+++ b/test/cpp/end2end/BUILD
@@ -76,6 +76,7 @@
 
 grpc_cc_binary(
     name = "client_crash_test_server",
+    testonly = True,
     srcs = ["client_crash_test_server.cc"],
     deps = [
         "//:gpr",
@@ -311,6 +312,7 @@
 
 grpc_cc_binary(
     name = "server_crash_test_client",
+    testonly = True,
     srcs = ["server_crash_test_client.cc"],
     deps = [
         "//:gpr",
diff --git a/test/cpp/util/cli_credentials.cc b/test/cpp/util/cli_credentials.cc
index f1f43f8..aa4eafb 100644
--- a/test/cpp/util/cli_credentials.cc
+++ b/test/cpp/util/cli_credentials.cc
@@ -22,27 +22,43 @@
 
 DEFINE_bool(enable_ssl, false, "Whether to use ssl/tls.");
 DEFINE_bool(use_auth, false, "Whether to create default google credentials.");
+DEFINE_string(
+    access_token, "",
+    "The access token that will be sent to the server to authenticate RPCs.");
 
 namespace grpc {
 namespace testing {
 
 std::shared_ptr<grpc::ChannelCredentials> CliCredentials::GetCredentials()
     const {
-  if (!FLAGS_enable_ssl) {
-    return grpc::InsecureChannelCredentials();
-  } else {
+  if (!FLAGS_access_token.empty()) {
     if (FLAGS_use_auth) {
-      return grpc::GoogleDefaultCredentials();
-    } else {
-      return grpc::SslCredentials(grpc::SslCredentialsOptions());
+      fprintf(stderr,
+              "warning: use_auth is ignored when access_token is provided.");
     }
+
+    return grpc::CompositeChannelCredentials(
+        grpc::SslCredentials(grpc::SslCredentialsOptions()),
+        grpc::AccessTokenCredentials(FLAGS_access_token));
   }
+
+  if (FLAGS_use_auth) {
+    return grpc::GoogleDefaultCredentials();
+  }
+
+  if (FLAGS_enable_ssl) {
+    return grpc::SslCredentials(grpc::SslCredentialsOptions());
+  }
+
+  return grpc::InsecureChannelCredentials();
 }
 
 const grpc::string CliCredentials::GetCredentialUsage() const {
   return "    --enable_ssl             ; Set whether to use tls\n"
          "    --use_auth               ; Set whether to create default google"
-         " credentials\n";
+         " credentials\n"
+         "    --access_token           ; Set the access token in metadata,"
+         " overrides --use_auth\n";
 }
 }  // namespace testing
 }  // namespace grpc
diff --git a/test/distrib/ruby/distribtest.gemspec b/test/distrib/ruby/distribtest.gemspec
index d72892f..f11f521 100644
--- a/test/distrib/ruby/distribtest.gemspec
+++ b/test/distrib/ruby/distribtest.gemspec
@@ -14,6 +14,8 @@
   s.platform      = Gem::Platform::RUBY
 
   s.add_dependency 'grpc', '>=0'
+  s.add_dependency 'public_suffix', '< 3.0'
+  s.add_dependency 'jwt', '< 2.0'
 
   s.add_development_dependency 'bundler', '~> 1.7'
 end
diff --git a/test/distrib/ruby/run_distrib_test.sh b/test/distrib/ruby/run_distrib_test.sh
index 0c214e3..d74f4cd 100755
--- a/test/distrib/ruby/run_distrib_test.sh
+++ b/test/distrib/ruby/run_distrib_test.sh
@@ -17,10 +17,16 @@
 
 cd $(dirname $0)
 
+ARCH=$1
+PLATFORM=$2
 # Create an indexed local gem source with gRPC gems to test
 GEM_SOURCE=../../../gem_source
 mkdir -p ${GEM_SOURCE}/gems
-cp -r $EXTERNAL_GIT_ROOT/input_artifacts/*.gem ${GEM_SOURCE}/gems
+cp $EXTERNAL_GIT_ROOT/input_artifacts/grpc-*$ARCH-$PLATFORM.gem ${GEM_SOURCE}/gems
+if [[ "$(ls ${GEM_SOURCE}/gems | grep grpc | wc -l)" != 1 ]]; then
+  echo "Sanity check failed. Copied over more than one grpc gem into the gem source directory."
+  exit 1
+fi;
 gem install builder
 gem generate_index --directory ${GEM_SOURCE}
 
diff --git a/third_party/BUILD b/third_party/BUILD
new file mode 100644
index 0000000..dea1229
--- /dev/null
+++ b/third_party/BUILD
@@ -0,0 +1,6 @@
+exports_files([
+    "benchmark.BUILD",
+    "gtest.BUILD",
+    "objective_c/Cronet/bidirectional_stream_c.h",
+    "zlib.BUILD",
+])
diff --git a/third_party/cares/BUILD b/third_party/cares/BUILD
new file mode 100644
index 0000000..ad27d93
--- /dev/null
+++ b/third_party/cares/BUILD
@@ -0,0 +1,9 @@
+exports_files([
+    "ares_build.h",
+    "cares.BUILD",
+    "config_android/ares_config.h",
+    "config_darwin/ares_config.h",
+    "config_freebsd/ares_config.h",
+    "config_linux/ares_config.h",
+    "config_openbsd/ares_config.h",
+])
diff --git a/third_party/cares/cares.BUILD b/third_party/cares/cares.BUILD
index 85ca506..3ac0287 100644
--- a/third_party/cares/cares.BUILD
+++ b/third_party/cares/cares.BUILD
@@ -35,33 +35,27 @@
 )
 
 genrule(
-    name = "ares_build",
-    srcs = ["@cares_local_files//:ares_build_h"],
+    name = "ares_build_h",
+    srcs = ["@com_github_grpc_grpc//third_party/cares:ares_build.h"],
     outs = ["ares_build.h"],
-    cmd = "cat $(location @cares_local_files//:ares_build_h) > $@",
+    cmd = "cat $< > $@",
 )
 
-# cc_library(
-#     name = "ares_build_h",
-#     hdrs = ["ares_build.h"],
-#     data = [":ares_build"],
-#     includes = ["."],
-# )
-
 genrule(
-    name = "ares_config",
-    srcs = ["@cares_local_files//:ares_config_h"],
+    name = "ares_config_h",
+    srcs = select({
+        ":ios_x86_64": ["@com_github_grpc_grpc//third_party/cares:config_darwin/ares_config.h"],
+        ":ios_armv7": ["@com_github_grpc_grpc//third_party/cares:config_darwin/ares_config.h"],
+        ":ios_armv7s": ["@com_github_grpc_grpc//third_party/cares:config_darwin/ares_config.h"],
+        ":ios_arm64": ["@com_github_grpc_grpc//third_party/cares:config_darwin/ares_config.h"],
+        ":darwin": ["@com_github_grpc_grpc//third_party/cares:config_darwin/ares_config.h"],
+        ":android": ["@com_github_grpc_grpc//third_party/cares:config_android/ares_config.h"],
+        "//conditions:default": ["@com_github_grpc_grpc//third_party/cares:config_linux/ares_config.h"],
+    }),
     outs = ["ares_config.h"],
-    cmd = "cat $(location @cares_local_files//:ares_config_h) > $@",
+    cmd = "cat $< > $@",
 )
 
-# cc_library(
-#     name = "ares_config_h",
-#     hdrs = ["ares_config.h"],
-#     data = [":ares_config"],
-#     includes = ["."],
-# )
-
 cc_library(
     name = "ares",
     srcs = [
@@ -147,10 +141,6 @@
         "-DNOMINMAX",
         "-DHAVE_CONFIG_H",
     ],
-    data = [
-        ":ares_build",
-        ":ares_config",
-    ],
     includes = ["."],
     linkstatic = 1,
     visibility = [
diff --git a/third_party/cares/cares_local_files.BUILD b/third_party/cares/cares_local_files.BUILD
deleted file mode 100644
index fe59447..0000000
--- a/third_party/cares/cares_local_files.BUILD
+++ /dev/null
@@ -1,57 +0,0 @@
-package(
-    default_visibility = ["//visibility:public"],
-)
-
-config_setting(
-    name = "darwin",
-    values = {"cpu": "darwin"},
-)
-
-# Android is not officially supported through C++.
-# This just helps with the build for now.
-config_setting(
-    name = "android",
-    values = {
-        "crosstool_top": "//external:android/crosstool",
-    },
-)
-
-# iOS is not officially supported through C++.
-# This just helps with the build for now.
-config_setting(
-    name = "ios_x86_64",
-    values = {"cpu": "ios_x86_64"},
-)
-
-config_setting(
-    name = "ios_armv7",
-    values = {"cpu": "ios_armv7"},
-)
-
-config_setting(
-    name = "ios_armv7s",
-    values = {"cpu": "ios_armv7s"},
-)
-
-config_setting(
-    name = "ios_arm64",
-    values = {"cpu": "ios_arm64"},
-)
-
-filegroup(
-    name = "ares_build_h",
-    srcs = ["ares_build.h"],
-)
-
-filegroup(
-    name = "ares_config_h",
-    srcs = select({
-        ":ios_x86_64": ["config_darwin/ares_config.h"],
-        ":ios_armv7": ["config_darwin/ares_config.h"],
-        ":ios_armv7s": ["config_darwin/ares_config.h"],
-        ":ios_arm64": ["config_darwin/ares_config.h"],
-        ":darwin": ["config_darwin/ares_config.h"],
-        ":android": ["config_android/ares_config.h"],
-        "//conditions:default": ["config_linux/ares_config.h"],
-    }),
-)
diff --git a/tools/failures/detect_new_failures.py b/tools/failures/detect_new_failures.py
new file mode 100644
index 0000000..87fd1d9
--- /dev/null
+++ b/tools/failures/detect_new_failures.py
@@ -0,0 +1,307 @@
+#!/usr/bin/env python
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Detect new flakes and create issues for them"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import datetime
+import json
+import logging
+import os
+import pprint
+import sys
+import urllib
+import urllib2
+from collections import namedtuple
+
+gcp_utils_dir = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '../gcp/utils'))
+sys.path.append(gcp_utils_dir)
+
+import big_query_utils
+
+GH_ISSUE_CREATION_URL = 'https://api.github.com/repos/grpc/grpc/issues'
+GH_ISSUE_SEARCH_URL = 'https://api.github.com/search/issues'
+KOKORO_BASE_URL = 'https://kokoro2.corp.google.com/job/'
+
+
+def gh(url, data=None):
+    request = urllib2.Request(url, data=data)
+    assert TOKEN
+    request.add_header('Authorization', 'token {}'.format(TOKEN))
+    if data:
+        request.add_header('Content-type', 'application/json')
+    response = urllib2.urlopen(request)
+    if 200 <= response.getcode() < 300:
+        return json.loads(response.read())
+    else:
+        raise ValueError('Error ({}) accessing {}'.format(response.getcode(),
+                                                          response.geturl()))
+
+
+def search_gh_issues(search_term, status='open'):
+    params = ' '.join((search_term, 'is:issue', 'is:open', 'repo:grpc/grpc'))
+    qargs = urllib.urlencode({'q': params})
+    url = '?'.join((GH_ISSUE_SEARCH_URL, qargs))
+    response = gh(url)
+    return response
+
+
+def create_gh_issue(title, body, labels, assignees=[]):
+    params = {'title': title, 'body': body, 'labels': labels}
+    if assignees:
+        params['assignees'] = assignees
+    data = json.dumps(params)
+    response = gh(GH_ISSUE_CREATION_URL, data)
+    issue_url = response['html_url']
+    print('Created issue {} for {}'.format(issue_url, title))
+
+
+def build_kokoro_url(job_name, build_id):
+    job_path = '{}/{}'.format('/job/'.join(job_name.split('/')), build_id)
+    return KOKORO_BASE_URL + job_path
+
+
+def create_issues(new_flakes, always_create):
+    for test_name, results_row in new_flakes.items():
+        poll_strategy, job_name, build_id, timestamp = results_row
+        # TODO(dgq): the Kokoro URL has a limited lifetime. The permanent and ideal
+        # URL would be the sponge one, but there's currently no easy way to retrieve
+        # it.
+        url = build_kokoro_url(job_name, build_id)
+        title = 'New Failure: ' + test_name
+        body = '- Test: {}\n- Poll Strategy: {}\n- URL: {}'.format(
+            test_name, poll_strategy, url)
+        labels = ['infra/New Failure']
+        if always_create:
+            proceed = True
+        else:
+            preexisting_issues = search_gh_issues(test_name)
+            if preexisting_issues['total_count'] > 0:
+                print('\nFound {} issues for "{}":'.format(preexisting_issues[
+                    'total_count'], test_name))
+                for issue in preexisting_issues['items']:
+                    print('\t"{}" ; URL: {}'.format(issue['title'], issue[
+                        'html_url']))
+            else:
+                print(
+                    '\nNo preexisting issues found for "{}"'.format(test_name))
+            proceed = raw_input(
+                'Create issue for:\nTitle: {}\nBody: {}\n[Y/n] '.format(
+                    title, body)) in ('y', 'Y', '')
+        if proceed:
+            assignees_str = raw_input(
+                'Asignees? (comma-separated, leave blank for unassigned): ')
+            assignees = [
+                assignee.strip() for assignee in assignees_str.split(',')
+            ]
+            create_gh_issue(title, body, labels, assignees)
+
+
+def print_table(table, format):
+    first_time = True
+    for test_name, results_row in table.items():
+        poll_strategy, job_name, build_id, timestamp = results_row
+        full_kokoro_url = build_kokoro_url(job_name, build_id)
+        if format == 'human':
+            print("\t- Test: {}, Polling: {}, Timestamp: {}, url: {}".format(
+                test_name, poll_strategy, timestamp, full_kokoro_url))
+        else:
+            assert (format == 'csv')
+            if first_time:
+                print('test,timestamp,url')
+                first_time = False
+            print("{},{},{}".format(test_name, timestamp, full_kokoro_url))
+
+
+Row = namedtuple('Row', ['poll_strategy', 'job_name', 'build_id', 'timestamp'])
+
+
+def get_new_failures(dates):
+    bq = big_query_utils.create_big_query()
+    this_script_path = os.path.join(os.path.dirname(__file__))
+    sql_script = os.path.join(this_script_path, 'sql/new_failures_24h.sql')
+    with open(sql_script) as query_file:
+        query = query_file.read().format(
+            calibration_begin=dates['calibration']['begin'],
+            calibration_end=dates['calibration']['end'],
+            reporting_begin=dates['reporting']['begin'],
+            reporting_end=dates['reporting']['end'])
+    logging.debug("Query:\n%s", query)
+    query_job = big_query_utils.sync_query_job(bq, 'grpc-testing', query)
+    page = bq.jobs().getQueryResults(
+        pageToken=None, **query_job['jobReference']).execute(num_retries=3)
+    rows = page.get('rows')
+    if rows:
+        return {
+            row['f'][0]['v']: Row(poll_strategy=row['f'][1]['v'],
+                                  job_name=row['f'][2]['v'],
+                                  build_id=row['f'][3]['v'],
+                                  timestamp=row['f'][4]['v'])
+            for row in rows
+        }
+    else:
+        return {}
+
+
+def parse_isodate(date_str):
+    return datetime.datetime.strptime(date_str, "%Y-%m-%d").date()
+
+
+def get_new_flakes(args):
+    """The from_date_str argument marks the beginning of the "calibration", used
+  to establish the set of pre-existing flakes, which extends over
+  "calibration_days".  After the calibration period, "reporting_days" is the
+  length of time during which new flakes will be reported.
+
+from
+date
+  |--------------------|---------------|
+  ^____________________^_______________^
+       calibration         reporting
+         days                days
+  """
+    dates = process_date_args(args)
+    new_failures = get_new_failures(dates)
+    logging.info('|new failures| = %d', len(new_failures))
+    return new_failures
+
+
+def build_args_parser():
+    import argparse, datetime
+    parser = argparse.ArgumentParser()
+    today = datetime.date.today()
+    a_week_ago = today - datetime.timedelta(days=7)
+    parser.add_argument(
+        '--calibration_days',
+        type=int,
+        default=7,
+        help='How many days to consider for pre-existing flakes.')
+    parser.add_argument(
+        '--reporting_days',
+        type=int,
+        default=1,
+        help='How many days to consider for the detection of new flakes.')
+    parser.add_argument(
+        '--count_only',
+        dest='count_only',
+        action='store_true',
+        help='Display only number of new flakes.')
+    parser.set_defaults(count_only=False)
+    parser.add_argument(
+        '--create_issues',
+        dest='create_issues',
+        action='store_true',
+        help='Create issues for all new flakes.')
+    parser.set_defaults(create_issues=False)
+    parser.add_argument(
+        '--always_create_issues',
+        dest='always_create_issues',
+        action='store_true',
+        help='Always create issues for all new flakes. Otherwise,'
+        ' interactively prompt for every issue.')
+    parser.set_defaults(always_create_issues=False)
+    parser.add_argument(
+        '--token',
+        type=str,
+        default='',
+        help='GitHub token to use its API with a higher rate limit')
+    parser.add_argument(
+        '--format',
+        type=str,
+        choices=['human', 'csv'],
+        default='human',
+        help='Output format: are you a human or a machine?')
+    parser.add_argument(
+        '--loglevel',
+        type=str,
+        choices=['INFO', 'DEBUG', 'WARNING', 'ERROR', 'CRITICAL'],
+        default='WARNING',
+        help='Logging level.')
+    return parser
+
+
+def process_date_args(args):
+    calibration_begin = (
+        datetime.date.today() - datetime.timedelta(days=args.calibration_days) -
+        datetime.timedelta(days=args.reporting_days))
+    calibration_end = calibration_begin + datetime.timedelta(
+        days=args.calibration_days)
+    reporting_begin = calibration_end
+    reporting_end = reporting_begin + datetime.timedelta(
+        days=args.reporting_days)
+    return {
+        'calibration': {
+            'begin': calibration_begin,
+            'end': calibration_end
+        },
+        'reporting': {
+            'begin': reporting_begin,
+            'end': reporting_end
+        }
+    }
+
+
+def main():
+    global TOKEN
+    args_parser = build_args_parser()
+    args = args_parser.parse_args()
+    if args.create_issues and not args.token:
+        raise ValueError(
+            'Missing --token argument, needed to create GitHub issues')
+    TOKEN = args.token
+
+    logging_level = getattr(logging, args.loglevel)
+    logging.basicConfig(format='%(asctime)s %(message)s', level=logging_level)
+    new_flakes = get_new_flakes(args)
+
+    dates = process_date_args(args)
+
+    dates_info_string = 'from {} until {} (calibrated from {} until {})'.format(
+        dates['reporting']['begin'].isoformat(),
+        dates['reporting']['end'].isoformat(),
+        dates['calibration']['begin'].isoformat(),
+        dates['calibration']['end'].isoformat())
+
+    if args.format == 'human':
+        if args.count_only:
+            print(len(new_flakes), dates_info_string)
+        elif new_flakes:
+            found_msg = 'Found {} new flakes {}'.format(
+                len(new_flakes), dates_info_string)
+            print(found_msg)
+            print('*' * len(found_msg))
+            print_table(new_flakes, 'human')
+            if args.create_issues:
+                create_issues(new_flakes, args.always_create_issues)
+        else:
+            print('No new flakes found '.format(len(new_flakes)),
+                  dates_info_string)
+    elif args.format == 'csv':
+        if args.count_only:
+            print('from_date,to_date,count')
+            print('{},{},{}'.format(dates['reporting']['begin'].isoformat(
+            ), dates['reporting']['end'].isoformat(), len(new_flakes)))
+        else:
+            print_table(new_flakes, 'csv')
+    else:
+        raise ValueError(
+            'Invalid argument for --format: {}'.format(args.format))
+
+
+if __name__ == '__main__':
+    main()
diff --git a/tools/failures/sql/new_failures_24h.sql b/tools/failures/sql/new_failures_24h.sql
new file mode 100644
index 0000000..6ce0c5d
--- /dev/null
+++ b/tools/failures/sql/new_failures_24h.sql
@@ -0,0 +1,62 @@
+#standardSQL
+WITH calibration AS (
+  SELECT
+    RTRIM(LTRIM(REGEXP_REPLACE(filtered_test_name, r'(/\d+)|(bins/.+/)|(cmake/.+/.+/)', ''))) AS test_binary,
+    REGEXP_EXTRACT(test_name, r'GRPC_POLL_STRATEGY=(\w+)') AS poll_strategy,
+    job_name,
+    build_id
+  FROM (
+    SELECT
+      REGEXP_REPLACE(test_name, r'(/\d+)|(GRPC_POLL_STRATEGY=.+)', '') AS filtered_test_name,
+      test_name,
+      job_name,
+      build_id,
+      timestamp
+    FROM
+      `grpc-testing.jenkins_test_results.aggregate_results`
+    WHERE
+      timestamp > TIMESTAMP(DATETIME("{calibration_begin} 00:00:00", "America/Los_Angeles"))
+      AND timestamp <= TIMESTAMP(DATETIME("{calibration_end} 23:59:59", "America/Los_Angeles"))
+      AND NOT REGEXP_CONTAINS(job_name,
+        'portability')
+      AND result != 'PASSED'
+      AND result != 'SKIPPED' )),
+  reporting AS (
+  SELECT
+    RTRIM(LTRIM(REGEXP_REPLACE(filtered_test_name, r'(/\d+)|(bins/.+/)|(cmake/.+/.+/)', ''))) AS test_binary,
+    REGEXP_EXTRACT(test_name, r'GRPC_POLL_STRATEGY=(\w+)') AS poll_strategy,
+    job_name,
+    build_id,
+    timestamp
+  FROM (
+    SELECT
+      REGEXP_REPLACE(test_name, r'(/\d+)|(GRPC_POLL_STRATEGY=.+)', '') AS filtered_test_name,
+      test_name,
+      job_name,
+      build_id,
+      timestamp
+    FROM
+      `grpc-testing.jenkins_test_results.aggregate_results`
+    WHERE
+      timestamp > TIMESTAMP(DATETIME("{reporting_begin} 00:00:00", "America/Los_Angeles"))
+      AND timestamp <= TIMESTAMP(DATETIME("{reporting_end} 23:59:59", "America/Los_Angeles"))
+      AND NOT REGEXP_CONTAINS(job_name,
+        'portability')
+      AND result != 'PASSED'
+      AND result != 'SKIPPED' ))
+SELECT
+  reporting.test_binary,
+  reporting.poll_strategy,
+  reporting.job_name,
+  reporting.build_id,
+  STRING(reporting.timestamp, "America/Los_Angeles") as timestamp_MTV
+FROM
+  reporting
+LEFT JOIN
+  calibration
+ON
+  reporting.test_binary = calibration.test_binary
+WHERE
+  calibration.test_binary IS NULL
+ORDER BY
+  timestamp DESC;
diff --git a/tools/flakes/detect_flakes.py b/tools/flakes/detect_flakes.py
deleted file mode 100644
index b066ee6..0000000
--- a/tools/flakes/detect_flakes.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Detect new flakes introduced in the last 24h hours with respect to the
-previous six days"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import datetime
-import os
-import sys
-import logging
-logging.basicConfig(format='%(asctime)s %(message)s')
-
-gcp_utils_dir = os.path.abspath(
-    os.path.join(os.path.dirname(__file__), '../gcp/utils'))
-sys.path.append(gcp_utils_dir)
-
-import big_query_utils
-
-
-def print_table(table):
-    kokoro_base_url = 'https://kokoro.corp.google.com/job/'
-    for k, v in table.items():
-        job_name = v[0]
-        build_id = v[1]
-        ts = int(float(v[2]))
-        # TODO(dgq): timezone handling is wrong. We need to determine the timezone
-        # of the computer running this script.
-        human_ts = datetime.datetime.utcfromtimestamp(ts).strftime(
-            '%Y-%m-%d %H:%M:%S PDT')
-        job_path = '{}/{}'.format('/job/'.join(job_name.split('/')), build_id)
-        full_kokoro_url = kokoro_base_url + job_path
-        print("Test: {}, Timestamp: {}, url: {}\n".format(k, human_ts,
-                                                          full_kokoro_url))
-
-
-def get_flaky_tests(days_lower_bound, days_upper_bound, limit=None):
-    """ period is one of "WEEK", "DAY", etc.
-  (see https://cloud.google.com/bigquery/docs/reference/standard-sql/functions-and-operators#date_add). """
-
-    bq = big_query_utils.create_big_query()
-    query = """
-SELECT
-  REGEXP_REPLACE(test_name, r'/\d+', '') AS filtered_test_name,
-  job_name,
-  build_id,
-  timestamp
-FROM
-  [grpc-testing:jenkins_test_results.aggregate_results]
-WHERE
-    timestamp > DATE_ADD(CURRENT_DATE(), {days_lower_bound}, "DAY")
-    AND timestamp <= DATE_ADD(CURRENT_DATE(), {days_upper_bound}, "DAY")
-  AND NOT REGEXP_MATCH(job_name, '.*portability.*')
-  AND result != 'PASSED' AND result != 'SKIPPED'
-ORDER BY timestamp desc
-""".format(
-        days_lower_bound=days_lower_bound, days_upper_bound=days_upper_bound)
-    if limit:
-        query += '\n LIMIT {}'.format(limit)
-    query_job = big_query_utils.sync_query_job(bq, 'grpc-testing', query)
-    page = bq.jobs().getQueryResults(
-        pageToken=None, **query_job['jobReference']).execute(num_retries=3)
-    rows = page.get('rows')
-    if rows:
-        return {
-            row['f'][0]['v']:
-            (row['f'][1]['v'], row['f'][2]['v'], row['f'][3]['v'])
-            for row in rows
-        }
-    else:
-        return {}
-
-
-def get_new_flakes():
-    last_week_sans_yesterday = get_flaky_tests(-14, -1)
-    last_24 = get_flaky_tests(0, +1)
-    last_week_sans_yesterday_names = set(last_week_sans_yesterday.keys())
-    last_24_names = set(last_24.keys())
-    logging.debug('|last_week_sans_yesterday| =',
-                  len(last_week_sans_yesterday_names))
-    logging.debug('|last_24_names| =', len(last_24_names))
-    new_flakes = last_24_names - last_week_sans_yesterday_names
-    logging.debug('|new_flakes| = ', len(new_flakes))
-    return {k: last_24[k] for k in new_flakes}
-
-
-def main():
-    new_flakes = get_new_flakes()
-    if new_flakes:
-        print("Found {} new flakes:".format(len(new_flakes)))
-        print_table(new_flakes)
-    else:
-        print("No new flakes found!")
-
-
-if __name__ == '__main__':
-    main()
diff --git a/tools/internal_ci/linux/grpc_bazel_on_foundry_dbg.sh b/tools/internal_ci/linux/grpc_bazel_on_foundry_dbg.sh
index c43ac0e..a767218 100644
--- a/tools/internal_ci/linux/grpc_bazel_on_foundry_dbg.sh
+++ b/tools/internal_ci/linux/grpc_bazel_on_foundry_dbg.sh
@@ -50,7 +50,7 @@
   --genrule_strategy=remote  \
   --experimental_strict_action_env=true \
   --experimental_remote_platform_override='properties:{name:"container-image" value:"docker://gcr.io/asci-toolchain/nosla-debian8-clang-fl@sha256:aa20628a902f06a11a015caa94b0432eb60690de2d2525bd046b9eea046f5d8a" }' \
-  --crosstool_top=@bazel_toolchains//configs/debian8_clang/0.2.0/bazel_0.7.0:toolchain \
+  --crosstool_top=@com_github_bazelbuild_bazeltoolchains//configs/debian8_clang/0.2.0/bazel_0.7.0:toolchain \
   --define GRPC_PORT_ISOLATED_RUNTIME=1 \
   -c dbg \
   -- //test/...
diff --git a/tools/internal_ci/linux/grpc_bazel_on_foundry_opt.sh b/tools/internal_ci/linux/grpc_bazel_on_foundry_opt.sh
index b106b71..defe664 100644
--- a/tools/internal_ci/linux/grpc_bazel_on_foundry_opt.sh
+++ b/tools/internal_ci/linux/grpc_bazel_on_foundry_opt.sh
@@ -50,7 +50,7 @@
   --genrule_strategy=remote  \
   --experimental_strict_action_env=true \
   --experimental_remote_platform_override='properties:{name:"container-image" value:"docker://gcr.io/asci-toolchain/nosla-debian8-clang-fl@sha256:aa20628a902f06a11a015caa94b0432eb60690de2d2525bd046b9eea046f5d8a" }' \
-  --crosstool_top=@bazel_toolchains//configs/debian8_clang/0.2.0/bazel_0.7.0:toolchain \
+  --crosstool_top=@com_github_bazelbuild_bazeltoolchains//configs/debian8_clang/0.2.0/bazel_0.7.0:toolchain \
   --define GRPC_PORT_ISOLATED_RUNTIME=1 \
   -c opt \
   -- //test/...
diff --git a/tools/run_tests/artifacts/distribtest_targets.py b/tools/run_tests/artifacts/distribtest_targets.py
index 83f656b..94a2d53 100644
--- a/tools/run_tests/artifacts/distribtest_targets.py
+++ b/tools/run_tests/artifacts/distribtest_targets.py
@@ -185,6 +185,10 @@
         return []
 
     def build_jobspec(self):
+        arch_to_gem_arch = {
+            'x64': 'x86_64',
+            'x86': 'x86',
+        }
         if not self.platform == 'linux':
             raise Exception("Not supported yet.")
 
@@ -192,7 +196,8 @@
             self.name,
             'tools/dockerfile/distribtest/ruby_%s_%s' % (self.docker_suffix,
                                                          self.arch),
-            'test/distrib/ruby/run_distrib_test.sh',
+            'test/distrib/ruby/run_distrib_test.sh %s %s' %
+            (arch_to_gem_arch[self.arch], self.platform),
             copy_rel_path='test/distrib')
 
     def __str__(self):
diff --git a/tools/run_tests/helper_scripts/build_csharp.sh b/tools/run_tests/helper_scripts/build_csharp.sh
index ec0a441..c6bee82 100755
--- a/tools/run_tests/helper_scripts/build_csharp.sh
+++ b/tools/run_tests/helper_scripts/build_csharp.sh
@@ -15,12 +15,12 @@
 
 set -ex
 
-cd $(dirname $0)/../../../src/csharp
+cd "$(dirname "$0")/../../../src/csharp"
 
 if [ "$CONFIG" == "gcov" ]
 then
   # overriding NativeDependenciesConfigurationUnix makes C# project pick up the gcov flavor of grpc_csharp_ext
-  dotnet build --configuration $MSBUILD_CONFIG /p:NativeDependenciesConfigurationUnix=gcov Grpc.sln
+  dotnet build --configuration "$MSBUILD_CONFIG" /p:NativeDependenciesConfigurationUnix=gcov Grpc.sln
 else
-  dotnet build --configuration $MSBUILD_CONFIG Grpc.sln
+  dotnet build --configuration "$MSBUILD_CONFIG" Grpc.sln
 fi
diff --git a/tools/run_tests/helper_scripts/build_php.sh b/tools/run_tests/helper_scripts/build_php.sh
index 856e5b6..443be34 100755
--- a/tools/run_tests/helper_scripts/build_php.sh
+++ b/tools/run_tests/helper_scripts/build_php.sh
@@ -18,9 +18,9 @@
 CONFIG=${CONFIG:-opt}
 
 # change to grpc repo root
-cd $(dirname $0)/../../..
+cd "$(dirname "$0")/../../.."
 
-root=`pwd`
+root=$(pwd)
 export GRPC_LIB_SUBDIR=libs/$CONFIG
 export CFLAGS="-Wno-parentheses-equality"
 
@@ -30,8 +30,8 @@
 cd ext/grpc
 phpize
 if [ "$CONFIG" != "gcov" ] ; then
-  ./configure --enable-grpc=$root
+  ./configure --enable-grpc="$root"
 else
-  ./configure --enable-grpc=$root --enable-coverage
+  ./configure --enable-grpc="$root" --enable-coverage
 fi
 make
diff --git a/tools/run_tests/helper_scripts/build_python.sh b/tools/run_tests/helper_scripts/build_python.sh
index e362082..b809fe0 100755
--- a/tools/run_tests/helper_scripts/build_python.sh
+++ b/tools/run_tests/helper_scripts/build_python.sh
@@ -16,13 +16,13 @@
 set -ex
 
 # change to grpc repo root
-cd $(dirname $0)/../../..
+cd "$(dirname "$0")/../../.."
 
 ##########################
 # Portability operations #
 ##########################
 
-PLATFORM=`uname -s`
+PLATFORM=$(uname -s)
 
 function is_msys() {
   if [ "${PLATFORM/MSYS}" != "$PLATFORM" ]; then
@@ -64,7 +64,7 @@
 # Path to python executable within a virtual environment depending on the
 # system.
 function venv_relative_python() {
-  if [ $(is_mingw) ]; then
+  if [ "$(is_mingw)" ]; then
     echo 'Scripts/python.exe'
   else
     echo 'bin/python'
@@ -73,7 +73,7 @@
 
 # Distutils toolchain to use depending on the system.
 function toolchain() {
-  if [ $(is_mingw) ]; then
+  if [ "$(is_mingw)" ]; then
     echo 'mingw32'
   else
     echo 'unix'
@@ -97,17 +97,17 @@
 ####################
 
 PYTHON=${1:-python2.7}
-VENV=${2:-$(venv $PYTHON)}
+VENV=${2:-$(venv "$PYTHON")}
 VENV_RELATIVE_PYTHON=${3:-$(venv_relative_python)}
 TOOLCHAIN=${4:-$(toolchain)}
 
-if [ $(is_msys) ]; then
+if [ "$(is_msys)" ]; then
   echo "MSYS doesn't directly provide the right compiler(s);"
   echo "switch to a MinGW shell."
   exit 1
 fi
 
-ROOT=`pwd`
+ROOT=$(pwd)
 export CFLAGS="-I$ROOT/include -std=gnu99 -fno-wrapv $CFLAGS"
 export GRPC_PYTHON_BUILD_WITH_CYTHON=1
 export LANG=en_US.UTF-8
@@ -117,7 +117,7 @@
 HOST_PYTHON=${HOST_PYTHON:-python}
 
 # If ccache is available on Linux, use it.
-if [ $(is_linux) ]; then
+if [ "$(is_linux)" ]; then
   # We're not on Darwin (Mac OS X)
   if [ -x "$(command -v ccache)" ]; then
     if [ -x "$(command -v gcc)" ]; then
@@ -137,46 +137,46 @@
 # it's possible that the virtualenv is still usable and we trust the tester to
 # be able to 'figure it out' instead of us e.g. doing potentially expensive and
 # unnecessary error recovery by `rm -rf`ing the virtualenv.
-($PYTHON -m virtualenv $VENV ||
- $HOST_PYTHON -m virtualenv -p $PYTHON $VENV ||
+($PYTHON -m virtualenv "$VENV" ||
+ $HOST_PYTHON -m virtualenv -p "$PYTHON" "$VENV" ||
  true)
-VENV_PYTHON=`script_realpath "$VENV/$VENV_RELATIVE_PYTHON"`
+VENV_PYTHON=$(script_realpath "$VENV/$VENV_RELATIVE_PYTHON")
 
 # pip-installs the directory specified. Used because on MSYS the vanilla Windows
 # Python gets confused when parsing paths.
 pip_install_dir() {
-  PWD=`pwd`
-  cd $1
-  ($VENV_PYTHON setup.py build_ext -c $TOOLCHAIN || true)
+  PWD=$(pwd)
+  cd "$1"
+  ($VENV_PYTHON setup.py build_ext -c "$TOOLCHAIN" || true)
   $VENV_PYTHON -m pip install --no-deps .
-  cd $PWD
+  cd "$PWD"
 }
 
 $VENV_PYTHON -m pip install --upgrade pip==9.0.1
 $VENV_PYTHON -m pip install setuptools
 $VENV_PYTHON -m pip install cython
 $VENV_PYTHON -m pip install six enum34 protobuf futures
-pip_install_dir $ROOT
+pip_install_dir "$ROOT"
 
-$VENV_PYTHON $ROOT/tools/distrib/python/make_grpcio_tools.py
-pip_install_dir $ROOT/tools/distrib/python/grpcio_tools
+$VENV_PYTHON "$ROOT/tools/distrib/python/make_grpcio_tools.py"
+pip_install_dir "$ROOT/tools/distrib/python/grpcio_tools"
 
 # Build/install health checking
-$VENV_PYTHON $ROOT/src/python/grpcio_health_checking/setup.py preprocess
-$VENV_PYTHON $ROOT/src/python/grpcio_health_checking/setup.py build_package_protos
-pip_install_dir $ROOT/src/python/grpcio_health_checking
+$VENV_PYTHON "$ROOT/src/python/grpcio_health_checking/setup.py" preprocess
+$VENV_PYTHON "$ROOT/src/python/grpcio_health_checking/setup.py" build_package_protos
+pip_install_dir "$ROOT/src/python/grpcio_health_checking"
 
 # Build/install reflection
-$VENV_PYTHON $ROOT/src/python/grpcio_reflection/setup.py preprocess
-$VENV_PYTHON $ROOT/src/python/grpcio_reflection/setup.py build_package_protos
-pip_install_dir $ROOT/src/python/grpcio_reflection
+$VENV_PYTHON "$ROOT/src/python/grpcio_reflection/setup.py" preprocess
+$VENV_PYTHON "$ROOT/src/python/grpcio_reflection/setup.py" build_package_protos
+pip_install_dir "$ROOT/src/python/grpcio_reflection"
 
 # Install testing
-pip_install_dir $ROOT/src/python/grpcio_testing
+pip_install_dir "$ROOT/src/python/grpcio_testing"
 
 # Build/install tests
 $VENV_PYTHON -m pip install coverage==4.4 oauth2client==4.1.0 \
                             google-auth==1.0.0 requests==2.14.2
-$VENV_PYTHON $ROOT/src/python/grpcio_tests/setup.py preprocess
-$VENV_PYTHON $ROOT/src/python/grpcio_tests/setup.py build_package_protos
-pip_install_dir $ROOT/src/python/grpcio_tests
+$VENV_PYTHON "$ROOT/src/python/grpcio_tests/setup.py" preprocess
+$VENV_PYTHON "$ROOT/src/python/grpcio_tests/setup.py" build_package_protos
+pip_install_dir "$ROOT/src/python/grpcio_tests"
diff --git a/tools/run_tests/helper_scripts/build_python_msys2.sh b/tools/run_tests/helper_scripts/build_python_msys2.sh
index 4c54f1c..f388b4b 100644
--- a/tools/run_tests/helper_scripts/build_python_msys2.sh
+++ b/tools/run_tests/helper_scripts/build_python_msys2.sh
@@ -15,7 +15,7 @@
 
 set -ex
 
-BUILD_PYTHON=`realpath "$(dirname $0)/build_python.sh"`
+BUILD_PYTHON=$(realpath "$(dirname "$0")/build_python.sh")
 export MSYSTEM=$1
 shift 1
-bash --login $BUILD_PYTHON "$@"
+bash --login "$BUILD_PYTHON" "$@"
diff --git a/tools/run_tests/helper_scripts/build_ruby.sh b/tools/run_tests/helper_scripts/build_ruby.sh
index a9267e1..b15a863 100755
--- a/tools/run_tests/helper_scripts/build_ruby.sh
+++ b/tools/run_tests/helper_scripts/build_ruby.sh
@@ -19,7 +19,7 @@
 export GRPC_CONFIG=${CONFIG:-opt}
 
 # change to grpc's ruby directory
-cd $(dirname $0)/../../..
+cd "$(dirname "$0")/../../.."
 
 rm -rf ./tmp
 rake compile
diff --git a/tools/run_tests/helper_scripts/bundle_install_wrapper.sh b/tools/run_tests/helper_scripts/bundle_install_wrapper.sh
index 27b8fce..ab31dd5 100755
--- a/tools/run_tests/helper_scripts/bundle_install_wrapper.sh
+++ b/tools/run_tests/helper_scripts/bundle_install_wrapper.sh
@@ -17,9 +17,9 @@
 set -ex
 
 # change to grpc repo root
-cd $(dirname $0)/../../..
+cd "$(dirname "$0")/../../.."
 
-SYSTEM=`uname | cut -f 1 -d_`
+SYSTEM=$(uname | cut -f 1 -d_)
 
 if [ "$SYSTEM" == "Darwin" ] ; then
   # Workaround for crash during bundle install
diff --git a/tools/run_tests/helper_scripts/post_tests_c.sh b/tools/run_tests/helper_scripts/post_tests_c.sh
index a4a8f44..e4ab203 100755
--- a/tools/run_tests/helper_scripts/post_tests_c.sh
+++ b/tools/run_tests/helper_scripts/post_tests_c.sh
@@ -17,14 +17,14 @@
 
 if [ "$CONFIG" != "gcov" ] ; then exit ; fi
 
-root=$(readlink -f $(dirname $0)/../../..)
+root=$(readlink -f "$(dirname "$0")/../../..")
 out=$root/reports/c_cxx_coverage
 tmp1=$(mktemp)
 tmp2=$(mktemp)
-cd $root
-lcov --capture --directory . --output-file $tmp1
-lcov --extract $tmp1 "$root/src/*" "$root/include/*" --output-file $tmp2
-genhtml $tmp2 --output-directory $out
-rm $tmp2
-rm $tmp1
+cd "$root"
+lcov --capture --directory . --output-file "$tmp1"
+lcov --extract "$tmp1" "$root/src/*" "$root/include/*" --output-file "$tmp2"
+genhtml "$tmp2" --output-directory "$out"
+rm "$tmp2"
+rm "$tmp1"
 
diff --git a/tools/run_tests/helper_scripts/post_tests_csharp.sh b/tools/run_tests/helper_scripts/post_tests_csharp.sh
index f92ea00..6473dfd 100755
--- a/tools/run_tests/helper_scripts/post_tests_csharp.sh
+++ b/tools/run_tests/helper_scripts/post_tests_csharp.sh
@@ -18,7 +18,7 @@
 if [ "$CONFIG" != "gcov" ] ; then exit ; fi
 
 # change to gRPC repo root
-cd $(dirname $0)/../../..
+cd "$(dirname "$0")/../../.."
 
 # Generate the csharp extension coverage report
 gcov objs/gcov/src/csharp/ext/*.o
diff --git a/tools/run_tests/helper_scripts/post_tests_php.sh b/tools/run_tests/helper_scripts/post_tests_php.sh
index 8ebc1e4..b23e4bd 100755
--- a/tools/run_tests/helper_scripts/post_tests_php.sh
+++ b/tools/run_tests/helper_scripts/post_tests_php.sh
@@ -17,15 +17,15 @@
 
 if [ "$CONFIG" != "gcov" ] ; then exit ; fi
 
-root=$(readlink -f $(dirname $0)/../../..)
+root=$(readlink -f "$(dirname "$0")/../../..")
 out=$root/reports/php_ext_coverage
 tmp1=$(mktemp)
 tmp2=$(mktemp)
-cd $root
-lcov --capture --directory . --output-file $tmp1
-lcov --extract $tmp1 "$root/src/php/ext/grpc/*" --output-file $tmp2
-genhtml $tmp2 --output-directory $out
-rm $tmp2
-rm $tmp1
+cd "$root"
+lcov --capture --directory . --output-file "$tmp1"
+lcov --extract "$tmp1" "$root/src/php/ext/grpc/*" --output-file "$tmp2"
+genhtml "$tmp2" --output-directory "$out"
+rm "$tmp2"
+rm "$tmp1"
 
 # todo(mattkwong): generate coverage report for php and copy to reports/php
diff --git a/tools/run_tests/helper_scripts/post_tests_python.sh b/tools/run_tests/helper_scripts/post_tests_python.sh
index 071e81a..bca9b20 100755
--- a/tools/run_tests/helper_scripts/post_tests_python.sh
+++ b/tools/run_tests/helper_scripts/post_tests_python.sh
@@ -18,7 +18,7 @@
 if [ "$CONFIG" != "gcov" ] ; then exit ; fi
 
 # change to directory of Python coverage files
-cd $(dirname $0)/../../../src/python/grpcio_tests/
+cd "$(dirname "$0")/../../../src/python/grpcio_tests/"
 
 coverage combine .
 coverage html -i -d ./../../../reports/python
diff --git a/tools/run_tests/helper_scripts/post_tests_ruby.sh b/tools/run_tests/helper_scripts/post_tests_ruby.sh
index a0b0736..f086001 100755
--- a/tools/run_tests/helper_scripts/post_tests_ruby.sh
+++ b/tools/run_tests/helper_scripts/post_tests_ruby.sh
@@ -17,15 +17,15 @@
 
 if [ "$CONFIG" != "gcov" ] ; then exit ; fi
 
-root=$(readlink -f $(dirname $0)/../../..)
+root=$(readlink -f "$(dirname "$0")/../../..")
 out=$root/reports/ruby_ext_coverage
 tmp1=$(mktemp)
 tmp2=$(mktemp)
-cd $root
-lcov --capture --directory . --output-file $tmp1
-lcov --extract $tmp1 "$root/src/ruby/*" --output-file $tmp2
-genhtml $tmp2 --output-directory $out
-rm $tmp2
-rm $tmp1
+cd "$root"
+lcov --capture --directory . --output-file "$tmp1"
+lcov --extract "$tmp1" "$root/src/ruby/*" --output-file "$tmp2"
+genhtml "$tmp2" --output-directory "$out"
+rm "$tmp2"
+rm "$tmp1"
 
-cp -rv $root/coverage $root/reports/ruby
+cp -rv "$root/coverage" "$root/reports/ruby"
diff --git a/tools/run_tests/helper_scripts/pre_build_cmake.sh b/tools/run_tests/helper_scripts/pre_build_cmake.sh
index 0300cd8..bb36588 100755
--- a/tools/run_tests/helper_scripts/pre_build_cmake.sh
+++ b/tools/run_tests/helper_scripts/pre_build_cmake.sh
@@ -15,10 +15,10 @@
 
 set -ex
 
-cd $(dirname $0)/../../..
+cd "$(dirname "$0")/../../.."
 
 mkdir -p cmake/build
 cd cmake/build
 
 # MSBUILD_CONFIG's values are suitable for cmake as well
-cmake -DgRPC_BUILD_TESTS=ON -DCMAKE_BUILD_TYPE=${MSBUILD_CONFIG} ../..
+cmake -DgRPC_BUILD_TESTS=ON -DCMAKE_BUILD_TYPE="${MSBUILD_CONFIG}" ../..
diff --git a/tools/run_tests/helper_scripts/pre_build_csharp.sh b/tools/run_tests/helper_scripts/pre_build_csharp.sh
index e2aeddc..f9f5440 100755
--- a/tools/run_tests/helper_scripts/pre_build_csharp.sh
+++ b/tools/run_tests/helper_scripts/pre_build_csharp.sh
@@ -16,6 +16,6 @@
 set -ex
 
 # cd to gRPC csharp directory
-cd $(dirname $0)/../../../src/csharp
+cd "$(dirname "$0")/../../../src/csharp"
 
 dotnet restore Grpc.sln
diff --git a/tools/run_tests/helper_scripts/pre_build_ruby.sh b/tools/run_tests/helper_scripts/pre_build_ruby.sh
index d68f7e9..b574096 100755
--- a/tools/run_tests/helper_scripts/pre_build_ruby.sh
+++ b/tools/run_tests/helper_scripts/pre_build_ruby.sh
@@ -19,6 +19,6 @@
 export GRPC_CONFIG=${CONFIG:-opt}
 
 # change to grpc repo root
-cd $(dirname $0)/../../..
+cd "$(dirname "$0")/../../.."
 
 tools/run_tests/helper_scripts/bundle_install_wrapper.sh
diff --git a/tools/run_tests/helper_scripts/run_grpc-node.sh b/tools/run_tests/helper_scripts/run_grpc-node.sh
index 25f149f..747aae7 100755
--- a/tools/run_tests/helper_scripts/run_grpc-node.sh
+++ b/tools/run_tests/helper_scripts/run_grpc-node.sh
@@ -17,12 +17,12 @@
 # to this reference
 
 # cd to gRPC root directory
-cd $(dirname $0)/../../..
+cd "$(dirname "$0")/../../.."
 
-CURRENT_COMMIT=$(git rev-parse --verify HEAD)
+CURRENT_COMMIT="$(git rev-parse --verify HEAD)"
 
 rm -rf ./../grpc-node
 git clone --recursive https://github.com/grpc/grpc-node ./../grpc-node
 cd ./../grpc-node
 
-./test-grpc-submodule.sh $CURRENT_COMMIT
+./test-grpc-submodule.sh "$CURRENT_COMMIT"
diff --git a/tools/run_tests/helper_scripts/run_lcov.sh b/tools/run_tests/helper_scripts/run_lcov.sh
index c7b2cfe..9d8b679 100755
--- a/tools/run_tests/helper_scripts/run_lcov.sh
+++ b/tools/run_tests/helper_scripts/run_lcov.sh
@@ -15,17 +15,17 @@
 
 set -ex
 
-out=$(readlink -f ${1:-coverage})
+out=$(readlink -f "${1:-coverage}")
 
-root=$(readlink -f $(dirname $0)/../../..)
+root=$(readlink -f "$(dirname "$0")/../../..")
 shift || true
 tmp=$(mktemp)
-cd $root
-tools/run_tests/run_tests.py -c gcov -l c c++ $@ || true
-lcov --capture --directory . --output-file $tmp
-genhtml $tmp --output-directory $out
-rm $tmp
+cd "$root"
+tools/run_tests/run_tests.py -c gcov -l c c++ "$@" || true
+lcov --capture --directory . --output-file "$tmp"
+genhtml "$tmp" --output-directory "$out"
+rm "$tmp"
 if which xdg-open > /dev/null
 then
-  xdg-open file://$out/index.html
+  xdg-open "file://$out/index.html"
 fi
diff --git a/tools/run_tests/helper_scripts/run_python.sh b/tools/run_tests/helper_scripts/run_python.sh
index 90f28c8..bcfe3a6 100755
--- a/tools/run_tests/helper_scripts/run_python.sh
+++ b/tools/run_tests/helper_scripts/run_python.sh
@@ -16,15 +16,15 @@
 set -ex
 
 # change to grpc repo root
-cd $(dirname $0)/../../..
+cd "$(dirname "$0")/../../.."
 
-PYTHON=`realpath "${1:-py27/bin/python}"`
+PYTHON=$(realpath "${1:-py27/bin/python}")
 
-ROOT=`pwd`
+ROOT=$(pwd)
 
-$PYTHON $ROOT/src/python/grpcio_tests/setup.py test_lite
+$PYTHON "$ROOT/src/python/grpcio_tests/setup.py" test_lite
 
-mkdir -p $ROOT/reports
-rm -rf $ROOT/reports/python-coverage
-(mv -T $ROOT/htmlcov $ROOT/reports/python-coverage) || true
+mkdir -p "$ROOT/reports"
+rm -rf "$ROOT/reports/python-coverage"
+(mv -T "$ROOT/htmlcov" "$ROOT/reports/python-coverage") || true
 
diff --git a/tools/run_tests/helper_scripts/run_ruby.sh b/tools/run_tests/helper_scripts/run_ruby.sh
index 4bd7d74..4e9c212 100755
--- a/tools/run_tests/helper_scripts/run_ruby.sh
+++ b/tools/run_tests/helper_scripts/run_ruby.sh
@@ -16,6 +16,6 @@
 set -ex
 
 # change to grpc repo root
-cd $(dirname $0)/../../..
+cd "$(dirname "$0")/../../.."
 
 rake
diff --git a/tools/run_tests/helper_scripts/run_ruby_end2end_tests.sh b/tools/run_tests/helper_scripts/run_ruby_end2end_tests.sh
index 5cfab14..1955442 100755
--- a/tools/run_tests/helper_scripts/run_ruby_end2end_tests.sh
+++ b/tools/run_tests/helper_scripts/run_ruby_end2end_tests.sh
@@ -16,7 +16,7 @@
 set -ex
 
 # change to grpc repo root
-cd $(dirname $0)/../../..
+cd "$(dirname "$0")/../../.."
 
 EXIT_CODE=0
 ruby src/ruby/end2end/sig_handling_driver.rb || EXIT_CODE=1
diff --git a/tools/run_tests/helper_scripts/run_tests_in_workspace.sh b/tools/run_tests/helper_scripts/run_tests_in_workspace.sh
index 529dc04..790c041 100755
--- a/tools/run_tests/helper_scripts/run_tests_in_workspace.sh
+++ b/tools/run_tests/helper_scripts/run_tests_in_workspace.sh
@@ -19,15 +19,15 @@
 # newly created workspace)
 set -ex
 
-cd $(dirname $0)/../../..
-export repo_root=$(pwd)
+cd "$(dirname "$0")/../../.."
+export repo_root="$(pwd)"
 
 rm -rf "${WORKSPACE_NAME}"
 git clone . "${WORKSPACE_NAME}"
 # clone gRPC submodules, use data from locally cloned submodules where possible
+# shellcheck disable=SC2016,SC1004
 git submodule foreach 'cd "${repo_root}/${WORKSPACE_NAME}" \
     && git submodule update --init --reference ${repo_root}/${name} ${name}'
 
 echo "Running run_tests.py in workspace ${WORKSPACE_NAME}" 
-python "${WORKSPACE_NAME}/tools/run_tests/run_tests.py" $@
-
+python "${WORKSPACE_NAME}/tools/run_tests/run_tests.py" "$@"
diff --git a/tools/run_tests/sanity/check_bazel_workspace.py b/tools/run_tests/sanity/check_bazel_workspace.py
index b5a77f4..62a6229 100755
--- a/tools/run_tests/sanity/check_bazel_workspace.py
+++ b/tools/run_tests/sanity/check_bazel_workspace.py
@@ -34,21 +34,76 @@
     for s in git_submodules
 }
 
-# Parse git hashes from Bazel WORKSPACE {new_}http_archive rules
-with open('WORKSPACE', 'r') as f:
-    workspace_rules = [expr.value for expr in ast.parse(f.read()).body]
+_BAZEL_TOOLCHAINS_DEP_NAME = 'com_github_bazelbuild_bazeltoolchains'
 
-http_archive_rules = [
-    rule for rule in workspace_rules if rule.func.id.endswith('http_archive')
+_GRPC_DEP_NAMES = [
+    'boringssl',
+    'com_github_madler_zlib',
+    'com_google_protobuf',
+    'com_github_google_googletest',
+    'com_github_gflags_gflags',
+    'com_github_google_benchmark',
+    'com_github_cares_cares',
+    'com_google_absl',
+    _BAZEL_TOOLCHAINS_DEP_NAME,
 ]
-archive_urls = [
-    kw.value.s for rule in http_archive_rules for kw in rule.keywords
-    if kw.arg == 'url'
-]
+
+
+class BazelEvalState(object):
+
+    def __init__(self, names_and_urls, overridden_name=None):
+        self.names_and_urls = names_and_urls
+        self.overridden_name = overridden_name
+
+    def http_archive(self, **args):
+        self.archive(**args)
+
+    def new_http_archive(self, **args):
+        self.archive(**args)
+
+    def bind(self, **args):
+        pass
+
+    def existing_rules(self):
+        if self.overridden_name:
+            return [self.overridden_name]
+        return []
+
+    def archive(self, **args):
+        if args['name'] == _BAZEL_TOOLCHAINS_DEP_NAME:
+            self.names_and_urls[args['name']] = 'dont care'
+            return
+        self.names_and_urls[args['name']] = args['url']
+
+
+# Parse git hashes from bazel/grpc_deps.bzl {new_}http_archive rules
+with open(os.path.join('bazel', 'grpc_deps.bzl'), 'r') as f:
+    names_and_urls = {}
+    eval_state = BazelEvalState(names_and_urls)
+    bazel_file = f.read()
+
+# grpc_deps.bzl only defines 'grpc_deps', add this to call it
+bazel_file += '\ngrpc_deps()\n'
+build_rules = {
+    'native': eval_state,
+}
+exec bazel_file in build_rules
+for name in _GRPC_DEP_NAMES:
+    assert name in names_and_urls.keys()
+assert len(_GRPC_DEP_NAMES) == len(names_and_urls.keys())
+
+# bazeltoolschains is an exception to this sanity check,
+# we don't require that there is a corresponding git module.
+names_without_bazeltoolchains = names_and_urls.keys()
+names_without_bazeltoolchains.remove(_BAZEL_TOOLCHAINS_DEP_NAME)
+archive_urls = [names_and_urls[name] for name in names_without_bazeltoolchains]
 workspace_git_hashes = {
     re.search(git_hash_pattern, url).group()
     for url in archive_urls
 }
+if len(workspace_git_hashes) == 0:
+    print("(Likely) parse error, did not find any bazel git dependencies.")
+    sys.exit(1)
 
 # Validate the equivalence of the git submodules and Bazel git dependencies. The
 # condition we impose is that there is a git submodule for every dependency in
@@ -60,4 +115,15 @@
     )
     sys.exit(1)
 
+# Also check that we can override each dependency
+for name in _GRPC_DEP_NAMES:
+    names_and_urls_with_overridden_name = {}
+    state = BazelEvalState(
+        names_and_urls_with_overridden_name, overridden_name=name)
+    rules = {
+        'native': state,
+    }
+    exec bazel_file in rules
+    assert name not in names_and_urls_with_overridden_name.keys()
+
 sys.exit(0)