Merge github.com:grpc/grpc into slice_with_exec_ctx
diff --git a/.gitmodules b/.gitmodules
index c32881c..04d155c 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -17,6 +17,6 @@
 [submodule "third_party/thrift"]
 	path = third_party/thrift
 	url = https://github.com/apache/thrift.git
-[submodule "third_party/google_benchmark"]
-	path = third_party/google_benchmark
+[submodule "third_party/benchmark"]
+	path = third_party/benchmark
 	url = https://github.com/google/benchmark
diff --git a/Makefile b/Makefile
index 3861f6a..3359985 100644
--- a/Makefile
+++ b/Makefile
@@ -1260,9 +1260,9 @@
 pc_cxx_unsecure: $(LIBDIR)/$(CONFIG)/pkgconfig/grpc++_unsecure.pc
 
 ifeq ($(EMBED_OPENSSL),true)
-privatelibs_cxx:  $(LIBDIR)/$(CONFIG)/libgrpc++_proto_reflection_desc_db.a $(LIBDIR)/$(CONFIG)/libgrpc++_test.a $(LIBDIR)/$(CONFIG)/libgrpc++_test_config.a $(LIBDIR)/$(CONFIG)/libgrpc++_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc_cli_libs.a $(LIBDIR)/$(CONFIG)/libinterop_client_helper.a $(LIBDIR)/$(CONFIG)/libinterop_client_main.a $(LIBDIR)/$(CONFIG)/libinterop_server_helper.a $(LIBDIR)/$(CONFIG)/libinterop_server_lib.a $(LIBDIR)/$(CONFIG)/libinterop_server_main.a $(LIBDIR)/$(CONFIG)/libqps.a $(LIBDIR)/$(CONFIG)/libboringssl_test_util.a $(LIBDIR)/$(CONFIG)/libboringssl_aes_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_asn1_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_base64_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_bio_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_bn_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_bytestring_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_aead_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_cipher_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_cmac_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_ed25519_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_x25519_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_dh_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_digest_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_ec_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_ecdsa_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_err_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_evp_extra_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_evp_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_pbkdf_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_hmac_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_pkcs12_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_pkcs8_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_poly1305_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_rsa_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_x509_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_ssl_test_lib.a $(LIBDIR)/$(CONFIG)/libgoogle_benchmark.a
+privatelibs_cxx:  $(LIBDIR)/$(CONFIG)/libgrpc++_proto_reflection_desc_db.a $(LIBDIR)/$(CONFIG)/libgrpc++_test.a $(LIBDIR)/$(CONFIG)/libgrpc++_test_config.a $(LIBDIR)/$(CONFIG)/libgrpc++_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc_cli_libs.a $(LIBDIR)/$(CONFIG)/libinterop_client_helper.a $(LIBDIR)/$(CONFIG)/libinterop_client_main.a $(LIBDIR)/$(CONFIG)/libinterop_server_helper.a $(LIBDIR)/$(CONFIG)/libinterop_server_lib.a $(LIBDIR)/$(CONFIG)/libinterop_server_main.a $(LIBDIR)/$(CONFIG)/libqps.a $(LIBDIR)/$(CONFIG)/libboringssl_test_util.a $(LIBDIR)/$(CONFIG)/libboringssl_aes_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_asn1_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_base64_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_bio_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_bn_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_bytestring_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_aead_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_cipher_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_cmac_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_ed25519_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_x25519_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_dh_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_digest_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_ec_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_ecdsa_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_err_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_evp_extra_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_evp_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_pbkdf_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_hmac_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_pkcs12_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_pkcs8_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_poly1305_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_rsa_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_x509_test_lib.a $(LIBDIR)/$(CONFIG)/libboringssl_ssl_test_lib.a $(LIBDIR)/$(CONFIG)/libbenchmark.a
 else
-privatelibs_cxx:  $(LIBDIR)/$(CONFIG)/libgrpc++_proto_reflection_desc_db.a $(LIBDIR)/$(CONFIG)/libgrpc++_test.a $(LIBDIR)/$(CONFIG)/libgrpc++_test_config.a $(LIBDIR)/$(CONFIG)/libgrpc++_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc_cli_libs.a $(LIBDIR)/$(CONFIG)/libinterop_client_helper.a $(LIBDIR)/$(CONFIG)/libinterop_client_main.a $(LIBDIR)/$(CONFIG)/libinterop_server_helper.a $(LIBDIR)/$(CONFIG)/libinterop_server_lib.a $(LIBDIR)/$(CONFIG)/libinterop_server_main.a $(LIBDIR)/$(CONFIG)/libqps.a $(LIBDIR)/$(CONFIG)/libgoogle_benchmark.a
+privatelibs_cxx:  $(LIBDIR)/$(CONFIG)/libgrpc++_proto_reflection_desc_db.a $(LIBDIR)/$(CONFIG)/libgrpc++_test.a $(LIBDIR)/$(CONFIG)/libgrpc++_test_config.a $(LIBDIR)/$(CONFIG)/libgrpc++_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc_cli_libs.a $(LIBDIR)/$(CONFIG)/libinterop_client_helper.a $(LIBDIR)/$(CONFIG)/libinterop_client_main.a $(LIBDIR)/$(CONFIG)/libinterop_server_helper.a $(LIBDIR)/$(CONFIG)/libinterop_server_lib.a $(LIBDIR)/$(CONFIG)/libinterop_server_main.a $(LIBDIR)/$(CONFIG)/libqps.a $(LIBDIR)/$(CONFIG)/libbenchmark.a
 endif
 
 
@@ -7007,43 +7007,43 @@
 endif
 
 
-LIBGOOGLE_BENCHMARK_SRC = \
-    third_party/google_benchmark/src/benchmark.cc \
-    third_party/google_benchmark/src/benchmark_register.cc \
-    third_party/google_benchmark/src/colorprint.cc \
-    third_party/google_benchmark/src/commandlineflags.cc \
-    third_party/google_benchmark/src/complexity.cc \
-    third_party/google_benchmark/src/console_reporter.cc \
-    third_party/google_benchmark/src/csv_reporter.cc \
-    third_party/google_benchmark/src/json_reporter.cc \
-    third_party/google_benchmark/src/reporter.cc \
-    third_party/google_benchmark/src/sleep.cc \
-    third_party/google_benchmark/src/string_util.cc \
-    third_party/google_benchmark/src/sysinfo.cc \
-    third_party/google_benchmark/src/timers.cc \
+LIBBENCHMARK_SRC = \
+    third_party/benchmark/src/benchmark.cc \
+    third_party/benchmark/src/benchmark_register.cc \
+    third_party/benchmark/src/colorprint.cc \
+    third_party/benchmark/src/commandlineflags.cc \
+    third_party/benchmark/src/complexity.cc \
+    third_party/benchmark/src/console_reporter.cc \
+    third_party/benchmark/src/csv_reporter.cc \
+    third_party/benchmark/src/json_reporter.cc \
+    third_party/benchmark/src/reporter.cc \
+    third_party/benchmark/src/sleep.cc \
+    third_party/benchmark/src/string_util.cc \
+    third_party/benchmark/src/sysinfo.cc \
+    third_party/benchmark/src/timers.cc \
 
 PUBLIC_HEADERS_CXX += \
 
-LIBGOOGLE_BENCHMARK_OBJS = $(addprefix $(OBJDIR)/$(CONFIG)/, $(addsuffix .o, $(basename $(LIBGOOGLE_BENCHMARK_SRC))))
+LIBBENCHMARK_OBJS = $(addprefix $(OBJDIR)/$(CONFIG)/, $(addsuffix .o, $(basename $(LIBBENCHMARK_SRC))))
 
-$(LIBGOOGLE_BENCHMARK_OBJS): CPPFLAGS += -Ithird_party/google_benchmark/include -DHAVE_POSIX_REGEX
+$(LIBBENCHMARK_OBJS): CPPFLAGS += -Ithird_party/benchmark/include -DHAVE_POSIX_REGEX
 
 ifeq ($(NO_PROTOBUF),true)
 
 # You can't build a C++ library if you don't have protobuf - a bit overreached, but still okay.
 
-$(LIBDIR)/$(CONFIG)/libgoogle_benchmark.a: protobuf_dep_error
+$(LIBDIR)/$(CONFIG)/libbenchmark.a: protobuf_dep_error
 
 
 else
 
-$(LIBDIR)/$(CONFIG)/libgoogle_benchmark.a: $(ZLIB_DEP)  $(PROTOBUF_DEP) $(LIBGOOGLE_BENCHMARK_OBJS) 
+$(LIBDIR)/$(CONFIG)/libbenchmark.a: $(ZLIB_DEP)  $(PROTOBUF_DEP) $(LIBBENCHMARK_OBJS) 
 	$(E) "[AR]      Creating $@"
 	$(Q) mkdir -p `dirname $@`
-	$(Q) rm -f $(LIBDIR)/$(CONFIG)/libgoogle_benchmark.a
-	$(Q) $(AR) $(AROPTS) $(LIBDIR)/$(CONFIG)/libgoogle_benchmark.a $(LIBGOOGLE_BENCHMARK_OBJS) 
+	$(Q) rm -f $(LIBDIR)/$(CONFIG)/libbenchmark.a
+	$(Q) $(AR) $(AROPTS) $(LIBDIR)/$(CONFIG)/libbenchmark.a $(LIBBENCHMARK_OBJS) 
 ifeq ($(SYSTEM),Darwin)
-	$(Q) ranlib -no_warning_for_no_symbols $(LIBDIR)/$(CONFIG)/libgoogle_benchmark.a
+	$(Q) ranlib -no_warning_for_no_symbols $(LIBDIR)/$(CONFIG)/libbenchmark.a
 endif
 
 
@@ -7052,7 +7052,7 @@
 endif
 
 ifneq ($(NO_DEPS),true)
--include $(LIBGOOGLE_BENCHMARK_OBJS:.o=.dep)
+-include $(LIBBENCHMARK_OBJS:.o=.dep)
 endif
 
 
@@ -11745,16 +11745,16 @@
 
 else
 
-$(BINDIR)/$(CONFIG)/bm_fullstack: $(PROTOBUF_DEP) $(BM_FULLSTACK_OBJS) $(LIBDIR)/$(CONFIG)/libgoogle_benchmark.a $(LIBDIR)/$(CONFIG)/libgrpc++_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc++.a $(LIBDIR)/$(CONFIG)/libgrpc.a $(LIBDIR)/$(CONFIG)/libgpr_test_util.a $(LIBDIR)/$(CONFIG)/libgpr.a
+$(BINDIR)/$(CONFIG)/bm_fullstack: $(PROTOBUF_DEP) $(BM_FULLSTACK_OBJS) $(LIBDIR)/$(CONFIG)/libbenchmark.a $(LIBDIR)/$(CONFIG)/libgrpc++_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc++.a $(LIBDIR)/$(CONFIG)/libgrpc.a $(LIBDIR)/$(CONFIG)/libgpr_test_util.a $(LIBDIR)/$(CONFIG)/libgpr.a
 	$(E) "[LD]      Linking $@"
 	$(Q) mkdir -p `dirname $@`
-	$(Q) $(LDXX) $(LDFLAGS) $(BM_FULLSTACK_OBJS) $(LIBDIR)/$(CONFIG)/libgoogle_benchmark.a $(LIBDIR)/$(CONFIG)/libgrpc++_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc++.a $(LIBDIR)/$(CONFIG)/libgrpc.a $(LIBDIR)/$(CONFIG)/libgpr_test_util.a $(LIBDIR)/$(CONFIG)/libgpr.a $(LDLIBSXX) $(LDLIBS_PROTOBUF) $(LDLIBS) $(LDLIBS_SECURE) $(GTEST_LIB) -o $(BINDIR)/$(CONFIG)/bm_fullstack
+	$(Q) $(LDXX) $(LDFLAGS) $(BM_FULLSTACK_OBJS) $(LIBDIR)/$(CONFIG)/libbenchmark.a $(LIBDIR)/$(CONFIG)/libgrpc++_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc++.a $(LIBDIR)/$(CONFIG)/libgrpc.a $(LIBDIR)/$(CONFIG)/libgpr_test_util.a $(LIBDIR)/$(CONFIG)/libgpr.a $(LDLIBSXX) $(LDLIBS_PROTOBUF) $(LDLIBS) $(LDLIBS_SECURE) $(GTEST_LIB) -o $(BINDIR)/$(CONFIG)/bm_fullstack
 
 endif
 
 endif
 
-$(OBJDIR)/$(CONFIG)/test/cpp/microbenchmarks/bm_fullstack.o:  $(LIBDIR)/$(CONFIG)/libgoogle_benchmark.a $(LIBDIR)/$(CONFIG)/libgrpc++_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc++.a $(LIBDIR)/$(CONFIG)/libgrpc.a $(LIBDIR)/$(CONFIG)/libgpr_test_util.a $(LIBDIR)/$(CONFIG)/libgpr.a
+$(OBJDIR)/$(CONFIG)/test/cpp/microbenchmarks/bm_fullstack.o:  $(LIBDIR)/$(CONFIG)/libbenchmark.a $(LIBDIR)/$(CONFIG)/libgrpc++_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc_test_util.a $(LIBDIR)/$(CONFIG)/libgrpc++.a $(LIBDIR)/$(CONFIG)/libgrpc.a $(LIBDIR)/$(CONFIG)/libgpr_test_util.a $(LIBDIR)/$(CONFIG)/libgpr.a
 
 deps_bm_fullstack: $(BM_FULLSTACK_OBJS:.o=.dep)
 
@@ -13201,16 +13201,16 @@
 
 else
 
-$(BINDIR)/$(CONFIG)/noop-benchmark: $(PROTOBUF_DEP) $(NOOP-BENCHMARK_OBJS) $(LIBDIR)/$(CONFIG)/libgoogle_benchmark.a
+$(BINDIR)/$(CONFIG)/noop-benchmark: $(PROTOBUF_DEP) $(NOOP-BENCHMARK_OBJS) $(LIBDIR)/$(CONFIG)/libbenchmark.a
 	$(E) "[LD]      Linking $@"
 	$(Q) mkdir -p `dirname $@`
-	$(Q) $(LDXX) $(LDFLAGS) $(NOOP-BENCHMARK_OBJS) $(LIBDIR)/$(CONFIG)/libgoogle_benchmark.a $(LDLIBSXX) $(LDLIBS_PROTOBUF) $(LDLIBS) $(LDLIBS_SECURE) $(GTEST_LIB) -o $(BINDIR)/$(CONFIG)/noop-benchmark
+	$(Q) $(LDXX) $(LDFLAGS) $(NOOP-BENCHMARK_OBJS) $(LIBDIR)/$(CONFIG)/libbenchmark.a $(LDLIBSXX) $(LDLIBS_PROTOBUF) $(LDLIBS) $(LDLIBS_SECURE) $(GTEST_LIB) -o $(BINDIR)/$(CONFIG)/noop-benchmark
 
 endif
 
 endif
 
-$(OBJDIR)/$(CONFIG)/test/cpp/microbenchmarks/noop-benchmark.o:  $(LIBDIR)/$(CONFIG)/libgoogle_benchmark.a
+$(OBJDIR)/$(CONFIG)/test/cpp/microbenchmarks/noop-benchmark.o:  $(LIBDIR)/$(CONFIG)/libbenchmark.a
 
 deps_noop-benchmark: $(NOOP-BENCHMARK_OBJS:.o=.dep)
 
diff --git a/build.yaml b/build.yaml
index 51ee141..7740c40 100644
--- a/build.yaml
+++ b/build.yaml
@@ -2852,7 +2852,7 @@
   src:
   - test/cpp/microbenchmarks/bm_fullstack.cc
   deps:
-  - google_benchmark
+  - benchmark
   - grpc++_test_util
   - grpc_test_util
   - grpc++
@@ -3302,7 +3302,7 @@
   src:
   - test/cpp/microbenchmarks/noop-benchmark.cc
   deps:
-  - google_benchmark
+  - benchmark
 - name: proto_server_reflection_test
   gtest: true
   build: test
@@ -3788,6 +3788,8 @@
       UBSAN_OPTIONS: halt_on_error=1:print_stacktrace=1
     timeout_multiplier: 1.5
 defaults:
+  benchmark:
+    CPPFLAGS: -Ithird_party/benchmark/include -DHAVE_POSIX_REGEX
   boringssl:
     CFLAGS: -Wno-sign-conversion -Wno-conversion -Wno-unused-value -Wno-unknown-pragmas
       -Wno-implicit-function-declaration -Wno-unused-variable -Wno-sign-compare $(NO_W_EXTRA_SEMI)
@@ -3796,8 +3798,6 @@
   global:
     CPPFLAGS: -g -Wall -Wextra -Werror -Wno-long-long -Wno-unused-parameter
     LDFLAGS: -g
-  google_benchmark:
-    CPPFLAGS: -Ithird_party/google_benchmark/include -DHAVE_POSIX_REGEX
   zlib:
     CFLAGS: -Wno-sign-conversion -Wno-conversion -Wno-unused-value -Wno-implicit-function-declaration
       $(W_NO_SHIFT_NEGATIVE_VALUE) -fvisibility=hidden
diff --git a/src/google_benchmark/gen_build_yaml.py b/src/benchmark/gen_build_yaml.py
similarity index 86%
rename from src/google_benchmark/gen_build_yaml.py
rename to src/benchmark/gen_build_yaml.py
index 302e087..09b7611 100755
--- a/src/google_benchmark/gen_build_yaml.py
+++ b/src/benchmark/gen_build_yaml.py
@@ -39,15 +39,15 @@
 out = {}
 
 out['libs'] = [{
-    'name': 'google_benchmark',
+    'name': 'benchmark',
     'build': 'private',
     'language': 'c++',
     'secure': 'no',
-    'defaults': 'google_benchmark',
-    'src': sorted(glob.glob('third_party/google_benchmark/src/*.cc')),
+    'defaults': 'benchmark',
+    'src': sorted(glob.glob('third_party/benchmark/src/*.cc')),
     'headers': sorted(
-        glob.glob('third_party/google_benchmark/src/*.h') +
-        glob.glob('third_party/google_benchmark/include/benchmark/*.h')),
+        glob.glob('third_party/benchmark/src/*.h') +
+        glob.glob('third_party/benchmark/include/benchmark/*.h')),
 }]
 
 print yaml.dump(out)
diff --git a/src/core/ext/transport/chttp2/transport/chttp2_transport.c b/src/core/ext/transport/chttp2/transport/chttp2_transport.c
index 9b56903..5c1a1db 100644
--- a/src/core/ext/transport/chttp2/transport/chttp2_transport.c
+++ b/src/core/ext/transport/chttp2/transport/chttp2_transport.c
@@ -1626,6 +1626,9 @@
     if (s->id != 0) {
       remove_stream(exec_ctx, t, s->id,
                     removal_error(GRPC_ERROR_REF(error), s, "Stream removed"));
+    } else {
+      /* Purge streams waiting on concurrency still waiting for id assignment */
+      grpc_chttp2_list_remove_waiting_for_concurrency(t, s);
     }
     GRPC_CHTTP2_STREAM_UNREF(exec_ctx, s, "chttp2");
   }
diff --git a/src/core/ext/transport/chttp2/transport/internal.h b/src/core/ext/transport/chttp2/transport/internal.h
index 31eb1e0..b727965 100644
--- a/src/core/ext/transport/chttp2/transport/internal.h
+++ b/src/core/ext/transport/chttp2/transport/internal.h
@@ -496,6 +496,8 @@
                                                   grpc_chttp2_stream *s);
 int grpc_chttp2_list_pop_waiting_for_concurrency(grpc_chttp2_transport *t,
                                                  grpc_chttp2_stream **s);
+void grpc_chttp2_list_remove_waiting_for_concurrency(grpc_chttp2_transport *t,
+                                                     grpc_chttp2_stream *s);
 
 void grpc_chttp2_list_add_stalled_by_transport(grpc_chttp2_transport *t,
                                                grpc_chttp2_stream *s);
diff --git a/src/core/ext/transport/chttp2/transport/stream_lists.c b/src/core/ext/transport/chttp2/transport/stream_lists.c
index 6d25b3a..a60264c 100644
--- a/src/core/ext/transport/chttp2/transport/stream_lists.c
+++ b/src/core/ext/transport/chttp2/transport/stream_lists.c
@@ -158,6 +158,11 @@
   return stream_list_pop(t, s, GRPC_CHTTP2_LIST_WAITING_FOR_CONCURRENCY);
 }
 
+void grpc_chttp2_list_remove_waiting_for_concurrency(grpc_chttp2_transport *t,
+                                                     grpc_chttp2_stream *s) {
+  stream_list_maybe_remove(t, s, GRPC_CHTTP2_LIST_WAITING_FOR_CONCURRENCY);
+}
+
 void grpc_chttp2_list_add_stalled_by_transport(grpc_chttp2_transport *t,
                                                grpc_chttp2_stream *s) {
   stream_list_add(t, s, GRPC_CHTTP2_LIST_STALLED_BY_TRANSPORT);
diff --git a/src/core/ext/transport/cronet/transport/cronet_transport.c b/src/core/ext/transport/cronet/transport/cronet_transport.c
index f9dae9e..355cb7e 100644
--- a/src/core/ext/transport/cronet/transport/cronet_transport.c
+++ b/src/core/ext/transport/cronet/transport/cronet_transport.c
@@ -149,6 +149,9 @@
 struct op_state {
   bool state_op_done[OP_NUM_OPS];
   bool state_callback_received[OP_NUM_OPS];
+  bool fail_state;
+  bool flush_read;
+  grpc_error *cancel_error;
   /* data structure for storing data coming from server */
   struct read_state rs;
   /* data structure for storing data going to the server */
@@ -248,6 +251,12 @@
   }
 }
 
+static grpc_error *make_error_with_desc(int error_code, const char *desc) {
+  grpc_error *error = GRPC_ERROR_CREATE(desc);
+  error = grpc_error_set_int(error, GRPC_ERROR_INT_GRPC_STATUS, error_code);
+  return error;
+}
+
 /*
   Add a new stream op to op storage.
 */
@@ -434,6 +443,18 @@
             grpc_mdstr_from_string(headers->headers[i].value)));
   }
   s->state.state_callback_received[OP_RECV_INITIAL_METADATA] = true;
+  if (!(s->state.state_op_done[OP_CANCEL_ERROR] ||
+        s->state.state_callback_received[OP_FAILED])) {
+    /* Do an extra read to trigger on_succeeded() callback in case connection
+     is closed */
+    GPR_ASSERT(s->state.rs.length_field_received == false);
+    s->state.rs.read_buffer = s->state.rs.grpc_header_bytes;
+    s->state.rs.received_bytes = 0;
+    s->state.rs.remaining_bytes = GRPC_HEADER_SIZE_IN_BYTES;
+    CRONET_LOG(GPR_DEBUG, "cronet_bidirectional_stream_read(%p)", s->cbs);
+    cronet_bidirectional_stream_read(s->cbs, s->state.rs.read_buffer,
+                                     s->state.rs.remaining_bytes);
+  }
   gpr_mu_unlock(&s->mu);
   grpc_exec_ctx_finish(&exec_ctx);
   execute_from_storage(s);
@@ -466,7 +487,11 @@
              count);
   gpr_mu_lock(&s->mu);
   s->state.state_callback_received[OP_RECV_MESSAGE] = true;
-  if (count > 0) {
+  if (count > 0 && s->state.flush_read) {
+    CRONET_LOG(GPR_DEBUG, "cronet_bidirectional_stream_read(%p)", s->cbs);
+    cronet_bidirectional_stream_read(s->cbs, s->state.rs.read_buffer, 4096);
+    gpr_mu_unlock(&s->mu);
+  } else if (count > 0) {
     s->state.rs.received_bytes += count;
     s->state.rs.remaining_bytes -= count;
     if (s->state.rs.remaining_bytes > 0) {
@@ -481,6 +506,10 @@
       execute_from_storage(s);
     }
   } else {
+    if (s->state.flush_read) {
+      gpr_free(s->state.rs.read_buffer);
+      s->state.rs.read_buffer = NULL;
+    }
     s->state.rs.read_stream_closed = true;
     gpr_mu_unlock(&s->mu);
     execute_from_storage(s);
@@ -511,11 +540,29 @@
             &exec_ctx, grpc_mdstr_from_string(trailers->headers[i].key),
             grpc_mdstr_from_string(trailers->headers[i].value)));
     s->state.rs.trailing_metadata_valid = true;
+    if (0 == strcmp(trailers->headers[i].key, "grpc-status") &&
+        0 != strcmp(trailers->headers[i].value, "0")) {
+      s->state.fail_state = true;
+    }
   }
   s->state.state_callback_received[OP_RECV_TRAILING_METADATA] = true;
-  gpr_mu_unlock(&s->mu);
-  grpc_exec_ctx_finish(&exec_ctx);
-  execute_from_storage(s);
+  /* Send a EOS when server terminates the stream (testServerFinishesRequest) to
+   * trigger on_succeeded */
+  if (!s->state.state_op_done[OP_SEND_TRAILING_METADATA] &&
+      !(s->state.state_op_done[OP_CANCEL_ERROR] ||
+        s->state.state_callback_received[OP_FAILED])) {
+    CRONET_LOG(GPR_DEBUG, "cronet_bidirectional_stream_write (%p, 0)", s->cbs);
+    s->state.state_callback_received[OP_SEND_MESSAGE] = false;
+    cronet_bidirectional_stream_write(s->cbs, "", 0, true);
+    s->state.state_op_done[OP_SEND_TRAILING_METADATA] = true;
+
+    gpr_mu_unlock(&s->mu);
+    grpc_exec_ctx_finish(&exec_ctx);
+  } else {
+    gpr_mu_unlock(&s->mu);
+    grpc_exec_ctx_finish(&exec_ctx);
+    execute_from_storage(s);
+  }
 }
 
 /*
@@ -636,9 +683,9 @@
   /* When call is canceled, every op can be run, except under following
   conditions
   */
-  bool is_canceled_of_failed = stream_state->state_op_done[OP_CANCEL_ERROR] ||
+  bool is_canceled_or_failed = stream_state->state_op_done[OP_CANCEL_ERROR] ||
                                stream_state->state_callback_received[OP_FAILED];
-  if (is_canceled_of_failed) {
+  if (is_canceled_or_failed) {
     if (op_id == OP_SEND_INITIAL_METADATA) result = false;
     if (op_id == OP_SEND_MESSAGE) result = false;
     if (op_id == OP_SEND_TRAILING_METADATA) result = false;
@@ -782,16 +829,10 @@
       op_can_be_run(stream_op, stream_state, &oas->state,
                     OP_SEND_INITIAL_METADATA)) {
     CRONET_LOG(GPR_DEBUG, "running: %p OP_SEND_INITIAL_METADATA", oas);
-    /* This OP is the beginning. Reset various states */
-    memset(&s->header_array, 0, sizeof(s->header_array));
-    memset(&stream_state->rs, 0, sizeof(stream_state->rs));
-    memset(&stream_state->ws, 0, sizeof(stream_state->ws));
-    memset(stream_state->state_op_done, 0, sizeof(stream_state->state_op_done));
-    memset(stream_state->state_callback_received, 0,
-           sizeof(stream_state->state_callback_received));
     /* Start new cronet stream. It is destroyed in on_succeeded, on_canceled,
      * on_failed */
     GPR_ASSERT(s->cbs == NULL);
+    GPR_ASSERT(!stream_state->state_op_done[OP_SEND_INITIAL_METADATA]);
     s->cbs = cronet_bidirectional_stream_create(s->curr_ct.engine, s->curr_gs,
                                                 &cronet_callbacks);
     CRONET_LOG(GPR_DEBUG, "%p = cronet_bidirectional_stream_create()", s->cbs);
@@ -812,10 +853,13 @@
              op_can_be_run(stream_op, stream_state, &oas->state,
                            OP_RECV_INITIAL_METADATA)) {
     CRONET_LOG(GPR_DEBUG, "running: %p  OP_RECV_INITIAL_METADATA", oas);
-    if (stream_state->state_op_done[OP_CANCEL_ERROR] ||
-        stream_state->state_callback_received[OP_FAILED]) {
+    if (stream_state->state_op_done[OP_CANCEL_ERROR]) {
       grpc_exec_ctx_sched(exec_ctx, stream_op->recv_initial_metadata_ready,
                           GRPC_ERROR_CANCELLED, NULL);
+    } else if (stream_state->state_callback_received[OP_FAILED]) {
+      grpc_exec_ctx_sched(
+          exec_ctx, stream_op->recv_initial_metadata_ready,
+          make_error_with_desc(GRPC_STATUS_UNAVAILABLE, "Unavailable."), NULL);
     } else {
       grpc_chttp2_incoming_metadata_buffer_publish(
           &oas->s->state.rs.initial_metadata, stream_op->recv_initial_metadata);
@@ -869,12 +913,19 @@
              op_can_be_run(stream_op, stream_state, &oas->state,
                            OP_RECV_MESSAGE)) {
     CRONET_LOG(GPR_DEBUG, "running: %p  OP_RECV_MESSAGE", oas);
-    if (stream_state->state_op_done[OP_CANCEL_ERROR] ||
-        stream_state->state_callback_received[OP_FAILED]) {
-      CRONET_LOG(GPR_DEBUG, "Stream is either cancelled or failed.");
+    if (stream_state->state_op_done[OP_CANCEL_ERROR]) {
+      CRONET_LOG(GPR_DEBUG, "Stream is cancelled.");
       grpc_exec_ctx_sched(exec_ctx, stream_op->recv_message_ready,
                           GRPC_ERROR_CANCELLED, NULL);
       stream_state->state_op_done[OP_RECV_MESSAGE] = true;
+      result = ACTION_TAKEN_NO_CALLBACK;
+    } else if (stream_state->state_callback_received[OP_FAILED]) {
+      CRONET_LOG(GPR_DEBUG, "Stream failed.");
+      grpc_exec_ctx_sched(
+          exec_ctx, stream_op->recv_message_ready,
+          make_error_with_desc(GRPC_STATUS_UNAVAILABLE, "Unavailable."), NULL);
+      stream_state->state_op_done[OP_RECV_MESSAGE] = true;
+      result = ACTION_TAKEN_NO_CALLBACK;
     } else if (stream_state->rs.read_stream_closed == true) {
       /* No more data will be received */
       CRONET_LOG(GPR_DEBUG, "read stream closed");
@@ -882,6 +933,7 @@
                           GRPC_ERROR_NONE, NULL);
       stream_state->state_op_done[OP_RECV_MESSAGE] = true;
       oas->state.state_op_done[OP_RECV_MESSAGE] = true;
+      result = ACTION_TAKEN_NO_CALLBACK;
     } else if (stream_state->rs.length_field_received == false) {
       if (stream_state->rs.received_bytes == GRPC_HEADER_SIZE_IN_BYTES &&
           stream_state->rs.remaining_bytes == 0) {
@@ -950,10 +1002,15 @@
                           GRPC_ERROR_NONE, NULL);
       stream_state->state_op_done[OP_RECV_MESSAGE] = true;
       oas->state.state_op_done[OP_RECV_MESSAGE] = true;
-      /* Clear read state of the stream, so next read op (if it were to come)
-       * will work */
-      stream_state->rs.received_bytes = stream_state->rs.remaining_bytes =
-          stream_state->rs.length_field_received = 0;
+      /* Do an extra read to trigger on_succeeded() callback in case connection
+         is closed */
+      stream_state->rs.read_buffer = stream_state->rs.grpc_header_bytes;
+      stream_state->rs.received_bytes = 0;
+      stream_state->rs.remaining_bytes = GRPC_HEADER_SIZE_IN_BYTES;
+      stream_state->rs.length_field_received = false;
+      CRONET_LOG(GPR_DEBUG, "cronet_bidirectional_stream_read(%p)", s->cbs);
+      cronet_bidirectional_stream_read(s->cbs, stream_state->rs.read_buffer,
+                                       stream_state->rs.remaining_bytes);
       result = ACTION_TAKEN_NO_CALLBACK;
     }
   } else if (stream_op->recv_trailing_metadata &&
@@ -990,17 +1047,25 @@
     CRONET_LOG(GPR_DEBUG, "W: cronet_bidirectional_stream_cancel(%p)", s->cbs);
     if (s->cbs) {
       cronet_bidirectional_stream_cancel(s->cbs);
+      result = ACTION_TAKEN_WITH_CALLBACK;
+    } else {
+      result = ACTION_TAKEN_NO_CALLBACK;
     }
     stream_state->state_op_done[OP_CANCEL_ERROR] = true;
-    result = ACTION_TAKEN_WITH_CALLBACK;
+    if (!stream_state->cancel_error) {
+      stream_state->cancel_error = GRPC_ERROR_REF(stream_op->cancel_error);
+    }
   } else if (stream_op->on_complete &&
              op_can_be_run(stream_op, stream_state, &oas->state,
                            OP_ON_COMPLETE)) {
     CRONET_LOG(GPR_DEBUG, "running: %p  OP_ON_COMPLETE", oas);
-    if (stream_state->state_op_done[OP_CANCEL_ERROR] ||
-        stream_state->state_callback_received[OP_FAILED]) {
+    if (stream_state->state_op_done[OP_CANCEL_ERROR]) {
       grpc_exec_ctx_sched(exec_ctx, stream_op->on_complete,
-                          GRPC_ERROR_CANCELLED, NULL);
+                          GRPC_ERROR_REF(stream_state->cancel_error), NULL);
+    } else if (stream_state->state_callback_received[OP_FAILED]) {
+      grpc_exec_ctx_sched(
+          exec_ctx, stream_op->on_complete,
+          make_error_with_desc(GRPC_STATUS_UNAVAILABLE, "Unavailable."), NULL);
     } else {
       /* All actions in this stream_op are complete. Call the on_complete
        * callback
@@ -1021,6 +1086,15 @@
       make a note */
     if (stream_op->recv_message)
       stream_state->state_op_done[OP_RECV_MESSAGE_AND_ON_COMPLETE] = true;
+  } else if (stream_state->fail_state && !stream_state->flush_read) {
+    CRONET_LOG(GPR_DEBUG, "running: %p  flush read", oas);
+    if (stream_state->rs.read_buffer &&
+        stream_state->rs.read_buffer != stream_state->rs.grpc_header_bytes) {
+      gpr_free(stream_state->rs.read_buffer);
+      stream_state->rs.read_buffer = NULL;
+    }
+    stream_state->rs.read_buffer = gpr_malloc(4096);
+    stream_state->flush_read = true;
   } else {
     result = NO_ACTION_POSSIBLE;
   }
@@ -1046,6 +1120,8 @@
   memset(s->state.state_op_done, 0, sizeof(s->state.state_op_done));
   memset(s->state.state_callback_received, 0,
          sizeof(s->state.state_callback_received));
+  s->state.fail_state = s->state.flush_read = false;
+  s->state.cancel_error = NULL;
   gpr_mu_init(&s->mu);
   return 0;
 }
@@ -1092,7 +1168,10 @@
 }
 
 static void destroy_stream(grpc_exec_ctx *exec_ctx, grpc_transport *gt,
-                           grpc_stream *gs, void *and_free_memory) {}
+                           grpc_stream *gs, void *and_free_memory) {
+  stream_obj *s = (stream_obj *)gs;
+  GRPC_ERROR_UNREF(s->state.cancel_error);
+}
 
 static void destroy_transport(grpc_exec_ctx *exec_ctx, grpc_transport *gt) {}
 
diff --git a/src/core/lib/iomgr/tcp_posix.c b/src/core/lib/iomgr/tcp_posix.c
index ba288c7..7cc66fb 100644
--- a/src/core/lib/iomgr/tcp_posix.c
+++ b/src/core/lib/iomgr/tcp_posix.c
@@ -108,6 +108,12 @@
   grpc_resource_user_slice_allocator slice_allocator;
 } grpc_tcp;
 
+static grpc_error *tcp_annotate_error(grpc_error *src_error, grpc_tcp *tcp) {
+  return grpc_error_set_str(
+      grpc_error_set_int(src_error, GRPC_ERROR_INT_FD, tcp->fd),
+      GRPC_ERROR_STR_TARGET_ADDRESS, tcp->peer_string);
+}
+
 static void tcp_handle_read(grpc_exec_ctx *exec_ctx, void *arg /* grpc_tcp */,
                             grpc_error *error);
 static void tcp_handle_write(grpc_exec_ctx *exec_ctx, void *arg /* grpc_tcp */,
@@ -230,15 +236,16 @@
       /* We've consumed the edge, request a new one */
       grpc_fd_notify_on_read(exec_ctx, tcp->em_fd, &tcp->read_closure);
     } else {
-      grpc_slice_buffer_reset_and_unref_internal(exec_ctx,
-                                                 tcp->incoming_buffer);
-      call_read_cb(exec_ctx, tcp, GRPC_OS_ERROR(errno, "recvmsg"));
+      grpc_slice_buffer_reset_and_unref(exec_ctx, tcp->incoming_buffer);
+      call_read_cb(exec_ctx, tcp,
+                   tcp_annotate_error(GRPC_OS_ERROR(errno, "recvmsg"), tcp));
       TCP_UNREF(exec_ctx, tcp, "read");
     }
   } else if (read_bytes == 0) {
     /* 0 read size ==> end of stream */
     grpc_slice_buffer_reset_and_unref_internal(exec_ctx, tcp->incoming_buffer);
-    call_read_cb(exec_ctx, tcp, GRPC_ERROR_CREATE("Socket closed"));
+    call_read_cb(exec_ctx, tcp,
+                 tcp_annotate_error(GRPC_ERROR_CREATE("Socket closed"), tcp));
     TCP_UNREF(exec_ctx, tcp, "read");
   } else {
     GPR_ASSERT((size_t)read_bytes <= tcp->incoming_buffer->length);
@@ -370,7 +377,7 @@
         tcp->outgoing_byte_idx = unwind_byte_idx;
         return false;
       } else {
-        *error = GRPC_OS_ERROR(errno, "sendmsg");
+        *error = tcp_annotate_error(GRPC_OS_ERROR(errno, "sendmsg"), tcp);
         return true;
       }
     }
@@ -451,9 +458,10 @@
 
   if (buf->length == 0) {
     GPR_TIMER_END("tcp_write", 0);
-    grpc_exec_ctx_sched(exec_ctx, cb, grpc_fd_is_shutdown(tcp->em_fd)
-                                          ? GRPC_ERROR_CREATE("EOF")
-                                          : GRPC_ERROR_NONE,
+    grpc_exec_ctx_sched(exec_ctx, cb,
+                        grpc_fd_is_shutdown(tcp->em_fd)
+                            ? tcp_annotate_error(GRPC_ERROR_CREATE("EOF"), tcp)
+                            : GRPC_ERROR_NONE,
                         NULL);
     return;
   }
diff --git a/src/core/lib/surface/completion_queue.c b/src/core/lib/surface/completion_queue.c
index 4e0feb5..184c1a1 100644
--- a/src/core/lib/surface/completion_queue.c
+++ b/src/core/lib/surface/completion_queue.c
@@ -354,11 +354,13 @@
   gpr_strvec v;
   gpr_strvec_init(&v);
   gpr_strvec_add(&v, gpr_strdup("PENDING TAGS:"));
+  gpr_mu_lock(cc->mu);
   for (size_t i = 0; i < cc->outstanding_tag_count; i++) {
     char *s;
     gpr_asprintf(&s, " %p", cc->outstanding_tags[i]);
     gpr_strvec_add(&v, s);
   }
+  gpr_mu_unlock(cc->mu);
   char *out = gpr_strvec_flatten(&v, NULL);
   gpr_strvec_destroy(&v);
   gpr_log(GPR_DEBUG, "%s", out);
diff --git a/src/node/performance/worker_service_impl.js b/src/node/performance/worker_service_impl.js
index 3f317f6..38888a7 100644
--- a/src/node/performance/worker_service_impl.js
+++ b/src/node/performance/worker_service_impl.js
@@ -55,9 +55,8 @@
   }
 
   this.quitWorker = function quitWorker(call, callback) {
-    server.tryShutdown(function() {
-      callback(null, {});
-    });
+    callback(null, {});
+    server.tryShutdown(function() {});
   };
 
   this.runClient = function runClient(call) {
diff --git a/src/objective-c/tests/CoreCronetEnd2EndTests/CoreCronetEnd2EndTests.m b/src/objective-c/tests/CoreCronetEnd2EndTests/CoreCronetEnd2EndTests.m
index 4a92cc8..4ba7bad 100644
--- a/src/objective-c/tests/CoreCronetEnd2EndTests/CoreCronetEnd2EndTests.m
+++ b/src/objective-c/tests/CoreCronetEnd2EndTests/CoreCronetEnd2EndTests.m
@@ -316,7 +316,8 @@
 }
 
 - (void)testInvokeLargeRequest {
-  [self testIndividualCase:"invoke_large_request"];
+  // NOT SUPPORTED (frame size)
+  // [self testIndividualCase:"invoke_large_request"];
 }
 
 - (void)testLargeMetadata {
@@ -329,7 +330,8 @@
 }
 
 - (void)testMaxMessageLength {
-  [self testIndividualCase:"max_message_length"];
+  // NOT SUPPORTED (close_error)
+  // [self testIndividualCase:"max_message_length"];
 }
 
 - (void)testNegativeDeadline {
diff --git a/test/cpp/microbenchmarks/bm_fullstack.cc b/test/cpp/microbenchmarks/bm_fullstack.cc
index 6cc780d..6c0bf80 100644
--- a/test/cpp/microbenchmarks/bm_fullstack.cc
+++ b/test/cpp/microbenchmarks/bm_fullstack.cc
@@ -59,7 +59,7 @@
 }
 #include "src/cpp/client/create_channel_internal.h"
 #include "src/proto/grpc/testing/echo.grpc.pb.h"
-#include "third_party/google_benchmark/include/benchmark/benchmark.h"
+#include "third_party/benchmark/include/benchmark/benchmark.h"
 
 namespace grpc {
 namespace testing {
diff --git a/test/cpp/microbenchmarks/noop-benchmark.cc b/test/cpp/microbenchmarks/noop-benchmark.cc
index 6b06c69..99fa6d5 100644
--- a/test/cpp/microbenchmarks/noop-benchmark.cc
+++ b/test/cpp/microbenchmarks/noop-benchmark.cc
@@ -31,10 +31,10 @@
  *
  */
 
-/* This benchmark exists to ensure that the google_benchmark integration is
+/* This benchmark exists to ensure that the benchmark integration is
  * working */
 
-#include "third_party/google_benchmark/include/benchmark/benchmark.h"
+#include "third_party/benchmark/include/benchmark/benchmark.h"
 
 static void BM_NoOp(benchmark::State& state) {
   while (state.KeepRunning()) {
diff --git a/test/cpp/qps/driver.cc b/test/cpp/qps/driver.cc
index ea0b38e..22b2cd0 100644
--- a/test/cpp/qps/driver.cc
+++ b/test/cpp/qps/driver.cc
@@ -101,7 +101,7 @@
 
 static deque<string> get_workers(const string& name) {
   char* env = gpr_getenv(name.c_str());
-  if (!env) return deque<string>();
+  if (!env || strlen(env) == 0) return deque<string>();
 
   deque<string> out;
   char* p = env;
diff --git a/third_party/google_benchmark b/third_party/benchmark
similarity index 100%
rename from third_party/google_benchmark
rename to third_party/benchmark
diff --git a/tools/buildgen/generate_build_additions.sh b/tools/buildgen/generate_build_additions.sh
index 1ea4704..53c30c7 100644
--- a/tools/buildgen/generate_build_additions.sh
+++ b/tools/buildgen/generate_build_additions.sh
@@ -30,7 +30,7 @@
 
 gen_build_yaml_dirs="  \
   src/boringssl        \
-  src/google_benchmark \
+  src/benchmark \
   src/proto            \
   src/zlib             \
   test/core/bad_client \
diff --git a/tools/gce/linux_performance_worker_init.sh b/tools/gce/linux_performance_worker_init.sh
index 523749e..ab29e01 100755
--- a/tools/gce/linux_performance_worker_init.sh
+++ b/tools/gce/linux_performance_worker_init.sh
@@ -150,3 +150,19 @@
 # Put go on the PATH, keep the usual installation dir
 sudo ln -s /usr/local/go/bin/go /usr/bin/go
 rm go$GO_VERSION.$OS-$ARCH.tar.gz
+
+# Install perf, to profile benchmarks. (need to get the right linux-tools-<> for kernel version)
+sudo apt-get install -y linux-tools-common linux-tools-generic linux-tools-`uname -r`
+# see http://unix.stackexchange.com/questions/14227/do-i-need-root-admin-permissions-to-run-userspace-perf-tool-perf-events-ar
+echo 0 | sudo tee /proc/sys/kernel/perf_event_paranoid
+# see http://stackoverflow.com/questions/21284906/perf-couldnt-record-kernel-reference-relocation-symbol
+echo 0 | sudo tee /proc/sys/kernel/kptr_restrict
+
+# qps workers under perf appear to need a lot of mmap pages under certain scenarios and perf args in
+# order to not lose perf events or time out
+echo 4096 | sudo tee /proc/sys/kernel/perf_event_mlock_kb
+
+# Fetch scripts to generate flame graphs from perf data collected
+# on benchmarks
+git clone -v https://github.com/brendangregg/FlameGraph ~/FlameGraph
+
diff --git a/tools/run_tests/build_python.sh b/tools/run_tests/build_python.sh
index fb884ad..7cac394 100755
--- a/tools/run_tests/build_python.sh
+++ b/tools/run_tests/build_python.sh
@@ -171,8 +171,7 @@
 }
 
 $VENV_PYTHON -m pip install --upgrade pip
-# TODO(https://github.com/pypa/setuptools/issues/709) get the latest setuptools
-$VENV_PYTHON -m pip install setuptools==25.1.1
+$VENV_PYTHON -m pip install setuptools
 $VENV_PYTHON -m pip install cython
 pip_install_dir $ROOT
 $VENV_PYTHON $ROOT/tools/distrib/python/make_grpcio_tools.py
diff --git a/src/google_benchmark/gen_build_yaml.py b/tools/run_tests/performance/process_local_perf_flamegraphs.sh
similarity index 73%
copy from src/google_benchmark/gen_build_yaml.py
copy to tools/run_tests/performance/process_local_perf_flamegraphs.sh
index 302e087..d15610f 100755
--- a/src/google_benchmark/gen_build_yaml.py
+++ b/tools/run_tests/performance/process_local_perf_flamegraphs.sh
@@ -1,5 +1,4 @@
-#!/usr/bin/env python2.7
-
+#!/bin/bash
 # Copyright 2015, Google Inc.
 # All rights reserved.
 #
@@ -29,25 +28,13 @@
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-import os
-import sys
-import glob
-import yaml
+mkdir -p $OUTPUT_DIR
 
-os.chdir(os.path.dirname(sys.argv[0])+'/../..')
+PERF_DATA_FILE=${PERF_BASE_NAME}-perf.data
+PERF_SCRIPT_OUTPUT=${PERF_BASE_NAME}-out.perf
 
-out = {}
+# Generate Flame graphs
+echo "running perf script on $PERF_DATA_FILE"
+perf script -i $PERF_DATA_FILE > $PERF_SCRIPT_OUTPUT
 
-out['libs'] = [{
-    'name': 'google_benchmark',
-    'build': 'private',
-    'language': 'c++',
-    'secure': 'no',
-    'defaults': 'google_benchmark',
-    'src': sorted(glob.glob('third_party/google_benchmark/src/*.cc')),
-    'headers': sorted(
-        glob.glob('third_party/google_benchmark/src/*.h') +
-        glob.glob('third_party/google_benchmark/include/benchmark/*.h')),
-}]
-
-print yaml.dump(out)
+~/FlameGraph/stackcollapse-perf.pl $PERF_SCRIPT_OUTPUT | ~/FlameGraph/flamegraph.pl > ${OUTPUT_DIR}/${OUTPUT_FILENAME}.svg
diff --git a/src/google_benchmark/gen_build_yaml.py b/tools/run_tests/performance/process_remote_perf_flamegraphs.sh
similarity index 71%
copy from src/google_benchmark/gen_build_yaml.py
copy to tools/run_tests/performance/process_remote_perf_flamegraphs.sh
index 302e087..cc07535 100755
--- a/src/google_benchmark/gen_build_yaml.py
+++ b/tools/run_tests/performance/process_remote_perf_flamegraphs.sh
@@ -1,5 +1,4 @@
-#!/usr/bin/env python2.7
-
+#!/bin/bash
 # Copyright 2015, Google Inc.
 # All rights reserved.
 #
@@ -29,25 +28,17 @@
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-import os
-import sys
-import glob
-import yaml
+mkdir -p $OUTPUT_DIR
 
-os.chdir(os.path.dirname(sys.argv[0])+'/../..')
+PERF_DATA_FILE=${PERF_BASE_NAME}-perf.data
+PERF_SCRIPT_OUTPUT=${PERF_BASE_NAME}-out.perf
 
-out = {}
+# Generate Flame graphs
+echo "running perf script on $USER_AT_HOST with perf.data"
+ssh $USER_AT_HOST "cd ~/performance_workspace/grpc && perf script -i $PERF_DATA_FILE | gzip > ${PERF_SCRIPT_OUTPUT}.gz"
 
-out['libs'] = [{
-    'name': 'google_benchmark',
-    'build': 'private',
-    'language': 'c++',
-    'secure': 'no',
-    'defaults': 'google_benchmark',
-    'src': sorted(glob.glob('third_party/google_benchmark/src/*.cc')),
-    'headers': sorted(
-        glob.glob('third_party/google_benchmark/src/*.h') +
-        glob.glob('third_party/google_benchmark/include/benchmark/*.h')),
-}]
+scp $USER_AT_HOST:~/performance_workspace/grpc/$PERF_SCRIPT_OUTPUT.gz .
 
-print yaml.dump(out)
+gzip -d -f $PERF_SCRIPT_OUTPUT.gz
+
+~/FlameGraph/stackcollapse-perf.pl --kernel $PERF_SCRIPT_OUTPUT | ~/FlameGraph/flamegraph.pl --color=java --hash > ${OUTPUT_DIR}/${OUTPUT_FILENAME}.svg
diff --git a/tools/run_tests/report_utils.py b/tools/run_tests/report_utils.py
index 90055e3..5ce2a87 100644
--- a/tools/run_tests/report_utils.py
+++ b/tools/run_tests/report_utils.py
@@ -122,3 +122,10 @@
   except:
     print(exceptions.text_error_template().render())
     raise
+
+def render_perf_profiling_results(output_filepath, profile_names):
+  with open(output_filepath, 'w') as output_file:
+    output_file.write('<ul>\n')
+    for name in profile_names:
+      output_file.write('<li><a href=%s>%s</a></li>\n' % (name, name))
+    output_file.write('</ul>\n')
diff --git a/tools/run_tests/run_performance_tests.py b/tools/run_tests/run_performance_tests.py
index 1d0c98f..69ccff8 100755
--- a/tools/run_tests/run_performance_tests.py
+++ b/tools/run_tests/run_performance_tests.py
@@ -49,6 +49,7 @@
 import time
 import traceback
 import uuid
+import report_utils
 
 
 _ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
@@ -57,15 +58,18 @@
 
 _REMOTE_HOST_USERNAME = 'jenkins'
 
+_PERF_REPORT_OUTPUT_DIR = 'perf_reports'
+
 
 class QpsWorkerJob:
   """Encapsulates a qps worker server job."""
 
-  def __init__(self, spec, language, host_and_port):
+  def __init__(self, spec, language, host_and_port, perf_file_base_name=None):
     self._spec = spec
     self.language = language
     self.host_and_port = host_and_port
     self._job = None
+    self.perf_file_base_name = perf_file_base_name
 
   def start(self):
     self._job = jobset.Job(self._spec, newline_on_success=True, travis=True, add_env={})
@@ -80,24 +84,32 @@
       self._job = None
 
 
-def create_qpsworker_job(language, shortname=None,
-                         port=10000, remote_host=None):
-  cmdline = language.worker_cmdline() + ['--driver_port=%s' % port]
+def create_qpsworker_job(language, shortname=None, port=10000, remote_host=None, perf_cmd=None):
+  cmdline = (language.worker_cmdline() + ['--driver_port=%s' % port])
+
   if remote_host:
-    user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, remote_host)
-    cmdline = ['ssh',
-               str(user_at_host),
-               'cd ~/performance_workspace/grpc/ && %s' % ' '.join(cmdline)]
     host_and_port='%s:%s' % (remote_host, port)
   else:
     host_and_port='localhost:%s' % port
 
+  perf_file_base_name = None
+  if perf_cmd:
+    perf_file_base_name = '%s-%s' % (host_and_port, shortname)
+    # specify -o output file so perf.data gets collected when worker stopped
+    cmdline = perf_cmd + ['-o', '%s-perf.data' % perf_file_base_name] + cmdline
+
+  if remote_host:
+    user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, remote_host)
+    ssh_cmd = ['ssh']
+    ssh_cmd.extend([str(user_at_host), 'cd ~/performance_workspace/grpc/ && %s' % ' '.join(cmdline)])
+    cmdline = ssh_cmd
+
   jobspec = jobset.JobSpec(
       cmdline=cmdline,
       shortname=shortname,
       timeout_seconds=5*60,  # workers get restarted after each scenario
       verbose_success=True)
-  return QpsWorkerJob(jobspec, language, host_and_port)
+  return QpsWorkerJob(jobspec, language, host_and_port, perf_file_base_name)
 
 
 def create_scenario_jobspec(scenario_json, workers, remote_host=None,
@@ -259,7 +271,7 @@
     sys.exit(1)
 
 
-def create_qpsworkers(languages, worker_hosts):
+def create_qpsworkers(languages, worker_hosts, perf_cmd=None):
   """Creates QPS workers (but does not start them)."""
   if not worker_hosts:
     # run two workers locally (for each language)
@@ -275,11 +287,32 @@
                                shortname= 'qps_worker_%s_%s' % (language,
                                                                 worker_idx),
                                port=worker[1] + language.worker_port_offset(),
-                               remote_host=worker[0])
+                               remote_host=worker[0],
+                               perf_cmd=perf_cmd)
           for language in languages
           for worker_idx, worker in enumerate(workers)]
 
 
+def perf_report_processor_job(worker_host, perf_base_name, output_filename):
+  print('Creating perf report collection job for %s' % worker_host)
+  cmd = ''
+  if worker_host != 'localhost':
+    user_at_host = "%s@%s" % (_REMOTE_HOST_USERNAME, worker_host)
+    cmd = "USER_AT_HOST=%s OUTPUT_FILENAME=%s OUTPUT_DIR=%s PERF_BASE_NAME=%s\
+         tools/run_tests/performance/process_remote_perf_flamegraphs.sh" \
+          % (user_at_host, output_filename, _PERF_REPORT_OUTPUT_DIR, perf_base_name)
+  else:
+    cmd = "OUTPUT_FILENAME=%s OUTPUT_DIR=%s PERF_BASE_NAME=%s\
+          tools/run_tests/performance/process_local_perf_flamegraphs.sh" \
+          % (output_filename, _PERF_REPORT_OUTPUT_DIR, perf_base_name)
+
+  return jobset.JobSpec(cmdline=cmd,
+                        timeout_seconds=3*60,
+                        shell=True,
+                        verbose_success=True,
+                        shortname='process perf report')
+
+
 Scenario = collections.namedtuple('Scenario', 'jobspec workers name')
 
 
@@ -372,6 +405,31 @@
   print('All QPS workers finished.')
   return num_killed
 
+profile_output_files = []
+
+# Collect perf text reports and flamegraphs if perf_cmd was used
+# Note the base names of perf text reports are used when creating and processing
+# perf data. The scenario name uniqifies the output name in the final
+# perf reports directory. 
+# Alos, the perf profiles need to be fetched and processed after each scenario
+# in order to avoid clobbering the output files.
+def run_collect_perf_profile_jobs(hosts_and_base_names, scenario_name):
+  perf_report_jobs = []
+  global profile_output_files
+  for host_and_port in hosts_and_base_names:
+    perf_base_name = hosts_and_base_names[host_and_port]
+    output_filename = '%s-%s' % (scenario_name, perf_base_name)
+    # from the base filename, create .svg output filename
+    host = host_and_port.split(':')[0]
+    profile_output_files.append('%s.svg' % output_filename)
+    perf_report_jobs.append(perf_report_processor_job(host, perf_base_name, output_filename))
+
+  jobset.message('START', 'Collecting perf reports from qps workers', do_newline=True)
+  failures, _ = jobset.run(perf_report_jobs, newline_on_success=True, maxjobs=1)
+  jobset.message('END', 'Collecting perf reports from qps workers', do_newline=True)
+  return failures
+
+
 argp = argparse.ArgumentParser(description='Run performance tests.')
 argp.add_argument('-l', '--language',
                   choices=['all'] + sorted(scenario_config.LANGUAGES.keys()),
@@ -405,6 +463,33 @@
                   help='Run netperf benchmark as one of the scenarios.')
 argp.add_argument('-x', '--xml_report', default='report.xml', type=str,
                   help='Name of XML report file to generate.')
+argp.add_argument('--perf_args',
+                  help=('Example usage: "--perf_args=record -F 99 -g". '
+                        'Wrap QPS workers in a perf command '
+                        'with the arguments to perf specified here. '
+                        '".svg" flame graph profiles will be '
+                        'created for each Qps Worker on each scenario. '
+                        'Files will output to "<repo_root>/perf_reports" '
+                        'directory. Output files from running the worker '
+                        'under perf are saved in the repo root where its ran. '
+                        'Note that the perf "-g" flag is necessary for '
+                        'flame graphs generation to work (assuming the binary '
+                        'being profiled uses frame pointers, check out '
+                        '"--call-graph dwarf" option using libunwind otherwise.) '
+                        'Also note that the entire "--perf_args=<arg(s)>" must '
+                        'be wrapped in quotes as in the example usage. '
+                        'If the "--perg_args" is unspecified, "perf" will '
+                        'not be used at all. '
+                        'See http://www.brendangregg.com/perf.html '
+                        'for more general perf examples.'))
+argp.add_argument('--skip_generate_flamegraphs',
+                  default=False,
+                  action='store_const',
+                  const=True,
+                  help=('Turn flame graph generation off. '
+                        'May be useful if "perf_args" arguments do not make sense for '
+                        'generating flamegraphs (e.g., "--perf_args=stat ...")'))
+
 
 args = argp.parse_args()
 
@@ -435,7 +520,13 @@
 if not args.dry_run:
   build_on_remote_hosts(remote_hosts, languages=[str(l) for l in languages], build_local=build_local)
 
-qpsworker_jobs = create_qpsworkers(languages, args.remote_worker_host)
+perf_cmd = None
+if args.perf_args:
+  # Expect /usr/bin/perf to be installed here, as is usual
+  perf_cmd = ['/usr/bin/perf'] 
+  perf_cmd.extend(re.split('\s+', args.perf_args))
+
+qpsworker_jobs = create_qpsworkers(languages, args.remote_worker_host, perf_cmd=perf_cmd)
 
 # get list of worker addresses for each language.
 workers_by_lang = dict([(str(language), []) for language in languages])
@@ -457,16 +548,20 @@
 total_scenario_failures = 0
 qps_workers_killed = 0
 merged_resultset = {}
+perf_report_failures = 0
+
 for scenario in scenarios:
   if args.dry_run:
     print(scenario.name)
   else:
+    scenario_failures = 0
     try:
       for worker in scenario.workers:
         worker.start()
-      scenario_failures, resultset = jobset.run([scenario.jobspec,
-                                                create_quit_jobspec(scenario.workers, remote_host=args.remote_driver_host)],
-                                                newline_on_success=True, maxjobs=1)
+      jobs = [scenario.jobspec]
+      if scenario.workers:
+        jobs.append(create_quit_jobspec(scenario.workers, remote_host=args.remote_driver_host))
+      scenario_failures, resultset = jobset.run(jobs, newline_on_success=True, maxjobs=1)
       total_scenario_failures += scenario_failures
       merged_resultset = dict(itertools.chain(merged_resultset.iteritems(),
                                               resultset.iteritems()))
@@ -474,10 +569,27 @@
       # Consider qps workers that need to be killed as failures
       qps_workers_killed += finish_qps_workers(scenario.workers)
 
+    if perf_cmd and scenario_failures == 0 and not args.skip_generate_flamegraphs:
+      workers_and_base_names = {}
+      for worker in scenario.workers:
+        if not worker.perf_file_base_name:
+          raise Exception('using perf buf perf report filename is unspecified')
+        workers_and_base_names[worker.host_and_port] = worker.perf_file_base_name
+      perf_report_failures += run_collect_perf_profile_jobs(workers_and_base_names, scenario.name)
+
+
+# Still write the index.html even if some scenarios failed.
+# 'profile_output_files' will only have names for scenarios that passed
+if perf_cmd and not args.skip_generate_flamegraphs:
+  # write the index fil to the output dir, with all profiles from all scenarios/workers
+  report_utils.render_perf_profiling_results('%s/index.html' % _PERF_REPORT_OUTPUT_DIR, profile_output_files)
+
+if total_scenario_failures > 0 or qps_workers_killed > 0:
+  print('%s scenarios failed and %s qps worker jobs killed' % (total_scenario_failures, qps_workers_killed))
+  sys.exit(1)
 
 report_utils.render_junit_xml_report(merged_resultset, args.xml_report,
                                      suite_name='benchmarks')
-
-if total_scenario_failures > 0 or qps_workers_killed > 0:
-  print ("%s scenarios failed and %s qps worker jobs killed" % (total_scenario_failures, qps_workers_killed))
+if perf_report_failures > 0:
+  print('%s perf profile collection jobs failed' % perf_report_failures)
   sys.exit(1)
diff --git a/tools/run_tests/sanity/check_submodules.sh b/tools/run_tests/sanity/check_submodules.sh
index 6ec0786..be12f96 100755
--- a/tools/run_tests/sanity/check_submodules.sh
+++ b/tools/run_tests/sanity/check_submodules.sh
@@ -43,7 +43,7 @@
 cat << EOF | awk '{ print $1 }' | sort > $want_submodules
  c880e42ba1c8032d4cdde2aba0541d8a9d9fa2e9 third_party/boringssl (version_for_cocoapods_2.0-100-gc880e42)
  05b155ff59114735ec8cd089f669c4c3d8f59029 third_party/gflags (v2.1.0-45-g05b155f)
- 44c25c892a6229b20db7cd9dc05584ea865896de third_party/google_benchmark (v0.1.0-343-g44c25c8)
+ 44c25c892a6229b20db7cd9dc05584ea865896de third_party/benchmark (v0.1.0-343-g44c25c8)
  c99458533a9b4c743ed51537e25989ea55944908 third_party/googletest (release-1.7.0)
  a428e42072765993ff674fda72863c9f1aa2d268 third_party/protobuf (v3.1.0)
  50893291621658f355bc5b4d450a8d06a563053d third_party/zlib (v1.2.8)
diff --git a/tools/run_tests/sources_and_headers.json b/tools/run_tests/sources_and_headers.json
index 6fedc9e..14700a0 100644
--- a/tools/run_tests/sources_and_headers.json
+++ b/tools/run_tests/sources_and_headers.json
@@ -2263,7 +2263,7 @@
   }, 
   {
     "deps": [
-      "google_benchmark", 
+      "benchmark", 
       "gpr", 
       "gpr_test_util", 
       "grpc", 
@@ -2913,7 +2913,7 @@
   }, 
   {
     "deps": [
-      "google_benchmark"
+      "benchmark"
     ], 
     "headers": [], 
     "is_filegroup": false, 
@@ -6207,30 +6207,30 @@
   {
     "deps": [], 
     "headers": [
-      "third_party/google_benchmark/include/benchmark/benchmark.h", 
-      "third_party/google_benchmark/include/benchmark/benchmark_api.h", 
-      "third_party/google_benchmark/include/benchmark/macros.h", 
-      "third_party/google_benchmark/include/benchmark/reporter.h", 
-      "third_party/google_benchmark/src/arraysize.h", 
-      "third_party/google_benchmark/src/benchmark_api_internal.h", 
-      "third_party/google_benchmark/src/check.h", 
-      "third_party/google_benchmark/src/colorprint.h", 
-      "third_party/google_benchmark/src/commandlineflags.h", 
-      "third_party/google_benchmark/src/complexity.h", 
-      "third_party/google_benchmark/src/cycleclock.h", 
-      "third_party/google_benchmark/src/internal_macros.h", 
-      "third_party/google_benchmark/src/log.h", 
-      "third_party/google_benchmark/src/mutex.h", 
-      "third_party/google_benchmark/src/re.h", 
-      "third_party/google_benchmark/src/sleep.h", 
-      "third_party/google_benchmark/src/stat.h", 
-      "third_party/google_benchmark/src/string_util.h", 
-      "third_party/google_benchmark/src/sysinfo.h", 
-      "third_party/google_benchmark/src/timers.h"
+      "third_party/benchmark/include/benchmark/benchmark.h", 
+      "third_party/benchmark/include/benchmark/benchmark_api.h", 
+      "third_party/benchmark/include/benchmark/macros.h", 
+      "third_party/benchmark/include/benchmark/reporter.h", 
+      "third_party/benchmark/src/arraysize.h", 
+      "third_party/benchmark/src/benchmark_api_internal.h", 
+      "third_party/benchmark/src/check.h", 
+      "third_party/benchmark/src/colorprint.h", 
+      "third_party/benchmark/src/commandlineflags.h", 
+      "third_party/benchmark/src/complexity.h", 
+      "third_party/benchmark/src/cycleclock.h", 
+      "third_party/benchmark/src/internal_macros.h", 
+      "third_party/benchmark/src/log.h", 
+      "third_party/benchmark/src/mutex.h", 
+      "third_party/benchmark/src/re.h", 
+      "third_party/benchmark/src/sleep.h", 
+      "third_party/benchmark/src/stat.h", 
+      "third_party/benchmark/src/string_util.h", 
+      "third_party/benchmark/src/sysinfo.h", 
+      "third_party/benchmark/src/timers.h"
     ], 
     "is_filegroup": false, 
     "language": "c++", 
-    "name": "google_benchmark", 
+    "name": "benchmark", 
     "src": [], 
     "third_party": false, 
     "type": "lib"
diff --git a/vsprojects/vcxproj/google_benchmark/google_benchmark.vcxproj b/vsprojects/vcxproj/benchmark/benchmark.vcxproj
similarity index 70%
rename from vsprojects/vcxproj/google_benchmark/google_benchmark.vcxproj
rename to vsprojects/vcxproj/benchmark/benchmark.vcxproj
index 52774e0..9f262b3 100644
--- a/vsprojects/vcxproj/google_benchmark/google_benchmark.vcxproj
+++ b/vsprojects/vcxproj/benchmark/benchmark.vcxproj
@@ -19,7 +19,7 @@
     </ProjectConfiguration>
   </ItemGroup>
   <PropertyGroup Label="Globals">
-    <ProjectGuid>{AAD4AEF3-DF1E-7A6D-EC35-233BD1031BF4}</ProjectGuid>
+    <ProjectGuid>{07978586-E47C-8709-A63E-895FBF3C3C7D}</ProjectGuid>
     <IgnoreWarnIntDirInTempDetected>true</IgnoreWarnIntDirInTempDetected>
     <IntDir>$(SolutionDir)IntDir\$(MSBuildProjectName)\</IntDir>
   </PropertyGroup>
@@ -57,10 +57,10 @@
   </ImportGroup>
   <PropertyGroup Label="UserMacros" />
   <PropertyGroup Condition="'$(Configuration)'=='Debug'">
-    <TargetName>google_benchmark</TargetName>
+    <TargetName>benchmark</TargetName>
   </PropertyGroup>
   <PropertyGroup Condition="'$(Configuration)'=='Release'">
-    <TargetName>google_benchmark</TargetName>
+    <TargetName>benchmark</TargetName>
   </PropertyGroup>
     <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
     <ClCompile>
@@ -147,53 +147,53 @@
   </ItemDefinitionGroup>
 
   <ItemGroup>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\include\benchmark\benchmark.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\include\benchmark\benchmark_api.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\include\benchmark\macros.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\include\benchmark\reporter.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\arraysize.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\benchmark_api_internal.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\check.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\colorprint.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\commandlineflags.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\complexity.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\cycleclock.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\internal_macros.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\log.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\mutex.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\re.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\sleep.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\stat.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\string_util.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\sysinfo.h" />
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\timers.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\include\benchmark\benchmark.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\include\benchmark\benchmark_api.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\include\benchmark\macros.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\include\benchmark\reporter.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\arraysize.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\benchmark_api_internal.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\check.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\colorprint.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\commandlineflags.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\complexity.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\cycleclock.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\internal_macros.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\log.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\mutex.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\re.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\sleep.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\stat.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\string_util.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\sysinfo.h" />
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\timers.h" />
   </ItemGroup>
   <ItemGroup>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\benchmark.cc">
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\benchmark.cc">
     </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\benchmark_register.cc">
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\benchmark_register.cc">
     </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\colorprint.cc">
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\colorprint.cc">
     </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\commandlineflags.cc">
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\commandlineflags.cc">
     </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\complexity.cc">
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\complexity.cc">
     </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\console_reporter.cc">
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\console_reporter.cc">
     </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\csv_reporter.cc">
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\csv_reporter.cc">
     </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\json_reporter.cc">
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\json_reporter.cc">
     </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\reporter.cc">
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\reporter.cc">
     </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\sleep.cc">
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\sleep.cc">
     </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\string_util.cc">
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\string_util.cc">
     </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\sysinfo.cc">
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\sysinfo.cc">
     </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\timers.cc">
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\timers.cc">
     </ClCompile>
   </ItemGroup>
   <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
diff --git a/vsprojects/vcxproj/benchmark/benchmark.vcxproj.filters b/vsprojects/vcxproj/benchmark/benchmark.vcxproj.filters
new file mode 100644
index 0000000..ccc9ca2
--- /dev/null
+++ b/vsprojects/vcxproj/benchmark/benchmark.vcxproj.filters
@@ -0,0 +1,125 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <ItemGroup>
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\benchmark.cc">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClCompile>
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\benchmark_register.cc">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClCompile>
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\colorprint.cc">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClCompile>
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\commandlineflags.cc">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClCompile>
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\complexity.cc">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClCompile>
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\console_reporter.cc">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClCompile>
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\csv_reporter.cc">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClCompile>
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\json_reporter.cc">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClCompile>
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\reporter.cc">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClCompile>
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\sleep.cc">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClCompile>
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\string_util.cc">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClCompile>
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\sysinfo.cc">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClCompile>
+    <ClCompile Include="$(SolutionDir)\..\third_party\benchmark\src\timers.cc">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClCompile>
+  </ItemGroup>
+  <ItemGroup>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\include\benchmark\benchmark.h">
+      <Filter>third_party\benchmark\include\benchmark</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\include\benchmark\benchmark_api.h">
+      <Filter>third_party\benchmark\include\benchmark</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\include\benchmark\macros.h">
+      <Filter>third_party\benchmark\include\benchmark</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\include\benchmark\reporter.h">
+      <Filter>third_party\benchmark\include\benchmark</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\arraysize.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\benchmark_api_internal.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\check.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\colorprint.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\commandlineflags.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\complexity.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\cycleclock.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\internal_macros.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\log.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\mutex.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\re.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\sleep.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\stat.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\string_util.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\sysinfo.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+    <ClInclude Include="$(SolutionDir)\..\third_party\benchmark\src\timers.h">
+      <Filter>third_party\benchmark\src</Filter>
+    </ClInclude>
+  </ItemGroup>
+
+  <ItemGroup>
+    <Filter Include="third_party">
+      <UniqueIdentifier>{7b593518-9fee-107e-6b64-24bdce73f939}</UniqueIdentifier>
+    </Filter>
+    <Filter Include="third_party\benchmark">
+      <UniqueIdentifier>{f0d35de1-6b41-778d-0ba0-faad514fb0f4}</UniqueIdentifier>
+    </Filter>
+    <Filter Include="third_party\benchmark\include">
+      <UniqueIdentifier>{cbc02dfa-face-8cc6-0efb-efacc0c3369c}</UniqueIdentifier>
+    </Filter>
+    <Filter Include="third_party\benchmark\include\benchmark">
+      <UniqueIdentifier>{4f2f03fc-b82d-df33-63ee-bedebeb2c0ee}</UniqueIdentifier>
+    </Filter>
+    <Filter Include="third_party\benchmark\src">
+      <UniqueIdentifier>{f42a8e0a-5a76-0e6f-d708-f0306858f673}</UniqueIdentifier>
+    </Filter>
+  </ItemGroup>
+</Project>
+
diff --git a/vsprojects/vcxproj/google_benchmark/google_benchmark.vcxproj.filters b/vsprojects/vcxproj/google_benchmark/google_benchmark.vcxproj.filters
deleted file mode 100644
index 9db6ed4..0000000
--- a/vsprojects/vcxproj/google_benchmark/google_benchmark.vcxproj.filters
+++ /dev/null
@@ -1,125 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
-  <ItemGroup>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\benchmark.cc">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\benchmark_register.cc">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\colorprint.cc">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\commandlineflags.cc">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\complexity.cc">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\console_reporter.cc">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\csv_reporter.cc">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\json_reporter.cc">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\reporter.cc">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\sleep.cc">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\string_util.cc">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\sysinfo.cc">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClCompile>
-    <ClCompile Include="$(SolutionDir)\..\third_party\google_benchmark\src\timers.cc">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClCompile>
-  </ItemGroup>
-  <ItemGroup>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\include\benchmark\benchmark.h">
-      <Filter>third_party\google_benchmark\include\benchmark</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\include\benchmark\benchmark_api.h">
-      <Filter>third_party\google_benchmark\include\benchmark</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\include\benchmark\macros.h">
-      <Filter>third_party\google_benchmark\include\benchmark</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\include\benchmark\reporter.h">
-      <Filter>third_party\google_benchmark\include\benchmark</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\arraysize.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\benchmark_api_internal.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\check.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\colorprint.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\commandlineflags.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\complexity.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\cycleclock.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\internal_macros.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\log.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\mutex.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\re.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\sleep.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\stat.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\string_util.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\sysinfo.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-    <ClInclude Include="$(SolutionDir)\..\third_party\google_benchmark\src\timers.h">
-      <Filter>third_party\google_benchmark\src</Filter>
-    </ClInclude>
-  </ItemGroup>
-
-  <ItemGroup>
-    <Filter Include="third_party">
-      <UniqueIdentifier>{7458b63d-7ba4-103d-2bed-3e3ad30d8237}</UniqueIdentifier>
-    </Filter>
-    <Filter Include="third_party\google_benchmark">
-      <UniqueIdentifier>{54a154e8-669b-a7c1-9b6e-bd1aab2f86e3}</UniqueIdentifier>
-    </Filter>
-    <Filter Include="third_party\google_benchmark\include">
-      <UniqueIdentifier>{f54c3cb1-ec20-a651-6956-78379b51e1a5}</UniqueIdentifier>
-    </Filter>
-    <Filter Include="third_party\google_benchmark\include\benchmark">
-      <UniqueIdentifier>{0483a457-8050-4565-bc15-09695bf7b822}</UniqueIdentifier>
-    </Filter>
-    <Filter Include="third_party\google_benchmark\src">
-      <UniqueIdentifier>{c39ff2d1-691e-4614-4d75-4bc20db05e09}</UniqueIdentifier>
-    </Filter>
-  </ItemGroup>
-</Project>
-
diff --git a/vsprojects/vcxproj/test/bm_fullstack/bm_fullstack.vcxproj b/vsprojects/vcxproj/test/bm_fullstack/bm_fullstack.vcxproj
index 1ce993e..3809beb 100644
--- a/vsprojects/vcxproj/test/bm_fullstack/bm_fullstack.vcxproj
+++ b/vsprojects/vcxproj/test/bm_fullstack/bm_fullstack.vcxproj
@@ -164,8 +164,8 @@
     </ClCompile>
   </ItemGroup>
   <ItemGroup>
-    <ProjectReference Include="$(SolutionDir)\..\vsprojects\vcxproj\.\google_benchmark\google_benchmark.vcxproj">
-      <Project>{AAD4AEF3-DF1E-7A6D-EC35-233BD1031BF4}</Project>
+    <ProjectReference Include="$(SolutionDir)\..\vsprojects\vcxproj\.\benchmark\benchmark.vcxproj">
+      <Project>{07978586-E47C-8709-A63E-895FBF3C3C7D}</Project>
     </ProjectReference>
     <ProjectReference Include="$(SolutionDir)\..\vsprojects\vcxproj\.\grpc++_test_util\grpc++_test_util.vcxproj">
       <Project>{0BE77741-552A-929B-A497-4EF7ECE17A64}</Project>
diff --git a/vsprojects/vcxproj/test/noop-benchmark/noop-benchmark.vcxproj b/vsprojects/vcxproj/test/noop-benchmark/noop-benchmark.vcxproj
index 99f33b2..15a82c0 100644
--- a/vsprojects/vcxproj/test/noop-benchmark/noop-benchmark.vcxproj
+++ b/vsprojects/vcxproj/test/noop-benchmark/noop-benchmark.vcxproj
@@ -164,8 +164,8 @@
     </ClCompile>
   </ItemGroup>
   <ItemGroup>
-    <ProjectReference Include="$(SolutionDir)\..\vsprojects\vcxproj\.\google_benchmark\google_benchmark.vcxproj">
-      <Project>{AAD4AEF3-DF1E-7A6D-EC35-233BD1031BF4}</Project>
+    <ProjectReference Include="$(SolutionDir)\..\vsprojects\vcxproj\.\benchmark\benchmark.vcxproj">
+      <Project>{07978586-E47C-8709-A63E-895FBF3C3C7D}</Project>
     </ProjectReference>
   </ItemGroup>
   <ItemGroup>