Merge branch 'master' of github.com:grpc/grpc into make-ruby-installable
diff --git a/.gitignore b/.gitignore
index 3294edb..cf05a2c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -13,6 +13,10 @@
 dist/
 *.egg
 
+# Node installation output
+node_modules/
+src/node/extension_binary/
+
 # gcov coverage data
 reports
 coverage
diff --git a/binding.gyp b/binding.gyp
index b1c0214..651c2e1 100644
--- a/binding.gyp
+++ b/binding.gyp
@@ -37,29 +37,45 @@
 # Some of this file is built with the help of
 # https://n8.io/converting-a-c-library-to-gyp/
 {
-  'variables': {
-    'config': '<!(echo $CONFIG)'
-  },
-  # TODO: Finish windows support
   'target_defaults': {
-      # Empirically, Node only exports ALPN symbols if its major version is >0.
-      # io.js always reports versions >0 and always exports ALPN symbols.
-      # Therefore, Node's major version will be truthy if and only if it
-      # supports ALPN. The output of "node -v" is v[major].[minor].[patch],
-      # like "v4.1.1" in a recent version. We use cut to split by period and
-      # take the first field (resulting in "v[major]"), then use cut again
-      # to take all but the first character, removing the "v".
-    'defines': [
-      'TSI_OPENSSL_ALPN_SUPPORT=<!(node --version | cut -d. -f1 | cut -c2-)'
-    ],
     'include_dirs': [
       '.',
       'include'
     ],
     'conditions': [
       ['OS == "win"', {
-        "include_dirs": [ "third_party/boringssl/include" ]
-      }, {
+        "include_dirs": [ "third_party/boringssl/include" ],
+        "defines": [
+          '_WIN32_WINNT=0x0600',
+          'WIN32_LEAN_AND_MEAN',
+          '_HAS_EXCEPTIONS=0',
+          'UNICODE',
+          '_UNICODE',
+          'NOMINMAX',
+          'OPENSSL_NO_ASM'
+        ],
+        "msvs_settings": {
+          'VCCLCompilerTool': {
+            'RuntimeLibrary': 1, # static debug
+          }
+        },
+        "libraries": [
+          "ws2_32"
+        ]
+      }, { # OS != "win"
+          # Empirically, Node only exports ALPN symbols if its major version is >0.
+          # io.js always reports versions >0 and always exports ALPN symbols.
+          # Therefore, Node's major version will be truthy if and only if it
+          # supports ALPN. The output of "node -v" is v[major].[minor].[patch],
+          # like "v4.1.1" in a recent version. We use cut to split by period and
+          # take the first field (resulting in "v[major]"), then use cut again
+          # to take all but the first character, removing the "v".
+        'defines': [
+          'TSI_OPENSSL_ALPN_SUPPORT=<!(node --version | cut -d. -f1 | cut -c2-)'
+        ],
+        'variables': {
+          'config': '<!(echo $CONFIG)'
+        },
         'include_dirs': [
           '<(node_root_dir)/deps/openssl/openssl/include',
           '<(node_root_dir)/deps/zlib'
@@ -93,6 +109,29 @@
   'conditions': [
     ['OS == "win"', {
       'targets': [
+        {
+          # IMPORTANT WINDOWS BUILD INFORMATION
+          # This library does not build on Windows without modifying the Node
+          # development packages that node-gyp downloads in order to build.
+          # Due to https://github.com/nodejs/node/issues/4932, the headers for
+          # BoringSSL conflict with the OpenSSL headers included by default
+          # when including the Node headers. The remedy for this is to remove
+          # the OpenSSL headers, from the downloaded Node development package,
+          # which is typically located in `.node-gyp` in your home directory.
+          'target_name': 'WINDOWS_BUILD_WARNING',
+          'actions': [
+            {
+              'action_name': 'WINDOWS_BUILD_WARNING',
+              'inputs': [
+                'package.json'
+              ],
+              'outputs': [
+                'ignore_this_part'
+              ],
+              'action': ['echo', 'IMPORTANT: Due to https://github.com/nodejs/node/issues/4932, to build this library on Windows, you must first remove <(node_root_dir)/include/node/openssl/']
+            }
+          ]
+        },
         # Only want to compile BoringSSL and zlib under Windows
         {
           'cflags': [
@@ -400,8 +439,7 @@
             'third_party/boringssl/ssl/t1_enc.c',
             'third_party/boringssl/ssl/t1_lib.c',
             'third_party/boringssl/ssl/tls_record.c',
-          ],
-          "include_dirs": [ "third_party/boringssl/include" ]
+          ]
         },
         {
           'cflags': [
@@ -430,8 +468,7 @@
             'third_party/zlib/trees.c',
             'third_party/zlib/uncompr.c',
             'third_party/zlib/zutil.c',
-          ],
-          "include_dirs": [ "third_party/boringssl/include" ]
+          ]
         },
       ]
     }]
diff --git a/src/node/ext/node_grpc.cc b/src/node/ext/node_grpc.cc
index a2b8e0d..654c5ae 100644
--- a/src/node/ext/node_grpc.cc
+++ b/src/node/ext/node_grpc.cc
@@ -112,8 +112,8 @@
   Nan::Set(exports, Nan::New("callError").ToLocalChecked(), call_error);
   Local<Value> OK(Nan::New<Uint32, uint32_t>(GRPC_CALL_OK));
   Nan::Set(call_error, Nan::New("OK").ToLocalChecked(), OK);
-  Local<Value> ERROR(Nan::New<Uint32, uint32_t>(GRPC_CALL_ERROR));
-  Nan::Set(call_error, Nan::New("ERROR").ToLocalChecked(), ERROR);
+  Local<Value> CALL_ERROR(Nan::New<Uint32, uint32_t>(GRPC_CALL_ERROR));
+  Nan::Set(call_error, Nan::New("ERROR").ToLocalChecked(), CALL_ERROR);
   Local<Value> NOT_ON_SERVER(
       Nan::New<Uint32, uint32_t>(GRPC_CALL_ERROR_NOT_ON_SERVER));
   Nan::Set(call_error, Nan::New("NOT_ON_SERVER").ToLocalChecked(),
diff --git a/templates/binding.gyp.template b/templates/binding.gyp.template
index 71276e7..a913585 100644
--- a/templates/binding.gyp.template
+++ b/templates/binding.gyp.template
@@ -39,29 +39,45 @@
   # Some of this file is built with the help of
   # https://n8.io/converting-a-c-library-to-gyp/
   {
-    'variables': {
-      'config': '<!(echo $CONFIG)'
-    },
-    # TODO: Finish windows support
     'target_defaults': {
-        # Empirically, Node only exports ALPN symbols if its major version is >0.
-        # io.js always reports versions >0 and always exports ALPN symbols.
-        # Therefore, Node's major version will be truthy if and only if it
-        # supports ALPN. The output of "node -v" is v[major].[minor].[patch],
-        # like "v4.1.1" in a recent version. We use cut to split by period and
-        # take the first field (resulting in "v[major]"), then use cut again
-        # to take all but the first character, removing the "v".
-      'defines': [
-        'TSI_OPENSSL_ALPN_SUPPORT=<!(node --version | cut -d. -f1 | cut -c2-)'
-      ],
       'include_dirs': [
         '.',
         'include'
       ],
       'conditions': [
         ['OS == "win"', {
-          "include_dirs": [ "third_party/boringssl/include" ]
-        }, {
+          "include_dirs": [ "third_party/boringssl/include" ],
+          "defines": [
+            '_WIN32_WINNT=0x0600',
+            'WIN32_LEAN_AND_MEAN',
+            '_HAS_EXCEPTIONS=0',
+            'UNICODE',
+            '_UNICODE',
+            'NOMINMAX',
+            'OPENSSL_NO_ASM'
+          ],
+          "msvs_settings": {
+            'VCCLCompilerTool': {
+              'RuntimeLibrary': 1, # static debug
+            }
+          },
+          "libraries": [
+            "ws2_32"
+          ]
+        }, { # OS != "win"
+            # Empirically, Node only exports ALPN symbols if its major version is >0.
+            # io.js always reports versions >0 and always exports ALPN symbols.
+            # Therefore, Node's major version will be truthy if and only if it
+            # supports ALPN. The output of "node -v" is v[major].[minor].[patch],
+            # like "v4.1.1" in a recent version. We use cut to split by period and
+            # take the first field (resulting in "v[major]"), then use cut again
+            # to take all but the first character, removing the "v".
+          'defines': [
+            'TSI_OPENSSL_ALPN_SUPPORT=<!(node --version | cut -d. -f1 | cut -c2-)'
+          ],
+          'variables': {
+            'config': '<!(echo $CONFIG)'
+          },
           'include_dirs': [
             '<(node_root_dir)/deps/openssl/openssl/include',
             '<(node_root_dir)/deps/zlib'
@@ -95,6 +111,29 @@
     'conditions': [
       ['OS == "win"', {
         'targets': [
+          {
+            # IMPORTANT WINDOWS BUILD INFORMATION
+            # This library does not build on Windows without modifying the Node
+            # development packages that node-gyp downloads in order to build.
+            # Due to https://github.com/nodejs/node/issues/4932, the headers for
+            # BoringSSL conflict with the OpenSSL headers included by default
+            # when including the Node headers. The remedy for this is to remove
+            # the OpenSSL headers, from the downloaded Node development package,
+            # which is typically located in `.node-gyp` in your home directory.
+            'target_name': 'WINDOWS_BUILD_WARNING',
+            'actions': [
+              {
+                'action_name': 'WINDOWS_BUILD_WARNING',
+                'inputs': [
+                  'package.json'
+                ],
+                'outputs': [
+                  'ignore_this_part'
+                ],
+                'action': ['echo', 'IMPORTANT: Due to https://github.com/nodejs/node/issues/4932, to build this library on Windows, you must first remove <(node_root_dir)/include/node/openssl/']
+              }
+            ]
+          },
           # Only want to compile BoringSSL and zlib under Windows
           % for module in node_modules:
           % for lib in libs:
@@ -117,8 +156,7 @@
               % for source in lib.src:
               '${source}',
               % endfor
-            ],
-            "include_dirs": [ "third_party/boringssl/include" ]
+            ]
           },
           % endif
           % endfor
diff --git a/tools/jenkins/build_artifacts.sh b/tools/jenkins/build_artifacts.sh
index d591201..9af553a 100755
--- a/tools/jenkins/build_artifacts.sh
+++ b/tools/jenkins/build_artifacts.sh
@@ -36,4 +36,7 @@
 # NOTE: No empty lines should appear in this file before igncr is set!
 set -ex -o igncr || set -ex
 
-python tools/run_tests/build_artifacts.py $@
+curr_platform="$platform"
+unset platform  # variable named 'platform' breaks the windows build
+
+python tools/run_tests/task_runner.py -f artifact $language $curr_platform $architecture
diff --git a/tools/jenkins/build_packages.sh b/tools/jenkins/build_packages.sh
new file mode 100644
index 0000000..d795e35
--- /dev/null
+++ b/tools/jenkins/build_packages.sh
@@ -0,0 +1,42 @@
+#!/usr/bin/env bash
+# Copyright 2016, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# This script is invoked by Jenkins and triggers build of artifacts.
+#
+# To prevent cygwin bash complaining about empty lines ending with \r
+# we set the igncr option. The option doesn't exist on Linux, so we fallback
+# to just 'set -ex' there.
+# NOTE: No empty lines should appear in this file before igncr is set!
+set -ex -o igncr || set -ex
+
+curr_platform="$platform"
+unset platform  # variable named 'platform' breaks the windows build
+
+python tools/run_tests/task_runner.py -f package $curr_platform
diff --git a/tools/run_tests/artifact_targets.py b/tools/run_tests/artifact_targets.py
new file mode 100644
index 0000000..a34fa8e
--- /dev/null
+++ b/tools/run_tests/artifact_targets.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+# Copyright 2016, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Definition of targets to build artifacts."""
+
+import jobset
+
+
+def create_docker_jobspec(name, dockerfile_dir, shell_command, environ={},
+                   flake_retries=0, timeout_retries=0):
+  """Creates jobspec for a task running under docker."""
+  environ = environ.copy()
+  environ['RUN_COMMAND'] = shell_command
+
+  docker_args=[]
+  for k,v in environ.iteritems():
+    docker_args += ['-e', '%s=%s' % (k, v)]
+  docker_env = {'DOCKERFILE_DIR': dockerfile_dir,
+                'DOCKER_RUN_SCRIPT': 'tools/jenkins/docker_run.sh',
+                'OUTPUT_DIR': 'artifacts'}
+  jobspec = jobset.JobSpec(
+          cmdline=['tools/jenkins/build_and_run_docker.sh'] + docker_args,
+          environ=docker_env,
+          shortname='build_artifact.%s' % (name),
+          timeout_seconds=30*60,
+          flake_retries=flake_retries,
+          timeout_retries=timeout_retries)
+  return jobspec
+
+
+def create_jobspec(name, cmdline, environ=None, shell=False,
+                   flake_retries=0, timeout_retries=0):
+  """Creates jobspec."""
+  jobspec = jobset.JobSpec(
+          cmdline=cmdline,
+          environ=environ,
+          shortname='build_artifact.%s' % (name),
+          timeout_seconds=5*60,
+          flake_retries=flake_retries,
+          timeout_retries=timeout_retries,
+          shell=shell)
+  return jobspec
+
+
+def macos_arch_env(arch):
+  """Returns environ specifying -arch arguments for make."""
+  if arch == 'x86':
+    arch_arg = '-arch i386'
+  elif arch == 'x64':
+    arch_arg = '-arch x86_64'
+  else:
+    raise Exception('Unsupported arch')
+  return {'CFLAGS': arch_arg, 'LDFLAGS': arch_arg}
+
+
+class CSharpExtArtifact:
+  """Builds C# native extension library"""
+
+  def __init__(self, platform, arch):
+    self.name = 'csharp_ext_%s_%s' % (platform, arch)
+    self.platform = platform
+    self.arch = arch
+    self.labels = ['artifact', 'csharp', platform, arch]
+
+  def pre_build_jobspecs(self):
+    if self.platform == 'windows':
+      return [create_jobspec('prebuild_%s' % self.name,
+                             ['tools\\run_tests\\pre_build_c.bat'],
+                             shell=True,
+                             flake_retries=5,
+                             timeout_retries=2)]
+    else:
+      return []
+
+  def build_jobspec(self):
+    if self.platform == 'windows':
+      msbuild_platform = 'Win32' if self.arch == 'x86' else self.arch
+      return create_jobspec(self.name,
+                            ['tools\\run_tests\\build_artifact_csharp.bat',
+                             'vsprojects\\grpc_csharp_ext.sln',
+                             '/p:Configuration=Release',
+                             '/p:PlatformToolset=v120',
+                             '/p:Platform=%s' % msbuild_platform],
+                            shell=True)
+    else:
+      environ = {'CONFIG': 'opt',
+                 'EMBED_OPENSSL': 'true',
+                 'EMBED_ZLIB': 'true'}
+      if self.platform == 'linux':
+        return create_docker_jobspec(self.name,
+            'tools/dockerfile/grpc_artifact_linux_%s' % self.arch,
+            'tools/run_tests/build_artifact_csharp.sh')
+      else:
+        environ.update(macos_arch_env(self.arch))
+        return create_jobspec(self.name,
+                              ['tools/run_tests/build_artifact_csharp.sh'],
+                              environ=environ)
+
+  def __str__(self):
+    return self.name
+
+
+def targets():
+  """Gets list of supported targets"""
+  return [CSharpExtArtifact('linux', 'x86'),
+          CSharpExtArtifact('linux', 'x64'),
+          CSharpExtArtifact('macos', 'x86'),
+          CSharpExtArtifact('macos', 'x64'),
+          CSharpExtArtifact('windows', 'x86'),
+          CSharpExtArtifact('windows', 'x64')]
diff --git a/tools/run_tests/build_artifacts.py b/tools/run_tests/build_artifacts.py
deleted file mode 100755
index 0337f1b..0000000
--- a/tools/run_tests/build_artifacts.py
+++ /dev/null
@@ -1,238 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016, Google Inc.
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Builds gRPC distribution artifacts."""
-
-import argparse
-import atexit
-import dockerjob
-import itertools
-import jobset
-import json
-import multiprocessing
-import os
-import re
-import subprocess
-import sys
-import time
-import uuid
-
-# Docker doesn't clean up after itself, so we do it on exit.
-if jobset.platform_string() == 'linux':
-  atexit.register(lambda: subprocess.call(['stty', 'echo']))
-
-ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
-os.chdir(ROOT)
-
-
-def create_docker_jobspec(name, dockerfile_dir, shell_command, environ={},
-                   flake_retries=0, timeout_retries=0):
-  """Creates jobspec for a task running under docker."""
-  environ = environ.copy()
-  environ['RUN_COMMAND'] = shell_command
-
-  #docker_args = ['-v', '%s/artifacts:/var/local/jenkins/grpc/artifacts' % ROOT]
-  docker_args=[]
-  for k,v in environ.iteritems():
-    docker_args += ['-e', '%s=%s' % (k, v)]
-  docker_env = {'DOCKERFILE_DIR': dockerfile_dir,
-                'DOCKER_RUN_SCRIPT': 'tools/jenkins/docker_run.sh',
-                'OUTPUT_DIR': 'artifacts'}
-  jobspec = jobset.JobSpec(
-          cmdline=['tools/jenkins/build_and_run_docker.sh'] + docker_args,
-          environ=docker_env,
-          shortname='build_artifact.%s' % (name),
-          timeout_seconds=30*60,
-          flake_retries=flake_retries,
-          timeout_retries=timeout_retries)
-  return jobspec
-
-
-def create_jobspec(name, cmdline, environ=None, shell=False,
-                   flake_retries=0, timeout_retries=0):
-  """Creates jobspec."""
-  jobspec = jobset.JobSpec(
-          cmdline=cmdline,
-          environ=environ,
-          shortname='build_artifact.%s' % (name),
-          timeout_seconds=5*60,
-          flake_retries=flake_retries,
-          timeout_retries=timeout_retries,
-          shell=shell)
-  return jobspec
-
-
-def macos_arch_env(arch):
-  """Returns environ specifying -arch arguments for make."""
-  if arch == 'x86':
-    arch_arg = '-arch i386'
-  elif arch == 'x64':
-    arch_arg = '-arch x86_64'
-  else:
-    raise Exception('Unsupported arch')
-  return {'CFLAGS': arch_arg, 'LDFLAGS': arch_arg}
-
-
-class CSharpExtArtifact:
-  """Builds C# native extension library"""
-
-  def __init__(self, platform, arch):
-    self.name = 'csharp_ext_%s_%s' % (platform, arch)
-    self.platform = platform
-    self.arch = arch
-    self.labels = ['csharp', platform, arch]
-
-  def pre_build_jobspecs(self):
-    if self.platform == 'windows':
-      return [create_jobspec('prebuild_%s' % self.name,
-                             ['tools\\run_tests\\pre_build_c.bat'],
-                             shell=True,
-                             flake_retries=5,
-                             timeout_retries=2)]
-    else:
-      return []
-
-  def build_jobspec(self):
-    if self.platform == 'windows':
-      msbuild_platform = 'Win32' if self.arch == 'x86' else self.arch
-      return create_jobspec(self.name,
-                            ['tools\\run_tests\\build_artifact_csharp.bat',
-                             'vsprojects\\grpc_csharp_ext.sln',
-                             '/p:Configuration=Release',
-                             '/p:PlatformToolset=v120',
-                             '/p:Platform=%s' % msbuild_platform],
-                            shell=True)
-    else:
-      environ = {'CONFIG': 'opt',
-                 'EMBED_OPENSSL': 'true',
-                 'EMBED_ZLIB': 'true'}
-      if self.platform == 'linux':
-        return create_docker_jobspec(self.name,
-            'tools/dockerfile/grpc_artifact_linux_%s' % self.arch,
-            'tools/run_tests/build_artifact_csharp.sh')
-      else:
-        environ.update(macos_arch_env(self.arch))
-        return create_jobspec(self.name,
-                              ['tools/run_tests/build_artifact_csharp.sh'],
-                              environ=environ)
-
-  def __str__(self):
-    return self.name
-
-
-_ARTIFACTS = [
-    CSharpExtArtifact('linux', 'x86'),
-    CSharpExtArtifact('linux', 'x64'),
-    CSharpExtArtifact('macos', 'x86'),
-    CSharpExtArtifact('macos', 'x64'),
-    CSharpExtArtifact('windows', 'x86'),
-    CSharpExtArtifact('windows', 'x64')
-]
-
-
-def _create_build_map():
-  """Maps artifact names and labels to list of artifacts to be built."""
-  artifact_build_map = dict([(artifact.name, [artifact])
-                             for artifact in _ARTIFACTS])
-  if len(_ARTIFACTS) > len(artifact_build_map.keys()):
-    raise Exception('Artifact names need to be unique')
-
-  label_build_map = {}
-  label_build_map['all'] = [a for a in _ARTIFACTS]  # to build all artifacts
-  for artifact in _ARTIFACTS:
-    for label in artifact.labels:
-      if label in label_build_map:
-        label_build_map[label].append(artifact)
-      else:
-        label_build_map[label] = [artifact]
-
-  if set(artifact_build_map.keys()).intersection(label_build_map.keys()):
-    raise Exception('Artifact names need to be distinct from label names')
-  return dict( artifact_build_map.items() + label_build_map.items())
-
-
-_BUILD_MAP = _create_build_map()
-
-argp = argparse.ArgumentParser(description='Builds distribution artifacts.')
-argp.add_argument('-b', '--build',
-                  choices=sorted(_BUILD_MAP.keys()),
-                  nargs='+',
-                  default=['all'],
-                  help='Artifact name or artifact label to build.')
-argp.add_argument('-f', '--filter',
-                  choices=sorted(_BUILD_MAP.keys()),
-                  nargs='+',
-                  default=[],
-                  help='Filter artifacts to build with AND semantics.')
-argp.add_argument('-j', '--jobs', default=multiprocessing.cpu_count(), type=int)
-argp.add_argument('-t', '--travis',
-                  default=False,
-                  action='store_const',
-                  const=True)
-
-args = argp.parse_args()
-
-# Figure out which artifacts to build
-artifacts = []
-for label in args.build:
-  artifacts += _BUILD_MAP[label]
-
-# Among target selected by -b, filter out those that don't match the filter
-artifacts = [a for a in artifacts if all(f in a.labels for f in args.filter)]
-artifacts = sorted(set(artifacts))
-
-# Execute pre-build phase
-prebuild_jobs = []
-for artifact in artifacts:
-  prebuild_jobs += artifact.pre_build_jobspecs()
-if prebuild_jobs:
-  num_failures, _ = jobset.run(
-    prebuild_jobs, newline_on_success=True, maxjobs=args.jobs)
-  if num_failures != 0:
-    jobset.message('FAILED', 'Pre-build phase failed.', do_newline=True)
-    sys.exit(1)
-
-build_jobs = []
-for artifact in artifacts:
-  build_jobs.append(artifact.build_jobspec())
-if not build_jobs:
-  print 'Nothing to build.'
-  sys.exit(1)
-
-jobset.message('START', 'Building artifacts.', do_newline=True)
-num_failures, _ = jobset.run(
-    build_jobs, newline_on_success=True, maxjobs=args.jobs)
-if num_failures == 0:
-  jobset.message('SUCCESS', 'All artifacts built successfully.',
-                 do_newline=True)
-else:
-  jobset.message('FAILED', 'Failed to build artifacts.',
-                 do_newline=True)
-  sys.exit(1)
diff --git a/tools/run_tests/package_targets.py b/tools/run_tests/package_targets.py
new file mode 100644
index 0000000..839991e
--- /dev/null
+++ b/tools/run_tests/package_targets.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+# Copyright 2016, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Definition of targets to build distribution packages."""
+
+import jobset
+
+
+def create_jobspec(name, cmdline, environ=None, cwd=None, shell=False,
+                   flake_retries=0, timeout_retries=0):
+  """Creates jobspec."""
+  jobspec = jobset.JobSpec(
+          cmdline=cmdline,
+          environ=environ,
+          cwd=cwd,
+          shortname='build_package.%s' % (name),
+          timeout_seconds=10*60,
+          flake_retries=flake_retries,
+          timeout_retries=timeout_retries,
+          shell=shell)
+  return jobspec
+
+
+class CSharpNugetTarget:
+  """Builds C# nuget packages."""
+
+  def __init__(self):
+    self.name = 'csharp_nuget'
+    self.labels = ['package', 'csharp', 'windows']
+
+  def pre_build_jobspecs(self):
+    return []
+
+  def build_jobspec(self):
+    return create_jobspec(self.name,
+                          ['build_packages.bat'],
+                          cwd='src\\csharp',
+                          shell=True)
+
+  def __str__(self):
+    return self.name
+
+
+def targets():
+  """Gets list of supported targets"""
+  return [CSharpNugetTarget()]
diff --git a/tools/run_tests/task_runner.py b/tools/run_tests/task_runner.py
new file mode 100644
index 0000000..39b15cc
--- /dev/null
+++ b/tools/run_tests/task_runner.py
@@ -0,0 +1,124 @@
+#!/usr/bin/env python
+# Copyright 2016, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Runs selected gRPC test/build tasks."""
+
+import argparse
+import atexit
+import jobset
+import multiprocessing
+import sys
+
+import artifact_targets
+import package_targets
+
+_TARGETS = []
+_TARGETS += artifact_targets.targets()
+_TARGETS += package_targets.targets()
+
+def _create_build_map():
+  """Maps task names and labels to list of tasks to be built."""
+  target_build_map = dict([(target.name, [target])
+                           for target in _TARGETS])
+  if len(_TARGETS) > len(target_build_map.keys()):
+    raise Exception('Target names need to be unique')
+
+  label_build_map = {}
+  label_build_map['all'] = [t for t in _TARGETS]  # to build all targets
+  for target in _TARGETS:
+    for label in target.labels:
+      if label in label_build_map:
+        label_build_map[label].append(target)
+      else:
+        label_build_map[label] = [target]
+
+  if set(target_build_map.keys()).intersection(label_build_map.keys()):
+    raise Exception('Target names need to be distinct from label names')
+  return dict( target_build_map.items() + label_build_map.items())
+
+
+_BUILD_MAP = _create_build_map()
+
+argp = argparse.ArgumentParser(description='Runs build/test targets.')
+argp.add_argument('-b', '--build',
+                  choices=sorted(_BUILD_MAP.keys()),
+                  nargs='+',
+                  default=['all'],
+                  help='Target name or target label to build.')
+argp.add_argument('-f', '--filter',
+                  choices=sorted(_BUILD_MAP.keys()),
+                  nargs='+',
+                  default=[],
+                  help='Filter targets to build with AND semantics.')
+argp.add_argument('-j', '--jobs', default=multiprocessing.cpu_count(), type=int)
+argp.add_argument('-t', '--travis',
+                  default=False,
+                  action='store_const',
+                  const=True)
+
+args = argp.parse_args()
+
+# Figure out which targets to build
+targets = []
+for label in args.build:
+  targets += _BUILD_MAP[label]
+
+# Among targets selected by -b, filter out those that don't match the filter
+targets = [t for t in targets if all(f in t.labels for f in args.filter)]
+targets = sorted(set(targets))
+
+# Execute pre-build phase
+prebuild_jobs = []
+for target in targets:
+  prebuild_jobs += target.pre_build_jobspecs()
+if prebuild_jobs:
+  num_failures, _ = jobset.run(
+    prebuild_jobs, newline_on_success=True, maxjobs=args.jobs)
+  if num_failures != 0:
+    jobset.message('FAILED', 'Pre-build phase failed.', do_newline=True)
+    sys.exit(1)
+
+build_jobs = []
+for target in targets:
+  build_jobs.append(target.build_jobspec())
+if not build_jobs:
+  print 'Nothing to build.'
+  sys.exit(1)
+
+jobset.message('START', 'Building targets.', do_newline=True)
+num_failures, _ = jobset.run(
+    build_jobs, newline_on_success=True, maxjobs=args.jobs)
+if num_failures == 0:
+  jobset.message('SUCCESS', 'All targets built successfully.',
+                 do_newline=True)
+else:
+  jobset.message('FAILED', 'Failed to build targets.',
+                 do_newline=True)
+  sys.exit(1)