Jan Tattermusch | 4ecf24b | 2016-01-15 09:12:11 -0800 | [diff] [blame] | 1 | #!/usr/bin/env python |
| 2 | # Copyright 2016, Google Inc. |
| 3 | # All rights reserved. |
| 4 | # |
| 5 | # Redistribution and use in source and binary forms, with or without |
| 6 | # modification, are permitted provided that the following conditions are |
| 7 | # met: |
| 8 | # |
| 9 | # * Redistributions of source code must retain the above copyright |
| 10 | # notice, this list of conditions and the following disclaimer. |
| 11 | # * Redistributions in binary form must reproduce the above |
| 12 | # copyright notice, this list of conditions and the following disclaimer |
| 13 | # in the documentation and/or other materials provided with the |
| 14 | # distribution. |
| 15 | # * Neither the name of Google Inc. nor the names of its |
| 16 | # contributors may be used to endorse or promote products derived from |
| 17 | # this software without specific prior written permission. |
| 18 | # |
| 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 30 | |
| 31 | """Builds gRPC distribution artifacts.""" |
| 32 | |
| 33 | import argparse |
| 34 | import atexit |
| 35 | import dockerjob |
| 36 | import itertools |
| 37 | import jobset |
| 38 | import json |
| 39 | import multiprocessing |
| 40 | import os |
| 41 | import re |
| 42 | import subprocess |
| 43 | import sys |
| 44 | import time |
| 45 | import uuid |
| 46 | |
| 47 | # Docker doesn't clean up after itself, so we do it on exit. |
Jan Tattermusch | 9fd61c6 | 2016-01-19 14:44:20 -0800 | [diff] [blame] | 48 | if jobset.platform_string() == 'linux': |
| 49 | atexit.register(lambda: subprocess.call(['stty', 'echo'])) |
Jan Tattermusch | 4ecf24b | 2016-01-15 09:12:11 -0800 | [diff] [blame] | 50 | |
| 51 | ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..')) |
| 52 | os.chdir(ROOT) |
| 53 | |
| 54 | |
Jan Tattermusch | 7eba172 | 2016-01-19 08:43:00 -0800 | [diff] [blame] | 55 | def create_docker_jobspec(name, dockerfile_dir, shell_command, environ={}, |
| 56 | flake_retries=0, timeout_retries=0): |
| 57 | """Creates jobspec for a task running under docker.""" |
| 58 | environ = environ.copy() |
| 59 | environ['RUN_COMMAND'] = shell_command |
| 60 | |
| 61 | #docker_args = ['-v', '%s/artifacts:/var/local/jenkins/grpc/artifacts' % ROOT] |
| 62 | docker_args=[] |
| 63 | for k,v in environ.iteritems(): |
| 64 | docker_args += ['-e', '%s=%s' % (k, v)] |
| 65 | docker_env = {'DOCKERFILE_DIR': dockerfile_dir, |
| 66 | 'DOCKER_RUN_SCRIPT': 'tools/jenkins/docker_run.sh', |
| 67 | 'OUTPUT_DIR': 'artifacts'} |
| 68 | jobspec = jobset.JobSpec( |
| 69 | cmdline=['tools/jenkins/build_and_run_docker.sh'] + docker_args, |
| 70 | environ=docker_env, |
| 71 | shortname='build_artifact.%s' % (name), |
| 72 | timeout_seconds=30*60, |
| 73 | flake_retries=flake_retries, |
| 74 | timeout_retries=timeout_retries) |
| 75 | return jobspec |
| 76 | |
| 77 | |
Jan Tattermusch | 4ecf24b | 2016-01-15 09:12:11 -0800 | [diff] [blame] | 78 | def create_jobspec(name, cmdline, environ=None, shell=False, |
| 79 | flake_retries=0, timeout_retries=0): |
| 80 | """Creates jobspec.""" |
Jan Tattermusch | 7eba172 | 2016-01-19 08:43:00 -0800 | [diff] [blame] | 81 | jobspec = jobset.JobSpec( |
Jan Tattermusch | 4ecf24b | 2016-01-15 09:12:11 -0800 | [diff] [blame] | 82 | cmdline=cmdline, |
| 83 | environ=environ, |
| 84 | shortname='build_artifact.%s' % (name), |
| 85 | timeout_seconds=5*60, |
| 86 | flake_retries=flake_retries, |
| 87 | timeout_retries=timeout_retries, |
| 88 | shell=shell) |
Jan Tattermusch | 7eba172 | 2016-01-19 08:43:00 -0800 | [diff] [blame] | 89 | return jobspec |
Jan Tattermusch | 4ecf24b | 2016-01-15 09:12:11 -0800 | [diff] [blame] | 90 | |
| 91 | |
| 92 | def macos_arch_env(arch): |
| 93 | """Returns environ specifying -arch arguments for make.""" |
| 94 | if arch == 'x86': |
| 95 | arch_arg = '-arch i386' |
| 96 | elif arch == 'x64': |
| 97 | arch_arg = '-arch x86_64' |
| 98 | else: |
| 99 | raise Exception('Unsupported arch') |
| 100 | return {'CFLAGS': arch_arg, 'LDFLAGS': arch_arg} |
| 101 | |
| 102 | |
Jan Tattermusch | 4ecf24b | 2016-01-15 09:12:11 -0800 | [diff] [blame] | 103 | class CSharpExtArtifact: |
| 104 | """Builds C# native extension library""" |
| 105 | |
| 106 | def __init__(self, platform, arch): |
| 107 | self.name = 'csharp_ext_%s_%s' % (platform, arch) |
| 108 | self.platform = platform |
| 109 | self.arch = arch |
| 110 | self.labels = ['csharp', platform, arch] |
| 111 | |
| 112 | def pre_build_jobspecs(self): |
| 113 | if self.platform == 'windows': |
| 114 | return [create_jobspec('prebuild_%s' % self.name, |
| 115 | ['tools\\run_tests\\pre_build_c.bat'], |
| 116 | shell=True, |
| 117 | flake_retries=5, |
| 118 | timeout_retries=2)] |
| 119 | else: |
| 120 | return [] |
| 121 | |
| 122 | def build_jobspec(self): |
| 123 | if self.platform == 'windows': |
| 124 | msbuild_platform = 'Win32' if self.arch == 'x86' else self.arch |
| 125 | return create_jobspec(self.name, |
Jan Tattermusch | 9fd61c6 | 2016-01-19 14:44:20 -0800 | [diff] [blame] | 126 | ['tools\\run_tests\\build_artifact_csharp.bat', |
Jan Tattermusch | 4ecf24b | 2016-01-15 09:12:11 -0800 | [diff] [blame] | 127 | 'vsprojects\\grpc_csharp_ext.sln', |
| 128 | '/p:Configuration=Release', |
| 129 | '/p:PlatformToolset=v120', |
| 130 | '/p:Platform=%s' % msbuild_platform], |
| 131 | shell=True) |
Jan Tattermusch | 7eba172 | 2016-01-19 08:43:00 -0800 | [diff] [blame] | 132 | if self.platform == 'linux': |
| 133 | environ = {'CONFIG': 'opt'} |
| 134 | return create_docker_jobspec(self.name, |
| 135 | 'tools/jenkins/grpc_artifact_linux_%s' % self.arch, |
| 136 | 'tools/run_tests/build_artifact_csharp.sh') |
Jan Tattermusch | 4ecf24b | 2016-01-15 09:12:11 -0800 | [diff] [blame] | 137 | else: |
| 138 | environ = {'CONFIG': 'opt'} |
| 139 | if self.platform == 'macos': |
| 140 | environ.update(macos_arch_env(self.arch)) |
| 141 | return create_jobspec(self.name, |
Jan Tattermusch | 7eba172 | 2016-01-19 08:43:00 -0800 | [diff] [blame] | 142 | ['tools/run_tests/build_artifact_csharp.sh'], |
Jan Tattermusch | 4ecf24b | 2016-01-15 09:12:11 -0800 | [diff] [blame] | 143 | environ=environ) |
| 144 | |
| 145 | def __str__(self): |
| 146 | return self.name |
| 147 | |
| 148 | |
| 149 | _ARTIFACTS = [ |
Jan Tattermusch | 7eba172 | 2016-01-19 08:43:00 -0800 | [diff] [blame] | 150 | CSharpExtArtifact('linux', 'x86'), |
Jan Tattermusch | 4ecf24b | 2016-01-15 09:12:11 -0800 | [diff] [blame] | 151 | CSharpExtArtifact('linux', 'x64'), |
| 152 | CSharpExtArtifact('macos', 'x86'), |
| 153 | CSharpExtArtifact('macos', 'x64'), |
| 154 | CSharpExtArtifact('windows', 'x86'), |
| 155 | CSharpExtArtifact('windows', 'x64') |
| 156 | ] |
| 157 | |
| 158 | |
| 159 | def _create_build_map(): |
| 160 | """Maps artifact names and labels to list of artifacts to be built.""" |
| 161 | artifact_build_map = dict([(artifact.name, [artifact]) |
| 162 | for artifact in _ARTIFACTS]) |
| 163 | if len(_ARTIFACTS) > len(artifact_build_map.keys()): |
| 164 | raise Exception('Artifact names need to be unique') |
| 165 | |
| 166 | label_build_map = {} |
| 167 | label_build_map['all'] = [a for a in _ARTIFACTS] # to build all artifacts |
| 168 | for artifact in _ARTIFACTS: |
| 169 | for label in artifact.labels: |
| 170 | if label in label_build_map: |
| 171 | label_build_map[label].append(artifact) |
| 172 | else: |
| 173 | label_build_map[label] = [artifact] |
| 174 | |
| 175 | if set(artifact_build_map.keys()).intersection(label_build_map.keys()): |
| 176 | raise Exception('Artifact names need to be distinct from label names') |
| 177 | return dict( artifact_build_map.items() + label_build_map.items()) |
| 178 | |
| 179 | |
| 180 | _BUILD_MAP = _create_build_map() |
| 181 | |
| 182 | argp = argparse.ArgumentParser(description='Builds distribution artifacts.') |
| 183 | argp.add_argument('-b', '--build', |
| 184 | choices=sorted(_BUILD_MAP.keys()), |
| 185 | nargs='+', |
| 186 | default=['all'], |
| 187 | help='Artifact name or artifact label to build.') |
| 188 | argp.add_argument('-f', '--filter', |
| 189 | choices=sorted(_BUILD_MAP.keys()), |
| 190 | nargs='+', |
| 191 | default=[], |
| 192 | help='Filter artifacts to build with AND semantics.') |
| 193 | argp.add_argument('-j', '--jobs', default=multiprocessing.cpu_count(), type=int) |
| 194 | argp.add_argument('-t', '--travis', |
| 195 | default=False, |
| 196 | action='store_const', |
| 197 | const=True) |
| 198 | |
| 199 | args = argp.parse_args() |
| 200 | |
| 201 | # Figure out which artifacts to build |
| 202 | artifacts = [] |
| 203 | for label in args.build: |
| 204 | artifacts += _BUILD_MAP[label] |
| 205 | |
| 206 | # Among target selected by -b, filter out those that don't match the filter |
| 207 | artifacts = [a for a in artifacts if all(f in a.labels for f in args.filter)] |
| 208 | artifacts = sorted(set(artifacts)) |
| 209 | |
| 210 | # Execute pre-build phase |
| 211 | prebuild_jobs = [] |
| 212 | for artifact in artifacts: |
| 213 | prebuild_jobs += artifact.pre_build_jobspecs() |
| 214 | if prebuild_jobs: |
| 215 | num_failures, _ = jobset.run( |
| 216 | prebuild_jobs, newline_on_success=True, maxjobs=args.jobs) |
| 217 | if num_failures != 0: |
| 218 | jobset.message('FAILED', 'Pre-build phase failed.', do_newline=True) |
| 219 | sys.exit(1) |
| 220 | |
| 221 | build_jobs = [] |
| 222 | for artifact in artifacts: |
| 223 | build_jobs.append(artifact.build_jobspec()) |
| 224 | if not build_jobs: |
| 225 | print 'Nothing to build.' |
| 226 | sys.exit(1) |
| 227 | |
| 228 | jobset.message('START', 'Building artifacts.', do_newline=True) |
| 229 | num_failures, _ = jobset.run( |
| 230 | build_jobs, newline_on_success=True, maxjobs=args.jobs) |
| 231 | if num_failures == 0: |
| 232 | jobset.message('SUCCESS', 'All artifacts built successfully.', |
| 233 | do_newline=True) |
| 234 | else: |
| 235 | jobset.message('FAILED', 'Failed to build artifacts.', |
| 236 | do_newline=True) |
| 237 | sys.exit(1) |