Use manylinux
diff --git a/tools/run_tests/artifact_targets.py b/tools/run_tests/artifact_targets.py
index 3e08c1d..ec44e37 100644
--- a/tools/run_tests/artifact_targets.py
+++ b/tools/run_tests/artifact_targets.py
@@ -84,12 +84,16 @@
class PythonArtifact:
"""Builds Python artifacts."""
- def __init__(self, platform, arch):
- self.name = 'python_%s_%s' % (platform, arch)
+ def __init__(self, platform, arch, manylinux_build=None):
+ if manylinux_build:
+ self.name = 'python_%s_%s_%s' % (platform, arch, manylinux_build)
+ else:
+ self.name = 'python_%s_%s' % (platform, arch)
self.platform = platform
self.arch = arch
self.labels = ['artifact', 'python', platform, arch]
self.python_version = python_version_arch_map[arch]
+ self.manylinux_build = manylinux_build
def pre_build_jobspecs(self):
return []
@@ -99,8 +103,47 @@
if self.platform == 'linux':
if self.arch == 'x86':
environ['SETARCH_CMD'] = 'linux32'
+ # Inside the manylinux container, the python installations are located in
+ # special places...
+ environ['PYTHON'] = '/opt/python/{}/bin/python'.format(self.manylinux_build)
+ environ['PIP'] = '/opt/python/{}/bin/pip'.format(self.manylinux_build)
+ # Our docker image has all the prerequisites pip-installed already.
+ environ['SKIP_PIP_INSTALL'] = '1'
+ # Platform autodetection for the manylinux1 image breaks so we set the
+ # defines ourselves.
+ # TODO(atash) get better platform-detection support in core so we don't
+ # need to do this manually...
+ environ['CFLAGS'] = " ".join([
+ '-DGPR_NO_AUTODETECT_PLATFORM',
+ '-DGPR_PLATFORM_STRING=\\"manylinux\\"',
+ '-DGPR_POSIX_CRASH_HANDLER=1',
+ '-DGPR_CPU_LINUX=1',
+ '-DGPR_GCC_ATOMIC=1',
+ '-DGPR_GCC_TLS=1',
+ '-DGPR_LINUX=1',
+ '-DGPR_LINUX_LOG=1',
+ #'-DGPR_LINUX_MULTIPOLL_WITH_EPOLL=1',
+ '-DGPR_POSIX_SOCKET=1',
+ '-DGPR_POSIX_WAKEUP_FD=1',
+ '-DGPR_POSIX_SOCKETADDR=1',
+ #'-DGPR_LINUX_EVENTFD=1',
+ #'-DGPR_LINUX_SOCKETUTILS=1',
+ '-DGPR_HAVE_UNIX_SOCKET=1',
+ '-DGPR_HAVE_IP_PKTINFO=1',
+ '-DGPR_HAVE_IPV6_RECVPKTINFO=1',
+ '-DGPR_LINUX_ENV=1',
+ '-DGPR_POSIX_FILE=1',
+ '-DGPR_POSIX_TMPFILE=1',
+ '-DGPR_POSIX_STRING=1',
+ '-DGPR_POSIX_SUBPROCESS=1',
+ '-DGPR_POSIX_SYNC=1',
+ '-DGPR_POSIX_TIME=1',
+ '-DGPR_GETPID_IN_UNISTD_H=1',
+ '-DGPR_HAVE_MSG_NOSIGNAL=1',
+ '-DGPR_ARCH_{arch}=1'.format(arch=('32' if self.arch == 'x86' else '64')),
+ ])
return create_docker_jobspec(self.name,
- 'tools/dockerfile/grpc_artifact_linux_%s' % self.arch,
+ 'tools/dockerfile/grpc_artifact_python_manylinux_%s' % self.arch,
'tools/run_tests/build_artifact_python.sh',
environ=environ)
elif self.platform == 'windows':
@@ -307,8 +350,10 @@
for Cls in (CSharpExtArtifact, NodeExtArtifact, ProtocArtifact)
for platform in ('linux', 'macos', 'windows')
for arch in ('x86', 'x64')] +
- [PythonArtifact('linux', 'x86'),
- PythonArtifact('linux', 'x64'),
+ [PythonArtifact('linux', 'x86', 'cp27-cp27m'),
+ PythonArtifact('linux', 'x86', 'cp27-cp27mu'),
+ PythonArtifact('linux', 'x64', 'cp27-cp27m'),
+ PythonArtifact('linux', 'x64', 'cp27-cp27mu'),
PythonArtifact('macos', 'x64'),
PythonArtifact('windows', 'x86'),
PythonArtifact('windows', 'x64'),
diff --git a/tools/run_tests/build_artifact_python.sh b/tools/run_tests/build_artifact_python.sh
index 454f472..920d255 100755
--- a/tools/run_tests/build_artifact_python.sh
+++ b/tools/run_tests/build_artifact_python.sh
@@ -32,43 +32,48 @@
cd $(dirname $0)/../..
-if [ "$SKIP_PIP_INSTALL" == "" ]
-then
- pip install --upgrade six
- # There's a bug in newer versions of setuptools (see
- # https://bitbucket.org/pypa/setuptools/issues/503/pkg_resources_vendorpackagingrequirementsi)
- pip install --upgrade 'setuptools==18'
- pip install -rrequirements.txt
-fi
-
export GRPC_PYTHON_USE_CUSTOM_BDIST=0
export GRPC_PYTHON_BUILD_WITH_CYTHON=1
+export PYTHON=${PYTHON:-python}
+export PIP=${PIP:-pip}
+export AUDITWHEEL=${AUDITWHEEL:-auditwheel}
+
+
+if [ "$SKIP_PIP_INSTALL" == "" ]
+then
+ ${PIP} install --upgrade six
+ # There's a bug in newer versions of setuptools (see
+ # https://bitbucket.org/pypa/setuptools/issues/503/pkg_resources_vendorpackagingrequirementsi)
+ ${PIP} pip install --upgrade 'setuptools==18'
+ ${PIP} install -rrequirements.txt
+fi
# Build the source distribution first because MANIFEST.in cannot override
# exclusion of built shared objects among package resources (for some
# inexplicable reason).
-${SETARCH_CMD} python setup.py \
+${SETARCH_CMD} ${PYTHON} setup.py \
sdist
-# The bdist_wheel_grpc_custom command is finicky about command output ordering
-# and thus ought to be run in a shell command separate of others. Further, it
-# trashes the actual bdist_wheel output, so it should be run first so that
-# bdist_wheel may be run unmolested.
-${SETARCH_CMD} python setup.py \
- build_tagged_ext
-
# Wheel has a bug where directories don't get excluded.
# https://bitbucket.org/pypa/wheel/issues/99/cannot-exclude-directory
-${SETARCH_CMD} python setup.py \
+${SETARCH_CMD} ${PYTHON} setup.py \
bdist_wheel
# Build gRPC tools package
-python tools/distrib/python/make_grpcio_tools.py
-# Build with clang since there's a bug in GCC 4.x where some constant
-# expressions are treated as non-constant in the presence of the fwrapv flag
-# (fixed in at most GCC 5.3).
-CC=clang python tools/distrib/python/grpcio_tools/setup.py bdist_wheel
+${PYTHON} tools/distrib/python/make_grpcio_tools.py
+CFLAGS="$CFLAGS -fno-wrapv" ${SETARCH_CMD} \
+ ${PYTHON} tools/distrib/python/grpcio_tools/setup.py bdist_wheel
mkdir -p artifacts
-cp -r dist/* artifacts
-cp -r tools/distrib/python/grpcio_tools/dist/* artifacts
+if command -v ${AUDITWHEEL}
+then
+ for wheel in dist/*.whl; do
+ ${AUDITWHEEL} repair $wheel -w artifacts/
+ done
+ for wheel in tools/distrib/python/grpcio_tools/dist/*.whl; do
+ ${AUDITWHEEL} repair $wheel -w artifacts/
+ done
+else
+ cp -r dist/* artifacts
+ cp -r tools/distrib/python/grpcio_tools/dist/* artifacts
+fi