Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Crosscompile python aarch64 wheels with dockcross #25418

Merged
merged 6 commits into from Mar 2, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
13 changes: 13 additions & 0 deletions setup.py
Expand Up @@ -138,6 +138,16 @@
# runtime, the shared library must be installed
BUILD_WITH_SYSTEM_RE2 = os.environ.get('GRPC_PYTHON_BUILD_SYSTEM_RE2', False)

# Export this variable to force building the python extension with a statically linked libstdc++.
# At least on linux, this is normally not needed as we can build manylinux-compatible wheels on linux just fine
# without statically linking libstdc++ (which leads to a slight increase in the wheel size).
# This option is useful when crosscompiling wheels for aarch64 where
# it's difficult to ensure that the crosscompilation toolchain has a high-enough version
# of GCC (we require >4.9) but still uses old-enough libstdc++ symbols.
# TODO(jtattermusch): remove this workaround once issues with crosscompiler version are resolved.
BUILD_WITH_STATIC_LIBSTDCXX = os.environ.get(
'GRPC_PYTHON_BUILD_WITH_STATIC_LIBSTDCXX', False)

# For local development use only: This skips building gRPC Core and its
# dependencies, including protobuf and boringssl. This allows "incremental"
# compilation by first building gRPC Core using make, then building only the
Expand Down Expand Up @@ -239,6 +249,9 @@ def check_linker_need_libatomic():
EXTRA_COMPILE_ARGS = shlex.split(EXTRA_ENV_COMPILE_ARGS)
EXTRA_LINK_ARGS = shlex.split(EXTRA_ENV_LINK_ARGS)

if BUILD_WITH_STATIC_LIBSTDCXX:
EXTRA_LINK_ARGS.append('-static-libstdc++')

CYTHON_EXTENSION_PACKAGE_NAMES = ()

CYTHON_EXTENSION_MODULE_NAMES = ('grpc._cython.cygrpc',)
Expand Down
15 changes: 15 additions & 0 deletions src/python/grpcio/commands.py
Expand Up @@ -24,6 +24,7 @@
import shutil
import subprocess
import sys
import sysconfig
import traceback

import setuptools
Expand Down Expand Up @@ -211,6 +212,20 @@ class BuildExt(build_ext.build_ext):
}
LINK_OPTIONS = {}

def get_ext_filename(self, ext_name):
# since python3.5, python extensions' shared libraries use a suffix that corresponds to the value
# of sysconfig.get_config_var('EXT_SUFFIX') and contains info about the architecture the library targets.
# E.g. on x64 linux the suffix is ".cpython-XYZ-x86_64-linux-gnu.so"
# When crosscompiling python wheels, we need to be able to override this suffix
# so that the resulting file name matches the target architecture and we end up with a well-formed
# wheel.
filename = build_ext.build_ext.get_ext_filename(self, ext_name)
orig_ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
new_ext_suffix = os.getenv('GRPC_PYTHON_OVERRIDE_EXT_SUFFIX')
if new_ext_suffix and filename.endswith(orig_ext_suffix):
filename = filename[:-len(orig_ext_suffix)] + new_ext_suffix
return filename

def build_extensions(self):

def compiler_ok_with_extra_std():
Expand Down
70 changes: 51 additions & 19 deletions tools/distrib/python/grpcio_tools/setup.py
Expand Up @@ -71,6 +71,16 @@
# to have been generated by building first *with* Cython support.
BUILD_WITH_CYTHON = os.environ.get('GRPC_PYTHON_BUILD_WITH_CYTHON', False)

# Export this variable to force building the python extension with a statically linked libstdc++.
# At least on linux, this is normally not needed as we can build manylinux-compatible wheels on linux just fine
# without statically linking libstdc++ (which leads to a slight increase in the wheel size).
# This option is useful when crosscompiling wheels for aarch64 where
# it's difficult to ensure that the crosscompilation toolchain has a high-enough version
# of GCC (we require >4.9) but still uses old-enough libstdc++ symbols.
# TODO(jtattermusch): remove this workaround once issues with crosscompiler version are resolved.
BUILD_WITH_STATIC_LIBSTDCXX = os.environ.get(
'GRPC_PYTHON_BUILD_WITH_STATIC_LIBSTDCXX', False)


def check_linker_need_libatomic():
"""Test if linker on system needs libatomic."""
Expand All @@ -95,6 +105,24 @@ def check_linker_need_libatomic():
return cpp_test.returncode == 0


class BuildExt(build_ext.build_ext):
"""Custom build_ext command."""

def get_ext_filename(self, ext_name):
# since python3.5, python extensions' shared libraries use a suffix that corresponds to the value
# of sysconfig.get_config_var('EXT_SUFFIX') and contains info about the architecture the library targets.
# E.g. on x64 linux the suffix is ".cpython-XYZ-x86_64-linux-gnu.so"
# When crosscompiling python wheels, we need to be able to override this suffix
# so that the resulting file name matches the target architecture and we end up with a well-formed
# wheel.
filename = build_ext.build_ext.get_ext_filename(self, ext_name)
orig_ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
new_ext_suffix = os.getenv('GRPC_PYTHON_OVERRIDE_EXT_SUFFIX')
if new_ext_suffix and filename.endswith(orig_ext_suffix):
filename = filename[:-len(orig_ext_suffix)] + new_ext_suffix
return filename


# There are some situations (like on Windows) where CC, CFLAGS, and LDFLAGS are
# entirely ignored/dropped/forgotten by distutils and its Cygwin/MinGW support.
# We use these environment variables to thus get around that without locking
Expand Down Expand Up @@ -159,6 +187,9 @@ def check_linker_need_libatomic():
EXTRA_COMPILE_ARGS = shlex.split(EXTRA_ENV_COMPILE_ARGS)
EXTRA_LINK_ARGS = shlex.split(EXTRA_ENV_LINK_ARGS)

if BUILD_WITH_STATIC_LIBSTDCXX:
EXTRA_LINK_ARGS.append('-static-libstdc++')

CC_FILES = [os.path.normpath(cc_file) for cc_file in protoc_lib_deps.CC_FILES]
PROTO_FILES = [
os.path.normpath(proto_file) for proto_file in protoc_lib_deps.PROTO_FILES
Expand Down Expand Up @@ -245,22 +276,23 @@ def extension_modules():
return extensions


setuptools.setup(
name='grpcio-tools',
version=grpc_version.VERSION,
description='Protobuf code generator for gRPC',
long_description=open(_README_PATH, 'r').read(),
author='The gRPC Authors',
author_email='grpc-io@googlegroups.com',
url='https://grpc.io',
license='Apache License 2.0',
classifiers=CLASSIFIERS,
ext_modules=extension_modules(),
packages=setuptools.find_packages('.'),
install_requires=[
'protobuf>=3.5.0.post1, < 4.0dev',
'grpcio>={version}'.format(version=grpc_version.VERSION),
'setuptools',
],
package_data=package_data(),
)
setuptools.setup(name='grpcio-tools',
version=grpc_version.VERSION,
description='Protobuf code generator for gRPC',
long_description=open(_README_PATH, 'r').read(),
author='The gRPC Authors',
author_email='grpc-io@googlegroups.com',
url='https://grpc.io',
license='Apache License 2.0',
classifiers=CLASSIFIERS,
ext_modules=extension_modules(),
packages=setuptools.find_packages('.'),
install_requires=[
'protobuf>=3.5.0.post1, < 4.0dev',
'grpcio>={version}'.format(version=grpc_version.VERSION),
'setuptools',
],
package_data=package_data(),
cmdclass={
'build_ext': BuildExt,
})
@@ -0,0 +1,31 @@
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# The aarch64 wheels are being crosscompiled to allow running the build
# on x64 machine. The dockcross/manylinux2014-aarch64 image is a x86_64
# image with crosscompilation toolchain installed.
# Use an older version of dockcross image that has gcc4.9.4 because it was built
# before https://github.com/dockcross/dockcross/pull/449
FROM dockcross/manylinux2014-aarch64:20200929-608e6ac

# Update the package manager
RUN yum update -y && yum install -y curl-devel expat-devel gettext-devel openssl-devel zlib-devel

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are these installing the aarch64 dev libs? When I tried it, it pulled in the x86 version.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nope, this is installing x86 libs. I guess the reason why this doesn't matter is that they are actually not strictly required for a successful build (I copied the line from x64 dockerfile and didn't investigate whether they are really needed).


###################################
# Install Python build requirements
RUN /opt/python/cp35-cp35m/bin/pip install --upgrade cython
RUN /opt/python/cp36-cp36m/bin/pip install --upgrade cython
RUN /opt/python/cp37-cp37m/bin/pip install --upgrade cython
RUN /opt/python/cp38-cp38/bin/pip install --upgrade cython
RUN /opt/python/cp39-cp39/bin/pip install --upgrade cython

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same question as above.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it's a x64 cython, but that doesn't really matter it seems. Even the python itself is a x64 python. That seems correct as we're crosscompiling and we need to run the build on a x64 machine (and cython is what invokes the build commands).

23 changes: 21 additions & 2 deletions tools/run_tests/artifacts/artifact_targets.py
Expand Up @@ -144,8 +144,24 @@ def build_jobspec(self):
environ['PYTHON'] = '/opt/python/{}/bin/python'.format(
self.py_version)
environ['PIP'] = '/opt/python/{}/bin/pip'.format(self.py_version)
environ['GRPC_BUILD_GRPCIO_TOOLS_DEPENDENTS'] = 'TRUE'
environ['GRPC_BUILD_MANYLINUX_WHEEL'] = 'TRUE'
if self.arch == 'aarch64':
environ['GRPC_SKIP_TWINE_CHECK'] = 'TRUE'
gnossen marked this conversation as resolved.
Show resolved Hide resolved
# when crosscompiling, we need to force statically linking libstdc++
# otherwise libstdc++ symbols would be too new and the resulting
# wheel wouldn't pass the auditwheel check.
# This is needed because C core won't build with GCC 4.8 that's
# included in the default dockcross toolchain and we needed
# to opt into using a slighly newer version of GCC.
environ['GRPC_PYTHON_BUILD_WITH_STATIC_LIBSTDCXX'] = 'TRUE'

else:
# only run auditwheel if we're not crosscompiling
environ['GRPC_RUN_AUDITWHEEL_REPAIR'] = 'TRUE'
# only build the packages that depend on grpcio-tools
# if we're not crosscompiling.
# - they require protoc to run on current architecture
# - they only have sdist packages anyway, so it's useless to build them again
environ['GRPC_BUILD_GRPCIO_TOOLS_DEPENDENTS'] = 'TRUE'
return create_docker_jobspec(
self.name,
# NOTE(rbellevi): Do *not* update this without also ensuring the
Expand Down Expand Up @@ -375,6 +391,9 @@ def targets():
PythonArtifact('manylinux2010', 'x86', 'cp37-cp37m'),
PythonArtifact('manylinux2010', 'x86', 'cp38-cp38'),
PythonArtifact('manylinux2010', 'x86', 'cp39-cp39'),
PythonArtifact('manylinux2014', 'aarch64', 'cp37-cp37m'),
PythonArtifact('manylinux2014', 'aarch64', 'cp38-cp38'),
PythonArtifact('manylinux2014', 'aarch64', 'cp39-cp39'),
PythonArtifact('linux_extra', 'armv7', '2.7'),
PythonArtifact('linux_extra', 'armv7', '3.5'),
PythonArtifact('linux_extra', 'armv7', '3.6'),
Expand Down
41 changes: 32 additions & 9 deletions tools/run_tests/artifacts/build_artifact_python.sh
Expand Up @@ -33,14 +33,33 @@ export GRPC_PYTHON_BUILD_EXT_COMPILER_JOBS=${GRPC_PYTHON_BUILD_EXT_COMPILER_JOBS
mkdir -p "${ARTIFACTS_OUT}"
ARTIFACT_DIR="$PWD/${ARTIFACTS_OUT}"

# check whether we are crosscompiling. AUDITWHEEL_ARCH is set by the dockcross docker image.
if [ "$AUDITWHEEL_ARCH" == "aarch64" ]
then
# when crosscompiling for aarch64, --plat-name needs to be set explicitly
# to end up with correctly named wheel file
# the value should be manylinuxABC_ARCH and dockcross docker image
# conveniently provides the value in the AUDITWHEEL_PLAT env
WHEEL_PLAT_NAME_FLAG="--plat-name=$AUDITWHEEL_PLAT"

# override the value of EXT_SUFFIX to make sure the crosscompiled .so files in the wheel have the correct filename suffix
GRPC_PYTHON_OVERRIDE_EXT_SUFFIX="$(${PYTHON} -c 'import sysconfig; print(sysconfig.get_config_var("EXT_SUFFIX").replace("-x86_64-linux-gnu.so", "-aarch64-linux-gnu.so"))')"
export GRPC_PYTHON_OVERRIDE_EXT_SUFFIX

# Set to empty string to disable the option (see https://github.com/grpc/grpc/issues/24498)
# TODO: enable ASM optimizations for crosscompiled wheels
export GRPC_BUILD_WITH_BORING_SSL_ASM=""
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe link to the PR changing the BoringSSL ASM enable flag? #25444

fi

# Build the source distribution first because MANIFEST.in cannot override
# exclusion of built shared objects among package resources (for some
# inexplicable reason).
${SETARCH_CMD} "${PYTHON}" setup.py sdist

# Wheel has a bug where directories don't get excluded.
# https://bitbucket.org/pypa/wheel/issues/99/cannot-exclude-directory
${SETARCH_CMD} "${PYTHON}" setup.py bdist_wheel
# shellcheck disable=SC2086
${SETARCH_CMD} "${PYTHON}" setup.py bdist_wheel $WHEEL_PLAT_NAME_FLAG
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Double quote (SC2086) seems applicable to this shell command. Quoting variables should fix the shellcheck complain.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That wouldn't work. WHEEL_PLAT_NAME_FLAG can be empty and in that case "$WHEEL_PLAT_NAME_FLAG" becomes "" and an empty arg will be forcibly passed to the command (which does break it).


GRPCIO_STRIP_TEMPDIR=$(mktemp -d)
GRPCIO_TAR_GZ_LIST=( dist/grpcio-*.tar.gz )
Expand Down Expand Up @@ -78,9 +97,10 @@ mv "${GRPCIO_STRIPPED_TAR_GZ}" "${GRPCIO_TAR_GZ}"
${SETARCH_CMD} "${PYTHON}" tools/distrib/python/grpcio_tools/setup.py sdist

# Build gRPC tools package binary distribution
${SETARCH_CMD} "${PYTHON}" tools/distrib/python/grpcio_tools/setup.py bdist_wheel
# shellcheck disable=SC2086
${SETARCH_CMD} "${PYTHON}" tools/distrib/python/grpcio_tools/setup.py bdist_wheel $WHEEL_PLAT_NAME_FLAG

if [ "$GRPC_BUILD_MANYLINUX_WHEEL" != "" ]
if [ "$GRPC_RUN_AUDITWHEEL_REPAIR" != "" ]
then
for wheel in dist/*.whl; do
"${AUDITWHEEL}" show "$wheel" | tee /dev/stderr | grep -E -w "$AUDITWHEEL_PLAT"
Expand Down Expand Up @@ -136,12 +156,15 @@ then
cp -r src/python/grpcio_status/dist/* "$ARTIFACT_DIR"
fi

# Ensure the generated artifacts are valid.
"${PYTHON}" -m pip install virtualenv
"${PYTHON}" -m virtualenv venv || { "${PYTHON}" -m pip install virtualenv==16.7.9 && "${PYTHON}" -m virtualenv venv; }
venv/bin/python -m pip install "twine<=2.0"
venv/bin/python -m twine check dist/* tools/distrib/python/grpcio_tools/dist/*
rm -rf venv/
if [ "$GRPC_SKIP_TWINE_CHECK" == "" ]
then
# Ensure the generated artifacts are valid.
"${PYTHON}" -m pip install virtualenv
"${PYTHON}" -m virtualenv venv || { "${PYTHON}" -m pip install virtualenv==16.7.9 && "${PYTHON}" -m virtualenv venv; }
venv/bin/python -m pip install "twine<=2.0"
venv/bin/python -m twine check dist/* tools/distrib/python/grpcio_tools/dist/*
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is there a reason for skipping twine checks? We are following manylinux2014, should be supported by PyPI official packages?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We need to skip auditwheel and twine check for now, since when crosscompiling we're running on x64 image, but we got aarch64 wheel. To perform checks, you'd likely need to run the check under qemu (available in the docker image), but also on aarch64 version of python etc.
The plan is to first test the aarch64 wheels manually. It should be possible to add back twine and auditwheel checks, but more work is needed.

rm -rf venv/
fi

cp -r dist/* "$ARTIFACT_DIR"
cp -r tools/distrib/python/grpcio_tools/dist/* "$ARTIFACT_DIR"