diff --git a/docker/python-builder/scripts/assemble b/docker/python-builder/scripts/assemble index 3d0131605b..83a9ba939d 100755 --- a/docker/python-builder/scripts/assemble +++ b/docker/python-builder/scripts/assemble @@ -27,53 +27,66 @@ cd /tmp/src apt-get update -# Protect from the bindep builder image use of the assemble script -# to produce a wheel -if [ -f bindep.txt -o -f other-requirements.txt ] ; then - bindep -l newline > /output/bindep/run.txt || true - compile_packages=$(bindep -b compile || true) - if [ ! -z "$compile_packages" ] ; then - apt-get install -y ${compile_packages} - fi -fi - # pbr needs git installed, else nothing will work apt-get install -y git -# Build a wheel so that we have an install target. -# pip install . in the container context with the mounted -# source dir gets ... exciting. -# We run sdist first to trigger code generation steps such -# as are found in zuul, since the sequencing otherwise -# happens in a way that makes wheel content copying unhappy. -# pip wheel isn't used here because it puts all of the output -# in the output dir and not the wheel cache, so it's not -# possible to tell what is the wheel for the project and -# what is the wheel cache. -python setup.py sdist bdist_wheel -d /output/wheels - -# Use a virtualenv for the next install steps in case to prevent -# things from the current environment from making us not build a -# wheel. +# Use a clean virtualenv for install steps to prevent things from the +# current environment making us not build a wheel. python -m venv /tmp/venv /tmp/venv/bin/pip install -U pip wheel -# Install everything so that the wheel cache is populated -# with transitive depends. If a requirements.txt file exists, -# install it directly so that people can use git url syntax -# to do things like pick up patched but unreleased versions -# of dependencies. -if [ -f /tmp/src/requirements.txt ] ; then - /tmp/venv/bin/pip install --cache-dir=/output/wheels -r /tmp/src/requirements.txt - cp /tmp/src/requirements.txt /output/requirements.txt -fi -/tmp/venv/bin/pip install --cache-dir=/output/wheels /output/wheels/*whl +function install_pwd { + # Protect from the bindep builder image use of the assemble script + # to produce a wheel. Note we append because we want all + # sibling packages in here too + if [ -f bindep.txt -o -f other-requirements.txt ] ; then + bindep -l newline >> /output/bindep/run.txt || true + compile_packages=$(bindep -b compile || true) + if [ ! -z "$compile_packages" ] ; then + apt-get install -y ${compile_packages} + fi + fi -# Install each of the extras so that we collect all possibly -# needed wheels in the wheel cache. get-extras-packages also -# writes out the req files into /output/$extra/requirements.txt. -for req in $(get-extras-packages) ; do - /tmp/venv/bin/pip install --cache-dir=/output/wheels "$req" + # Build a wheel so that we have an install target. + # pip install . in the container context with the mounted + # source dir gets ... exciting. + # We run sdist first to trigger code generation steps such + # as are found in zuul, since the sequencing otherwise + # happens in a way that makes wheel content copying unhappy. + # pip wheel isn't used here because it puts all of the output + # in the output dir and not the wheel cache, so it's not + # possible to tell what is the wheel for the project and + # what is the wheel cache. + python setup.py sdist bdist_wheel -d /output/wheels + + # Install everything so that the wheel cache is populated with + # transitive depends. If a requirements.txt file exists, install + # it directly so that people can use git url syntax to do things + # like pick up patched but unreleased versions of dependencies. + # Only do this for the main package (i.e. only write requirements + # once). + if [ -f /tmp/src/requirements.txt ] && [ ! -f /output/requirements.txt ] ; then + /tmp/venv/bin/pip install --cache-dir=/output/wheels -r /tmp/src/requirements.txt + cp /tmp/src/requirements.txt /output/requirements.txt + fi + /tmp/venv/bin/pip install --cache-dir=/output/wheels /output/wheels/*whl + + # Install each of the extras so that we collect all possibly + # needed wheels in the wheel cache. get-extras-packages also + # writes out the req files into /output/$extra/requirements.txt. + for req in $(get-extras-packages) ; do + /tmp/venv/bin/pip install --cache-dir=/output/wheels "$req" + done +} + +# Install the main package +install_pwd + +# go through ZUUL_SIBLINGS, if any, and build those wheels too +for sibling in ${ZUUL_SIBLINGS:-}; do + pushd .zuul-siblings/${sibling} + install_pwd + popd done rm -rf /tmp/venv diff --git a/docker/python-builder/scripts/install-from-bindep b/docker/python-builder/scripts/install-from-bindep index 4733ac9e31..b78343bc6e 100755 --- a/docker/python-builder/scripts/install-from-bindep +++ b/docker/python-builder/scripts/install-from-bindep @@ -25,7 +25,12 @@ apt-get -y install $(cat /output/bindep/run.txt) if [ -f /output/requirements.txt ] ; then pip install --cache-dir=/output/wheels -r /output/requirements.txt fi -pip install --cache-dir=/output/wheels /output/wheels/*.whl + +# Install the wheels. Use --force here because sibling wheels might +# be built with the same version number as the latest release, but we +# really want the speculatively built wheels installed over any +# automatic dependencies. +pip install --force --cache-dir=/output/wheels /output/wheels/*.whl # clean up after ourselves apt-get clean