| #!/usr/bin/env python3 |
| # |
| # Build the required docker image to run package unit tests |
| # |
| # Script Variables: |
| # DOCKER_IMG_NAME: <optional, the name of the docker image to generate> |
| # default is openbmc/ubuntu-unit-test |
| # DISTRO: <optional, the distro to build a docker image against> |
| # default is ubuntu:focal |
| # FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker |
| # images to be rebuilt rather than reusing caches.> |
| # BUILD_URL: <optional, used to detect running under CI context |
| # (ex. Jenkins)> |
| # BRANCH: <optional, branch to build from each of the openbmc/ |
| # repositories> |
| # default is master, which will be used if input branch not |
| # provided or not found |
| # UBUNTU_MIRROR: <optional, the URL of a mirror of Ubuntu to override the |
| # default ones in /etc/apt/sources.list> |
| # default is empty, and no mirror is used. |
| # http_proxy The HTTP address of the proxy server to connect to. |
| # Default: "", proxy is not setup if this is not set |
| |
| import os |
| import sys |
| import threading |
| from datetime import date |
| from hashlib import sha256 |
| from sh import docker, git, nproc, uname |
| |
| # Read a bunch of environment variables. |
| docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test") |
| force_build = os.environ.get("FORCE_DOCKER_BUILD") |
| is_automated_ci_build = os.environ.get("BUILD_URL", False) |
| distro = os.environ.get("DISTRO", "ubuntu:focal") |
| branch = os.environ.get("BRANCH", "master") |
| ubuntu_mirror = os.environ.get("UBUNTU_MIRROR") |
| http_proxy = os.environ.get("http_proxy") |
| prefix = "/usr/local" |
| |
| # Set up some common variables. |
| proc_count = nproc().strip() |
| username = os.environ.get("USER") |
| homedir = os.environ.get("HOME") |
| gid = os.getgid() |
| uid = os.getuid() |
| |
| # Determine the architecture for Docker. |
| arch = uname("-m").strip() |
| if arch == "ppc64le": |
| docker_base = "ppc64le/" |
| elif arch == "x86_64": |
| docker_base = "" |
| else: |
| print( |
| f"Unsupported system architecture({arch}) found for docker image", |
| file=sys.stderr, |
| ) |
| sys.exit(1) |
| |
| # Packages to include in image. |
| packages = { |
| "boost": { |
| "rev": "1.74.0", |
| "url": ( |
| lambda pkg, rev: f"https://dl.bintray.com/boostorg/release/{rev}/source/{pkg}_{rev.replace('.', '_')}.tar.bz2" |
| ), |
| "build_type": "custom", |
| "build_steps": [ |
| f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine", |
| "./b2", |
| f"./b2 install --prefix={prefix}", |
| ], |
| }, |
| "USCiLab/cereal": { |
| "rev": "v1.3.0", |
| "build_type": "custom", |
| "build_steps": [f"cp -a include/cereal/ {prefix}/include/"], |
| }, |
| "catchorg/Catch2": { |
| "rev": "v2.12.2", |
| "build_type": "cmake", |
| "config_flags": ["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"], |
| }, |
| "CLIUtils/CLI11": { |
| "rev": "v1.9.0", |
| "build_type": "cmake", |
| "config_flags": [ |
| "-DBUILD_TESTING=OFF", |
| "-DCLI11_BUILD_DOCS=OFF", |
| "-DCLI11_BUILD_EXAMPLES=OFF", |
| ], |
| }, |
| "fmtlib/fmt": { |
| "rev": "6.2.1", |
| "build_type": "cmake", |
| "config_flags": [ |
| "-DFMT_DOC=OFF", |
| "-DFMT_TEST=OFF", |
| ], |
| }, |
| # Snapshot from 2020-01-03 |
| "Naios/function2": { |
| "rev": "3a0746bf5f601dfed05330aefcb6854354fce07d", |
| "build_type": "custom", |
| "build_steps": [ |
| f"mkdir {prefix}/include/function2", |
| f"cp include/function2/function2.hpp {prefix}/include/function2/", |
| ], |
| }, |
| # Snapshot from 2020-02-13 |
| "google/googletest": { |
| "rev": "23b2a3b1cf803999fb38175f6e9e038a4495c8a5", |
| "build_type": "cmake", |
| "config_env": ["CXXFLAGS=-std=c++17"], |
| "config_flags": ["-DTHREADS_PREFER_PTHREAD_FLAG=ON"], |
| }, |
| # Release 2020-08-06 |
| "nlohmann/json": { |
| "rev": "v3.9.1", |
| "build_type": "custom", |
| "build_steps": [ |
| f"mkdir {prefix}/include/nlohmann", |
| f"cp include/nlohmann/json.hpp {prefix}/include/nlohmann", |
| f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp", |
| ], |
| }, |
| # Snapshot from 2019-05-24 |
| "linux-test-project/lcov": { |
| "rev": "75fbae1cfc5027f818a0bb865bf6f96fab3202da", |
| "build_type": "make", |
| }, |
| # dev-5.0 2019-05-03 |
| "openbmc/linux": { |
| "rev": "8bf6567e77f7aa68975b7c9c6d044bba690bf327", |
| "build_type": "custom", |
| "build_steps": [ |
| f"make -j{proc_count} defconfig", |
| f"make INSTALL_HDR_PATH={prefix} headers_install", |
| ], |
| }, |
| # Snapshot from 2019-09-03 |
| "LibVNC/libvncserver": { |
| "rev": "1354f7f1bb6962dab209eddb9d6aac1f03408110", |
| "build_type": "cmake", |
| }, |
| "martinmoene/span-lite": { |
| "rev": "v0.7.0", |
| "build_type": "cmake", |
| "config_flags": [ |
| "-DSPAN_LITE_OPT_BUILD_TESTS=OFF", |
| ], |
| }, |
| # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_5.0.1.bb |
| "leethomason/tinyxml2": { |
| "rev": "37bc3aca429f0164adf68c23444540b4a24b5778", |
| "build_type": "cmake", |
| }, |
| # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb |
| "CPPAlliance/url": { |
| "rev": "a56ae0df6d3078319755fbaa67822b4fa7fd352b", |
| "build_type": "cmake", |
| "config_flags": [ |
| "-DBOOST_URL_BUILD_EXAMPLES=OFF", |
| "-DBOOST_URL_BUILD_TESTS=OFF", |
| "-DBOOST_URL_STANDALONE=ON", |
| ], |
| }, |
| # version from meta-openembedded/meta-oe/recipes-devtools/valijson/valijson_git.bb |
| "tristanpenman/valijson": { |
| "rev": "c2f22fddf599d04dc33fcd7ed257c698a05345d9", |
| "build_type": "cmake", |
| "config_flags": [ |
| "-DBUILD_TESTS=0", |
| "-DINSTALL_HEADERS=1", |
| ], |
| }, |
| # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb |
| "nlohmann/fifo_map": { |
| "rev": "0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9", |
| "build_type": "custom", |
| "build_steps": [f"cp src/fifo_map.hpp {prefix}/include/"], |
| }, |
| "open-power/pdbg": {"build_type": "autoconf"}, |
| "openbmc/gpioplus": { |
| "depends": ["openbmc/stdplus"], |
| "build_type": "meson", |
| "config_flags": [ |
| "-Dexamples=false", |
| "-Dtests=disabled", |
| ], |
| }, |
| "openbmc/phosphor-dbus-interfaces": { |
| "depends": ["openbmc/sdbusplus"], |
| "build_type": "meson", |
| "config_flags": [ |
| "-Ddata_com_ibm=true", |
| "-Ddata_org_open_power=true", |
| ], |
| }, |
| "openbmc/phosphor-logging": { |
| "depends": [ |
| "USCiLab/cereal", |
| "nlohmann/fifo_map", |
| "openbmc/phosphor-dbus-interfaces", |
| "openbmc/sdbusplus", |
| "openbmc/sdeventplus", |
| ], |
| "build_type": "autoconf", |
| "config_flags": [ |
| "--enable-metadata-processing", |
| f"YAML_DIR={prefix}/share/phosphor-dbus-yaml/yaml", |
| ], |
| }, |
| "openbmc/phosphor-objmgr": { |
| "depends": [ |
| "boost", |
| "leethomason/tinyxml2", |
| "openbmc/phosphor-logging", |
| "openbmc/sdbusplus", |
| ], |
| "build_type": "autoconf", |
| }, |
| "openbmc/pldm": { |
| "depends": [ |
| "CLIUtils/CLI11", |
| "boost", |
| "nlohmann/json", |
| "openbmc/phosphor-dbus-interfaces", |
| "openbmc/phosphor-logging", |
| "openbmc/sdbusplus", |
| "openbmc/sdeventplus", |
| ], |
| "build_type": "meson", |
| "config_flags": [ |
| "-Dlibpldm-only=enabled", |
| "-Doem-ibm=enabled", |
| "-Dtests=disabled", |
| ], |
| }, |
| "openbmc/sdbusplus": { |
| "build_type": "meson", |
| "custom_post_dl": [ |
| "cd tools", |
| f"./setup.py install --root=/ --prefix={prefix}", |
| "cd ..", |
| ], |
| "config_flags": [ |
| "-Dexamples=disabled", |
| "-Dtests=disabled", |
| ], |
| }, |
| "openbmc/sdeventplus": { |
| "depends": ["Naios/function2", "openbmc/stdplus"], |
| "build_type": "meson", |
| "config_flags": [ |
| "-Dexamples=false", |
| "-Dtests=disabled", |
| ], |
| }, |
| "openbmc/stdplus": { |
| "depends": ["fmtlib/fmt", "martinmoene/span-lite"], |
| "build_type": "meson", |
| "config_flags": [ |
| "-Dexamples=false", |
| "-Dtests=disabled", |
| ], |
| }, |
| } |
| |
| |
| def pkg_rev(pkg): |
| return packages[pkg]["rev"] |
| |
| |
| def pkg_stagename(pkg): |
| return pkg.replace("/", "-").lower() |
| |
| |
| def pkg_url(pkg): |
| if "url" in packages[pkg]: |
| return packages[pkg]["url"](pkg, pkg_rev(pkg)) |
| return f"https://github.com/{pkg}/archive/{pkg_rev(pkg)}.tar.gz" |
| |
| |
| def pkg_download(pkg): |
| url = pkg_url(pkg) |
| if ".tar." not in url: |
| raise NotImplementedError(f"Unhandled download type for {pkg}: {url}") |
| cmd = f"curl -L {url} | tar -x" |
| if url.endswith(".bz2"): |
| cmd += "j" |
| if url.endswith(".gz"): |
| cmd += "z" |
| return cmd |
| |
| |
| def pkg_copycmds(pkg=None): |
| pkgs = [] |
| if pkg: |
| if "depends" not in packages[pkg]: |
| return "" |
| pkgs = sorted(packages[pkg]["depends"]) |
| else: |
| pkgs = sorted(packages.keys()) |
| |
| copy_cmds = "" |
| for p in pkgs: |
| copy_cmds += f"COPY --from={packages[p]['__tag']} {prefix} {prefix}\n" |
| # Workaround for upstream docker bug and multiple COPY cmds |
| # https://github.com/moby/moby/issues/37965 |
| copy_cmds += "RUN true\n" |
| return copy_cmds |
| |
| |
| def pkg_cd_srcdir(pkg): |
| return f"cd {pkg.split('/')[-1]}* && " |
| |
| |
| def pkg_build(pkg): |
| result = f"RUN {pkg_download(pkg)} && " |
| result += pkg_cd_srcdir(pkg) |
| |
| if "custom_post_dl" in packages[pkg]: |
| result += " && ".join(packages[pkg]["custom_post_dl"]) + " && " |
| |
| build_type = packages[pkg]["build_type"] |
| if build_type == "autoconf": |
| result += pkg_build_autoconf(pkg) |
| elif build_type == "cmake": |
| result += pkg_build_cmake(pkg) |
| elif build_type == "custom": |
| result += pkg_build_custom(pkg) |
| elif build_type == "make": |
| result += pkg_build_make(pkg) |
| elif build_type == "meson": |
| result += pkg_build_meson(pkg) |
| else: |
| raise NotImplementedError( |
| f"Unhandled build type for {pkg}: {packages[pkg]['build_type']}" |
| ) |
| |
| return result |
| |
| |
| def pkg_build_autoconf(pkg): |
| options = " ".join(packages[pkg].get("config_flags", [])) |
| env = " ".join(packages[pkg].get("config_env", [])) |
| result = "./bootstrap.sh && " |
| result += f"{env} ./configure {configure_flags} {options} && " |
| result += f"make -j{proc_count} && " |
| result += "make install " |
| return result |
| |
| |
| def pkg_build_cmake(pkg): |
| options = " ".join(packages[pkg].get("config_flags", [])) |
| env = " ".join(packages[pkg].get("config_env", [])) |
| result = "mkdir builddir && cd builddir && " |
| result += f"{env} cmake {cmake_flags} {options} .. && " |
| result += "cmake --build . --target all && " |
| result += "cmake --build . --target install && " |
| result += "cd .. " |
| return result |
| |
| |
| def pkg_build_custom(pkg): |
| return " && ".join(packages[pkg].get("build_steps", [])) |
| |
| |
| def pkg_build_make(pkg): |
| result = f"make -j{proc_count} && " |
| result += "make install " |
| return result |
| |
| |
| def pkg_build_meson(pkg): |
| options = " ".join(packages[pkg].get("config_flags", [])) |
| env = " ".join(packages[pkg].get("config_env", [])) |
| result = f"{env} meson builddir {meson_flags} {options} && " |
| result += "ninja -C builddir && ninja -C builddir install " |
| return result |
| |
| |
| pkg_lock = threading.Lock() |
| |
| |
| def pkg_generate(pkg): |
| class pkg_thread(threading.Thread): |
| def run(self): |
| pkg_lock.acquire() |
| deps = [ |
| packages[deppkg]["__thread"] |
| for deppkg in sorted(packages[pkg].get("depends", [])) |
| ] |
| pkg_lock.release() |
| for deppkg in deps: |
| deppkg.join() |
| |
| dockerfile = f""" |
| FROM {docker_base_img_name} |
| {pkg_copycmds(pkg)} |
| {pkg_build(pkg)} |
| """ |
| |
| pkg_lock.acquire() |
| tag = docker_img_tagname(pkg_stagename(pkg), dockerfile) |
| packages[pkg]["__tag"] = tag |
| pkg_lock.release() |
| |
| try: |
| self.exception = None |
| docker_img_build(pkg, tag, dockerfile) |
| except Exception as e: |
| self.package = pkg |
| self.exception = e |
| |
| packages[pkg]["__thread"] = pkg_thread() |
| |
| |
| def pkg_generate_packages(): |
| for pkg in packages.keys(): |
| pkg_generate(pkg) |
| |
| pkg_lock.acquire() |
| pkg_threads = [packages[p]["__thread"] for p in packages.keys()] |
| for t in pkg_threads: |
| t.start() |
| pkg_lock.release() |
| |
| for t in pkg_threads: |
| t.join() |
| if t.exception: |
| print(f"Package {t.package} failed!", file=sys.stderr) |
| raise t.exception |
| |
| def timestamp(): |
| today = date.today().isocalendar() |
| return f"{today[0]}-W{today[1]:02}" |
| |
| def docker_img_tagname(pkgname, dockerfile): |
| result = docker_image_name |
| if pkgname: |
| result += "-" + pkgname |
| result += ":" + timestamp() |
| result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16] |
| return result |
| |
| |
| def docker_img_build(pkg, tag, dockerfile): |
| if not force_build and pkg != "final": |
| # TODO: the 'final' is here because we do not tag the final image yet |
| # so we always need to rebuild it. This will be changed in a future |
| # commit so that we tag even the final image. |
| if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'): |
| print(f"Image {tag} already exists. Skipping.", file=sys.stderr) |
| return |
| |
| docker.build( |
| proxy_args, |
| "--network=host", |
| "--force-rm", |
| "--no-cache=true" if force_build else "--no-cache=false", |
| "-t", |
| tag, |
| "-", |
| _in=dockerfile, |
| _out=( |
| lambda line: print(pkg + ":", line, end="", file=sys.stderr, flush=True) |
| ), |
| ) |
| |
| |
| # Look up the HEAD for missing a static rev. |
| pkg_lookups = {} |
| for pkg in packages.keys(): |
| if "rev" in packages[pkg]: |
| continue |
| pkg_lookups[pkg] = git( |
| "ls-remote", "--heads", f"https://github.com/{pkg}", _bg=True |
| ) |
| for pkg, result in pkg_lookups.items(): |
| for line in result.stdout.decode().split("\n"): |
| if f"refs/heads/{branch}" in line: |
| packages[pkg]["rev"] = line.strip().split()[0] |
| elif "refs/heads/master" in line and p not in packages: |
| packages[pkg]["rev"] = line.strip().split()[0] |
| |
| # Create the contents of the '/tmp/depcache'. |
| # This needs to be sorted for consistency. |
| depcache = "" |
| for pkg in sorted(packages.keys()): |
| depcache += "%s:%s," % (pkg, pkg_rev(pkg)) |
| |
| # Define common flags used for builds |
| configure_flags = " ".join( |
| [ |
| f"--prefix={prefix}", |
| ] |
| ) |
| cmake_flags = " ".join( |
| [ |
| "-DBUILD_SHARED_LIBS=ON", |
| "-DCMAKE_BUILD_TYPE=RelWithDebInfo", |
| f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}", |
| "-GNinja", |
| "-DCMAKE_MAKE_PROGRAM=ninja", |
| ] |
| ) |
| meson_flags = " ".join( |
| [ |
| "--wrap-mode=nodownload", |
| f"-Dprefix={prefix}", |
| ] |
| ) |
| |
| # Special flags if setting up a deb mirror. |
| mirror = "" |
| if "ubuntu" in distro and ubuntu_mirror: |
| mirror = f""" |
| RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\ |
| echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\ |
| echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\ |
| echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\ |
| echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list |
| """ |
| |
| # Special flags for proxying. |
| proxy_cmd = "" |
| proxy_args = [] |
| if http_proxy: |
| proxy_cmd = f""" |
| RUN echo "[http]" >> {homedir}/.gitconfig && \ |
| echo "proxy = {http_proxy}" >> {homedir}/.gitconfig |
| """ |
| proxy_args.extend( |
| [ |
| "--build-arg", |
| f"http_proxy={http_proxy}", |
| "--build-arg", |
| "https_proxy={https_proxy}", |
| ] |
| ) |
| |
| # Create docker image that can run package unit tests |
| dockerfile_base = f""" |
| FROM {docker_base}{distro} |
| |
| {mirror} |
| |
| ENV DEBIAN_FRONTEND noninteractive |
| |
| ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/" |
| |
| # We need the keys to be imported for dbgsym repos |
| # New releases have a package, older ones fall back to manual fetching |
| # https://wiki.ubuntu.com/Debug%20Symbol%20Packages |
| RUN apt-get update && apt-get dist-upgrade -yy && \ |
| ( apt-get install ubuntu-dbgsym-keyring || \ |
| ( apt-get install -yy dirmngr && \ |
| apt-key adv --keyserver keyserver.ubuntu.com \ |
| --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) ) |
| |
| # Parse the current repo list into a debug repo list |
| RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list |
| |
| # Remove non-existent debug repos |
| RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list |
| |
| RUN cat /etc/apt/sources.list.d/debug.list |
| |
| RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \ |
| gcc-10 \ |
| g++-10 \ |
| libc6-dbg \ |
| libc6-dev \ |
| libtool \ |
| bison \ |
| libdbus-1-dev \ |
| flex \ |
| cmake \ |
| python3 \ |
| python3-dev\ |
| python3-yaml \ |
| python3-mako \ |
| python3-pip \ |
| python3-setuptools \ |
| python3-git \ |
| python3-socks \ |
| pkg-config \ |
| autoconf \ |
| autoconf-archive \ |
| libsystemd-dev \ |
| systemd \ |
| libssl-dev \ |
| libevdev-dev \ |
| libevdev2-dbgsym \ |
| libjpeg-dev \ |
| libpng-dev \ |
| ninja-build \ |
| sudo \ |
| curl \ |
| git \ |
| dbus \ |
| iputils-ping \ |
| clang-10 \ |
| clang-format-10 \ |
| clang-tidy-10 \ |
| clang-tools-10 \ |
| shellcheck \ |
| npm \ |
| iproute2 \ |
| libnl-3-dev \ |
| libnl-genl-3-dev \ |
| libconfig++-dev \ |
| libsnmp-dev \ |
| valgrind \ |
| valgrind-dbg \ |
| libpam0g-dev \ |
| xxd \ |
| libi2c-dev \ |
| wget \ |
| libldap2-dev \ |
| libprotobuf-dev \ |
| libperlio-gzip-perl \ |
| libjson-perl \ |
| protobuf-compiler \ |
| libgpiod-dev \ |
| device-tree-compiler \ |
| cppcheck \ |
| libpciaccess-dev \ |
| libmimetic-dev \ |
| libxml2-utils \ |
| libxml-simple-perl |
| |
| RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 1000 \ |
| --slave /usr/bin/g++ g++ /usr/bin/g++-10 \ |
| --slave /usr/bin/gcov gcov /usr/bin/gcov-10 \ |
| --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-10 \ |
| --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-10 |
| |
| RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-10 1000 \ |
| --slave /usr/bin/clang++ clang++ /usr/bin/clang++-10 \ |
| --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-10 \ |
| --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-10 \ |
| --slave /usr/bin/run-clang-tidy.py run-clang-tidy.py /usr/bin/run-clang-tidy-10.py |
| |
| """ |
| |
| if is_automated_ci_build: |
| dockerfile_base += f""" |
| # Run an arbitrary command to polute the docker cache regularly force us |
| # to re-run `apt-get update` daily. |
| RUN echo {timestamp()} |
| RUN apt-get update && apt-get dist-upgrade -yy |
| |
| """ |
| |
| dockerfile_base += f""" |
| RUN pip3 install inflection |
| RUN pip3 install pycodestyle |
| RUN pip3 install jsonschema |
| RUN pip3 install meson==0.54.3 |
| RUN pip3 install protobuf |
| """ |
| |
| # Build the stage docker images. |
| docker_base_img_name = docker_img_tagname("base", dockerfile_base) |
| docker_img_build("base", docker_base_img_name, dockerfile_base) |
| pkg_generate_packages() |
| |
| dockerfile = f""" |
| # Build the final output image |
| FROM {docker_base_img_name} |
| {pkg_copycmds()} |
| |
| # Some of our infrastructure still relies on the presence of this file |
| # even though it is no longer needed to rebuild the docker environment |
| # NOTE: The file is sorted to ensure the ordering is stable. |
| RUN echo '{depcache}' > /tmp/depcache |
| |
| # Final configuration for the workspace |
| RUN grep -q {gid} /etc/group || groupadd -g {gid} {username} |
| RUN mkdir -p "{os.path.dirname(homedir)}" |
| RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username} |
| RUN sed -i '1iDefaults umask=000' /etc/sudoers |
| RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers |
| |
| {proxy_cmd} |
| |
| RUN /bin/bash |
| """ |
| |
| # Do the final docker build |
| docker_img_build("final", docker_image_name, dockerfile) |