From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: Received: from mails.dpdk.org (mails.dpdk.org [217.70.189.124]) by inbox.dpdk.org (Postfix) with ESMTP id 22A7542B83 for ; Tue, 23 May 2023 19:04:40 +0200 (CEST) Received: from mails.dpdk.org (localhost [127.0.0.1]) by mails.dpdk.org (Postfix) with ESMTP id 200B442C54; Tue, 23 May 2023 19:04:40 +0200 (CEST) Received: from mail-qt1-f180.google.com (mail-qt1-f180.google.com [209.85.160.180]) by mails.dpdk.org (Postfix) with ESMTP id 2BB1A4014F for ; Tue, 23 May 2023 19:04:38 +0200 (CEST) Received: by mail-qt1-f180.google.com with SMTP id d75a77b69052e-3f5279cc284so37442161cf.2 for ; Tue, 23 May 2023 10:04:38 -0700 (PDT) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=iol.unh.edu; s=unh-iol; t=1684861477; x=1687453477; h=content-transfer-encoding:mime-version:references:in-reply-to :message-id:date:subject:cc:to:from:from:to:cc:subject:date :message-id:reply-to; bh=QyrE7JAVFQjMBW3L1Nng9z+aX7HY/xo1u86qRSMm7yo=; b=JCM9KU7SrDFR5I2rCN74kQne8HeE4V401vwsJZEmYoU0G3Xihv9l6Vx7YoW3oJxRgw yXwoH8SbirIf5FuG76F7yY7t9CaA0eSfbOibW8y4u0m5Uy6UsTLih2C2MfZmrxnVRRCC m56Fed9tQD8jofDdtMLrP2Apw+uwEvE1IVZfM= X-Google-DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=1e100.net; s=20221208; t=1684861477; x=1687453477; h=content-transfer-encoding:mime-version:references:in-reply-to :message-id:date:subject:cc:to:from:x-gm-message-state:from:to:cc :subject:date:message-id:reply-to; bh=QyrE7JAVFQjMBW3L1Nng9z+aX7HY/xo1u86qRSMm7yo=; b=HpNTUn+aRD6h8mPjgxFYBdBYOJRMQVtAYa4/+lWVPFVLvWnJWwK/xC71PTBTeYbFeK 8aTrpxdby33iRIXO//mA6EaW7tvhEaDPRT+cHK3QZoHHkRTpL4DB48JHqLvaqwJLibY3 QiGzfVulQMbJOOUuQkp68lc0svRoAt7e/1d97FeorwDZZa6zBT0VCRLE5+QuECKBk3Gp aCce7h1ESnc1AoXRPCQa6ho8L4RfUOJnTgKHuqN/FqScF4IjOvxW+Jkv+MkLucmxKOiL gZC0sWlior94pXq4eD5AjzJg0hKrSdODrvDggzl+5Bh3CXOApG1B8gFW0L/H193gzwlX RDEQ== X-Gm-Message-State: AC+VfDy2Wzg/aahQztlB05+1mPmc9UQZOetINUo53xzeUDrLlSRCK/ZN YHL6hF2m5nGU8mQkx2+whxJcnK9JLGX5bGkP0oh/Nokgz9gE0XP8VeS7SQFe5ADhZwaPvNA0tFW Tj53EgfEGIPqSluGaGTWWZmAbud8OXCWAosKSeTD0e3YmCB4DALqVnLa8NBWGFw== X-Google-Smtp-Source: ACHHUZ67EvE08vzu+Gubj4LQj6/JF+WAU0+gTyyjHiUZAvZLJZwdLIBl+/Rkyp1T8aUBOKsW40exBw== X-Received: by 2002:ac8:5c8e:0:b0:3f4:f5bf:e7be with SMTP id r14-20020ac85c8e000000b003f4f5bfe7bemr21322995qta.56.1684861477159; Tue, 23 May 2023 10:04:37 -0700 (PDT) Received: from ahassick-Desktop.iol.unh.edu ([2606:4100:3880:1220:7704:829d:adaa:3294]) by smtp.gmail.com with ESMTPSA id g14-20020ac8774e000000b003e38c9a2a22sm2996553qtu.92.2023.05.23.10.04.36 (version=TLS1_3 cipher=TLS_AES_256_GCM_SHA384 bits=256/256); Tue, 23 May 2023 10:04:36 -0700 (PDT) From: Adam Hassick To: ci@dpdk.org Cc: Owen Hilyard , aconole@redhat.com, alialnu@nvidia.com Subject: [PATCH v5 03/11] containers/builder: Dockerfile creation script Date: Tue, 23 May 2023 13:04:05 -0400 Message-Id: <20230523170413.812922-4-ahassick@iol.unh.edu> X-Mailer: git-send-email 2.34.1 In-Reply-To: <20230523170413.812922-1-ahassick@iol.unh.edu> References: <20230523170413.812922-1-ahassick@iol.unh.edu> MIME-Version: 1.0 Content-Transfer-Encoding: 8bit X-BeenThere: ci@dpdk.org X-Mailman-Version: 2.1.29 Precedence: list List-Id: DPDK CI discussions List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , Errors-To: ci-bounces@dpdk.org From: Owen Hilyard This script will template out all of the Dockerfiles based on the definitions provided in the inventory using the jinja2 templating library. Signed-off-by: Owen Hilyard --- containers/template_engine/make_dockerfile.py | 240 ++++++++++++++++++ 1 file changed, 240 insertions(+) create mode 100755 containers/template_engine/make_dockerfile.py diff --git a/containers/template_engine/make_dockerfile.py b/containers/template_engine/make_dockerfile.py new file mode 100755 index 0000000..9a3c19b --- /dev/null +++ b/containers/template_engine/make_dockerfile.py @@ -0,0 +1,240 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2022 University of New Hampshire +import argparse +import json +import logging +import os +from dataclasses import dataclass +from datetime import datetime +from typing import Any, Dict, List, Optional + +import jsonschema +import yaml +from jinja2 import Environment, FileSystemLoader, select_autoescape + + +@dataclass(frozen=True) +class Options: + on_rhel: bool + fail_on_unbuildable: bool + build_libabigail: bool + build_abi: bool + output_dir: str + registry_hostname: str + + +def _get_arg_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser(description="Makes the dockerfile") + parser.add_argument("--output-dir", required=True) + parser.add_argument( + "--rhel", + action="store_true", + help="Overwrite the check for running on RHEL", + default=False, + ) + parser.add_argument( + "--fail-on-unbuildable", + action="store_true", + help="If any container would not be possible to build, fail and exit with a non-zero exit code.", + default=False, + ) + parser.add_argument( + "--build-abi", + action="store_true", + help="Whether to build the ABI references into the image. Disabled by \ + default due to producing 10+ GB images. \ + Implies '--build-libabigail'.", + ) + parser.add_argument( + "--build-libabigail", + action="store_true", + help="Whether to build libabigail from source for distros that do not \ + package it. Implied by '--build-abi'", + ) + return parser + + +def parse_args() -> Options: + parser = _get_arg_parser() + args = parser.parse_args() + + registry_hostname = ( + os.environ.get("DPDK_CI_CONTAINERS_REGISTRY_HOSTNAME") or "localhost" + ) + + # In order to to build the ABIs, libabigail must be built from source on + # some platforms + build_libabigail: bool = args.build_libabigail or args.build_abi + + opts = Options( + on_rhel=args.rhel, + fail_on_unbuildable=args.fail_on_unbuildable, + build_libabigail=build_libabigail, + build_abi=args.build_abi, + output_dir=args.output_dir, + registry_hostname=registry_hostname, + ) + logging.info(f"make_dockerfile.py options: {opts}") + return opts + + +def running_on_RHEL(options: Options) -> bool: + """ + RHEL containers can only be built on RHEL, so disable them and emit a + warning if not on RHEL. + """ + redhat_release_path = "/etc/redhat-release" + + if os.path.exists(redhat_release_path): + with open(redhat_release_path) as f: + first_line = f.readline() + on_rhel = "Red Hat Enterprise Linux" in first_line + if on_rhel: + logging.info("Running on RHEL, allowing RHEL containers") + return True + + logging.warning("Not on RHEL, disabling RHEL containers") + assert options is not None, "Internal state error, OPTIONS should not be None" + + if options.on_rhel: + logging.info("Override enabled, enabling RHEL containers") + + return options.on_rhel + + +def get_path_to_parent_directory() -> str: + return os.path.dirname(__file__) + + +def get_raw_inventory(): + parent_dir = get_path_to_parent_directory() + + schema_path = os.path.join(parent_dir, "inventory_schema.json") + inventory_path = os.path.join(parent_dir, "inventory.yaml") + + inventory: Dict[str, Any] + with open(inventory_path, "r") as f: + inventory = yaml.safe_load(f) + + schema: Dict[str, Any] + with open(schema_path, "r") as f: + schema = json.load(f) + + jsonschema.validate(instance=inventory, schema=schema) + return inventory + + +def apply_group_config_to_target( + target: Dict[str, Any], + raw_inventory: Dict[str, Any], + on_rhel: bool, + fail_on_unbuildable: bool, +) -> Optional[Dict[str, Any]]: + groups_for_target: List[Dict[str, Any]] = [] + groups: List[Dict[str, Any]] = raw_inventory["dockerfiles"]["groups"] + group = groups[target["group"]] + + target_primary_group = target["group"] + + assert isinstance(target_primary_group, str), "Target group name was not a string" + + requires_rhel = "rhel" in target_primary_group.lower() + + if requires_rhel and not on_rhel: + logging.warning( + f"Disabling target {target['name']}, because it must be built on RHEL." + ) + if fail_on_unbuildable: + raise AssertionError( + f"Not on RHEL and target {target['name']} must be built on RHEL" + ) + + return None + + while group["parent"] != "NONE": + groups_for_target.append(group) + group = groups[group["parent"]] + + groups_for_target.append(group) # add the "all" group + groups_for_target.reverse() # reverse it so overrides work + + target_packages: List[str] = target.get("packages") or [] + + for group in groups_for_target: + target_packages = [*target_packages, *(group.get("packages") or [])] + target = dict(target, **group) + + target["packages"] = target_packages + + return target + + +def get_processed_inventory(options: Options) -> Dict[str, Any]: + raw_inventory: Dict[str, Any] = get_raw_inventory() + on_rhel = running_on_RHEL(options) + targets = raw_inventory["dockerfiles"]["targets"] + targets = [ + apply_group_config_to_target( + target, raw_inventory, on_rhel, options.fail_on_unbuildable + ) + for target in targets + ] + # remove disabled options + targets = [target for target in targets if target is not None] + raw_inventory["dockerfiles"]["targets"] = targets + + return raw_inventory + + +def main(): + options: Options = parse_args() + + env = Environment( + loader=FileSystemLoader("templates"), + ) + + inventory = get_processed_inventory(options) + + timestamp = datetime.now().strftime("%Y-%m-%d") + + for target in inventory["dockerfiles"]["targets"]: + template = env.get_template(f"containers/{target['group']}.dockerfile.j2") + dockerfile_location = os.path.join( + options.output_dir, target["name"] + ".dockerfile" + ) + + tags: list[str] = target.get("extra_tags") or [] + tags.insert(0, "$R/$N:latest") + tags.insert(1, "$R/$N:$T") + + target["tags"] = tags + + rendered_dockerfile = template.render( + timestamp=timestamp, + target=target, + build_libabigail=options.build_libabigail, + build_abi=options.build_abi, + registry_hostname=options.registry_hostname, + **inventory, + ) + with open(dockerfile_location, "w") as output_file: + output_file.write(rendered_dockerfile) + + makefile_template = env.get_template(f"containers.makefile.j2") + rendered_makefile = makefile_template.render( + timestamp=timestamp, + build_libabigail=options.build_libabigail, + build_abi=options.build_abi, + registry_hostname=options.registry_hostname, + **inventory, + ) + makefile_output_path = os.path.join(options.output_dir, "Makefile") + with open(makefile_output_path, "w") as f: + f.write(rendered_makefile) + + +if __name__ == "__main__": + logging.basicConfig() + logging.root.setLevel(0) # log everything + main() -- 2.34.1