From d74e760b0d84b52439972931a6a1d8d264bc1608 Mon Sep 17 00:00:00 2001 From: Anas Husseini Date: Fri, 21 Jun 2024 15:42:36 +0300 Subject: [PATCH 01/10] add 'deprecated' field for depreacted section generation in image docs --- src/docs/generate_oci_doc_yaml.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/docs/generate_oci_doc_yaml.py b/src/docs/generate_oci_doc_yaml.py index c159e2b0..9a1ba959 100755 --- a/src/docs/generate_oci_doc_yaml.py +++ b/src/docs/generate_oci_doc_yaml.py @@ -12,7 +12,9 @@ import subprocess import sys import tempfile +import timezone from typing import Any, Dict, List +from datetime import datetime from dateutil import parser import boto3 @@ -269,6 +271,11 @@ def build_releases_data( "until": eol.strftime("%m/%Y") } + if eol > datetime.now(timezone.utc): + release_data["deprecated"] = { + "date": eol.strftime("%m/%Y") + } + releases.append(release_data) return releases From 11521c5d185b9e53eb67c9076210a4d9fdc952e9 Mon Sep 17 00:00:00 2001 From: linostar Date: Fri, 21 Jun 2024 13:01:12 +0000 Subject: [PATCH 02/10] ci: automatically update oci/mock-rock/_releases.json, from https://github.com/canonical/oci-factory/actions/runs/9613833853 --- oci/mock-rock/_releases.json | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/oci/mock-rock/_releases.json b/oci/mock-rock/_releases.json index 9e312931..dcbeeee0 100644 --- a/oci/mock-rock/_releases.json +++ b/oci/mock-rock/_releases.json @@ -1,68 +1,68 @@ { "latest": { "candidate": { - "target": "283" + "target": "1.0-22.04_candidate" }, "beta": { - "target": "283" + "target": "latest_candidate" }, "edge": { - "target": "283" + "target": "latest_beta" }, "end-of-life": "2025-05-01T00:00:00Z" }, "1.0-22.04": { "candidate": { - "target": "283" + "target": "316" }, "beta": { - "target": "283" + "target": "316" }, "edge": { - "target": "283" + "target": "316" }, "end-of-life": "2025-05-01T00:00:00Z" }, "test": { "beta": { - "target": "283" + "target": "1.0-22.04_beta" }, "edge": { - "target": "283" + "target": "test_beta" }, "end-of-life": "2026-05-01T00:00:00Z" }, "1.1-22.04": { "end-of-life": "2025-05-01T00:00:00Z", "candidate": { - "target": "284" + "target": "317" }, "beta": { - "target": "284" + "target": "317" }, "edge": { - "target": "284" + "target": "317" } }, "1-22.04": { "end-of-life": "2025-05-01T00:00:00Z", "candidate": { - "target": "284" + "target": "317" }, "beta": { - "target": "284" + "target": "317" }, "edge": { - "target": "284" + "target": "317" } }, "1.2-22.04": { "end-of-life": "2025-05-01T00:00:00Z", "beta": { - "target": "285" + "target": "318" }, "edge": { - "target": "285" + "target": "1.2-22.04_beta" } } } \ No newline at end of file From 22b17eba3fc1eeb056474f7690fa299603ea402a Mon Sep 17 00:00:00 2001 From: Linostar Date: Mon, 30 Sep 2024 14:39:06 +0300 Subject: [PATCH 03/10] Do not build/release/test/scan tracks with expired end-of-life values (#241) --- oci/mock-rock/image.yaml | 2 +- .../prepare_single_image_build_matrix.py | 23 +++++++++++++++---- src/image/utils/schema/triggers.py | 18 ++------------- 3 files changed, 21 insertions(+), 22 deletions(-) diff --git a/oci/mock-rock/image.yaml b/oci/mock-rock/image.yaml index 72a34809..d8670ad5 100644 --- a/oci/mock-rock/image.yaml +++ b/oci/mock-rock/image.yaml @@ -14,7 +14,7 @@ upload: directory: examples/mock-rock/1.0 release: 1.0-22.04: - end-of-life: "2025-05-01T00:00:00Z" + end-of-life: "2024-05-01T00:00:00Z" risks: - candidate - edge diff --git a/src/image/prepare_single_image_build_matrix.py b/src/image/prepare_single_image_build_matrix.py index a3857838..9047d0bd 100755 --- a/src/image/prepare_single_image_build_matrix.py +++ b/src/image/prepare_single_image_build_matrix.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import argparse +from datetime import datetime, timezone import glob import json import os @@ -53,8 +54,10 @@ def validate_image_trigger(data: dict) -> None: builds = image_trigger.get("upload", []) release_to = "true" if "release" in image_trigger else "" + + img_number = 0 # inject some extra metadata into the matrix data - for img_number, _ in enumerate(builds): + while img_number < len(builds): builds[img_number]["name"] = args.oci_path.rstrip("/").split("/")[-1] builds[img_number]["path"] = args.oci_path # make sure every build of this image has a unique identifier @@ -69,13 +72,23 @@ def validate_image_trigger(data: dict) -> None: # set an output as a marker for later knowing if we need to release if "release" in builds[img_number]: - release_to = "true" - # the workflow GH matrix has a problem parsing nested JSON dicts - # so let's remove this field since we don't need it for the builds - builds[img_number]["release"] = "true" + min_eol = datetime.strptime(min( + v["end-of-life"] for v in builds[img_number]["release"].values() + ), "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=timezone.utc) + if min_eol < datetime.now(timezone.utc): + print("Track skipped because it reached its end of life") + del builds[img_number] + continue + else: + release_to = "true" + # the workflow GH matrix has a problem parsing nested JSON dicts + # so let's remove this field since we don't need it for the builds + builds[img_number]["release"] = "true" else: builds[img_number]["release"] = "" + img_number += 1 + matrix = {"include": builds} print(f"{args.oci_path} - build matrix:\n{json.dumps(matrix, indent=4)}") with open(os.environ["GITHUB_OUTPUT"], "a") as gh_out: diff --git a/src/image/utils/schema/triggers.py b/src/image/utils/schema/triggers.py index 221c9ce0..e9e6a4d2 100644 --- a/src/image/utils/schema/triggers.py +++ b/src/image/utils/schema/triggers.py @@ -1,6 +1,6 @@ import pydantic -from datetime import datetime, timezone +from datetime import datetime from typing import Dict, List, Literal, Optional @@ -25,13 +25,6 @@ class ImageUploadReleaseSchema(pydantic.BaseModel): class Config: extra = pydantic.Extra.forbid - @pydantic.validator("end_of_life") - def ensure_still_supported(cls, v: datetime) -> datetime: - """ensure that the end of life isn't reached.""" - if v < datetime.now(timezone.utc): - raise ImageReachedEol("This track has reached its end of life") - return v - class ImageUploadSchema(pydantic.BaseModel): """Schema of each upload within the image.yaml files.""" @@ -58,7 +51,7 @@ class Config: extra = pydantic.Extra.forbid @pydantic.validator("stable", "candidate", "beta", "edge", pre=True) - def _check_risks(cls, values: List) -> str: + def _check_risks(cls, values: List) -> List: """There must be at least one risk specified.""" error = "At least one risk must be specified per track." if not any(values): @@ -66,13 +59,6 @@ def _check_risks(cls, values: List) -> str: return values - @pydantic.validator("end_of_life") - def ensure_still_supported(cls, v: datetime) -> datetime: - """ensure that the end of life isn't reached.""" - if v < datetime.now(timezone.utc): - raise ImageReachedEol("This track has reached its end of life") - return v - class ImageSchema(pydantic.BaseModel): """Validates the schema of the image.yaml files.""" From 1cc99ee2e4e559b129b32ed3d6c5b03987b5d0ac Mon Sep 17 00:00:00 2001 From: Anas Husseini Date: Tue, 12 Nov 2024 15:47:21 +0200 Subject: [PATCH 04/10] update for trivy scans --- .github/workflows/Vulnerability-Scan.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/Vulnerability-Scan.yaml b/.github/workflows/Vulnerability-Scan.yaml index fb94c993..3d1c0876 100644 --- a/.github/workflows/Vulnerability-Scan.yaml +++ b/.github/workflows/Vulnerability-Scan.yaml @@ -32,6 +32,8 @@ env: TEST_IMAGE_NAME: 'test-img' TEST_IMAGE_TAG: 'test' SKOPEO_IMAGE: 'quay.io/skopeo/stable:v1.15.1' + TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db,aquasec/trivy-db,ghcr.io/aquasecurity/trivy-db + TRIVY_JAVA_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-java-db,aquasec/trivy-java-db,ghcr.io/aquasecurity/trivy-java-db jobs: test-vulnerabilities: @@ -87,7 +89,7 @@ jobs: echo "file=$file" >> "$GITHUB_OUTPUT" - name: Scan for vulnerabilities - uses: aquasecurity/trivy-action@0.9.2 + uses: aquasecurity/trivy-action@0.28.0 with: # NOTE: we're allowing images with vulnerabilities to be published ignore-unfixed: true From 9336b05ed57a8d737e8473792e80dd329fdd5812 Mon Sep 17 00:00:00 2001 From: Anas Husseini Date: Tue, 12 Nov 2024 17:10:27 +0200 Subject: [PATCH 05/10] fix: cpc build tools new deps --- src/image/requirements.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/image/requirements.sh b/src/image/requirements.sh index 9f7f1966..22c97065 100755 --- a/src/image/requirements.sh +++ b/src/image/requirements.sh @@ -22,8 +22,9 @@ git remote add origin git+ssh://${ROCKS_DEV_LP_USERNAME}@${CPC_BUILD_TOOLS_REPO} git fetch --depth 1 origin main # ${CPC_BUILD_TOOLS_REPO_REF} git checkout FETCH_HEAD -sudo mv oci_registry_upload.py /usr/local/bin/cpc-build-tools.oci-registry-upload -sudo chmod +x /usr/local/bin/cpc-build-tools.oci-registry-upload +sudo mv /tmp/cpc-build-tools/* /usr/local/bin/ +sudo chmod +x /usr/local/bin/oci_registry_upload.py +ln -s oci_registry_upload.py /usr/local/bin/cpc-build-tools.oci-registry-upload popd ## From 23694e577d680483965e36e656f967bf35d12377 Mon Sep 17 00:00:00 2001 From: linostar Date: Wed, 13 Nov 2024 07:05:45 +0000 Subject: [PATCH 06/10] ci: automatically update oci/mock-rock/_releases.json, from https://github.com/canonical/oci-factory/actions/runs/11800095181 --- oci/mock-rock/_releases.json | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/oci/mock-rock/_releases.json b/oci/mock-rock/_releases.json index dcbeeee0..bd231fe5 100644 --- a/oci/mock-rock/_releases.json +++ b/oci/mock-rock/_releases.json @@ -35,31 +35,31 @@ "1.1-22.04": { "end-of-life": "2025-05-01T00:00:00Z", "candidate": { - "target": "317" + "target": "697" }, "beta": { - "target": "317" + "target": "697" }, "edge": { - "target": "317" + "target": "697" } }, "1-22.04": { "end-of-life": "2025-05-01T00:00:00Z", "candidate": { - "target": "317" + "target": "697" }, "beta": { - "target": "317" + "target": "697" }, "edge": { - "target": "317" + "target": "697" } }, "1.2-22.04": { "end-of-life": "2025-05-01T00:00:00Z", "beta": { - "target": "318" + "target": "698" }, "edge": { "target": "1.2-22.04_beta" From 03270f4d04f9e82bea2756afb9f09fdbc4526afe Mon Sep 17 00:00:00 2001 From: Anas Husseini Date: Wed, 13 Nov 2024 09:40:28 +0200 Subject: [PATCH 07/10] fix conflict --- src/image/prepare_single_image_build_matrix.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/image/prepare_single_image_build_matrix.py b/src/image/prepare_single_image_build_matrix.py index 9047d0bd..5da5f25e 100755 --- a/src/image/prepare_single_image_build_matrix.py +++ b/src/image/prepare_single_image_build_matrix.py @@ -72,9 +72,10 @@ def validate_image_trigger(data: dict) -> None: # set an output as a marker for later knowing if we need to release if "release" in builds[img_number]: - min_eol = datetime.strptime(min( - v["end-of-life"] for v in builds[img_number]["release"].values() - ), "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=timezone.utc) + min_eol = datetime.strptime( + min(v["end-of-life"] for v in builds[img_number]["release"].values()), + "%Y-%m-%dT%H:%M:%SZ", + ).replace(tzinfo=timezone.utc) if min_eol < datetime.now(timezone.utc): print("Track skipped because it reached its end of life") del builds[img_number] From 16bae7743d670bb75d10268ea688de54639c57a7 Mon Sep 17 00:00:00 2001 From: linostar Date: Wed, 13 Nov 2024 08:17:43 +0000 Subject: [PATCH 08/10] ci: automatically update oci/mock-rock/_releases.json, from https://github.com/canonical/oci-factory/actions/runs/11812750186 --- oci/mock-rock/_releases.json | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/oci/mock-rock/_releases.json b/oci/mock-rock/_releases.json index bd231fe5..ecbc807c 100644 --- a/oci/mock-rock/_releases.json +++ b/oci/mock-rock/_releases.json @@ -35,31 +35,31 @@ "1.1-22.04": { "end-of-life": "2025-05-01T00:00:00Z", "candidate": { - "target": "697" + "target": "699" }, "beta": { - "target": "697" + "target": "699" }, "edge": { - "target": "697" + "target": "699" } }, "1-22.04": { "end-of-life": "2025-05-01T00:00:00Z", "candidate": { - "target": "697" + "target": "699" }, "beta": { - "target": "697" + "target": "699" }, "edge": { - "target": "697" + "target": "699" } }, "1.2-22.04": { "end-of-life": "2025-05-01T00:00:00Z", "beta": { - "target": "698" + "target": "700" }, "edge": { "target": "1.2-22.04_beta" From 460436c90681dcde52b2a7c0d8fc329309e58608 Mon Sep 17 00:00:00 2001 From: Anas Husseini Date: Wed, 13 Nov 2024 10:27:36 +0200 Subject: [PATCH 09/10] fix import --- src/docs/generate_oci_doc_yaml.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/docs/generate_oci_doc_yaml.py b/src/docs/generate_oci_doc_yaml.py index 9a1ba959..33b39531 100755 --- a/src/docs/generate_oci_doc_yaml.py +++ b/src/docs/generate_oci_doc_yaml.py @@ -12,9 +12,8 @@ import subprocess import sys import tempfile -import timezone from typing import Any, Dict, List -from datetime import datetime +from datetime import datetime, timezone from dateutil import parser import boto3 From 31c87856675769b62ccea13dee86c21709c5a617 Mon Sep 17 00:00:00 2001 From: Adrian Clay Lake Date: Thu, 17 Oct 2024 16:31:12 +0200 Subject: [PATCH 10/10] feat: ROCKS 1452 - Refactor Build Rock workflow to be externally reusable (#233) --- .github/actions/checkout/action.yaml | 52 +++ .github/workflows/Build-Rock.yaml | 315 ++++++++++-------- .github/workflows/Image.yaml | 171 +++++++++- .github/workflows/Tests.yaml | 12 + .github/workflows/_Test-OCI-Factory.yaml | 82 ++++- oci/mock-rock/_releases.json | 16 +- src/build_rock/assemble_rock/assemble.sh | 54 +++ src/build_rock/assemble_rock/requirements.sh | 6 + .../configure/generate_build_matrix.py | 120 +++++++ src/build_rock/configure/requirements.txt | 2 + src/build_rock/lpci_build/lpci_build.sh | 58 ++++ src/docs/schema/triggers.py | 1 + src/shared/github_output.py | 52 +++ src/shared/release_info.py | 2 +- tests/data/rockcraft.yaml | 18 + tests/etc/requirements.txt | 2 + tests/fixtures/buffers.py | 22 ++ tests/fixtures/sample_data.py | 26 ++ .../test_convert_junit_xml_to_markdown.py | 18 + .../test_junit_to_markdown_output.py | 18 + tests/unit/test_generate_build_matrix.py | 78 +++++ tests/unit/test_github_output.py | 49 +++ 22 files changed, 1006 insertions(+), 168 deletions(-) create mode 100644 .github/actions/checkout/action.yaml create mode 100755 src/build_rock/assemble_rock/assemble.sh create mode 100755 src/build_rock/assemble_rock/requirements.sh create mode 100755 src/build_rock/configure/generate_build_matrix.py create mode 100644 src/build_rock/configure/requirements.txt create mode 100755 src/build_rock/lpci_build/lpci_build.sh create mode 100755 src/shared/github_output.py mode change 100755 => 100644 src/shared/release_info.py create mode 100644 tests/data/rockcraft.yaml create mode 100644 tests/etc/requirements.txt create mode 100644 tests/fixtures/buffers.py create mode 100644 tests/fixtures/sample_data.py create mode 100644 tests/integration/test_convert_junit_xml_to_markdown.py create mode 100644 tests/integration/test_junit_to_markdown_output.py create mode 100644 tests/unit/test_generate_build_matrix.py create mode 100755 tests/unit/test_github_output.py diff --git a/.github/actions/checkout/action.yaml b/.github/actions/checkout/action.yaml new file mode 100644 index 00000000..f2f0dcf1 --- /dev/null +++ b/.github/actions/checkout/action.yaml @@ -0,0 +1,52 @@ +name: Git Checkout +description: 'Checkout action supporting both github and non github repositories.' + + +inputs: + repository: + description: 'Github repository in the format owner/repo or external http(s) URL' + required: true + ref: + description: 'The branch, tag or SHA to checkout' + default: '' + path: + description: 'Relative path under $GITHUB_WORKSPACE to place the repository' + default: '.' + submodules: + description: 'Whether to checkout submodules. true|false|recursive according to actions/checkout@v4' + default: 'false' + github-server-url: + description: 'The base URL for the GitHub instance that you are trying to clone from' + default: 'https://github.com' + +runs: + using: "composite" + steps: + - name: Checkout + shell: bash + run: | + + # If URL lacks the protocol, assume it is a github repo + if [[ "${{ inputs.repository }}" =~ https?:// ]] + then + git_url="${{ inputs.repository }}" + else + git_url="${{ inputs.github-server-url }}/${{ inputs.repository }}.git" + fi + + # create repo path relative to GITHUB_WORKSPACE as per actions/checkout@v4 + repo_path="$GITHUB_WORKSPACE/${{ inputs.path }}" + + # clone the repo and cd into it + git clone $git_url "$repo_path" + cd "$repo_path" + + # checkout the correct ref + git config advice.detachedHead false + git checkout ${{ inputs.ref }} + + # and update sub modules if required + if ${{ inputs.submodules == 'true' || inputs.submodules == 'recursive' }} + then + git submodule update ${{ inputs.submodules == 'recursive' && '--recursive' || '' }} + fi diff --git a/.github/workflows/Build-Rock.yaml b/.github/workflows/Build-Rock.yaml index 495c8650..31dc1ef8 100644 --- a/.github/workflows/Build-Rock.yaml +++ b/.github/workflows/Build-Rock.yaml @@ -3,204 +3,231 @@ name: Build rock on: workflow_call: inputs: + # build parameters oci-archive-name: - description: "Final filename of the rock's OCI archive" + description: "Final filename of the rock OCI archive." type: string required: true - oci-factory-path: - description: "Path, in the OCI Factory, to this rock" + build-id: + description: "Optional string for identifying workflow jobs in GitHub UI" type: string - required: true - rock-name: - description: "Name of the rock" - type: string - required: true + + # source parameters rock-repo: - description: "Public Git repo where to build the rock from" + description: "Public Git repo where to build the rock from." type: string required: true rock-repo-commit: - description: "Git ref from where to build the rock from" + description: "Git ref from where to build the rock from." type: string required: true rockfile-directory: - description: "Directory, in 'rock-repo', where to find the rockcraft.yaml file" + description: "Directory in repository where to find the rockcraft.yaml file." type: string required: true + # parameters for multi-arch builds + arch-map: + description: "JSON string mapping target architecture to runners." + type: string + default: '{"amd64": ["linux", "X64"], "arm64": ["linux", "ARM64"]}' + lpci-fallback: + description: "Enable fallback to Launchpad build when runners for target arch are not available." + type: boolean + default: false + env: - ROCKS_CI_FOLDER: ci-rocks + ROCK_REPO_DIR: rock-repo # path where the image repo is cloned into + ROCK_CI_FOLDER: ci-rocks # path of uploaded/downloaded artifacts jobs: - prepare-multi-arch-matrix: + configure-build: + # configure-build reads the rockcraft.yaml, creating one or more *-build job runs + # depending on the target architecture. runs-on: ubuntu-22.04 outputs: - build-for: ${{ steps.rock-platforms.outputs.build-for }} - build-with-lpci: ${{ steps.rock-platforms.outputs.build-with-lpci }} + runner-build-matrix: ${{ steps.configure.outputs.runner-build-matrix }} + lpci-build-matrix: ${{ steps.configure.outputs.lpci-build-matrix }} + oci-factory-ref: ${{ steps.workflow-version.outputs.sha }} + name: "configure-build ${{ inputs.build-id != '' && format('| {0}', inputs.build-id) || ' '}}" steps: - - name: Clone GitHub image repository + + - name: Get Workflow Version + # Note: we may need to pass a github token when working with private repositories. + # https://github.com/canonical/get-workflow-version-action + id: workflow-version + uses: canonical/get-workflow-version-action@v1 + with: + repository-name: canonical/oci-factory + file-name: Build-Rock.yaml + + - name: Cloning OCI Factory uses: actions/checkout@v4 - id: clone-image-repo - continue-on-error: true with: - repository: ${{ inputs.rock-repo }} - fetch-depth: 0 - - name: Clone generic image repository - if: ${{ steps.clone-image-repo.outcome == 'failure' }} - run: | - git clone ${{ inputs.rock-repo }} . - - run: git checkout ${{ inputs.rock-repo-commit }} - - run: sudo snap install yq --channel=v4/stable - - name: Validate image naming and base - working-directory: ${{ inputs.rockfile-directory }} - run: | - rock_name=`cat rockcraft.y*ml | yq -r .name` - if [[ "${{ inputs.oci-factory-path }}" != *"${rock_name}"* ]] - then - echo "ERROR: the rock's name '${rock_name}' must match the OCI folder name!" - exit 1 - fi - - uses: actions/setup-python@v5 + repository: canonical/oci-factory + ref: ${{ steps.workflow-version.outputs.sha }} + fetch-depth: 1 + + - name: Cloning Target Repo + uses: ./.github/actions/checkout with: - python-version: '3.x' - - run: pip install pyyaml - - name: Get rock archs - uses: jannekem/run-python-script-action@v1 - id: rock-platforms + repository: ${{ inputs.rock-repo }} + path: ${{ env.ROCK_REPO_DIR }} + ref: ${{ inputs.rock-repo-commit }} + submodules: "recursive" + + - name: Installing Python + uses: actions/setup-python@v5 with: - script: | - import yaml - import os - - BUILD_WITH_LPCI = 0 - - with open("${{ inputs.rockfile-directory }}/rockcraft.yaml") as rf: - rockcraft_yaml = yaml.safe_load(rf) - - platforms = rockcraft_yaml["platforms"] - - target_archs = [] - for platf, values in platforms.items(): - if isinstance(values, dict) and "build-for" in values: - target_archs += list(values["build-for"]) - continue - target_archs.append(platf) - - print(f"Target architectures: {set(target_archs)}") - - matrix = {"include": []} - gh_supported_archs = {"amd64": "ubuntu-22.04", "arm64": "Ubuntu_ARM64_4C_16G_01"} - if set(target_archs) - set(gh_supported_archs.keys()): - # Then there are other target archs, so we need to build in LP - matrix["include"].append( - {"architecture": "-".join(set(target_archs)), "runner": gh_supported_archs["amd64"]} - ) - BUILD_WITH_LPCI = 1 - else: - for runner_arch, runner_name in gh_supported_archs.items(): - if runner_arch in target_archs: - matrix["include"].append( - {"architecture": runner_arch, "runner": runner_name} - ) - - with open(os.environ["GITHUB_OUTPUT"], "a") as gh_out: - print(f"build-for={matrix}", file=gh_out) - print(f"build-with-lpci={BUILD_WITH_LPCI}", file=gh_out) - - build: - needs: [prepare-multi-arch-matrix] + python-version: "3.x" + + - name: Installing Python requirements + run: pip install -r src/build_rock/configure/requirements.txt + + # Configure matrices for each *-build job + - name: Configuring Jobs + id: configure + run: | + python3 -m src.build_rock.configure.generate_build_matrix \ + --rockfile-directory "${{ env.ROCK_REPO_DIR }}/${{ inputs.rockfile-directory }}" \ + --lpci-fallback "${{ toJSON(inputs.lpci-fallback) }}" \ + --config ${{ toJSON(inputs.arch-map) }} # important: do not use quotes here + + runner-build: + # runner-build builds rocks per target architecture using pre configured runner images. + needs: [configure-build] + if: fromJSON(needs.configure-build.outputs.runner-build-matrix).include[0] != '' strategy: fail-fast: true - matrix: ${{ fromJSON(needs.prepare-multi-arch-matrix.outputs.build-for) }} + matrix: ${{ fromJSON(needs.configure-build.outputs.runner-build-matrix) }} runs-on: ${{ matrix.runner }} - name: 'Build ${{ inputs.rock-name }} | ${{ matrix.architecture }}' + name: "runner-build | ${{ matrix.architecture }} ${{ inputs.build-id != '' && format('| {0}', inputs.build-id) || ' '}}" steps: - - name: Clone GitHub image repository + + - name: Cloning OCI Factory uses: actions/checkout@v4 - id: clone-image-repo - continue-on-error: true + with: + repository: canonical/oci-factory + ref: ${{ needs.configure-build.outputs.oci-factory-ref }} + fetch-depth: 1 + + - name: Cloning Target Repo + uses: ./.github/actions/checkout with: repository: ${{ inputs.rock-repo }} - fetch-depth: 0 - - name: Clone generic image repository - if: ${{ steps.clone-image-repo.outcome == 'failure' }} - run: | - git clone ${{ inputs.rock-repo }} . - - run: git checkout ${{ inputs.rock-repo-commit }} - - name: Build rock ${{ inputs.rock-name }} + path: ${{ env.ROCK_REPO_DIR }} + ref: ${{ inputs.rock-repo-commit }} + submodules: "recursive" + + - name: Building Target id: rockcraft - if: needs.prepare-multi-arch-matrix.outputs.build-with-lpci == 0 uses: canonical/craft-actions/rockcraft-pack@main with: - path: "${{ inputs.rockfile-directory }}" + path: "${{ env.ROCK_REPO_DIR }}/${{ inputs.rockfile-directory }}" verbosity: debug - - uses: actions/setup-python@v5 - if: needs.prepare-multi-arch-matrix.outputs.build-with-lpci == 1 + + - name: Collecting Artifacts + id: collect-artifacts + run: | + mkdir -p ${{ env.ROCK_CI_FOLDER }} && cp ${{ steps.rockcraft.outputs.rock }} "$_" + echo "filename=$(basename ${{ steps.rockcraft.outputs.rock }})" >> $GITHUB_OUTPUT + + - name: Uploading Artifacts + uses: actions/upload-artifact@v4 with: - python-version: '3.x' - - uses: nick-fields/retry@v3.0.0 - name: Build multi-arch ${{ inputs.rock-name }} in Launchpad - if: needs.prepare-multi-arch-matrix.outputs.build-with-lpci == 1 + name: ${{ inputs.oci-archive-name }}-${{ steps.collect-artifacts.outputs.filename }} + path: ${{ env.ROCK_CI_FOLDER }} + if-no-files-found: error + + lpci-build: + # lpci-build is a fallback for building rocks if no suitable runners are + # configured for the required architecture. Builds in this job will be + # outsourced to Launchpad for completion. + # Note the Secret + needs: [configure-build] + if: fromJSON(needs.configure-build.outputs.lpci-build-matrix).include[0] != '' + strategy: + fail-fast: true + matrix: ${{ fromJSON(needs.configure-build.outputs.lpci-build-matrix) }} + runs-on: ubuntu-22.04 + name: "lpci-build | ${{ matrix.architecture }} ${{ inputs.build-id != '' && format('| {0}', inputs.build-id) || ' '}}" + steps: + + - name: Cloning OCI Factory + uses: actions/checkout@v4 + with: + repository: canonical/oci-factory + ref: ${{ needs.configure-build.outputs.oci-factory-ref }} + fetch-depth: 1 + + - name: Cloning Target Repo + uses: ./.github/actions/checkout + with: + repository: ${{ inputs.rock-repo }} + path: ${{ env.ROCK_REPO_DIR }} + ref: ${{ inputs.rock-repo-commit }} + submodules: "recursive" + + - name: Building Target + # TODO: Replace this retry action with bash equivalent for better testing + uses: nick-fields/retry@v3.0.0 with: timeout_minutes: 180 max_attempts: 4 polling_interval_seconds: 5 retry_wait_seconds: 30 command: | - set -ex - cd ${{ inputs.rockfile-directory }} - rocks_toolbox="$(mktemp -d)" - git clone --depth 1 --branch v1.1.2 https://github.com/canonical/rocks-toolbox $rocks_toolbox - ${rocks_toolbox}/rockcraft_lpci_build/requirements.sh - pip3 install -r ${rocks_toolbox}/rockcraft_lpci_build/requirements.txt - - python3 ${rocks_toolbox}/rockcraft_lpci_build/rockcraft_lpci_build.py \ - --lp-credentials-b64 "${{ secrets.LP_CREDENTIALS_B64 }}" \ - --launchpad-accept-public-upload - - name: Rename rock OCI archive - id: rock + src/build_rock/lpci_build/lpci_build.sh \ + -c "${{ secrets.LP_CREDENTIALS_B64 }}" \ + -d "${{ env.ROCK_REPO_DIR }}/${{ inputs.rockfile-directory }}" + + - name: Collecting Artifacts + id: collect-artifacts run: | - mkdir ${{ env.ROCKS_CI_FOLDER }} - if [ ${{ needs.prepare-multi-arch-matrix.outputs.build-with-lpci }} -eq 0 ] - then - cp ${{ steps.rockcraft.outputs.rock }} ${{ env.ROCKS_CI_FOLDER }}/$(basename ${{ steps.rockcraft.outputs.rock }}) - echo "filename=$(basename ${{ steps.rockcraft.outputs.rock }})" >> $GITHUB_OUTPUT - else - cp ${{ inputs.rockfile-directory }}/*.rock ${{ env.ROCKS_CI_FOLDER }} - echo "filename=${{ inputs.rock-name }}_${{ matrix.architecture }}" >> $GITHUB_OUTPUT - fi - - name: Upload ${{ inputs.rock-name }} for ${{ matrix.architecture }} + mkdir -p ${{ env.ROCK_CI_FOLDER }} && cp ${{ env.ROCK_REPO_DIR }}/${{ inputs.rockfile-directory }}/*.rock "$_" + echo "filename=${{ matrix.rock-name }}_${{ matrix.architecture }}" >> $GITHUB_OUTPUT + + - name: Uploading Artifacts uses: actions/upload-artifact@v4 with: - name: ${{ inputs.oci-archive-name }}-${{ steps.rock.outputs.filename }} - path: ${{ env.ROCKS_CI_FOLDER }} + name: ${{ inputs.oci-archive-name }}-${{ steps.collect-artifacts.outputs.filename }} + path: ${{ env.ROCK_CI_FOLDER }} if-no-files-found: error assemble-rock: - needs: [prepare-multi-arch-matrix, build] + # Assemble individual single-arch rocks into multi-arch rocks + needs: [runner-build, lpci-build, configure-build] runs-on: ubuntu-22.04 + # Always run even if one of the *-build jobs are skipped + # Nice example from benjamin-bergia/github-workflow-patterns... + if: ${{ always() && contains(needs.*.result, 'success') && !(contains(needs.*.result, 'failure')) }} + name: "assemble-rock ${{ inputs.build-id != '' && format('| {0}', inputs.build-id) || ' '}}" steps: - - uses: actions/download-artifact@v4 + # Job Setup + - name: Cloning OCI Factory + uses: actions/checkout@v4 + with: + repository: canonical/oci-factory + ref: ${{ needs.configure-build.outputs.oci-factory-ref }} + fetch-depth: 1 + + - run: src/build_rock/assemble_rock/requirements.sh + - name: Downloading Single Arch rocks + uses: actions/download-artifact@v4 id: download - - run: sudo apt update && sudo apt install buildah -y - - name: Merge single-arch rocks into multi-arch OCI archive + with: + path: ${{ env.ROCK_CI_FOLDER }} + pattern: ${{ inputs.oci-archive-name }}-* + + - name: Assembling Multi Arch rock run: | - set -xe - ls ./${{ inputs.oci-archive-name }}* - buildah manifest create multi-arch-rock - for rock in `find ${{ inputs.oci-archive-name }}*/*.rock` - do - test -f $rock - buildah manifest add multi-arch-rock oci-archive:$rock - done - buildah manifest push --all multi-arch-rock oci-archive:${{ inputs.oci-archive-name }} - - name: Upload multi-arch ${{ inputs.oci-archive-name }} OCI archive + src/build_rock/assemble_rock/assemble.sh \ + -n "${{ inputs.oci-archive-name }}" \ + -d "${{ env.ROCK_CI_FOLDER }}" + + - name: Uploading Multi Arch rock uses: actions/upload-artifact@v4 with: name: ${{ inputs.oci-archive-name }} path: ${{ inputs.oci-archive-name }} if-no-files-found: error - - uses: actions/cache/save@v4 - with: - path: ${{ inputs.oci-archive-name }} - key: ${{ github.run_id }}-${{ inputs.oci-archive-name }} diff --git a/.github/workflows/Image.yaml b/.github/workflows/Image.yaml index 2bd21039..c132510f 100644 --- a/.github/workflows/Image.yaml +++ b/.github/workflows/Image.yaml @@ -1,5 +1,5 @@ name: Image -run-name: 'Image - ${{ inputs.oci-image-name || github.triggering_actor }} - ${{ github.ref }}' +run-name: "Image - ${{ inputs.oci-image-name || github.triggering_actor }} - ${{ github.ref }}" on: push: @@ -24,8 +24,8 @@ on: required: true type: boolean default: false - external_ref_id: #(1) - description: 'Optional ID for unique run detection' + external_ref_id: # (1) + description: "Optional ID for unique run detection" required: false type: string default: "default-id" @@ -136,23 +136,75 @@ jobs: path: ${{ steps.prepare-matrix.outputs.revision-data-dir }} key: ${{ steps.prepare-matrix.outputs.revision-data-cache-key }} - run-build: + validate-matrix: + # validate matrix prepared in previous job before running Build-Rock workflow. + runs-on: ubuntu-22.04 needs: [prepare-build] + strategy: + fail-fast: true + matrix: ${{ fromJSON(needs.prepare-build.outputs.build-matrix) }} + steps: + + - name: Clone GitHub image repository + uses: actions/checkout@v4 + with: + repository: ${{ matrix.source }} + ref: ${{ matrix.commit }} + submodules: "recursive" + fetch-depth: 1 + + - name: Installing yq + run: sudo snap install yq --channel=v4/stable + + - name: Validate image naming and base + run: | + rock_name=`cat "${{ matrix.directory }}"/rockcraft.y*ml | yq -r .name` + folder_name="${{ matrix.path }}" + if [[ "${folder_name}" != *"${rock_name}"* ]] + then + echo "ERROR: the OCI folder name '${folder_name}', must contain the rock's name '${rock_name}'." + exit 1 + fi + + run-build: + needs: [prepare-build, validate-matrix] strategy: fail-fast: true matrix: ${{ fromJSON(needs.prepare-build.outputs.build-matrix) }} uses: ./.github/workflows/Build-Rock.yaml with: - oci-archive-name: ${{ matrix.name }}_${{ matrix.commit }}_${{ matrix.revision }} - oci-factory-path: ${{ matrix.path }} - rock-name: ${{ matrix.name }} + oci-archive-name: ${{ matrix.name }}_${{ matrix.commit }}_${{ matrix.dir_identifier }} + build-id: ${{ matrix.name }} rock-repo: ${{ matrix.source }} rock-repo-commit: ${{ matrix.commit }} rockfile-directory: ${{ matrix.directory }} + lpci-fallback: true secrets: inherit - test: + tmp-cache-job: + # TODO: This is a temporary job that will be removed when the refactored test job is merged. + # Going forward we download the built rocks from artifacts instead of cache. This job takes + # the uploaded rocks then re-caches them for compatibility. + name: Temporary step to cache rocks + runs-on: ubuntu-22.04 needs: [prepare-build, run-build] + strategy: + fail-fast: true + matrix: ${{ fromJSON(needs.prepare-build.outputs.build-matrix) }} + steps: + - name: Download rock + uses: actions/download-artifact@v4 + with: + name: ${{ matrix.name }}_${{ matrix.commit }}_${{ matrix.dir_identifier }} + + - uses: actions/cache/save@v4 + with: + key: ${{ github.run_id }}-${{ matrix.name }}_${{ matrix.commit }}_${{ matrix.dir_identifier }} + path: ${{ matrix.name }}_${{ matrix.commit }}_${{ matrix.dir_identifier }} + + test: + needs: [prepare-build, run-build, tmp-cache-job] + # TODO: Remove tmp-cache-job when removing the job tmp-cache-job name: Test strategy: fail-fast: true @@ -162,9 +214,99 @@ jobs: oci-image-name: "${{ matrix.name }}_${{ matrix.commit }}_${{ matrix.revision }}" oci-image-path: "oci/${{ matrix.name }}" test-from: "cache" - cache-key: ${{ github.run_id }}-${{ matrix.name }}_${{ matrix.commit }}_${{ matrix.revision }} + cache-key: ${{ github.run_id }}-${{ matrix.name }}_${{ matrix.commit }}_${{ matrix.dir_identifier }} secrets: inherit + prepare-upload: + runs-on: ubuntu-22.04 + needs: [prepare-build, run-build, test] + name: Prepare upload + if: ${{ inputs.upload || (github.ref_name == 'main' && github.event_name == 'push') }} + env: + OS_USERNAME: ${{ secrets.SWIFT_OS_USERNAME }} + OS_TENANT_NAME: ${{ secrets.SWIFT_OS_TENANT_NAME }} + OS_PASSWORD: ${{ secrets.SWIFT_OS_PASSWORD }} + OS_REGION_NAME: ${{ secrets.SWIFT_OS_REGION_NAME }} + OS_STORAGE_URL: ${{ secrets.SWIFT_OS_STORAGE_URL }} + IMAGE_NAME: ${{ needs.prepare-build.outputs.oci-img-name }} + SWIFT_CONTAINER_NAME: ${{ vars.SWIFT_CONTAINER_NAME }} + DATA_DIR: "revision-data" + outputs: + build-matrix: ${{ steps.prepare-matrix.outputs.build-matrix }} + revision-data-cache-key: ${{ steps.prepare-matrix.outputs.revision-data-cache-key }} + steps: + - uses: actions/checkout@v4 + + - name: Use custom image trigger + if: ${{ inputs.b64-image-trigger != '' }} + run: echo ${{ inputs.b64-image-trigger }} | base64 -d > ${{ needs.prepare-build.outputs.oci-img-path }}/image.yaml + + - uses: actions/setup-python@v5 + with: + python-version: "3.x" + + - run: | + ./src/uploads/requirements.sh + pip install -r src/image/requirements.txt -r src/uploads/requirements.txt + + - name: Upload the lockfile for the image + id: swift-lock + run: | + ./src/uploads/swift_lockfile_lock.sh \ + ${{ needs.prepare-build.outputs.oci-img-name }} + + # Here starts the critical section, have to be executed in sequence outside of matrix. + - name: Get next revision number + id: get-next-revision + run: ./src/image/define_image_revision.sh + + - name: Prepare builds matrix for upload + id: prepare-matrix + run: | + set -ex + + mkdir ${{ env.DATA_DIR }} + + ./src/image/prepare_single_image_build_matrix.py \ + --oci-path ${{ needs.prepare-build.outputs.oci-img-path }} \ + --revision-data-dir ${{ env.DATA_DIR }} \ + --next-revision ${{ steps.get-next-revision.outputs.revision }} \ + --infer-image-track + + echo "revision-data-cache-key=${{ github.run_id }}-${{ env.DATA_DIR }}-$(date +%s)" >> "$GITHUB_OUTPUT" + + - name: Preempt Swift slot + run: | + ./src/uploads/preempt_swift_slots.sh ${{ env.DATA_DIR }} + + # Here leaves the critical section. + # The lock will be removed even if the steps above fail, + # or the workflow is cancelled. + - name: Remove the lockfile for the image + # Failing to lock the swift container can mean there are multiple + # workflows trying to upload the same image at the same time. + # Therefore we should not remove the lockfile if the swift lock failed. + if: ${{ always() && steps.swift-lock.outcome != 'failure' }} + run: | + ./src/uploads/swift_lockfile_unlock.sh \ + ${{ needs.prepare-build.outputs.oci-img-name }} + + # The revision files have to be sanitised before merging, + # since the `track` field should not be present. + - name: Sanitise revision files + run: | + set -ex + for revision_file in `ls ${{ env.DATA_DIR }}` + do + jq 'del(.track, .base)' ${{ env.DATA_DIR }}/$revision_file > ${{ env.DATA_DIR }}/$revision_file.tmp + mv ${{ env.DATA_DIR }}/$revision_file.tmp ${{ env.DATA_DIR }}/$revision_file + done + + - uses: actions/cache/save@v4 + with: + path: ${{ steps.prepare-matrix.outputs.revision-data-dir }} + key: ${{ steps.prepare-matrix.outputs.revision-data-cache-key }} + upload: runs-on: ubuntu-22.04 needs: [prepare-build, run-build, test] @@ -519,15 +661,16 @@ jobs: notify: runs-on: ubuntu-22.04 name: Notify - needs: [prepare-build, run-build, upload, prepare-releases, generate-provenance] - if: ${{ always() && contains(needs.*.result, 'failure') && github.event_name != 'pull_request' }} + needs: + [prepare-build, run-build, upload, prepare-releases, generate-provenance] + if: ${{ !cancelled() && contains(needs.*.result, 'failure') && github.event_name != 'pull_request' }} steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.x' - + python-version: "3.x" + - name: Summarize workflow failure message id: get-summary run: | @@ -549,7 +692,7 @@ jobs: URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} SUMMARY: ${{ steps.get-summary.outputs.summary }} FOOTER: "Triggered by ${{ github.triggering_actor }}. Ref: ${{ github.ref }}. Attempts: ${{ github.run_attempt }}" - TITLE: '${{ needs.prepare-build.outputs.oci-img-name }}: failed to build->upload->release' + TITLE: "${{ needs.prepare-build.outputs.oci-img-name }}: failed to build->upload->release" run: | for channel in $(echo ${{ steps.get-contacts.outputs.mattermost-channels }} | tr ',' ' ') do diff --git a/.github/workflows/Tests.yaml b/.github/workflows/Tests.yaml index e2a3fce0..41d271f5 100644 --- a/.github/workflows/Tests.yaml +++ b/.github/workflows/Tests.yaml @@ -66,6 +66,18 @@ env: DIVE_IMAGE: 'wagoodman/dive:v0.12' jobs: + access-check: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - name: Validate access to triggered image + uses: ./.github/actions/validate-actor + if: ${{ github.repository == 'canonical/oci-factory' && !github.event.pull_request.head.repo.fork }} + with: + admin-only: true + image-path: ${{ inputs.oci-image-path }} + github-token: ${{ secrets.ROCKSBOT_TOKEN }} + fetch-oci-image: runs-on: ubuntu-22.04 name: Fetch OCI image for testing diff --git a/.github/workflows/_Test-OCI-Factory.yaml b/.github/workflows/_Test-OCI-Factory.yaml index dab257f9..ea914e78 100644 --- a/.github/workflows/_Test-OCI-Factory.yaml +++ b/.github/workflows/_Test-OCI-Factory.yaml @@ -11,9 +11,89 @@ on: - "examples/**" - "oci/mock*" - "src/**" - - "!src/workflow-engine/**" + - "tools/**" + - "tests/**" + - "!tools/workflow-engine/**" + - "!tools/cli-client/**" + +env: + # local path to clone the oci-factory to + + # path of pytest junit output + PYTEST_RESULT_PATH: pytest_results.xml jobs: + access-check: + name: Validate access to mock-rock + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/validate-actor + with: + admin-only: true + image-path: "oci/mock-rock" + github-token: ${{ secrets.ROCKSBOT_TOKEN }} + + pytest: + # Trigger python unit tests across the repository + name: pytest + runs-on: ubuntu-22.04 + steps: + # Job Setup + - uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - uses: actions/setup-python@v5 + with: + python-version: "3.x" + + # Note: Add additional dependency installation lines as required below + # test-oci-factory/pytest requirements + - run: pip install -r tests/etc/requirements.txt + + # build_rock/configure requirements + - run: pip install -r src/build_rock/configure/requirements.txt + + - name: Run pytest + continue-on-error: true + run: | + python3 -m pytest --junit-xml "${{ env.PYTEST_RESULT_PATH }}" + + - name: Generate Summary + if: ${{ !cancelled() }} + run: | + python3 -m tools.junit_to_markdown --input-junit "${{ env.PYTEST_RESULT_PATH }}" >> $GITHUB_STEP_SUMMARY + + - name: Upload pytest Result + if: ${{ !cancelled() }} + uses: actions/upload-artifact@v4 + with: + name: ${{ env.PYTEST_RESULT_PATH }} + path: ${{ env.PYTEST_RESULT_PATH }} + if-no-files-found: error + + bats-test: + # Trigger bash unit tests across the repository + name: bats + runs-on: ubuntu-22.04 + steps: + # Job Setup + - uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Install bats + run: | + sudo apt-get update + sudo apt-get install -y bats + + - name: Run bats + env: + GITHUB_TOKEN: ${{ secrets.ROCKSBOT_TOKEN }} + run: | + find ${{ github.workspace }} -name 'test-*.bats' | xargs bats + test-workflows: name: Trigger internal tests for mock-rock uses: ./.github/workflows/Image.yaml diff --git a/oci/mock-rock/_releases.json b/oci/mock-rock/_releases.json index ecbc807c..7eb048f2 100644 --- a/oci/mock-rock/_releases.json +++ b/oci/mock-rock/_releases.json @@ -35,34 +35,34 @@ "1.1-22.04": { "end-of-life": "2025-05-01T00:00:00Z", "candidate": { - "target": "699" + "target": "623" }, "beta": { - "target": "699" + "target": "623" }, "edge": { - "target": "699" + "target": "623" } }, "1-22.04": { "end-of-life": "2025-05-01T00:00:00Z", "candidate": { - "target": "699" + "target": "623" }, "beta": { - "target": "699" + "target": "623" }, "edge": { - "target": "699" + "target": "623" } }, "1.2-22.04": { "end-of-life": "2025-05-01T00:00:00Z", "beta": { - "target": "700" + "target": "624" }, "edge": { "target": "1.2-22.04_beta" } } -} \ No newline at end of file +} diff --git a/src/build_rock/assemble_rock/assemble.sh b/src/build_rock/assemble_rock/assemble.sh new file mode 100755 index 00000000..f2c3d8dd --- /dev/null +++ b/src/build_rock/assemble_rock/assemble.sh @@ -0,0 +1,54 @@ +#! /bin/bash + +set -e + + +function usage(){ + echo + echo "$(basename "$0") -d -n " + echo + echo "Merge multiple OCI rock images into one multi arch image." + echo + echo -e "-d \\t Directory to search for rock OCI images in." + echo -e "-n \\t Final output archive name. " +} + +while getopts "d:n:" opt +do + case $opt in + d) + ROCK_DIR="$OPTARG" + ;; + n) + ARCHIVE_NAME="$OPTARG" + ;; + ?) + usage + exit 1 + ;; + esac +done + +if [ -z "$ROCK_DIR" ] +then + echo "Error: Missing rock search directory argument (-d)" + usage + exit 1 +fi + +if [ -z "$ARCHIVE_NAME" ] +then + echo "Error: Missing final archive name (-n)" + usage + exit 1 +fi + +buildah manifest create multi-arch-rock + +for rock in `find "$ROCK_DIR" -name "*.rock" -type f` +do + buildah manifest add multi-arch-rock oci-archive:$rock +done + +buildah manifest push --all multi-arch-rock "oci-archive:$ARCHIVE_NAME" + diff --git a/src/build_rock/assemble_rock/requirements.sh b/src/build_rock/assemble_rock/requirements.sh new file mode 100755 index 00000000..1ff6d0d0 --- /dev/null +++ b/src/build_rock/assemble_rock/requirements.sh @@ -0,0 +1,6 @@ +#! /bin/bash + +set -e + +sudo apt update +sudo apt install buildah -y diff --git a/src/build_rock/configure/generate_build_matrix.py b/src/build_rock/configure/generate_build_matrix.py new file mode 100755 index 00000000..8c1dc9cf --- /dev/null +++ b/src/build_rock/configure/generate_build_matrix.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python3 + +import yaml +import os +import argparse +import json +from enum import Enum +from ...shared.github_output import GithubOutput +from pydantic import TypeAdapter + + +class MATRIX_NAMES(Enum): + RUNNER = "runner-build-matrix" + LPCI = "lpci-build-matrix" + + +class MissingArchSupport(Exception): + pass + + +def get_target_archs(rockcraft: dict) -> list: + """get list of target architectures from rockcraft project definition""" + + rock_platforms = rockcraft["platforms"] + + target_archs = set() + + for platf, values in rock_platforms.items(): + + if isinstance(values, dict) and "build-for" in values: + if isinstance(arches := values["build-for"], list): + target_archs.update(arches) + elif isinstance(values, str): + target_archs.add(arches) + else: + target_archs.add(platf) + + return target_archs + + +def configure_matrices(target_archs: list, arch_map: dict, lp_fallback: bool) -> dict: + """Sort build into appropriate build matrices""" + + # map configuration to individual job matrices + build_matrices = {name.value: {"include": []} for name in MATRIX_NAMES} + + # Check if we have runners for all supported architectures + if missing_archs := set(target_archs) - set(arch_map): + + # raise exception if we cannot fallback to LP builds + if not lp_fallback: + raise MissingArchSupport( + f"Missing support for runner arches: {missing_archs}" + ) + + # configure LP build + build_matrices[MATRIX_NAMES.LPCI.value]["include"].append( + {"architecture": "-".join(set(target_archs))} + ) + + else: + # configure runner matrix for list of supported runners + for runner_arch, runner_name in arch_map.items(): + if runner_arch in target_archs: + build_matrices[MATRIX_NAMES.RUNNER.value]["include"].append( + {"architecture": runner_arch, "runner": runner_name} + ) + + return build_matrices + + +def set_build_config_outputs(rock_name: str, build_matrices: dict): + """Update GITHUB_OUTPUT with build configuration.""" + + outputs = {"rock-name": rock_name, **build_matrices} + + with GithubOutput() as github_output: + github_output.write(**outputs) + + +def main(): + parser = argparse.ArgumentParser() + + parser.add_argument( + "--rockfile-directory", + help="Path where to directory containing rockcraft.yaml.", + required=True, + ) + + parser.add_argument( + "--lpci-fallback", + help="Revert to lpci if architectures are not supported. ", + required=True, + type=TypeAdapter(bool).validate_python, + ) + + parser.add_argument( + "--config", + help="JSON mapping arch to runner for matrix generation.", + required=True, + ) + + args = parser.parse_args() + + # get configuration form rockcraft yaml + with open(f"{args.rockfile_directory}/rockcraft.yaml") as rf: + rockcraft_yaml = yaml.safe_load(rf) + + # load config + arch_map = json.loads(args.config) + + target_archs = get_target_archs(rockcraft_yaml) + build_matrices = configure_matrices(target_archs, arch_map, args.lpci_fallback) + + # set github outputs for use in later steps + set_build_config_outputs(rockcraft_yaml["name"], build_matrices) + + +if __name__ == "__main__": + main() diff --git a/src/build_rock/configure/requirements.txt b/src/build_rock/configure/requirements.txt new file mode 100644 index 00000000..d04c0606 --- /dev/null +++ b/src/build_rock/configure/requirements.txt @@ -0,0 +1,2 @@ +pyyaml==6.0.2 +pydantic==2.8.2 diff --git a/src/build_rock/lpci_build/lpci_build.sh b/src/build_rock/lpci_build/lpci_build.sh new file mode 100755 index 00000000..f1900694 --- /dev/null +++ b/src/build_rock/lpci_build/lpci_build.sh @@ -0,0 +1,58 @@ +#! /bin/bash + + +set -e + + +function usage(){ + echo + echo "$(basename "$0") -d -c " + echo + echo "Build local rockcraft project on Launchpad." + echo + echo -e "-d \\t Directory to rockcraft project file. " + echo -e "-c \\t Launchpad credentials. " +} + +while getopts "c:d:" opt +do + case $opt in + d) + ROCKCRAFT_DIR="$OPTARG" + ;; + c) + LP_CREDENTIALS_B64="$OPTARG" + ;; + ?) + usage + exit 1 + ;; + esac +done + +if [ -z "$ROCKCRAFT_DIR" ] +then + echo "Error: Missing rockcraft project directory argument (-d)" + usage + exit 1 +fi + +if [ -z "$LP_CREDENTIALS_B64" ] +then + echo "Error: Missing launchpad credentials argument (-c)" + usage + exit 1 +fi + + +cd "$ROCKCRAFT_DIR" +rocks_toolbox="$(mktemp -d)" + +# install dependencies +git clone --depth 1 --branch v1.1.2 https://github.com/canonical/rocks-toolbox $rocks_toolbox +${rocks_toolbox}/rockcraft_lpci_build/requirements.sh +pip3 install -r ${rocks_toolbox}/rockcraft_lpci_build/requirements.txt + +python3 ${rocks_toolbox}/rockcraft_lpci_build/rockcraft_lpci_build.py \ + --lp-credentials-b64 "$LP_CREDENTIALS_B64" \ + --launchpad-accept-public-upload diff --git a/src/docs/schema/triggers.py b/src/docs/schema/triggers.py index d9de4900..7cae1b3f 100755 --- a/src/docs/schema/triggers.py +++ b/src/docs/schema/triggers.py @@ -2,6 +2,7 @@ this module is the pydantic version of the documentation.yaml schema. """ + from typing import Optional from pydantic import BaseModel, Extra, constr, conlist diff --git a/src/shared/github_output.py b/src/shared/github_output.py new file mode 100755 index 00000000..ab9fe70f --- /dev/null +++ b/src/shared/github_output.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python3 + +import json +from os import environ + +"""This module provides support for writing Github Outputs.""" + +# locate +GITHUB_OUTPUT = environ.get("GITHUB_OUTPUT", None) + + +class GithubOutput: + + def __init__(self): + + self.output_path = environ["GITHUB_OUTPUT"] + + def __enter__(self): + + self.file_handler = open(self.output_path, "a") + + return self + + def __exit__(self, exc_type, exc_value, traceback): + + self.file_handler.close() + del self.file_handler + + def write(self, **kwargs): + """Format kwargs for Github Outputs and write to `output` File Object""" + + if not getattr(self, "file_handler", None): + raise AttributeError( + "file_handler not available. Please use in context block." + ) + + for key, value in kwargs.items(): + + formatted_value = self.format_value(value) + print(f"{key}={formatted_value}", file=self.file_handler) + + @staticmethod + def format_value(value): + """Format `value` such that it can be stored as a github output""" + + if isinstance(value, str): + # str is an exception to casting with json.dumps as we do + # not need to represent the string itself, but just the data + return value + else: + json_value = json.dumps(value) + return json_value diff --git a/src/shared/release_info.py b/src/shared/release_info.py old mode 100755 new mode 100644 index b879d68d..a835b712 --- a/src/shared/release_info.py +++ b/src/shared/release_info.py @@ -6,7 +6,7 @@ """ import json -from src.image.utils.schema.triggers import KNOWN_RISKS_ORDERED +from ..image.utils.schema.triggers import KNOWN_RISKS_ORDERED class BadChannel(Exception): diff --git a/tests/data/rockcraft.yaml b/tests/data/rockcraft.yaml new file mode 100644 index 00000000..a26aeabe --- /dev/null +++ b/tests/data/rockcraft.yaml @@ -0,0 +1,18 @@ +# Metadata section + +name: hello +summary: Hello World +description: The most basic example of a rock. +version: "latest" +license: Apache-2.0 + +base: bare +build-base: ubuntu@22.04 +platforms: + amd64: + +parts: + hello: + plugin: nil + stage-packages: + - hello diff --git a/tests/etc/requirements.txt b/tests/etc/requirements.txt new file mode 100644 index 00000000..9ace6f53 --- /dev/null +++ b/tests/etc/requirements.txt @@ -0,0 +1,2 @@ +pytest==8.3.2 +-r ../../src/build_rock/configure/requirements.txt diff --git a/tests/fixtures/buffers.py b/tests/fixtures/buffers.py new file mode 100644 index 00000000..30993191 --- /dev/null +++ b/tests/fixtures/buffers.py @@ -0,0 +1,22 @@ +import pytest +from io import StringIO +import os +from pathlib import Path + + +@pytest.fixture +def str_buff(): + """String IO fixture for simulating a file object""" + with StringIO() as buffer: + yield buffer + + +@pytest.fixture +def github_output(monkeypatch, tmp_path): + + env_path = tmp_path / "env" + env_path.touch() + + monkeypatch.setitem(os.environ, "GITHUB_OUTPUT", str(env_path)) + + yield env_path diff --git a/tests/fixtures/sample_data.py b/tests/fixtures/sample_data.py new file mode 100644 index 00000000..c3df9af1 --- /dev/null +++ b/tests/fixtures/sample_data.py @@ -0,0 +1,26 @@ +import pytest +import xml.etree.ElementTree as ET +import yaml +from .. import DATA_DIR + + +@pytest.fixture +def junit_with_failure(): + """Load ET of junit xml report with failure.""" + sample_file = DATA_DIR / "junit_xml_failure.xml" + + tree = ET.parse(sample_file) + root = tree.getroot() + return root + + +@pytest.fixture +def rockcraft_project(): + """Get sample rockcraft project file for testing.""" + + sample = DATA_DIR / "rockcraft.yaml" + + with open(sample) as rf: + project = yaml.safe_load(rf) + + return project diff --git a/tests/integration/test_convert_junit_xml_to_markdown.py b/tests/integration/test_convert_junit_xml_to_markdown.py new file mode 100644 index 00000000..9305985c --- /dev/null +++ b/tests/integration/test_convert_junit_xml_to_markdown.py @@ -0,0 +1,18 @@ +from ..fixtures.buffers import str_buff +from ..fixtures.sample_data import junit_with_failure +import tools.junit_to_markdown.convert as report + + +def test_print_redirection(junit_with_failure, str_buff, capsys): + """Ensure that the report is entirely redirected when needed""" + + report.print_junit_report(junit_with_failure, str_buff) + report.print_junit_report(junit_with_failure, None) # print report to stdout + + str_buff.seek(0) + str_buff_content = str_buff.read() + + captured = capsys.readouterr() + stdout_content = captured.out + + assert stdout_content == str_buff_content, "Printing to multiple locations." diff --git a/tests/integration/test_junit_to_markdown_output.py b/tests/integration/test_junit_to_markdown_output.py new file mode 100644 index 00000000..9305985c --- /dev/null +++ b/tests/integration/test_junit_to_markdown_output.py @@ -0,0 +1,18 @@ +from ..fixtures.buffers import str_buff +from ..fixtures.sample_data import junit_with_failure +import tools.junit_to_markdown.convert as report + + +def test_print_redirection(junit_with_failure, str_buff, capsys): + """Ensure that the report is entirely redirected when needed""" + + report.print_junit_report(junit_with_failure, str_buff) + report.print_junit_report(junit_with_failure, None) # print report to stdout + + str_buff.seek(0) + str_buff_content = str_buff.read() + + captured = capsys.readouterr() + stdout_content = captured.out + + assert stdout_content == str_buff_content, "Printing to multiple locations." diff --git a/tests/unit/test_generate_build_matrix.py b/tests/unit/test_generate_build_matrix.py new file mode 100644 index 00000000..7fca208d --- /dev/null +++ b/tests/unit/test_generate_build_matrix.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python3 + +from src.build_rock.configure.generate_build_matrix import ( + get_target_archs, + configure_matrices, + MissingArchSupport, + set_build_config_outputs, +) +import pytest +from ..fixtures.buffers import github_output +from ..fixtures.sample_data import rockcraft_project + + +def test_get_target_archs(rockcraft_project): + """Test extraction of target architectures from rockcraft project configuration""" + + rockcraft_project["platforms"] = { + "amd64": None, + "armhf": {"build-for": ["armhf", "arm64"]}, + "ibm": {"build-on": ["s390x"], "build-for": "s390x"}, + } + + arches = get_target_archs(rockcraft_project) + assert arches == {"arm64", "armhf", "amd64"} + + +def test_configure_matrices(): + """Test correct configuration of build matrices from project's target arches""" + + build_matrices = configure_matrices(["amd64"], {"amd64": "ubuntu-22.04"}, False) + expected_result = { + "runner-build-matrix": { + "include": [{"architecture": "amd64", "runner": "ubuntu-22.04"}] + }, + "lpci-build-matrix": {"include": []}, + } + + assert build_matrices == expected_result + + +def test_configure_matrices_fallback_exception(): + """Test proper exception is raised when target arch is not buildable""" + with pytest.raises(MissingArchSupport): + configure_matrices(["arm64"], {"amd64": "ubuntu-22.04"}, False) + + +def test_configure_matrices_lpci_fallback(): + """Test lpci fallback logic when target cannot be built on a runner""" + build_matrices = configure_matrices(["arm64"], {"amd64": "ubuntu-22.04"}, True) + expected_result = { + "runner-build-matrix": {"include": []}, + "lpci-build-matrix": {"include": [{"architecture": "arm64"}]}, + } + + assert build_matrices == expected_result + + +def test_set_build_config_outputs(github_output): + """Test correct generation of build matrices.""" + + test_build_matrices = { + "runner-build-matrix": { + "include": [{"architecture": "amd64", "runner": "ubuntu-22.04"}] + }, + "lpci-build-matrix": {"include": []}, + } + + set_build_config_outputs("test", test_build_matrices) + + with open(github_output, "r") as fh: + gh_output = fh.read() + + expected_result = """rock-name=test +runner-build-matrix={"include": [{"architecture": "amd64", "runner": "ubuntu-22.04"}]} +lpci-build-matrix={"include": []} +""" + + assert gh_output == expected_result diff --git a/tests/unit/test_github_output.py b/tests/unit/test_github_output.py new file mode 100755 index 00000000..5cf11445 --- /dev/null +++ b/tests/unit/test_github_output.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python3 + +from src.shared.github_output import GithubOutput +from ..fixtures.buffers import github_output + + +def test_write(github_output): + """Test github_output write function""" + + outputs = { + "hello-world": 42, + } + expected_result = "hello-world=42\n" + + with GithubOutput() as output: + + output.write(**outputs) + + with open(github_output, "r") as fh: + result = fh.read() + + assert result == expected_result + + +def test_format_value_string(): + """Test formatting of string for outputs""" + + expected_result = "foo" + result = GithubOutput.format_value("foo") + + assert expected_result == result + + +def test_format_value_number(): + """Test formatting of number for outputs""" + + expected_result = "1" + result = GithubOutput.format_value(1) + + assert expected_result == result + + +def test_format_value_json(): + """Test formatting of JSON for outputs""" + + expected_result = '{"foo": "bar"}' + result = GithubOutput.format_value({"foo": "bar"}) + + assert expected_result == result