diff --git a/.github/actions/checkout/action.yaml b/.github/actions/checkout/action.yaml new file mode 100644 index 000000000..f2f0dcf1c --- /dev/null +++ b/.github/actions/checkout/action.yaml @@ -0,0 +1,52 @@ +name: Git Checkout +description: 'Checkout action supporting both github and non github repositories.' + + +inputs: + repository: + description: 'Github repository in the format owner/repo or external http(s) URL' + required: true + ref: + description: 'The branch, tag or SHA to checkout' + default: '' + path: + description: 'Relative path under $GITHUB_WORKSPACE to place the repository' + default: '.' + submodules: + description: 'Whether to checkout submodules. true|false|recursive according to actions/checkout@v4' + default: 'false' + github-server-url: + description: 'The base URL for the GitHub instance that you are trying to clone from' + default: 'https://github.com' + +runs: + using: "composite" + steps: + - name: Checkout + shell: bash + run: | + + # If URL lacks the protocol, assume it is a github repo + if [[ "${{ inputs.repository }}" =~ https?:// ]] + then + git_url="${{ inputs.repository }}" + else + git_url="${{ inputs.github-server-url }}/${{ inputs.repository }}.git" + fi + + # create repo path relative to GITHUB_WORKSPACE as per actions/checkout@v4 + repo_path="$GITHUB_WORKSPACE/${{ inputs.path }}" + + # clone the repo and cd into it + git clone $git_url "$repo_path" + cd "$repo_path" + + # checkout the correct ref + git config advice.detachedHead false + git checkout ${{ inputs.ref }} + + # and update sub modules if required + if ${{ inputs.submodules == 'true' || inputs.submodules == 'recursive' }} + then + git submodule update ${{ inputs.submodules == 'recursive' && '--recursive' || '' }} + fi diff --git a/.github/base_digests/20.04 b/.github/base_digests/20.04 index 05843afe6..b5d44b69c 100644 --- a/.github/base_digests/20.04 +++ b/.github/base_digests/20.04 @@ -1 +1 @@ -public.ecr.aws/ubuntu/ubuntu:focal@sha256:036c51fb190af9d3ec7fbd654c2669d5a3da3549aa0e3408b40ba6fb6eb16dfc +public.ecr.aws/ubuntu/ubuntu:focal@sha256:4a885c102bc7de9ff2ffc4b11b65f35e46827d608069cd959181718aa7d14731 diff --git a/.github/workflows/Build-Rock.yaml b/.github/workflows/Build-Rock.yaml index eda9e812f..31dc1ef85 100644 --- a/.github/workflows/Build-Rock.yaml +++ b/.github/workflows/Build-Rock.yaml @@ -3,222 +3,231 @@ name: Build rock on: workflow_call: inputs: + # build parameters oci-archive-name: - description: "Final filename of the rock's OCI archive" + description: "Final filename of the rock OCI archive." type: string required: true - oci-factory-path: - description: "Path, in the OCI Factory, to this rock" + build-id: + description: "Optional string for identifying workflow jobs in GitHub UI" type: string - required: true - rock-name: - description: "Name of the rock" - type: string - required: true + + # source parameters rock-repo: - description: "Public Git repo where to build the rock from" + description: "Public Git repo where to build the rock from." type: string required: true rock-repo-commit: - description: "Git ref from where to build the rock from" + description: "Git ref from where to build the rock from." type: string required: true rockfile-directory: - description: "Directory, in 'rock-repo', where to find the rockcraft.yaml file" + description: "Directory in repository where to find the rockcraft.yaml file." type: string required: true + # parameters for multi-arch builds + arch-map: + description: "JSON string mapping target architecture to runners." + type: string + default: '{"amd64": ["linux", "X64"], "arm64": ["linux", "ARM64"]}' + lpci-fallback: + description: "Enable fallback to Launchpad build when runners for target arch are not available." + type: boolean + default: false + env: - ROCKS_CI_FOLDER: ci-rocks + ROCK_REPO_DIR: rock-repo # path where the image repo is cloned into + ROCK_CI_FOLDER: ci-rocks # path of uploaded/downloaded artifacts jobs: - prepare-multi-arch-matrix: + configure-build: + # configure-build reads the rockcraft.yaml, creating one or more *-build job runs + # depending on the target architecture. runs-on: ubuntu-22.04 outputs: - build-for: ${{ steps.rock-platforms.outputs.build-for }} - build-with-lpci: ${{ steps.rock-platforms.outputs.build-with-lpci }} + runner-build-matrix: ${{ steps.configure.outputs.runner-build-matrix }} + lpci-build-matrix: ${{ steps.configure.outputs.lpci-build-matrix }} + oci-factory-ref: ${{ steps.workflow-version.outputs.sha }} + name: "configure-build ${{ inputs.build-id != '' && format('| {0}', inputs.build-id) || ' '}}" steps: - # Checkout the OCI Factory repo for the actor validation - - uses: actions/checkout@v4 - if: ${{ github.repository == 'canonical/oci-factory' && !github.event.pull_request.head.repo.fork }} - - name: Validate access to triggered image - uses: ./.github/actions/validate-actor - if: ${{ github.repository == 'canonical/oci-factory' && !github.event.pull_request.head.repo.fork }} + - name: Get Workflow Version + # Note: we may need to pass a github token when working with private repositories. + # https://github.com/canonical/get-workflow-version-action + id: workflow-version + uses: canonical/get-workflow-version-action@v1 with: - admin-only: true - image-path: ${{ inputs.oci-factory-path }} - github-token: ${{ secrets.ROCKSBOT_TOKEN }} + repository-name: canonical/oci-factory + file-name: Build-Rock.yaml - # We clear the working directory to make space for the rock repo - - run: rm -rf ./* - if: ${{ github.repository == 'canonical/oci-factory' && !github.event.pull_request.head.repo.fork }} - - - name: Clone GitHub image repository + - name: Cloning OCI Factory uses: actions/checkout@v4 - id: clone-image-repo - continue-on-error: true + with: + repository: canonical/oci-factory + ref: ${{ steps.workflow-version.outputs.sha }} + fetch-depth: 1 + + - name: Cloning Target Repo + uses: ./.github/actions/checkout with: repository: ${{ inputs.rock-repo }} - fetch-depth: 0 - - name: Clone generic image repository - if: ${{ steps.clone-image-repo.outcome == 'failure' }} - run: | - git clone ${{ inputs.rock-repo }} . + path: ${{ env.ROCK_REPO_DIR }} + ref: ${{ inputs.rock-repo-commit }} + submodules: "recursive" + + - name: Installing Python + uses: actions/setup-python@v5 + with: + python-version: "3.x" - - run: git checkout ${{ inputs.rock-repo-commit }} + - name: Installing Python requirements + run: pip install -r src/build_rock/configure/requirements.txt - - run: sudo snap install yq --channel=v4/stable - - name: Validate image naming and base - working-directory: ${{ inputs.rockfile-directory }} + # Configure matrices for each *-build job + - name: Configuring Jobs + id: configure run: | - rock_name=`cat rockcraft.y*ml | yq -r .name` - if [[ "${{ inputs.oci-factory-path }}" != *"${rock_name}"* ]] - then - echo "ERROR: the rock's name '${rock_name}' must match the OCI folder name!" - exit 1 - fi - - uses: actions/setup-python@v5 - with: - python-version: '3.x' - - run: pip install pyyaml - - name: Get rock archs - uses: jannekem/run-python-script-action@v1 - id: rock-platforms - with: - script: | - import yaml - import os - - BUILD_WITH_LPCI = 0 - - with open("${{ inputs.rockfile-directory }}/rockcraft.yaml") as rf: - rockcraft_yaml = yaml.safe_load(rf) - - platforms = rockcraft_yaml["platforms"] - - target_archs = [] - for platf, values in platforms.items(): - if isinstance(values, dict) and "build-for" in values: - target_archs += list(values["build-for"]) - continue - target_archs.append(platf) - - print(f"Target architectures: {set(target_archs)}") - - matrix = {"include": []} - gh_supported_archs = {"amd64": "ubuntu-22.04", "arm64": "Ubuntu_ARM64_4C_16G_01"} - if set(target_archs) - set(gh_supported_archs.keys()): - # Then there are other target archs, so we need to build in LP - matrix["include"].append( - {"architecture": "-".join(set(target_archs)), "runner": gh_supported_archs["amd64"]} - ) - BUILD_WITH_LPCI = 1 - else: - for runner_arch, runner_name in gh_supported_archs.items(): - if runner_arch in target_archs: - matrix["include"].append( - {"architecture": runner_arch, "runner": runner_name} - ) - - with open(os.environ["GITHUB_OUTPUT"], "a") as gh_out: - print(f"build-for={matrix}", file=gh_out) - print(f"build-with-lpci={BUILD_WITH_LPCI}", file=gh_out) - - build: - needs: [prepare-multi-arch-matrix] + python3 -m src.build_rock.configure.generate_build_matrix \ + --rockfile-directory "${{ env.ROCK_REPO_DIR }}/${{ inputs.rockfile-directory }}" \ + --lpci-fallback "${{ toJSON(inputs.lpci-fallback) }}" \ + --config ${{ toJSON(inputs.arch-map) }} # important: do not use quotes here + + runner-build: + # runner-build builds rocks per target architecture using pre configured runner images. + needs: [configure-build] + if: fromJSON(needs.configure-build.outputs.runner-build-matrix).include[0] != '' strategy: fail-fast: true - matrix: ${{ fromJSON(needs.prepare-multi-arch-matrix.outputs.build-for) }} + matrix: ${{ fromJSON(needs.configure-build.outputs.runner-build-matrix) }} runs-on: ${{ matrix.runner }} - name: 'Build ${{ inputs.rock-name }} | ${{ matrix.architecture }}' + name: "runner-build | ${{ matrix.architecture }} ${{ inputs.build-id != '' && format('| {0}', inputs.build-id) || ' '}}" steps: - - name: Clone GitHub image repository + + - name: Cloning OCI Factory uses: actions/checkout@v4 - id: clone-image-repo - continue-on-error: true + with: + repository: canonical/oci-factory + ref: ${{ needs.configure-build.outputs.oci-factory-ref }} + fetch-depth: 1 + + - name: Cloning Target Repo + uses: ./.github/actions/checkout with: repository: ${{ inputs.rock-repo }} - fetch-depth: 0 - - name: Clone generic image repository - if: ${{ steps.clone-image-repo.outcome == 'failure' }} - run: | - git clone ${{ inputs.rock-repo }} . - - run: git checkout ${{ inputs.rock-repo-commit }} - - name: Build rock ${{ inputs.rock-name }} + path: ${{ env.ROCK_REPO_DIR }} + ref: ${{ inputs.rock-repo-commit }} + submodules: "recursive" + + - name: Building Target id: rockcraft - if: needs.prepare-multi-arch-matrix.outputs.build-with-lpci == 0 uses: canonical/craft-actions/rockcraft-pack@main with: - path: "${{ inputs.rockfile-directory }}" + path: "${{ env.ROCK_REPO_DIR }}/${{ inputs.rockfile-directory }}" verbosity: debug - - uses: actions/setup-python@v5 - if: needs.prepare-multi-arch-matrix.outputs.build-with-lpci == 1 + + - name: Collecting Artifacts + id: collect-artifacts + run: | + mkdir -p ${{ env.ROCK_CI_FOLDER }} && cp ${{ steps.rockcraft.outputs.rock }} "$_" + echo "filename=$(basename ${{ steps.rockcraft.outputs.rock }})" >> $GITHUB_OUTPUT + + - name: Uploading Artifacts + uses: actions/upload-artifact@v4 with: - python-version: '3.x' - - uses: nick-fields/retry@v3.0.0 - name: Build multi-arch ${{ inputs.rock-name }} in Launchpad - if: needs.prepare-multi-arch-matrix.outputs.build-with-lpci == 1 + name: ${{ inputs.oci-archive-name }}-${{ steps.collect-artifacts.outputs.filename }} + path: ${{ env.ROCK_CI_FOLDER }} + if-no-files-found: error + + lpci-build: + # lpci-build is a fallback for building rocks if no suitable runners are + # configured for the required architecture. Builds in this job will be + # outsourced to Launchpad for completion. + # Note the Secret + needs: [configure-build] + if: fromJSON(needs.configure-build.outputs.lpci-build-matrix).include[0] != '' + strategy: + fail-fast: true + matrix: ${{ fromJSON(needs.configure-build.outputs.lpci-build-matrix) }} + runs-on: ubuntu-22.04 + name: "lpci-build | ${{ matrix.architecture }} ${{ inputs.build-id != '' && format('| {0}', inputs.build-id) || ' '}}" + steps: + + - name: Cloning OCI Factory + uses: actions/checkout@v4 + with: + repository: canonical/oci-factory + ref: ${{ needs.configure-build.outputs.oci-factory-ref }} + fetch-depth: 1 + + - name: Cloning Target Repo + uses: ./.github/actions/checkout + with: + repository: ${{ inputs.rock-repo }} + path: ${{ env.ROCK_REPO_DIR }} + ref: ${{ inputs.rock-repo-commit }} + submodules: "recursive" + + - name: Building Target + # TODO: Replace this retry action with bash equivalent for better testing + uses: nick-fields/retry@v3.0.0 with: timeout_minutes: 180 max_attempts: 4 polling_interval_seconds: 5 retry_wait_seconds: 30 command: | - set -ex - cd ${{ inputs.rockfile-directory }} - rocks_toolbox="$(mktemp -d)" - git clone --depth 1 --branch v1.1.2 https://github.com/canonical/rocks-toolbox $rocks_toolbox - ${rocks_toolbox}/rockcraft_lpci_build/requirements.sh - pip3 install -r ${rocks_toolbox}/rockcraft_lpci_build/requirements.txt - - python3 ${rocks_toolbox}/rockcraft_lpci_build/rockcraft_lpci_build.py \ - --lp-credentials-b64 "${{ secrets.LP_CREDENTIALS_B64 }}" \ - --launchpad-accept-public-upload - - name: Rename rock OCI archive - id: rock + src/build_rock/lpci_build/lpci_build.sh \ + -c "${{ secrets.LP_CREDENTIALS_B64 }}" \ + -d "${{ env.ROCK_REPO_DIR }}/${{ inputs.rockfile-directory }}" + + - name: Collecting Artifacts + id: collect-artifacts run: | - mkdir ${{ env.ROCKS_CI_FOLDER }} - if [ ${{ needs.prepare-multi-arch-matrix.outputs.build-with-lpci }} -eq 0 ] - then - cp ${{ steps.rockcraft.outputs.rock }} ${{ env.ROCKS_CI_FOLDER }}/$(basename ${{ steps.rockcraft.outputs.rock }}) - echo "filename=$(basename ${{ steps.rockcraft.outputs.rock }})" >> $GITHUB_OUTPUT - else - cp ${{ inputs.rockfile-directory }}/*.rock ${{ env.ROCKS_CI_FOLDER }} - echo "filename=${{ inputs.rock-name }}_${{ matrix.architecture }}" >> $GITHUB_OUTPUT - fi - - name: Upload ${{ inputs.rock-name }} for ${{ matrix.architecture }} + mkdir -p ${{ env.ROCK_CI_FOLDER }} && cp ${{ env.ROCK_REPO_DIR }}/${{ inputs.rockfile-directory }}/*.rock "$_" + echo "filename=${{ matrix.rock-name }}_${{ matrix.architecture }}" >> $GITHUB_OUTPUT + + - name: Uploading Artifacts uses: actions/upload-artifact@v4 with: - name: ${{ inputs.oci-archive-name }}-${{ steps.rock.outputs.filename }} - path: ${{ env.ROCKS_CI_FOLDER }} + name: ${{ inputs.oci-archive-name }}-${{ steps.collect-artifacts.outputs.filename }} + path: ${{ env.ROCK_CI_FOLDER }} if-no-files-found: error assemble-rock: - needs: [prepare-multi-arch-matrix, build] + # Assemble individual single-arch rocks into multi-arch rocks + needs: [runner-build, lpci-build, configure-build] runs-on: ubuntu-22.04 + # Always run even if one of the *-build jobs are skipped + # Nice example from benjamin-bergia/github-workflow-patterns... + if: ${{ always() && contains(needs.*.result, 'success') && !(contains(needs.*.result, 'failure')) }} + name: "assemble-rock ${{ inputs.build-id != '' && format('| {0}', inputs.build-id) || ' '}}" steps: - - uses: actions/download-artifact@v4 + # Job Setup + - name: Cloning OCI Factory + uses: actions/checkout@v4 + with: + repository: canonical/oci-factory + ref: ${{ needs.configure-build.outputs.oci-factory-ref }} + fetch-depth: 1 + + - run: src/build_rock/assemble_rock/requirements.sh + - name: Downloading Single Arch rocks + uses: actions/download-artifact@v4 id: download - - run: sudo apt update && sudo apt install buildah -y - - name: Merge single-arch rocks into multi-arch OCI archive + with: + path: ${{ env.ROCK_CI_FOLDER }} + pattern: ${{ inputs.oci-archive-name }}-* + + - name: Assembling Multi Arch rock run: | - set -xe - ls ./${{ inputs.oci-archive-name }}* - buildah manifest create multi-arch-rock - for rock in `find ${{ inputs.oci-archive-name }}*/*.rock` - do - test -f $rock - buildah manifest add multi-arch-rock oci-archive:$rock - done - buildah manifest push --all multi-arch-rock oci-archive:${{ inputs.oci-archive-name }} - - name: Upload multi-arch ${{ inputs.oci-archive-name }} OCI archive + src/build_rock/assemble_rock/assemble.sh \ + -n "${{ inputs.oci-archive-name }}" \ + -d "${{ env.ROCK_CI_FOLDER }}" + + - name: Uploading Multi Arch rock uses: actions/upload-artifact@v4 with: name: ${{ inputs.oci-archive-name }} path: ${{ inputs.oci-archive-name }} if-no-files-found: error - - uses: actions/cache/save@v4 - with: - path: ${{ inputs.oci-archive-name }} - key: ${{ github.run_id }}-${{ inputs.oci-archive-name }} diff --git a/.github/workflows/Image.yaml b/.github/workflows/Image.yaml index ccec363b2..0ba4c7c39 100644 --- a/.github/workflows/Image.yaml +++ b/.github/workflows/Image.yaml @@ -1,5 +1,5 @@ name: Image -run-name: 'Image - ${{ inputs.oci-image-name || github.triggering_actor }} - ${{ github.ref }}' +run-name: "Image - ${{ inputs.oci-image-name || github.triggering_actor }} - ${{ github.ref }}" on: push: @@ -25,7 +25,7 @@ on: type: boolean default: false external_ref_id: # (1) - description: 'Optional ID for unique run detection' + description: "Optional ID for unique run detection" required: false type: string default: "default-id" @@ -123,23 +123,75 @@ jobs: --oci-path ${{ steps.validate-image.outputs.img-path }} \ --revision-data-dir ${{ env.DATA_DIR }} - run-build: + validate-matrix: + # validate matrix prepared in previous job before running Build-Rock workflow. + runs-on: ubuntu-22.04 needs: [prepare-build] + strategy: + fail-fast: true + matrix: ${{ fromJSON(needs.prepare-build.outputs.build-matrix) }} + steps: + + - name: Clone GitHub image repository + uses: actions/checkout@v4 + with: + repository: ${{ matrix.source }} + ref: ${{ matrix.commit }} + submodules: "recursive" + fetch-depth: 1 + + - name: Installing yq + run: sudo snap install yq --channel=v4/stable + + - name: Validate image naming and base + run: | + rock_name=`cat "${{ matrix.directory }}"/rockcraft.y*ml | yq -r .name` + folder_name="${{ matrix.path }}" + if [[ "${folder_name}" != *"${rock_name}"* ]] + then + echo "ERROR: the OCI folder name '${folder_name}', must contain the rock's name '${rock_name}'." + exit 1 + fi + + run-build: + needs: [prepare-build, validate-matrix] strategy: fail-fast: true matrix: ${{ fromJSON(needs.prepare-build.outputs.build-matrix) }} uses: ./.github/workflows/Build-Rock.yaml with: oci-archive-name: ${{ matrix.name }}_${{ matrix.commit }}_${{ matrix.dir_identifier }} - oci-factory-path: ${{ matrix.path }} - rock-name: ${{ matrix.name }} + build-id: ${{ matrix.name }} rock-repo: ${{ matrix.source }} rock-repo-commit: ${{ matrix.commit }} rockfile-directory: ${{ matrix.directory }} + lpci-fallback: true secrets: inherit - test: + tmp-cache-job: + # TODO: This is a temporary job that will be removed when the refactored test job is merged. + # Going forward we download the built rocks from artifacts instead of cache. This job takes + # the uploaded rocks then re-caches them for compatibility. + name: Temporary step to cache rocks + runs-on: ubuntu-22.04 needs: [prepare-build, run-build] + strategy: + fail-fast: true + matrix: ${{ fromJSON(needs.prepare-build.outputs.build-matrix) }} + steps: + - name: Download rock + uses: actions/download-artifact@v4 + with: + name: ${{ matrix.name }}_${{ matrix.commit }}_${{ matrix.dir_identifier }} + + - uses: actions/cache/save@v4 + with: + key: ${{ github.run_id }}-${{ matrix.name }}_${{ matrix.commit }}_${{ matrix.dir_identifier }} + path: ${{ matrix.name }}_${{ matrix.commit }}_${{ matrix.dir_identifier }} + + test: + needs: [prepare-build, run-build, tmp-cache-job] + # TODO: Remove tmp-cache-job when removing the job tmp-cache-job name: Test strategy: fail-fast: true @@ -149,10 +201,9 @@ jobs: oci-image-name: "${{ matrix.name }}_${{ matrix.commit }}_${{ matrix.dir_identifier }}" oci-image-path: "oci/${{ matrix.name }}" test-from: "cache" - cache-key: ${{ github.run_id }}-${{ matrix.name }}_${{ matrix.commit }}_${{ matrix.dir_identifier }} + cache-key: ${{ github.run_id }}-${{ matrix.name }}_${{ matrix.commit }}_${{ matrix.dir_identifier }} secrets: inherit - prepare-upload: runs-on: ubuntu-22.04 needs: [prepare-build, run-build, test] @@ -575,15 +626,16 @@ jobs: notify: runs-on: ubuntu-22.04 name: Notify - needs: [prepare-build, run-build, upload, prepare-releases, generate-provenance] + needs: + [prepare-build, run-build, upload, prepare-releases, generate-provenance] if: ${{ !cancelled() && contains(needs.*.result, 'failure') && github.event_name != 'pull_request' }} steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.x' - + python-version: "3.x" + - name: Summarize workflow failure message id: get-summary run: | @@ -605,7 +657,7 @@ jobs: URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} SUMMARY: ${{ steps.get-summary.outputs.summary }} FOOTER: "Triggered by ${{ github.triggering_actor }}. Ref: ${{ github.ref }}. Attempts: ${{ github.run_attempt }}" - TITLE: '${{ needs.prepare-build.outputs.oci-img-name }}: failed to build->upload->release' + TITLE: "${{ needs.prepare-build.outputs.oci-img-name }}: failed to build->upload->release" run: | for channel in $(echo ${{ steps.get-contacts.outputs.mattermost-channels }} | tr ',' ' ') do diff --git a/.github/workflows/Tests.yaml b/.github/workflows/Tests.yaml index 15c205bd3..09be6e997 100644 --- a/.github/workflows/Tests.yaml +++ b/.github/workflows/Tests.yaml @@ -69,14 +69,14 @@ jobs: access-check: runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 - - name: Validate access to triggered image - uses: ./.github/actions/validate-actor - if: ${{ github.repository == 'canonical/oci-factory' && !github.event.pull_request.head.repo.fork }} - with: - admin-only: true - image-path: ${{ inputs.oci-image-path }} - github-token: ${{ secrets.ROCKSBOT_TOKEN }} + - uses: actions/checkout@v4 + - name: Validate access to triggered image + uses: ./.github/actions/validate-actor + if: ${{ github.repository == 'canonical/oci-factory' && !github.event.pull_request.head.repo.fork }} + with: + admin-only: true + image-path: ${{ inputs.oci-image-path }} + github-token: ${{ secrets.ROCKSBOT_TOKEN }} fetch-oci-image: runs-on: ubuntu-22.04 diff --git a/.github/workflows/_Test-OCI-Factory.yaml b/.github/workflows/_Test-OCI-Factory.yaml index 25e1efb95..e11c18ebb 100644 --- a/.github/workflows/_Test-OCI-Factory.yaml +++ b/.github/workflows/_Test-OCI-Factory.yaml @@ -24,7 +24,6 @@ env: # path of pytest junit output PYTEST_RESULT_PATH: pytest_results.xml - jobs: access-check: name: Validate access to mock-rock @@ -42,7 +41,6 @@ jobs: name: pytest runs-on: ubuntu-22.04 steps: - # Job Setup - uses: actions/checkout@v4 with: @@ -51,11 +49,13 @@ jobs: - uses: actions/setup-python@v5 with: python-version: "3.x" - - # Note: Add additional dependency installation lines as required below + + # Note: Add additional dependency installation lines as required below # test-oci-factory/pytest requirements - run: pip install -r tests/etc/requirements.txt + # build_rock/configure requirements + - run: pip install -r src/build_rock/configure/requirements.txt - name: Run pytest continue-on-error: true @@ -80,7 +80,6 @@ jobs: name: bats runs-on: ubuntu-22.04 steps: - # Job Setup - uses: actions/checkout@v4 with: diff --git a/oci/hydra/.trivyignore b/oci/hydra/.trivyignore new file mode 100644 index 000000000..6a1ac181f --- /dev/null +++ b/oci/hydra/.trivyignore @@ -0,0 +1,8 @@ +# Upstream CVEs + +# github.com/jackc/pgproto3/v2 - pgproto3 SQL Injection via Protocol Message Size Overflow +GHSA-7jwh-3vrq-q3m8 +# github.com/jackc/pgx/v4 - pgx SQL Injection via Line Comment Creation +CVE-2024-27289 +# github.com/jackc/pgx/v4 - pgx SQL Injection via Protocol Message Size Overflow +CVE-2024-27304 \ No newline at end of file diff --git a/oci/hydra/_releases.json b/oci/hydra/_releases.json new file mode 100644 index 000000000..c6073e04c --- /dev/null +++ b/oci/hydra/_releases.json @@ -0,0 +1,17 @@ +{ + "2.2.0-22.04": { + "end-of-life": "2025-05-01T00:00:00Z", + "stable": { + "target": "1" + }, + "candidate": { + "target": "1" + }, + "beta": { + "target": "2.2.0-22.04_candidate" + }, + "edge": { + "target": "1" + } + } +} \ No newline at end of file diff --git a/oci/hydra/contacts.yaml b/oci/hydra/contacts.yaml new file mode 100644 index 000000000..2957e9a3a --- /dev/null +++ b/oci/hydra/contacts.yaml @@ -0,0 +1,5 @@ +notify: + emails: + - identity.charmers@lists.launchpad.net + mattermost-channels: + - ofi4for9obfq8m978h318x56ar \ No newline at end of file diff --git a/oci/hydra/documentation.yaml b/oci/hydra/documentation.yaml new file mode 100644 index 000000000..7f14835d9 --- /dev/null +++ b/oci/hydra/documentation.yaml @@ -0,0 +1,48 @@ +version: 1 +application: hydra +is_chiselled: True +description: | + Ory Hydra is a hardened, OpenID Certified OAuth 2.0 Server and OpenID Connect Provider + optimized for low-latency, high throughput, and low resource consumption. + Ory Hydra enables you to become an OAuth 2.0 and OpenID Connect provider. + If you're not writing a basic web app but something that has to work on different devices, + that has machine-2-machine interaction, or enables third-party developers to use your API + (and pay for it), then this is what you're looking for. +docker: + parameters: + - -p 4444:4444 + - -p 4445:4445 + access: Access your Hydra Public API at `http://localhost:4444`, Admin API at `http://localhost:4445`. +parameters: + - type: -e + value: 'TRACING_ENABLED=true' + description: Tracing enablement. + - type: -e + value: 'TRACING_PROVIDER=otel' + description: Tracing protocol to be used. + - type: -e + value: 'TRACING_PROVIDERS_OTLP_INSECURE=true' + description: Allow Tracing via non TLS/insecure communication. + - type: -e + value: 'TRACING_PROVIDERS_OTLP_SAMPLING_SAMPLING_RATIO=1.0' + description: Tracing sampling ratio. + - type: -e + value: 'TRACING_PROVIDERS_OTLP_SERVER_URL=tempo.server.io:4318' + description: Tracing server url and port. + - type: -p + value: '4444:4444' + description: Hydra Public API port. + - type: -p + value: '4445:4445' + description: Hydra Admin API port. + - type: -v + value: "/path/to/hydra/config.yaml:/hydra.yaml" + description: > + Hydra config contains all the information needed to successfully configure it as an OIDC + Provider, see https://github.com/ory/hydra/blob/master/internal/config/config.yaml as a reference + - type: CMD + value: "hydra serve all --config /hydra.yaml" + description: > + Launch Hydra web server(s) using a mix of environment variables and the config mounted via volume. +debug: + text: "" diff --git a/oci/hydra/image.yaml b/oci/hydra/image.yaml new file mode 100644 index 000000000..2a2d27d20 --- /dev/null +++ b/oci/hydra/image.yaml @@ -0,0 +1,12 @@ +version: 1 +upload: + - source: "canonical/hydra-rock" + commit: 3c27fb428fad0a339c39355b2f8cd5a477d32014 + directory: . + release: + 2.2.0-22.04: + risks: + - stable + - candidate + - edge + end-of-life: "2025-05-01T00:00:00Z" \ No newline at end of file diff --git a/oci/identity-platform-admin-ui/_releases.json b/oci/identity-platform-admin-ui/_releases.json new file mode 100644 index 000000000..c360fad75 --- /dev/null +++ b/oci/identity-platform-admin-ui/_releases.json @@ -0,0 +1,17 @@ +{ + "1.19.0-22.04": { + "end-of-life": "2025-03-01T00:00:00Z", + "stable": { + "target": "1" + }, + "candidate": { + "target": "1" + }, + "beta": { + "target": "1.19.0-22.04_candidate" + }, + "edge": { + "target": "1" + } + } +} \ No newline at end of file diff --git a/oci/identity-platform-admin-ui/contacts.yaml b/oci/identity-platform-admin-ui/contacts.yaml new file mode 100644 index 000000000..8daeadeb8 --- /dev/null +++ b/oci/identity-platform-admin-ui/contacts.yaml @@ -0,0 +1,7 @@ +notify: + emails: + - identity.charmers@lists.launchpad.net + mattermost-channels: + - ofi4for9obfq8m978h318x56ar +maintainers: + - canonical-iam \ No newline at end of file diff --git a/oci/identity-platform-admin-ui/documentation.yaml b/oci/identity-platform-admin-ui/documentation.yaml new file mode 100644 index 000000000..b73b391fd --- /dev/null +++ b/oci/identity-platform-admin-ui/documentation.yaml @@ -0,0 +1,139 @@ +version: 1 +application: identity-platform-admin-ui +is_chiselled: True +description: | + Canonical IAM Admin UI is a component that allows you to interact with the components + that are part of the Identity Platform solution. + + It provides a set of API to view,modify and delete resources on Ory Kratos, Ory Hydra + Ory Oathkeeper and OpenFGA + + For further information check our repository on Github https://github.com/canonical/identity-platform-admin-ui +docker: + parameters: + - -p 8080:8080 + access: Access the API at `http://localhost:8080`. +parameters: + - type: -e + value: 'TRACING_ENABLED=true' + description: Tracing enablement. + - type: -e + value: 'OTEL_GRPC_ENDPOINT=tempo-0.tempo-endpoints.stg-identity-jaas-dev.svc.cluster.local:4317' + description: Tracing server GRPC endpoint, has priority on OTEL_HTTP_ENDPOINT. + - type: -e + value: 'OTEL_HTTP_ENDPOINT=http://tempo-0.tempo-endpoints.stg-identity-jaas-dev.svc.cluster.local:4318' + description: Tracing server HTTP endpoint. + - type: -e + value: 'MFA_ENABLED="true"' + description: Enable MFA validation on logins. + - type: -e + value: 'HYDRA_ADMIN_URL=http://hydra.io:4445' + description: Hydra Admin API URL, used to manage clients + - type: -e + value: 'KRATOS_ADMIN_URL=http://kratos.io:4434' + description: Kratos Admin API URL, used to manage identities + - type: -e + value: 'KRATOS_PUBLIC_URL=http://kratos.io:4433' + description: Kratos Public API URL, used to manage identities + - type: -e + value: 'OATHKEEPER_PUBLIC_URL=http://oathkeeper.io:4455' + description: Oathkeeper Public API URL, used to manage rules + - type: -e + value: 'BASE_URL=https://iam.io/dev/path' + description: Public URL Login UI will be served from. + - type: -e + value: 'ACCESS_TOKEN_VERIFICATION_STRATEGY=jwks' + description: Strategy used to verify JWT tokens. + - type: -e + value: 'AUTHENTICATION_ENABLED="true"' + description: Authentication enable flag. + - type: -e + value: 'AUTHORIZATION_ENABLED="true"' + description: Authorization enable flag. + - type: -e + value: 'CONTEXT_PATH=/dev/path' + description: Path needed by the UI to work behind an ingress proxy. + - type: -e + value: 'IDP_CONFIGMAP_NAME=providers' + description: Name of kubernetes configmap where Kratos IDP are configured. + - type: -e + value: 'IDP_CONFIGMAP_NAMESPACE=default' + description: Namespace of kubernetes configmap where Kratos IDP are configured. + - type: -e + value: 'RULES_CONFIGMAP_NAME=rules' + description: Name of kubernetes configmap where Oathkeeper rules are configured. + - type: -e + value: 'RULES_CONFIGMAP_NAMESPACE=default' + description: Namespace of kubernetes configmap where Oathkeeper rules are configured. + - type: -e + value: 'RULES_CONFIGMAP_FILENAME=rules.yaml' + description: Name of the file where Oathkeeper rules are configured. + - type: -e + value: 'SCHEMAS_CONFIGMAP_NAME=schemas' + description: Name of kubernetes configmap where Kratos identity schemas are configured. + - type: -e + value: 'SCHEMAS_CONFIGMAP_NAMESPACE=default' + description: Namespace of kubernetes configmap where Kratos identity schemas are configured. + - type: -e + value: 'MAIL_FROM_ADDRESS=iam@canonical.com' + description: Email sender + - type: -e + value: 'MAIL_HOST=smtp.io' + description: SMPT server host + - type: -e + value: 'MAIL_PASSWORD="***********************************"' + description: SMTP password + - type: -e + value: 'MAIL_PORT="1025"' + description: SMTP server port + - type: -e + value: 'MAIL_USERNAME="***********************************"' + description: SMTP password + - type: -e + value: 'OAUTH2_AUTH_COOKIES_ENCRYPTION_KEY="***********************************"' + description: Key used to encrypt authentication cookies + - type: -e + value: 'OAUTH2_CLIENT_ID=***********************************' + description: OAuth2 client ID, needed for OIDC authentication + - type: -e + value: 'OAUTH2_CLIENT_SECRET=***********************************' + description: OAuth2 client secret, needed for OIDC authentication + - type: -e + value: 'OAUTH2_CODEGRANT_SCOPES=openid,email,profile,offline_access' + description: OAuth2 scopes needed by the application, needed for OIDC authentication + - type: -e + value: 'OAUTH2_REDIRECT_URI=https://iam..io/dev/api/v0/auth/callback' + description: OAuth2 redirect uri where /api/v0/auth/callback is the endpoint used by the application, needed for OIDC authentication + - type: -e + value: 'OIDC_ISSUER=https://iam.dev.canonical.com/stg-identity-jaas-dev-hydra' + description: OAuth2 server issuer + - type: -e + value: 'OPENFGA_API_HOST=openfga:8443' + description: OpenFGA server address + - type: -e + value: 'OPENFGA_API_SCHEME=http' + description: OpenFGA server scheme + - type: -e + value: 'OPENFGA_API_TOKEN=***********************************' + description: OpenFGA server API token, needed for authentication to the server + - type: -e + value: 'OPENFGA_AUTHORIZATION_MODEL_ID=***********************************' + description: OpenFGA model ID + - type: -e + value: 'OPENFGA_STORE_ID=***********************************' + description: OpenFGA store ID + - type: -e + value: 'LOG_FILE=log.txt' + description: Destination file for logs. + - type: -e + value: 'LOG_LEVEL=error' + description: Log level. + - type: -p + value: '8080:8080' + description: Server API port. + - type: CMD + value: '/usr/bin/identity-platform-admin-ui serve' + description: > + Launch Admin UI web server(s) using environment variables. +debug: + text: "" \ No newline at end of file diff --git a/oci/identity-platform-admin-ui/image.yaml b/oci/identity-platform-admin-ui/image.yaml new file mode 100644 index 000000000..dab8f6565 --- /dev/null +++ b/oci/identity-platform-admin-ui/image.yaml @@ -0,0 +1,12 @@ +version: 1 +upload: + - source: "canonical/identity-platform-admin-ui" + commit: c46a9568f9be665f86aa5a274d8ac9d90054ba6b + directory: . + release: + 1.19.0-22.04: + risks: + - stable + - candidate + - edge + end-of-life: "2025-03-01T00:00:00Z" \ No newline at end of file diff --git a/oci/identity-platform-login-ui/contacts.yaml b/oci/identity-platform-login-ui/contacts.yaml new file mode 100644 index 000000000..96633e707 --- /dev/null +++ b/oci/identity-platform-login-ui/contacts.yaml @@ -0,0 +1,5 @@ +notify: + emails: + - identity.charmers@lists.launchpad.net + mattermost-channels: + - ofi4for9obfq8m978h318x56ar diff --git a/oci/identity-platform-login-ui/documentation.yaml b/oci/identity-platform-login-ui/documentation.yaml new file mode 100644 index 000000000..e68b66296 --- /dev/null +++ b/oci/identity-platform-login-ui/documentation.yaml @@ -0,0 +1,54 @@ +version: 1 +application: identity-platform-login-ui +is_chiselled: True +description: | + Canonical IAM Login UI is a core components of the Identity Platform solution. + + It provides a way to login using OIDC via interactions with Ory Kratos and Ory Hydra, also allows + you to manage self service functionalities for everything related to authentication + + For further information check our repository on Github https://github.com/canonical/identity-platform-login-ui +docker: + parameters: + - -p 8080:8080 + access: Access the API at `http://localhost:8080`. +parameters: + - type: -e + value: 'TRACING_ENABLED=true' + description: Tracing enablement. + - type: -e + value: 'OTEL_GRPC_ENDPOINT=tempo-0.tempo-endpoints.stg-identity-jaas-dev.svc.cluster.local:4317' + description: Tracing server GRPC endpoint, has priority on OTEL_HTTP_ENDPOINT. + - type: -e + value: 'OTEL_HTTP_ENDPOINT=http://tempo-0.tempo-endpoints.stg-identity-jaas-dev.svc.cluster.local:4318' + description: Tracing server HTTP endpoint. + - type: -e + value: 'MFA_ENABLED="true"' + description: Enable MFA validation on logins. + - type: -e + value: 'HYDRA_ADMIN_URL=http://hydra.io:4445' + description: Hydra Admin API URL, used to validate logins + - type: -e + value: 'KRATOS_ADMIN_URL=http://kratos.io:4434' + description: Kratos Admin API URL, used to manage identities + - type: -e + value: 'KRATOS_PUBLIC_URL=http://kratos.io:4433' + description: Kratos Public API URL, used to manage identities + - type: -e + value: 'BASE_URL=https://iam.io/dev/path' + description: Public URL Login UI will be served from. + - type: -e + value: 'LOG_FILE=log.txt' + description: Destination file for logs. + - type: -e + value: 'LOG_LEVEL=error' + description: Log level. + - type: -p + value: '8080:8080' + description: Server API port. + - type: CMD + value: '/usr/bin/identity-platform-login-ui serve' + description: > + Launch Login UI web server(s) using environment variables. +debug: + text: "" \ No newline at end of file diff --git a/oci/identity-platform-login-ui/image.yaml b/oci/identity-platform-login-ui/image.yaml new file mode 100644 index 000000000..e59f88226 --- /dev/null +++ b/oci/identity-platform-login-ui/image.yaml @@ -0,0 +1,12 @@ +version: 1 +upload: + - source: "canonical/identity-platform-login-ui" + commit: 3c03717429801d1334ca7feb4dd2a2e2793ca4ff + directory: . + release: + 0.18.3-22.04: + risks: + - stable + - candidate + - edge + end-of-life: "2025-03-01T00:00:00Z" diff --git a/oci/kratos/.trivyignore b/oci/kratos/.trivyignore new file mode 100644 index 000000000..9ebdf9c14 --- /dev/null +++ b/oci/kratos/.trivyignore @@ -0,0 +1,10 @@ +# Upstream CVEs + +# github.com/jackc/pgproto3/v2 - pgproto3 SQL Injection via Protocol Message Size Overflow +GHSA-7jwh-3vrq-q3m8 +# github.com/jackc/pgx/v4 - pgx SQL Injection via Line Comment Creation +CVE-2024-27289 +# github.com/jackc/pgx/v4 - pgx SQL Injection via Protocol Message Size Overflow +CVE-2024-27304 +# github.com/docker/docker - Authz zero length regression +CVE-2024-41110 diff --git a/oci/kratos/_releases.json b/oci/kratos/_releases.json new file mode 100644 index 000000000..834e571a7 --- /dev/null +++ b/oci/kratos/_releases.json @@ -0,0 +1,17 @@ +{ + "1.1.0-22.04": { + "end-of-life": "2025-05-01T00:00:00Z", + "stable": { + "target": "1" + }, + "candidate": { + "target": "1" + }, + "beta": { + "target": "1.1.0-22.04_candidate" + }, + "edge": { + "target": "1" + } + } +} \ No newline at end of file diff --git a/oci/kratos/contacts.yaml b/oci/kratos/contacts.yaml new file mode 100644 index 000000000..2957e9a3a --- /dev/null +++ b/oci/kratos/contacts.yaml @@ -0,0 +1,5 @@ +notify: + emails: + - identity.charmers@lists.launchpad.net + mattermost-channels: + - ofi4for9obfq8m978h318x56ar \ No newline at end of file diff --git a/oci/kratos/documentation.yaml b/oci/kratos/documentation.yaml new file mode 100644 index 000000000..c4ca5ebef --- /dev/null +++ b/oci/kratos/documentation.yaml @@ -0,0 +1,67 @@ +version: 1 +application: kratos +is_chiselled: True +description: | + Ory Kratos is the developer-friendly, security-hardened and battle-tested Identity, + User Management and Authentication system for the Cloud. + + The identity management server Ory Kratos enables you to implement user management, + login and registration in a secure and straightforward way. + Don't rewrite every aspect of identity management yourself. + Ory Kratos implements all common flows such as login and logout, account activation, + mfa/2fa, profile and session management, user facing errors and account recovery methods. + Just spin up a docker image and write a simple UI for it in the language or framework of + your choice. +docker: + parameters: + - -p 4433:4433 + - -p 4434:4434 + access: Access your Kratos Public API at `http://localhost:4433`, Admin API at `http://localhost:4434`. +parameters: + - type: -e + value: 'TRACING_ENABLED=true' + description: Tracing enablement. + - type: -e + value: 'TRACING_PROVIDER=otel' + description: Tracing protocol to be used. + - type: -e + value: 'TRACING_PROVIDERS_OTLP_INSECURE=true' + description: Allow Tracing via non TLS/insecure communication. + - type: -e + value: 'TRACING_PROVIDERS_OTLP_SAMPLING_SAMPLING_RATIO=1.0' + description: Tracing sampling ratio. + - type: -e + value: 'TRACING_PROVIDERS_OTLP_SERVER_URL=tempo.server.io:4318' + description: Tracing server url and port. + - type: -e + value: 'SERVE_PUBLIC_BASE_URL=https://kratos.io/dev/path' + description: Public URL kratos will be served from. + - type: -e + value: 'DSN=postgres://user:pass@postgresql:5432/db' + description: Database connection string for postgresql database. + - type: -e + value: 'HTTPS_PROXY=http://proxy.internal' + description: HTTPS proxy used in air gapped environments. + - type: -e + value: 'HTTP_PROXY=http://proxy.internal' + description: HTTP proxy used in air gapped environments. + - type: -e + value: 'NO_PROXY=*.canonical.com' + description: Domain that needs to be exluded from the proxy, used in air gapped environments. + - type: -p + value: '4433:4433' + description: Kratos Public API port. + - type: -p + value: '4434:4434' + description: Kratos Admin API port. + - type: -v + value: '/path/to/Kratos/config.yaml:/kratos.yaml' + description: > + Kratos config contains all the information needed to successfully configure it as an OIDC + Provider, see https://www.ory.sh/docs/kratos/reference/configuration as a reference + - type: CMD + value: 'kratos serve all --config /kratos.yaml' + description: > + Launch Kratos web server(s) using a mix of environment variables and the config mounted via volume. +debug: + text: "" diff --git a/oci/kratos/image.yaml b/oci/kratos/image.yaml new file mode 100644 index 000000000..230eb0ad3 --- /dev/null +++ b/oci/kratos/image.yaml @@ -0,0 +1,12 @@ +version: 1 +upload: + - source: "canonical/kratos-rock" + commit: 396bf3a71cb65f97cf853540117858d6859ef43b + directory: . + release: + 1.1.0-22.04: + risks: + - stable + - candidate + - edge + end-of-life: "2025-05-01T00:00:00Z" \ No newline at end of file diff --git a/oci/mimir/_releases.json b/oci/mimir/_releases.json index 822d1fa49..e2fc31f38 100644 --- a/oci/mimir/_releases.json +++ b/oci/mimir/_releases.json @@ -31,18 +31,18 @@ }, "2-22.04": { "stable": { - "target": "105" + "target": "107" }, "candidate": { - "target": "105" + "target": "2-22.04_stable" }, "beta": { - "target": "105" + "target": "2-22.04_candidate" }, "edge": { - "target": "105" + "target": "2-22.04_beta" }, - "end-of-life": "2025-05-28T00:00:00Z" + "end-of-life": "2025-01-08T00:00:00Z" }, "2.10.4-22.04": { "stable": { @@ -148,5 +148,35 @@ "edge": { "target": "105" } + }, + "2.13.0-22.04": { + "end-of-life": "2025-01-08T00:00:00Z", + "stable": { + "target": "107" + }, + "candidate": { + "target": "2.13.0-22.04_stable" + }, + "beta": { + "target": "2.13.0-22.04_candidate" + }, + "edge": { + "target": "2.13.0-22.04_beta" + } + }, + "2.13-22.04": { + "end-of-life": "2025-01-08T00:00:00Z", + "stable": { + "target": "107" + }, + "candidate": { + "target": "2.13-22.04_stable" + }, + "beta": { + "target": "2.13-22.04_candidate" + }, + "edge": { + "target": "2.13-22.04_beta" + } } } \ No newline at end of file diff --git a/oci/mimir/image.yaml b/oci/mimir/image.yaml index 315972735..dc6644169 100644 --- a/oci/mimir/image.yaml +++ b/oci/mimir/image.yaml @@ -1,18 +1,18 @@ version: 1 upload: - source: canonical/mimir-rock - commit: 2d5de24d16bb5909204614314536ea855d2defea - directory: 2.12.0 + commit: 130a427f31db55028183aa9842f7fdd6778ad733 + directory: 2.13.0 release: - 2.12.0-22.04: - end-of-life: "2025-05-28T00:00:00Z" + 2.13.0-22.04: + end-of-life: "2025-01-08T00:00:00Z" risks: - stable - 2.12-22.04: - end-of-life: "2025-05-28T00:00:00Z" + 2.13-22.04: + end-of-life: "2025-01-08T00:00:00Z" risks: - stable 2-22.04: - end-of-life: "2025-05-28T00:00:00Z" + end-of-life: "2025-01-08T00:00:00Z" risks: - stable diff --git a/oci/mock-rock/_releases.json b/oci/mock-rock/_releases.json index 679461d17..ac773692b 100644 --- a/oci/mock-rock/_releases.json +++ b/oci/mock-rock/_releases.json @@ -35,31 +35,31 @@ "1.1-22.04": { "end-of-life": "2025-05-01T00:00:00Z", "candidate": { - "target": "584" + "target": "625" }, "beta": { - "target": "584" + "target": "625" }, "edge": { - "target": "584" + "target": "625" } }, "1-22.04": { "end-of-life": "2025-05-01T00:00:00Z", "candidate": { - "target": "584" + "target": "625" }, "beta": { - "target": "584" + "target": "625" }, "edge": { - "target": "584" + "target": "625" } }, "1.2-22.04": { "end-of-life": "2025-05-01T00:00:00Z", "beta": { - "target": "585" + "target": "626" }, "edge": { "target": "1.2-22.04_beta" diff --git a/oci/tempo/_releases.json b/oci/tempo/_releases.json index 192fe6a41..a2b2f8ef2 100644 --- a/oci/tempo/_releases.json +++ b/oci/tempo/_releases.json @@ -30,9 +30,9 @@ } }, "2-22.04": { - "end-of-life": "2025-05-28T00:00:00Z", + "end-of-life": "2025-01-18T00:00:00Z", "stable": { - "target": "2" + "target": "3" }, "candidate": { "target": "2-22.04_stable" @@ -58,5 +58,35 @@ "edge": { "target": "2.4.2-22.04_beta" } + }, + "2.6.1-22.04": { + "end-of-life": "2025-01-18T00:00:00Z", + "stable": { + "target": "3" + }, + "candidate": { + "target": "2.6.1-22.04_stable" + }, + "beta": { + "target": "2.6.1-22.04_candidate" + }, + "edge": { + "target": "2.6.1-22.04_beta" + } + }, + "2.6-22.04": { + "end-of-life": "2025-01-18T00:00:00Z", + "stable": { + "target": "3" + }, + "candidate": { + "target": "2.6-22.04_stable" + }, + "beta": { + "target": "2.6-22.04_candidate" + }, + "edge": { + "target": "2.6-22.04_beta" + } } } \ No newline at end of file diff --git a/oci/tempo/image.yaml b/oci/tempo/image.yaml index 913d0b02a..57b46703a 100644 --- a/oci/tempo/image.yaml +++ b/oci/tempo/image.yaml @@ -1,18 +1,18 @@ version: 1 upload: - source: canonical/tempo-rock - commit: 791b11401656f10dd447b2385644bf847737ae6e - directory: 2.4.2 + commit: 61866670957aecbb67481f8c5250b72aa82fc7f4 + directory: 2.6.1 release: - 2.4.2-22.04: - end-of-life: "2025-05-28T00:00:00Z" + 2.6.1-22.04: + end-of-life: "2025-01-18T00:00:00Z" risks: - stable - 2.4-22.04: - end-of-life: "2025-05-28T00:00:00Z" + 2.6-22.04: + end-of-life: "2025-01-18T00:00:00Z" risks: - stable 2-22.04: - end-of-life: "2025-05-28T00:00:00Z" + end-of-life: "2025-01-18T00:00:00Z" risks: - stable diff --git a/src/build_rock/assemble_rock/assemble.sh b/src/build_rock/assemble_rock/assemble.sh new file mode 100755 index 000000000..f2c3d8dd4 --- /dev/null +++ b/src/build_rock/assemble_rock/assemble.sh @@ -0,0 +1,54 @@ +#! /bin/bash + +set -e + + +function usage(){ + echo + echo "$(basename "$0") -d -n " + echo + echo "Merge multiple OCI rock images into one multi arch image." + echo + echo -e "-d \\t Directory to search for rock OCI images in." + echo -e "-n \\t Final output archive name. " +} + +while getopts "d:n:" opt +do + case $opt in + d) + ROCK_DIR="$OPTARG" + ;; + n) + ARCHIVE_NAME="$OPTARG" + ;; + ?) + usage + exit 1 + ;; + esac +done + +if [ -z "$ROCK_DIR" ] +then + echo "Error: Missing rock search directory argument (-d)" + usage + exit 1 +fi + +if [ -z "$ARCHIVE_NAME" ] +then + echo "Error: Missing final archive name (-n)" + usage + exit 1 +fi + +buildah manifest create multi-arch-rock + +for rock in `find "$ROCK_DIR" -name "*.rock" -type f` +do + buildah manifest add multi-arch-rock oci-archive:$rock +done + +buildah manifest push --all multi-arch-rock "oci-archive:$ARCHIVE_NAME" + diff --git a/src/build_rock/assemble_rock/requirements.sh b/src/build_rock/assemble_rock/requirements.sh new file mode 100755 index 000000000..1ff6d0d00 --- /dev/null +++ b/src/build_rock/assemble_rock/requirements.sh @@ -0,0 +1,6 @@ +#! /bin/bash + +set -e + +sudo apt update +sudo apt install buildah -y diff --git a/src/build_rock/configure/generate_build_matrix.py b/src/build_rock/configure/generate_build_matrix.py new file mode 100755 index 000000000..8c1dc9cfa --- /dev/null +++ b/src/build_rock/configure/generate_build_matrix.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python3 + +import yaml +import os +import argparse +import json +from enum import Enum +from ...shared.github_output import GithubOutput +from pydantic import TypeAdapter + + +class MATRIX_NAMES(Enum): + RUNNER = "runner-build-matrix" + LPCI = "lpci-build-matrix" + + +class MissingArchSupport(Exception): + pass + + +def get_target_archs(rockcraft: dict) -> list: + """get list of target architectures from rockcraft project definition""" + + rock_platforms = rockcraft["platforms"] + + target_archs = set() + + for platf, values in rock_platforms.items(): + + if isinstance(values, dict) and "build-for" in values: + if isinstance(arches := values["build-for"], list): + target_archs.update(arches) + elif isinstance(values, str): + target_archs.add(arches) + else: + target_archs.add(platf) + + return target_archs + + +def configure_matrices(target_archs: list, arch_map: dict, lp_fallback: bool) -> dict: + """Sort build into appropriate build matrices""" + + # map configuration to individual job matrices + build_matrices = {name.value: {"include": []} for name in MATRIX_NAMES} + + # Check if we have runners for all supported architectures + if missing_archs := set(target_archs) - set(arch_map): + + # raise exception if we cannot fallback to LP builds + if not lp_fallback: + raise MissingArchSupport( + f"Missing support for runner arches: {missing_archs}" + ) + + # configure LP build + build_matrices[MATRIX_NAMES.LPCI.value]["include"].append( + {"architecture": "-".join(set(target_archs))} + ) + + else: + # configure runner matrix for list of supported runners + for runner_arch, runner_name in arch_map.items(): + if runner_arch in target_archs: + build_matrices[MATRIX_NAMES.RUNNER.value]["include"].append( + {"architecture": runner_arch, "runner": runner_name} + ) + + return build_matrices + + +def set_build_config_outputs(rock_name: str, build_matrices: dict): + """Update GITHUB_OUTPUT with build configuration.""" + + outputs = {"rock-name": rock_name, **build_matrices} + + with GithubOutput() as github_output: + github_output.write(**outputs) + + +def main(): + parser = argparse.ArgumentParser() + + parser.add_argument( + "--rockfile-directory", + help="Path where to directory containing rockcraft.yaml.", + required=True, + ) + + parser.add_argument( + "--lpci-fallback", + help="Revert to lpci if architectures are not supported. ", + required=True, + type=TypeAdapter(bool).validate_python, + ) + + parser.add_argument( + "--config", + help="JSON mapping arch to runner for matrix generation.", + required=True, + ) + + args = parser.parse_args() + + # get configuration form rockcraft yaml + with open(f"{args.rockfile_directory}/rockcraft.yaml") as rf: + rockcraft_yaml = yaml.safe_load(rf) + + # load config + arch_map = json.loads(args.config) + + target_archs = get_target_archs(rockcraft_yaml) + build_matrices = configure_matrices(target_archs, arch_map, args.lpci_fallback) + + # set github outputs for use in later steps + set_build_config_outputs(rockcraft_yaml["name"], build_matrices) + + +if __name__ == "__main__": + main() diff --git a/src/build_rock/configure/requirements.txt b/src/build_rock/configure/requirements.txt new file mode 100644 index 000000000..d04c06062 --- /dev/null +++ b/src/build_rock/configure/requirements.txt @@ -0,0 +1,2 @@ +pyyaml==6.0.2 +pydantic==2.8.2 diff --git a/src/build_rock/lpci_build/lpci_build.sh b/src/build_rock/lpci_build/lpci_build.sh new file mode 100755 index 000000000..f1900694d --- /dev/null +++ b/src/build_rock/lpci_build/lpci_build.sh @@ -0,0 +1,58 @@ +#! /bin/bash + + +set -e + + +function usage(){ + echo + echo "$(basename "$0") -d -c " + echo + echo "Build local rockcraft project on Launchpad." + echo + echo -e "-d \\t Directory to rockcraft project file. " + echo -e "-c \\t Launchpad credentials. " +} + +while getopts "c:d:" opt +do + case $opt in + d) + ROCKCRAFT_DIR="$OPTARG" + ;; + c) + LP_CREDENTIALS_B64="$OPTARG" + ;; + ?) + usage + exit 1 + ;; + esac +done + +if [ -z "$ROCKCRAFT_DIR" ] +then + echo "Error: Missing rockcraft project directory argument (-d)" + usage + exit 1 +fi + +if [ -z "$LP_CREDENTIALS_B64" ] +then + echo "Error: Missing launchpad credentials argument (-c)" + usage + exit 1 +fi + + +cd "$ROCKCRAFT_DIR" +rocks_toolbox="$(mktemp -d)" + +# install dependencies +git clone --depth 1 --branch v1.1.2 https://github.com/canonical/rocks-toolbox $rocks_toolbox +${rocks_toolbox}/rockcraft_lpci_build/requirements.sh +pip3 install -r ${rocks_toolbox}/rockcraft_lpci_build/requirements.txt + +python3 ${rocks_toolbox}/rockcraft_lpci_build/rockcraft_lpci_build.py \ + --lp-credentials-b64 "$LP_CREDENTIALS_B64" \ + --launchpad-accept-public-upload diff --git a/src/docs/schema/triggers.py b/src/docs/schema/triggers.py index d9de49003..7cae1b3f6 100755 --- a/src/docs/schema/triggers.py +++ b/src/docs/schema/triggers.py @@ -2,6 +2,7 @@ this module is the pydantic version of the documentation.yaml schema. """ + from typing import Optional from pydantic import BaseModel, Extra, constr, conlist diff --git a/src/image/prepare_single_image_build_matrix.py b/src/image/prepare_single_image_build_matrix.py index f94dfa6ae..09cecc664 100755 --- a/src/image/prepare_single_image_build_matrix.py +++ b/src/image/prepare_single_image_build_matrix.py @@ -106,9 +106,10 @@ def validate_image_trigger(data: dict) -> None: # set an output as a marker for later knowing if we need to release if "release" in builds[img_number]: - min_eol = datetime.strptime(min( - v["end-of-life"] for v in builds[img_number]["release"].values() - ), "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=timezone.utc) + min_eol = datetime.strptime( + min(v["end-of-life"] for v in builds[img_number]["release"].values()), + "%Y-%m-%dT%H:%M:%SZ", + ).replace(tzinfo=timezone.utc) if min_eol < datetime.now(timezone.utc): print("Track skipped because it reached its end of life") del builds[img_number] diff --git a/src/shared/github_output.py b/src/shared/github_output.py new file mode 100755 index 000000000..ab9fe70f4 --- /dev/null +++ b/src/shared/github_output.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python3 + +import json +from os import environ + +"""This module provides support for writing Github Outputs.""" + +# locate +GITHUB_OUTPUT = environ.get("GITHUB_OUTPUT", None) + + +class GithubOutput: + + def __init__(self): + + self.output_path = environ["GITHUB_OUTPUT"] + + def __enter__(self): + + self.file_handler = open(self.output_path, "a") + + return self + + def __exit__(self, exc_type, exc_value, traceback): + + self.file_handler.close() + del self.file_handler + + def write(self, **kwargs): + """Format kwargs for Github Outputs and write to `output` File Object""" + + if not getattr(self, "file_handler", None): + raise AttributeError( + "file_handler not available. Please use in context block." + ) + + for key, value in kwargs.items(): + + formatted_value = self.format_value(value) + print(f"{key}={formatted_value}", file=self.file_handler) + + @staticmethod + def format_value(value): + """Format `value` such that it can be stored as a github output""" + + if isinstance(value, str): + # str is an exception to casting with json.dumps as we do + # not need to represent the string itself, but just the data + return value + else: + json_value = json.dumps(value) + return json_value diff --git a/src/shared/release_info.py b/src/shared/release_info.py old mode 100755 new mode 100644 index 86a8d02d7..d07a06f6a --- a/src/shared/release_info.py +++ b/src/shared/release_info.py @@ -6,7 +6,7 @@ """ import json -from src.image.utils.schema.triggers import KNOWN_RISKS_ORDERED +from ..image.utils.schema.triggers import KNOWN_RISKS_ORDERED class BadChannel(Exception): diff --git a/tests/data/rockcraft.yaml b/tests/data/rockcraft.yaml new file mode 100644 index 000000000..a26aeabe0 --- /dev/null +++ b/tests/data/rockcraft.yaml @@ -0,0 +1,18 @@ +# Metadata section + +name: hello +summary: Hello World +description: The most basic example of a rock. +version: "latest" +license: Apache-2.0 + +base: bare +build-base: ubuntu@22.04 +platforms: + amd64: + +parts: + hello: + plugin: nil + stage-packages: + - hello diff --git a/tests/etc/requirements.txt b/tests/etc/requirements.txt index fe93bd52f..214657606 100644 --- a/tests/etc/requirements.txt +++ b/tests/etc/requirements.txt @@ -1 +1,2 @@ pytest==8.3.2 +-r ../../src/build_rock/configure/requirements.txt \ No newline at end of file diff --git a/tests/fixtures/buffers.py b/tests/fixtures/buffers.py index 694e9a448..309931910 100644 --- a/tests/fixtures/buffers.py +++ b/tests/fixtures/buffers.py @@ -1,5 +1,7 @@ import pytest from io import StringIO +import os +from pathlib import Path @pytest.fixture @@ -7,3 +9,14 @@ def str_buff(): """String IO fixture for simulating a file object""" with StringIO() as buffer: yield buffer + + +@pytest.fixture +def github_output(monkeypatch, tmp_path): + + env_path = tmp_path / "env" + env_path.touch() + + monkeypatch.setitem(os.environ, "GITHUB_OUTPUT", str(env_path)) + + yield env_path diff --git a/tests/fixtures/junit_et.py b/tests/fixtures/junit_et.py deleted file mode 100644 index 31616c82e..000000000 --- a/tests/fixtures/junit_et.py +++ /dev/null @@ -1,13 +0,0 @@ -import pytest -import xml.etree.ElementTree as ET -from .. import DATA_DIR - - -@pytest.fixture -def junit_with_failure(): - """Load ET of junit xml report with failure""" - sample = DATA_DIR / "junit_xml_failure.xml" - - tree = ET.parse(sample) - root = tree.getroot() - return root diff --git a/tests/fixtures/sample_data.py b/tests/fixtures/sample_data.py new file mode 100644 index 000000000..c3df9af1a --- /dev/null +++ b/tests/fixtures/sample_data.py @@ -0,0 +1,26 @@ +import pytest +import xml.etree.ElementTree as ET +import yaml +from .. import DATA_DIR + + +@pytest.fixture +def junit_with_failure(): + """Load ET of junit xml report with failure.""" + sample_file = DATA_DIR / "junit_xml_failure.xml" + + tree = ET.parse(sample_file) + root = tree.getroot() + return root + + +@pytest.fixture +def rockcraft_project(): + """Get sample rockcraft project file for testing.""" + + sample = DATA_DIR / "rockcraft.yaml" + + with open(sample) as rf: + project = yaml.safe_load(rf) + + return project diff --git a/tests/integration/test_convert_junit_xml_to_markdown.py b/tests/integration/test_convert_junit_xml_to_markdown.py new file mode 100644 index 000000000..9305985c2 --- /dev/null +++ b/tests/integration/test_convert_junit_xml_to_markdown.py @@ -0,0 +1,18 @@ +from ..fixtures.buffers import str_buff +from ..fixtures.sample_data import junit_with_failure +import tools.junit_to_markdown.convert as report + + +def test_print_redirection(junit_with_failure, str_buff, capsys): + """Ensure that the report is entirely redirected when needed""" + + report.print_junit_report(junit_with_failure, str_buff) + report.print_junit_report(junit_with_failure, None) # print report to stdout + + str_buff.seek(0) + str_buff_content = str_buff.read() + + captured = capsys.readouterr() + stdout_content = captured.out + + assert stdout_content == str_buff_content, "Printing to multiple locations." diff --git a/tests/integration/test_junit_to_markdown_output.py b/tests/integration/test_junit_to_markdown_output.py index 419389512..9305985c2 100644 --- a/tests/integration/test_junit_to_markdown_output.py +++ b/tests/integration/test_junit_to_markdown_output.py @@ -1,5 +1,5 @@ from ..fixtures.buffers import str_buff -from ..fixtures.junit_et import junit_with_failure +from ..fixtures.sample_data import junit_with_failure import tools.junit_to_markdown.convert as report diff --git a/tests/unit/test_generate_build_matrix.py b/tests/unit/test_generate_build_matrix.py new file mode 100644 index 000000000..7fca208dc --- /dev/null +++ b/tests/unit/test_generate_build_matrix.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python3 + +from src.build_rock.configure.generate_build_matrix import ( + get_target_archs, + configure_matrices, + MissingArchSupport, + set_build_config_outputs, +) +import pytest +from ..fixtures.buffers import github_output +from ..fixtures.sample_data import rockcraft_project + + +def test_get_target_archs(rockcraft_project): + """Test extraction of target architectures from rockcraft project configuration""" + + rockcraft_project["platforms"] = { + "amd64": None, + "armhf": {"build-for": ["armhf", "arm64"]}, + "ibm": {"build-on": ["s390x"], "build-for": "s390x"}, + } + + arches = get_target_archs(rockcraft_project) + assert arches == {"arm64", "armhf", "amd64"} + + +def test_configure_matrices(): + """Test correct configuration of build matrices from project's target arches""" + + build_matrices = configure_matrices(["amd64"], {"amd64": "ubuntu-22.04"}, False) + expected_result = { + "runner-build-matrix": { + "include": [{"architecture": "amd64", "runner": "ubuntu-22.04"}] + }, + "lpci-build-matrix": {"include": []}, + } + + assert build_matrices == expected_result + + +def test_configure_matrices_fallback_exception(): + """Test proper exception is raised when target arch is not buildable""" + with pytest.raises(MissingArchSupport): + configure_matrices(["arm64"], {"amd64": "ubuntu-22.04"}, False) + + +def test_configure_matrices_lpci_fallback(): + """Test lpci fallback logic when target cannot be built on a runner""" + build_matrices = configure_matrices(["arm64"], {"amd64": "ubuntu-22.04"}, True) + expected_result = { + "runner-build-matrix": {"include": []}, + "lpci-build-matrix": {"include": [{"architecture": "arm64"}]}, + } + + assert build_matrices == expected_result + + +def test_set_build_config_outputs(github_output): + """Test correct generation of build matrices.""" + + test_build_matrices = { + "runner-build-matrix": { + "include": [{"architecture": "amd64", "runner": "ubuntu-22.04"}] + }, + "lpci-build-matrix": {"include": []}, + } + + set_build_config_outputs("test", test_build_matrices) + + with open(github_output, "r") as fh: + gh_output = fh.read() + + expected_result = """rock-name=test +runner-build-matrix={"include": [{"architecture": "amd64", "runner": "ubuntu-22.04"}]} +lpci-build-matrix={"include": []} +""" + + assert gh_output == expected_result diff --git a/tests/unit/test_github_output.py b/tests/unit/test_github_output.py new file mode 100755 index 000000000..5cf11445a --- /dev/null +++ b/tests/unit/test_github_output.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python3 + +from src.shared.github_output import GithubOutput +from ..fixtures.buffers import github_output + + +def test_write(github_output): + """Test github_output write function""" + + outputs = { + "hello-world": 42, + } + expected_result = "hello-world=42\n" + + with GithubOutput() as output: + + output.write(**outputs) + + with open(github_output, "r") as fh: + result = fh.read() + + assert result == expected_result + + +def test_format_value_string(): + """Test formatting of string for outputs""" + + expected_result = "foo" + result = GithubOutput.format_value("foo") + + assert expected_result == result + + +def test_format_value_number(): + """Test formatting of number for outputs""" + + expected_result = "1" + result = GithubOutput.format_value(1) + + assert expected_result == result + + +def test_format_value_json(): + """Test formatting of JSON for outputs""" + + expected_result = '{"foo": "bar"}' + result = GithubOutput.format_value({"foo": "bar"}) + + assert expected_result == result