mirror of
https://github.com/JasonN3/build-container-installer.git
synced 2025-12-25 10:57:55 +01:00
Compare commits
No commits in common. "main" and "v1.1.0" have entirely different histories.
62 changed files with 772 additions and 1451 deletions
|
|
@ -1,2 +0,0 @@
|
||||||
all
|
|
||||||
rule 'MD033', :allowed_elements => ["a","img","picture","source"]
|
|
||||||
6
.github/ISSUE_TEMPLATE/bug_report.md
vendored
6
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
|
@ -12,7 +12,6 @@ A clear and concise description of what the bug is.
|
||||||
|
|
||||||
**To Reproduce**
|
**To Reproduce**
|
||||||
Steps to reproduce the behavior:
|
Steps to reproduce the behavior:
|
||||||
|
|
||||||
1. Go to '...'
|
1. Go to '...'
|
||||||
2. Click on '....'
|
2. Click on '....'
|
||||||
3. Scroll down to '....'
|
3. Scroll down to '....'
|
||||||
|
|
@ -25,9 +24,8 @@ A clear and concise description of what you expected to happen.
|
||||||
If applicable, add screenshots to help explain your problem.
|
If applicable, add screenshots to help explain your problem.
|
||||||
|
|
||||||
**Desktop (please complete the following information):**
|
**Desktop (please complete the following information):**
|
||||||
|
- OS: [e.g. iOS]
|
||||||
- OS: [e.g. iOS]
|
- Version [e.g. 22]
|
||||||
- Version [e.g. 22]
|
|
||||||
|
|
||||||
**Additional context**
|
**Additional context**
|
||||||
Add any other context about the problem here.
|
Add any other context about the problem here.
|
||||||
|
|
|
||||||
17
.github/dependabot.yml
vendored
17
.github/dependabot.yml
vendored
|
|
@ -1,17 +0,0 @@
|
||||||
# To get started with Dependabot version updates, you'll need to specify which
|
|
||||||
# package ecosystems to update and where the package manifests are located.
|
|
||||||
# Please see the documentation for all configuration options:
|
|
||||||
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
|
||||||
|
|
||||||
version: 2
|
|
||||||
updates:
|
|
||||||
- package-ecosystem: "github-actions" # See documentation for possible values
|
|
||||||
directory: "/" # Location of package manifests
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
- package-ecosystem: "github-actions" # See documentation for possible values
|
|
||||||
directory: "/external" # Location of package manifests
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
ignore:
|
|
||||||
- dependency-name: "lorax"
|
|
||||||
6
.github/workflows/bot_commands.yml
vendored
6
.github/workflows/bot_commands.yml
vendored
|
|
@ -85,6 +85,9 @@ jobs:
|
||||||
with:
|
with:
|
||||||
pr: ${{ github.event.issue.number }}
|
pr: ${{ github.event.issue.number }}
|
||||||
parent_job_name: Run ISO Tests
|
parent_job_name: Run ISO Tests
|
||||||
|
iso_name-38: ${{ needs.load_vars.outputs.IMAGE_NAME }}-38.iso
|
||||||
|
iso_name-39: ${{ needs.load_vars.outputs.IMAGE_NAME }}-39.iso
|
||||||
|
iso_name-40: ${{ needs.load_vars.outputs.IMAGE_NAME }}-40.iso
|
||||||
|
|
||||||
run_test_deployment:
|
run_test_deployment:
|
||||||
name: Run ISO Deployment Tests
|
name: Run ISO Deployment Tests
|
||||||
|
|
@ -103,3 +106,6 @@ jobs:
|
||||||
with:
|
with:
|
||||||
pr: ${{ github.event.issue.number }}
|
pr: ${{ github.event.issue.number }}
|
||||||
parent_job_name: Run ISO Deployment Tests
|
parent_job_name: Run ISO Deployment Tests
|
||||||
|
iso_name-38: ${{ needs.load_vars.outputs.IMAGE_NAME }}-38.iso
|
||||||
|
iso_name-39: ${{ needs.load_vars.outputs.IMAGE_NAME }}-39.iso
|
||||||
|
iso_name-40: ${{ needs.load_vars.outputs.IMAGE_NAME }}-40.iso
|
||||||
|
|
|
||||||
41
.github/workflows/build_container.yml
vendored
41
.github/workflows/build_container.yml
vendored
|
|
@ -1,5 +1,3 @@
|
||||||
name: Build Container
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
|
|
@ -26,7 +24,7 @@ jobs:
|
||||||
statuses: write
|
statuses: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
@ -50,7 +48,6 @@ jobs:
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
job_name: "${{ inputs.parent_job_name }} / ${{ env.JOB_NAME }}"
|
job_name: "${{ inputs.parent_job_name }} / ${{ env.JOB_NAME }}"
|
||||||
per_page: 100
|
|
||||||
|
|
||||||
- name: Set status
|
- name: Set status
|
||||||
if: inputs.pr && always()
|
if: inputs.pr && always()
|
||||||
|
|
@ -72,10 +69,8 @@ jobs:
|
||||||
tags: |
|
tags: |
|
||||||
type=ref,event=branch
|
type=ref,event=branch
|
||||||
type=ref,event=pr
|
type=ref,event=pr
|
||||||
type=raw,value=${{ github.sha }}
|
type=semver,pattern={{version}}
|
||||||
type=semver,pattern=v{{version}}
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
type=semver,pattern=v{{major}}.{{minor}}
|
|
||||||
type=semver,pattern=v{{major}}.{{minor}}.{{patch}}
|
|
||||||
|
|
||||||
- name: Docker meta for PR
|
- name: Docker meta for PR
|
||||||
if: inputs.pr
|
if: inputs.pr
|
||||||
|
|
@ -86,7 +81,6 @@ jobs:
|
||||||
ghcr.io/${{ github.repository }}
|
ghcr.io/${{ github.repository }}
|
||||||
tags: |
|
tags: |
|
||||||
pr-${{ inputs.pr }}
|
pr-${{ inputs.pr }}
|
||||||
${{ github.sha }}
|
|
||||||
|
|
||||||
- name: Buildah Build
|
- name: Buildah Build
|
||||||
id: build-image
|
id: build-image
|
||||||
|
|
@ -96,13 +90,6 @@ jobs:
|
||||||
tags: ${{ steps.meta.outputs.tags || steps.meta_pr.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags || steps.meta_pr.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels || steps.meta_pr.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels || steps.meta_pr.outputs.labels }}
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
|
||||||
uses: docker/login-action@v3.6.0
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Push image
|
- name: Push image
|
||||||
uses: redhat-actions/push-to-registry@v2
|
uses: redhat-actions/push-to-registry@v2
|
||||||
with:
|
with:
|
||||||
|
|
@ -120,25 +107,3 @@ jobs:
|
||||||
context: ${{ env.JOB_NAME }}
|
context: ${{ env.JOB_NAME }}
|
||||||
sha: ${{ env.sha }}
|
sha: ${{ env.sha }}
|
||||||
targetUrl: ${{ steps.jobs.outputs.html_url }}
|
targetUrl: ${{ steps.jobs.outputs.html_url }}
|
||||||
|
|
||||||
- name: Install Cosign
|
|
||||||
if: startsWith(github.ref, 'refs/tags/v')
|
|
||||||
uses: sigstore/cosign-installer@v3.10.0
|
|
||||||
|
|
||||||
- name: Sign the images
|
|
||||||
if: startsWith(github.ref, 'refs/tags/v')
|
|
||||||
env:
|
|
||||||
TAGS: ${{ steps.build-image.outputs.tags }}
|
|
||||||
COSIGN_PRIVATE_KEY: ${{ secrets.COSIGN_PRIVATE_KEY }}
|
|
||||||
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
|
||||||
run: |
|
|
||||||
images=""
|
|
||||||
digest=""
|
|
||||||
for tag in ${TAGS}; do
|
|
||||||
if [[ -z "${digest}" ]]
|
|
||||||
then
|
|
||||||
digest=$(cat $(echo ${tag} | tr '/:' '--')_digest.txt)
|
|
||||||
fi
|
|
||||||
images+="${tag}@${digest} "
|
|
||||||
done
|
|
||||||
cosign sign --key env://COSIGN_PRIVATE_KEY --yes ${images}
|
|
||||||
|
|
|
||||||
56
.github/workflows/build_iso.yml
vendored
56
.github/workflows/build_iso.yml
vendored
|
|
@ -1,5 +1,3 @@
|
||||||
name: Build ISO
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
|
|
@ -9,6 +7,16 @@ on:
|
||||||
parent_job_name:
|
parent_job_name:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
outputs:
|
||||||
|
iso_name-38:
|
||||||
|
description: "Version 38 ISO Name"
|
||||||
|
value: ${{ jobs.build_iso.outputs.iso_name-38 }}
|
||||||
|
iso_name-39:
|
||||||
|
description: "Version 39 ISO Name"
|
||||||
|
value: ${{ jobs.build_iso.outputs.iso_name-39 }}
|
||||||
|
iso_name-40:
|
||||||
|
description: "Version 40 ISO Name"
|
||||||
|
value: ${{ jobs.build_iso.outputs.iso_name-40 }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
load_vars:
|
load_vars:
|
||||||
|
|
@ -29,10 +37,18 @@ jobs:
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix: ${{ fromJson(needs.load_vars.outputs.BUILD_MATRIX) }}
|
matrix:
|
||||||
|
version:
|
||||||
|
- 38
|
||||||
|
- 39
|
||||||
|
- 40
|
||||||
|
outputs:
|
||||||
|
iso_name-38: ${{ steps.save_output.outputs.iso_name-38 }}
|
||||||
|
iso_name-39: ${{ steps.save_output.outputs.iso_name-39 }}
|
||||||
|
iso_name-40: ${{ steps.save_output.outputs.iso_name-40 }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
||||||
|
|
@ -53,8 +69,7 @@ jobs:
|
||||||
id: jobs
|
id: jobs
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
job_name: "${{ inputs.parent_job_name }} / ${{ env.JOB_NAME }} (${{ matrix.version }}, ${{ matrix.flatpaks }}, ${{ matrix.image_repo }})"
|
job_name: "${{ inputs.parent_job_name }} / ${{ env.JOB_NAME }} (${{ matrix.version }})"
|
||||||
per_page: 100
|
|
||||||
|
|
||||||
- name: Set status
|
- name: Set status
|
||||||
if: inputs.pr && always()
|
if: inputs.pr && always()
|
||||||
|
|
@ -62,7 +77,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
status: pending
|
status: pending
|
||||||
context: "${{ inputs.parent_job_name }} / ${{ env.JOB_NAME }} (${{ matrix.version }}, ${{ matrix.flatpaks }}, ${{ matrix.image_repo }})"
|
context: ${{ env.JOB_NAME }} (${{ matrix.version }})
|
||||||
sha: ${{ env.sha }}
|
sha: ${{ env.sha }}
|
||||||
targetUrl: ${{ steps.jobs.outputs.html_url }}
|
targetUrl: ${{ steps.jobs.outputs.html_url }}
|
||||||
|
|
||||||
|
|
@ -105,28 +120,30 @@ jobs:
|
||||||
id: build
|
id: build
|
||||||
with:
|
with:
|
||||||
arch: ${{ needs.load_vars.outputs.ARCH }}
|
arch: ${{ needs.load_vars.outputs.ARCH }}
|
||||||
image_name: ${{ matrix.image_name }}
|
image_name: ${{ needs.load_vars.outputs.IMAGE_NAME }}
|
||||||
image_repo: ${{ matrix.image_repo}}
|
image_repo: ${{ needs.load_vars.outputs.IMAGE_REPO }}
|
||||||
image_src: ${{ matrix.image_src }}
|
|
||||||
image_tag: ${{ matrix.version }}
|
image_tag: ${{ matrix.version }}
|
||||||
version: ${{ matrix.version }}
|
version: ${{ matrix.version }}
|
||||||
repos: ${{ matrix.repos }}
|
|
||||||
variant: ${{ needs.load_vars.outputs.VARIANT }}
|
variant: ${{ needs.load_vars.outputs.VARIANT }}
|
||||||
flatpak_remote_refs: ${{ matrix.flatpaks == 'flatpak_refs' && needs.load_vars.outputs.FLATPAK_REMOTE_REFS || '' }}
|
flatpak_remote_refs_dir: /github/workspace/${{ needs.load_vars.outputs.FLATPAK_REMOTE_REFS_DIR }}
|
||||||
flatpak_remote_refs_dir: ${{ matrix.flatpaks == 'flatpak_refs_dir' && needs.load_vars.outputs.FLATPAK_REMOTE_REFS_DIR || '' }}
|
|
||||||
secure_boot_key_url: ${{ needs.load_vars.outputs.SECURE_BOOT_KEY_URL }}
|
secure_boot_key_url: ${{ needs.load_vars.outputs.SECURE_BOOT_KEY_URL }}
|
||||||
enrollment_password: ${{ needs.load_vars.outputs.ENROLLMENT_PASSWORD }}
|
enrollment_password: ${{ needs.load_vars.outputs.ENROLLMENT_PASSWORD }}
|
||||||
iso_name: build/${{ matrix.image_name }}-${{ matrix.version }}${{ matrix.flatpaks == 'false' && '' || format('-{0}', matrix.flatpaks) }}.iso
|
iso_name: ${{ needs.load_vars.outputs.IMAGE_NAME }}-${{ matrix.version }}.iso
|
||||||
|
|
||||||
|
- name: Save output
|
||||||
|
id: save_output
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "iso_name-${{ matrix.version }}=${{ steps.build.outputs.iso_name}}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Upload ISO as artifact
|
- name: Upload ISO as artifact
|
||||||
if: matrix.version != 'fake'
|
|
||||||
id: upload
|
id: upload
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.image_name }}-${{ matrix.version }}${{ matrix.flatpaks == 'false' && '' || format('-{0}', matrix.flatpaks) }}
|
name: ${{ steps.build.outputs.iso_name }}
|
||||||
path: |
|
path: |
|
||||||
build/${{ matrix.image_name }}-${{ matrix.version }}${{ matrix.flatpaks == 'false' && '' || format('-{0}', matrix.flatpaks) }}.iso
|
${{ steps.build.outputs.iso_path }}
|
||||||
build/${{ matrix.image_name }}-${{ matrix.version }}${{ matrix.flatpaks == 'false' && '' || format('-{0}', matrix.flatpaks) }}.iso-CHECKSUM
|
${{ steps.build.outputs.iso_path }}-CHECKSUM
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 0
|
retention-days: 0
|
||||||
compression-level: 0
|
compression-level: 0
|
||||||
|
|
@ -138,7 +155,6 @@ jobs:
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
status: ${{ job.status }}
|
status: ${{ job.status }}
|
||||||
context: "${{ inputs.parent_job_name }} / ${{ env.JOB_NAME }} (${{ matrix.version }}, ${{ matrix.flatpaks }}, ${{ matrix.image_repo }})"
|
context: ${{ env.JOB_NAME }} (${{ matrix.version }})
|
||||||
sha: ${{ env.sha }}
|
sha: ${{ env.sha }}
|
||||||
targetUrl: ${{ steps.jobs.outputs.html_url }}
|
targetUrl: ${{ steps.jobs.outputs.html_url }}
|
||||||
|
|
||||||
|
|
|
||||||
65
.github/workflows/build_vars.yml
vendored
65
.github/workflows/build_vars.yml
vendored
|
|
@ -1,71 +1,18 @@
|
||||||
name: Build Vars
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
outputs:
|
outputs:
|
||||||
ARCH:
|
ARCH:
|
||||||
value: 'x86_64'
|
value: 'x86_64'
|
||||||
BUILD_MATRIX:
|
IMAGE_NAME:
|
||||||
value: '
|
value: 'base'
|
||||||
{
|
IMAGE_REPO:
|
||||||
"version": [
|
value: 'quay.io/fedora-ostree-desktops'
|
||||||
"41",
|
IMAGE_TAG:
|
||||||
"42",
|
value: '39'
|
||||||
"43"
|
|
||||||
],
|
|
||||||
"flatpaks": [
|
|
||||||
"false",
|
|
||||||
"flatpak_refs_dir",
|
|
||||||
"flatpak_refs"
|
|
||||||
],
|
|
||||||
"image_repo": [
|
|
||||||
"ghcr.io/ublue-os",
|
|
||||||
"quay.io/fedora",
|
|
||||||
"quay.io/fedora-ostree-desktops"
|
|
||||||
],
|
|
||||||
"include": [
|
|
||||||
{
|
|
||||||
"image_repo": "ghcr.io/ublue-os",
|
|
||||||
"image_name": "base-main",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"image_repo": "quay.io/fedora",
|
|
||||||
"image_name": "fedora-bootc"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"image_repo": "quay.io/fedora-ostree-desktops",
|
|
||||||
"image_name": "base-atomic"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"exclude": [
|
|
||||||
{
|
|
||||||
"image_repo": "quay.io/fedora",
|
|
||||||
"flatpaks": "flatpak_refs_dir"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"image_repo": "quay.io/fedora",
|
|
||||||
"flatpaks": "flatpak_refs"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"image_repo": "quay.io/fedora-ostree-desktops",
|
|
||||||
"flatpaks": "flatpak_refs_dir"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"image_repo": "quay.io/fedora-ostree-desktops",
|
|
||||||
"flatpaks": "flatpak_refs"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"image_repo": "ghcr.io/ublue-os",
|
|
||||||
"version": "43"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}'
|
|
||||||
VARIANT:
|
VARIANT:
|
||||||
value: 'Server'
|
value: 'Server'
|
||||||
FLATPAK_REMOTE_REFS_DIR:
|
FLATPAK_REMOTE_REFS_DIR:
|
||||||
value: flatpak_refs
|
value: flatpak_refs
|
||||||
FLATPAK_REMOTE_REFS:
|
|
||||||
value: "app/org.mozilla.firefox/x86_64/stable app/org.videolan.VLC/x86_64/stable"
|
|
||||||
SECURE_BOOT_KEY_URL:
|
SECURE_BOOT_KEY_URL:
|
||||||
value: 'https://github.com/ublue-os/akmods/raw/main/certs/public_key.der'
|
value: 'https://github.com/ublue-os/akmods/raw/main/certs/public_key.der'
|
||||||
ENROLLMENT_PASSWORD:
|
ENROLLMENT_PASSWORD:
|
||||||
|
|
|
||||||
143
.github/workflows/clean_repo.yml
vendored
143
.github/workflows/clean_repo.yml
vendored
|
|
@ -1,143 +0,0 @@
|
||||||
name: Clean Container Registry
|
|
||||||
on:
|
|
||||||
# schedule:
|
|
||||||
# - cron: '0 21 * * 0'
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
delete_untagged:
|
|
||||||
name: Delete Untagged Packages
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Delete Untagged Packages
|
|
||||||
uses: Chizkiyahu/delete-untagged-ghcr-action@v5
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.PACKAGE_DELETER }}
|
|
||||||
repository_owner: ${{ github.repository_owner }}
|
|
||||||
repository: ${{ github.repository }}
|
|
||||||
untagged_only: true
|
|
||||||
owner_type: user
|
|
||||||
|
|
||||||
delete_old_pr:
|
|
||||||
name: Delete Old PR Packages
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
packages: read
|
|
||||||
steps:
|
|
||||||
- name: Delete Old PR Packages
|
|
||||||
id: all_tags
|
|
||||||
run: |
|
|
||||||
curl -L \
|
|
||||||
-H "Accept: application/vnd.github+json" \
|
|
||||||
-H "Authorization: Bearer ${{ github.token }}" \
|
|
||||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
|
||||||
"https://api.github.com/user/packages/container/build-container-installer/versions" > all_packages.json
|
|
||||||
curl -L \
|
|
||||||
-H "Accept: application/vnd.github+json" \
|
|
||||||
-H "Authorization: Bearer ${{ github.token }}" \
|
|
||||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
|
||||||
https://api.github.com/repos/${{ github.repository }}/pulls | \
|
|
||||||
jq -r '.[] | select(.state == "open") | .number' | \
|
|
||||||
sed 's/^/pr-/g' > open_prs
|
|
||||||
cat << EOF | python
|
|
||||||
import json
|
|
||||||
import re
|
|
||||||
|
|
||||||
prs = open("open_prs", "r")
|
|
||||||
open_prs = prs.readlines()
|
|
||||||
open_prs = [x.strip() for x in open_prs]
|
|
||||||
|
|
||||||
all_packages = open('all_packages.json')
|
|
||||||
data = json.load(all_packages)
|
|
||||||
|
|
||||||
delete_versions = open("delete_versions", "w")
|
|
||||||
|
|
||||||
for i in data:
|
|
||||||
delete = True
|
|
||||||
for tag in i['metadata']['container']['tags']:
|
|
||||||
if not re.match('pr-.*', tag):
|
|
||||||
delete = False
|
|
||||||
continue
|
|
||||||
if tag in open_prs:
|
|
||||||
delete = False
|
|
||||||
if delete:
|
|
||||||
print("delete", i['id'])
|
|
||||||
delete_versions.write(str(i['id']))
|
|
||||||
delete_versions.write("\n")
|
|
||||||
print(i['metadata']['container']['tags'])
|
|
||||||
EOF
|
|
||||||
|
|
||||||
for id in $(cat delete_versions)
|
|
||||||
do
|
|
||||||
curl -L \
|
|
||||||
-X DELETE \
|
|
||||||
-H "Accept: application/vnd.github+json" \
|
|
||||||
-H "Authorization: Bearer ${{ secrets.PACKAGE_DELETER }}" \
|
|
||||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
|
||||||
https://api.github.com/user/packages/container/build-container-installer/versions/${id}
|
|
||||||
done
|
|
||||||
|
|
||||||
|
|
||||||
delete_old_branches:
|
|
||||||
name: Delete Old Branch Packages
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
packages: read
|
|
||||||
steps:
|
|
||||||
- name: Delete Old Branch Packages
|
|
||||||
run: |
|
|
||||||
curl -L \
|
|
||||||
-H "Accept: application/vnd.github+json" \
|
|
||||||
-H "Authorization: Bearer ${{ github.token }}" \
|
|
||||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
|
||||||
"https://api.github.com/user/packages/container/build-container-installer/versions" > all_packages.json
|
|
||||||
curl -L \
|
|
||||||
-H "Accept: application/vnd.github+json" \
|
|
||||||
-H "Authorization: Bearer ${{ github.token }}" \
|
|
||||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
|
||||||
https://api.github.com/repos/${{ github.repository }}/branches | jq -r '.[].name' > branches
|
|
||||||
|
|
||||||
cat << EOF | python
|
|
||||||
import json
|
|
||||||
import re
|
|
||||||
|
|
||||||
branches_f = open("branches", "r")
|
|
||||||
branches = branches_f.readlines()
|
|
||||||
branches = [x.strip() for x in branches]
|
|
||||||
|
|
||||||
all_packages_f = open('all_packages.json')
|
|
||||||
data = json.load(all_packages_f)
|
|
||||||
|
|
||||||
delete_versions = open("delete_versions", "w")
|
|
||||||
|
|
||||||
for i in data:
|
|
||||||
delete = True
|
|
||||||
for tag in i['metadata']['container']['tags']:
|
|
||||||
if re.match('v[0-9]+\\\.[0-9]+\\\.[0-9]+', tag):
|
|
||||||
delete = False
|
|
||||||
continue
|
|
||||||
if re.match('pr-.*', tag):
|
|
||||||
delete = False
|
|
||||||
continue
|
|
||||||
if tag in branches:
|
|
||||||
delete = False
|
|
||||||
continue
|
|
||||||
if tag == "latest":
|
|
||||||
delete = False
|
|
||||||
if delete:
|
|
||||||
print("delete", i['id'])
|
|
||||||
delete_versions.write(str(i['id']))
|
|
||||||
delete_versions.write("\n")
|
|
||||||
print(i['metadata']['container']['tags'])
|
|
||||||
EOF
|
|
||||||
|
|
||||||
for id in $(cat delete_versions)
|
|
||||||
do
|
|
||||||
curl -L \
|
|
||||||
-X DELETE \
|
|
||||||
-H "Accept: application/vnd.github+json" \
|
|
||||||
-H "Authorization: Bearer ${{ secrets.PACKAGE_DELETER }}" \
|
|
||||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
|
||||||
https://api.github.com/user/packages/container/build-container-installer/versions/${id}
|
|
||||||
done
|
|
||||||
4
.github/workflows/stale.yml
vendored
4
.github/workflows/stale.yml
vendored
|
|
@ -7,7 +7,7 @@ name: Mark stale issues and pull requests
|
||||||
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 21 * * *'
|
- cron: '39 21 * * *'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
|
|
@ -18,7 +18,7 @@ jobs:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v9
|
- uses: actions/stale@v5
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
stale-issue-message: 'Issue is stale and will be closed in 14 days if there is no further activity'
|
stale-issue-message: 'Issue is stale and will be closed in 14 days if there is no further activity'
|
||||||
|
|
|
||||||
124
.github/workflows/test_deployment.yml
vendored
124
.github/workflows/test_deployment.yml
vendored
|
|
@ -1,5 +1,3 @@
|
||||||
name: Test Deployment
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
|
|
@ -9,6 +7,17 @@ on:
|
||||||
parent_job_name:
|
parent_job_name:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
iso_name-38:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
iso_name-39:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
iso_name-40:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
load_vars:
|
load_vars:
|
||||||
|
|
@ -28,10 +37,14 @@ jobs:
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix: ${{ fromJson(needs.load_vars.outputs.BUILD_MATRIX) }}
|
matrix:
|
||||||
|
version:
|
||||||
|
- 38
|
||||||
|
- 39
|
||||||
|
- 40
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
||||||
|
|
@ -52,8 +65,7 @@ jobs:
|
||||||
id: jobs
|
id: jobs
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
job_name: "${{ inputs.parent_job_name }} / ${{ env.JOB_NAME }} (${{ matrix.version }}, ${{ matrix.flatpaks }}, ${{ matrix.image_repo }})"
|
job_name: "${{ inputs.parent_job_name }} / ${{ env.JOB_NAME }} (${{ matrix.version }})"
|
||||||
per_page: 100
|
|
||||||
|
|
||||||
- name: Set status
|
- name: Set status
|
||||||
if: inputs.pr && always()
|
if: inputs.pr && always()
|
||||||
|
|
@ -61,45 +73,105 @@ jobs:
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
status: pending
|
status: pending
|
||||||
context: "${{ inputs.parent_job_name }} / ${{ env.JOB_NAME }} (${{ matrix.version }}, ${{ matrix.flatpaks }}, ${{ matrix.image_repo }})"
|
context: ${{ env.JOB_NAME }} (${{ matrix.version }})
|
||||||
sha: ${{ env.sha }}
|
sha: ${{ env.sha }}
|
||||||
targetUrl: ${{ steps.jobs.outputs.html_url }}
|
targetUrl: ${{ steps.jobs.outputs.html_url }}
|
||||||
|
|
||||||
- name: Install test tools
|
- name: Install test tools
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y unzip make
|
sudo apt-get install -y make
|
||||||
sudo make test/vm/install-deps PACKAGE_MANAGER=apt-get
|
sudo make install-test-deps PACKAGE_MANAGER=apt-get
|
||||||
|
|
||||||
- name: Download generated ISO
|
- name: Download generated ISO
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.image_name }}-${{ matrix.version }}${{ matrix.flatpaks == 'false' && '' || format('-{0}', matrix.flatpaks) }}
|
name: ${{ inputs[format('iso_name-{0}', matrix.version)] }}
|
||||||
|
|
||||||
- name: Run VM Tests
|
- name: Add Kickstart and Grub options to ISO
|
||||||
|
run: |
|
||||||
|
mv ${{ inputs[format('iso_name-{0}', matrix.version)] }} deploy.iso
|
||||||
|
sudo mkdir /mnt/iso || true
|
||||||
|
sudo mount -o loop deploy.iso /mnt/iso
|
||||||
|
cp /mnt/iso/boot/grub2/grub.cfg grub.cfg
|
||||||
|
sudo umount /mnt/iso
|
||||||
|
sed -i 's/quiet/console=ttyS0,115200n8 inst.ks=cdrom:\/ks.cfg/' grub.cfg
|
||||||
|
sed -i 's/set default="1"/set default="0"/' grub.cfg
|
||||||
|
sed -i 's/set timeout=60/set timeout=1/' grub.cfg
|
||||||
|
cat << EOF > ks.cfg
|
||||||
|
lang en_US.UTF-8
|
||||||
|
keyboard us
|
||||||
|
timezone Americas/New_York
|
||||||
|
zerombr
|
||||||
|
clearpart --all --initlabel
|
||||||
|
autopart
|
||||||
|
poweroff
|
||||||
|
user --name=core --groups=wheel --password=foobar
|
||||||
|
%include /usr/share/anaconda/interactive-defaults.ks
|
||||||
|
EOF
|
||||||
|
xorriso -dialog on << EOF
|
||||||
|
-indev deploy.iso
|
||||||
|
-outdev test.iso
|
||||||
|
-boot_image any replay
|
||||||
|
-map ks.cfg ks.cfg
|
||||||
|
-chmod 0444 ks.cfg
|
||||||
|
-map grub.cfg boot/grub2/grub.cfg
|
||||||
|
-end
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Create VM disk
|
||||||
|
run: |
|
||||||
|
qemu-img create -f qcow2 disk.qcow2 50G
|
||||||
|
|
||||||
|
- name: Install the test VM
|
||||||
|
run: |
|
||||||
|
timeout 1h qemu-system-x86_64 -name "Anaconda" -boot d -m 4096 -cpu qemu64 -display none -cdrom test.iso -smp 2 -hda disk.qcow2 -serial telnet:localhost:4321,server=on,wait=off & QEMU_PID=$!
|
||||||
|
echo "PID: $QEMU_PID"
|
||||||
|
timeout 1m bash -c "while ! (echo > /dev/tcp/127.0.0.1/4321); do sleep 0.1; done"
|
||||||
|
(nc localhost 4321 | tee vm.stdout) &
|
||||||
|
wait $QEMU_PID
|
||||||
|
|
||||||
|
- name: Start the test VM
|
||||||
env:
|
env:
|
||||||
VM_USER: core
|
VM_USER: core
|
||||||
VM_PASS: foobar
|
VM_PASS: foobar
|
||||||
VM_IP: "127.0.0.1"
|
VM_IP: "127.0.0.1"
|
||||||
VM_PORT: "5555"
|
VM_PORT: "5555"
|
||||||
run: |
|
run: |
|
||||||
make test/vm \
|
mkfifo vm.stdin
|
||||||
|
qemu-system-x86_64 -name "Anaconda" \
|
||||||
|
-m 4096 -cpu qemu64 -display none -smp 2 \
|
||||||
|
-chardev socket,path=/tmp/qga.sock,server=on,wait=off,id=qga0 \
|
||||||
|
-device e1000,netdev=net0 \
|
||||||
|
-netdev user,id=net0,hostfwd=tcp::${VM_PORT}-:22 \
|
||||||
|
-device virtio-serial \
|
||||||
|
-device virtserialport,chardev=qga0,name=org.qemu.guest_agent.0 \
|
||||||
|
-boot c -hda disk.qcow2 -serial telnet:localhost:4321,server=on,wait=off & export QEMU_PID=$!
|
||||||
|
echo "PID: $QEMU_PID"
|
||||||
|
|
||||||
|
timeout 1m bash -c "while ! (echo > /dev/tcp/127.0.0.1/4321); do sleep 0.1; done"
|
||||||
|
(tail -f vm.stdin | nc localhost 4321 | tee vm.stdout) &
|
||||||
|
|
||||||
|
timeout 30m bash -c "while ! (echo > /dev/tcp/${VM_IP}/${VM_PORT}); do sleep 1; done"
|
||||||
|
|
||||||
|
if ! (echo > /dev/tcp/${VM_IP}/${VM_PORT})
|
||||||
|
then
|
||||||
|
echo "SSH must be installed and enabled inside the container"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "VM ready for tests at IP ${VM_IP}:${VM_PORT}"
|
||||||
|
|
||||||
|
make test-vm VM_IP=${VM_IP} VM_PORT=${VM_PORT} VM_USER=${VM_USER} VM_PASS=${VM_PASS} \
|
||||||
ARCH=${{ needs.load_vars.outputs.ARCH}} \
|
ARCH=${{ needs.load_vars.outputs.ARCH}} \
|
||||||
ENROLLMENT_PASSWORD=${{ needs.load_vars.outputs.ENROLLMENT_PASSWORD }} \
|
IMAGE_NAME=${{ needs.load_vars.outputs.IMAGE_NAME}} \
|
||||||
${{ matrix.flatpaks == 'flatpak_refs' && format('FLATPAK_REMOTE_REFS="{0}"', needs.load_vars.outputs.FLATPAK_REMOTE_REFS) || '' }} \
|
IMAGE_REPO=${{ needs.load_vars.outputs.IMAGE_REPO}} \
|
||||||
${{ matrix.flatpaks == 'flatpak_refs_dir' && format('FLATPAK_REMOTE_REFS_DIR="{0}"', needs.load_vars.outputs.FLATPAK_REMOTE_REFS_DIR) || '' }} \
|
|
||||||
IMAGE_NAME=${{ matrix.image_name }} \
|
|
||||||
IMAGE_REPO=${{ matrix.image_repo }} \
|
|
||||||
IMAGE_TAG=${{ matrix.version }} \
|
IMAGE_TAG=${{ matrix.version }} \
|
||||||
ISO_NAME=${{ matrix.image_name }}-${{ matrix.version }}${{ matrix.flatpaks == 'false' && '' || format('-{0}', matrix.flatpaks) }}.iso \
|
|
||||||
${{ matrix.repos != '' && format('REPOS="{0}"', matrix.repos) || '' }} \
|
|
||||||
SECURE_BOOT_KEY_URL=${{ needs.load_vars.outputs.SECURE_BOOT_KEY_URL }} \
|
|
||||||
VARIANT=${{ needs.load_vars.outputs.VARIANT }} \
|
|
||||||
VERSION=${{ matrix.version }} \
|
VERSION=${{ matrix.version }} \
|
||||||
VM_IP=${VM_IP} \
|
VARIANT=${{ needs.load_vars.outputs.VARIANT }} \
|
||||||
VM_PASS=${VM_PASS} \
|
FLATPAK_REMOTE_REFS_DIR=${{ needs.load_vars.outputs.FLATPAK_REMOTE_REFS_DIR }} \
|
||||||
VM_PORT=${VM_PORT} \
|
SECURE_BOOT_KEY_URL=${{ needs.load_vars.outputs.SECURE_BOOT_KEY_URL }} \
|
||||||
VM_USER=${VM_USER}
|
ENROLLMENT_PASSWORD=${{ needs.load_vars.outputs.ENROLLMENT_PASSWORD }}
|
||||||
|
kill $QEMU_PID
|
||||||
|
|
||||||
- name: Set status
|
- name: Set status
|
||||||
if: inputs.pr && always()
|
if: inputs.pr && always()
|
||||||
|
|
@ -107,6 +179,6 @@ jobs:
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
status: ${{ job.status }}
|
status: ${{ job.status }}
|
||||||
context: "${{ inputs.parent_job_name }} / ${{ env.JOB_NAME }} (${{ matrix.version }}, ${{ matrix.flatpaks }}, ${{ matrix.image_repo }})"
|
context: ${{ env.JOB_NAME }} (${{ matrix.version }})
|
||||||
sha: ${{ env.sha }}
|
sha: ${{ env.sha }}
|
||||||
targetUrl: ${{ steps.jobs.outputs.html_url }}
|
targetUrl: ${{ steps.jobs.outputs.html_url }}
|
||||||
|
|
|
||||||
54
.github/workflows/test_iso.yml
vendored
54
.github/workflows/test_iso.yml
vendored
|
|
@ -1,5 +1,3 @@
|
||||||
name: Test ISO
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
|
|
@ -9,6 +7,16 @@ on:
|
||||||
parent_job_name:
|
parent_job_name:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
iso_name-38:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
iso_name-39:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
iso_name-40:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
load_vars:
|
load_vars:
|
||||||
|
|
@ -28,10 +36,14 @@ jobs:
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix: ${{ fromJson(needs.load_vars.outputs.BUILD_MATRIX) }}
|
matrix:
|
||||||
|
version:
|
||||||
|
- 38
|
||||||
|
- 39
|
||||||
|
- 40
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
||||||
|
|
@ -52,8 +64,7 @@ jobs:
|
||||||
id: jobs
|
id: jobs
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
job_name: "${{ inputs.parent_job_name }} / ${{ env.JOB_NAME }} (${{ matrix.version }}, ${{ matrix.flatpaks }}, ${{ matrix.image_repo }})"
|
job_name: "${{ inputs.parent_job_name }} / ${{ env.JOB_NAME }} (${{ matrix.version }})"
|
||||||
per_page: 100
|
|
||||||
|
|
||||||
- name: Set status
|
- name: Set status
|
||||||
if: inputs.pr && always()
|
if: inputs.pr && always()
|
||||||
|
|
@ -61,7 +72,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
status: pending
|
status: pending
|
||||||
context: "${{ inputs.parent_job_name }} / ${{ env.JOB_NAME }} (${{ matrix.version }}, ${{ matrix.flatpaks }}, ${{ matrix.image_repo }})"
|
context: ${{ env.JOB_NAME }} (${{ matrix.version }})
|
||||||
sha: ${{ env.sha }}
|
sha: ${{ env.sha }}
|
||||||
targetUrl: ${{ steps.jobs.outputs.html_url }}
|
targetUrl: ${{ steps.jobs.outputs.html_url }}
|
||||||
|
|
||||||
|
|
@ -69,28 +80,31 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y make
|
sudo apt-get install -y make
|
||||||
sudo make test/iso/install-deps PACKAGE_MANAGER=apt-get
|
sudo make install-test-deps PACKAGE_MANAGER=apt-get
|
||||||
|
|
||||||
- name: Download generated ISO
|
- name: Download generated ISO
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.image_name }}-${{ matrix.version }}${{ matrix.flatpaks == 'false' && '' || format('-{0}', matrix.flatpaks) }}
|
name: ${{ inputs[format('iso_name-{0}', matrix.version)] }}
|
||||||
|
|
||||||
|
- name: Verify ISO
|
||||||
|
run: |
|
||||||
|
checkisomd5 ${{ inputs[format('iso_name-{0}', matrix.version)] }}
|
||||||
|
sha256sum -c ${{ inputs[format('iso_name-{0}', matrix.version)] }}-CHECKSUM
|
||||||
|
|
||||||
- name: Run ISO checks
|
- name: Run ISO checks
|
||||||
run: |
|
run: |
|
||||||
make test/iso \
|
mv ${{ inputs[format('iso_name-{0}', matrix.version)] }} deploy.iso
|
||||||
|
make test-iso \
|
||||||
ARCH=${{ needs.load_vars.outputs.ARCH}} \
|
ARCH=${{ needs.load_vars.outputs.ARCH}} \
|
||||||
ENROLLMENT_PASSWORD=${{ needs.load_vars.outputs.ENROLLMENT_PASSWORD }} \
|
IMAGE_NAME=${{ needs.load_vars.outputs.IMAGE_NAME}} \
|
||||||
${{ matrix.flatpaks == 'flatpak_refs' && format('FLATPAK_REMOTE_REFS="{0}"', needs.load_vars.outputs.FLATPAK_REMOTE_REFS) || '' }} \
|
IMAGE_REPO=${{ needs.load_vars.outputs.IMAGE_REPO}} \
|
||||||
${{ matrix.flatpaks == 'flatpak_refs_dir' && format('FLATPAK_REMOTE_REFS_DIR="{0}"', needs.load_vars.outputs.FLATPAK_REMOTE_REFS_DIR) || '' }} \
|
|
||||||
IMAGE_NAME=${{ matrix.image_name }} \
|
|
||||||
IMAGE_REPO=${{ matrix.image_repo }} \
|
|
||||||
IMAGE_TAG=${{ matrix.version }} \
|
IMAGE_TAG=${{ matrix.version }} \
|
||||||
ISO_NAME=${{ matrix.image_name }}-${{ matrix.version }}${{ matrix.flatpaks == 'false' && '' || format('-{0}', matrix.flatpaks) }}.iso \
|
VERSION=${{ matrix.version }} \
|
||||||
${{ matrix.repos != '' && format('REPOS="{0}"', matrix.repos) || '' }} \
|
|
||||||
SECURE_BOOT_KEY_URL=${{ needs.load_vars.outputs.SECURE_BOOT_KEY_URL }} \
|
|
||||||
VARIANT=${{ needs.load_vars.outputs.VARIANT }} \
|
VARIANT=${{ needs.load_vars.outputs.VARIANT }} \
|
||||||
VERSION=${{ matrix.version }}
|
FLATPAK_REMOTE_REFS_DIR=${{ needs.load_vars.outputs.FLATPAK_REMOTE_REFS_DIR }} \
|
||||||
|
SECURE_BOOT_KEY_URL=${{ needs.load_vars.outputs.SECURE_BOOT_KEY_URL }} \
|
||||||
|
ENROLLMENT_PASSWORD=${{ needs.load_vars.outputs.ENROLLMENT_PASSWORD }}
|
||||||
|
|
||||||
- name: Set status
|
- name: Set status
|
||||||
if: inputs.pr && always()
|
if: inputs.pr && always()
|
||||||
|
|
@ -98,6 +112,6 @@ jobs:
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
status: ${{ job.status }}
|
status: ${{ job.status }}
|
||||||
context: "${{ inputs.parent_job_name }} / ${{ env.JOB_NAME }} (${{ matrix.version }}, ${{ matrix.flatpaks }}, ${{ matrix.image_repo }})"
|
context: ${{ env.JOB_NAME }} (${{ matrix.version }})
|
||||||
sha: ${{ env.sha }}
|
sha: ${{ env.sha }}
|
||||||
targetUrl: ${{ steps.jobs.outputs.html_url }}
|
targetUrl: ${{ steps.jobs.outputs.html_url }}
|
||||||
31
.github/workflows/tests.yml
vendored
31
.github/workflows/tests.yml
vendored
|
|
@ -1,4 +1,4 @@
|
||||||
name: All Tests
|
name: Tests
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
|
|
@ -27,34 +27,39 @@ jobs:
|
||||||
build_container:
|
build_container:
|
||||||
name: Build Container
|
name: Build Container
|
||||||
uses: ./.github/workflows/build_container.yml
|
uses: ./.github/workflows/build_container.yml
|
||||||
secrets: inherit
|
|
||||||
with:
|
with:
|
||||||
pr: ${{ inputs.pr }}
|
pr: ${{ inputs.pr }}
|
||||||
parent_job_name: ${{ inputs.parent_job_name && format('{0} / ', inputs.parent_job_name) }}Build Container
|
parent_job_name: ${{ inputs.parent_job_name && format('{0} / ', inputs.parent_job_name) }}Build Container
|
||||||
|
|
||||||
build_isos:
|
build_iso:
|
||||||
name: Build ISOs
|
name: Build ISO
|
||||||
needs:
|
needs:
|
||||||
- build_container
|
- build_container
|
||||||
uses: ./.github/workflows/build_iso.yml
|
uses: ./.github/workflows/build_iso.yml
|
||||||
with:
|
with:
|
||||||
pr: ${{ inputs.pr }}
|
pr: ${{ inputs.pr }}
|
||||||
parent_job_name: ${{ inputs.parent_job_name && format('{0} / ', inputs.parent_job_name) }}Build ISOs
|
parent_job_name: ${{ inputs.parent_job_name && format('{0} / ', inputs.parent_job_name) }}Build ISO
|
||||||
|
|
||||||
test_isos:
|
test_iso:
|
||||||
name: Test ISOs
|
name: Test ISO
|
||||||
needs:
|
needs:
|
||||||
- build_isos
|
- build_iso
|
||||||
uses: ./.github/workflows/test_iso.yml
|
uses: ./.github/workflows/test_iso.yml
|
||||||
with:
|
with:
|
||||||
pr: ${{ inputs.pr }}
|
pr: ${{ inputs.pr }}
|
||||||
parent_job_name: ${{ inputs.parent_job_name && format('{0} / ', inputs.parent_job_name) }}Test ISOs
|
parent_job_name: ${{ inputs.parent_job_name && format('{0} / ', inputs.parent_job_name) }}Test ISO
|
||||||
|
iso_name-38: ${{ needs.build_iso.outputs.iso_name-38 }}
|
||||||
|
iso_name-39: ${{ needs.build_iso.outputs.iso_name-39 }}
|
||||||
|
iso_name-40: ${{ needs.build_iso.outputs.iso_name-40 }}
|
||||||
|
|
||||||
test_deployments:
|
test_deployment:
|
||||||
name: Test Deployments
|
name: Test Deployment
|
||||||
needs:
|
needs:
|
||||||
- build_isos
|
- build_iso
|
||||||
uses: ./.github/workflows/test_deployment.yml
|
uses: ./.github/workflows/test_deployment.yml
|
||||||
with:
|
with:
|
||||||
pr: ${{ inputs.pr }}
|
pr: ${{ inputs.pr }}
|
||||||
parent_job_name: ${{ inputs.parent_job_name && format('{0} / ', inputs.parent_job_name) }}Test Deployments
|
parent_job_name: ${{ inputs.parent_job_name && format('{0} / ', inputs.parent_job_name) }}Test Deployment
|
||||||
|
iso_name-38: ${{ needs.build_iso.outputs.iso_name-38 }}
|
||||||
|
iso_name-39: ${{ needs.build_iso.outputs.iso_name-39 }}
|
||||||
|
iso_name-40: ${{ needs.build_iso.outputs.iso_name-40 }}
|
||||||
50
.github/workflows/update_wiki.yml
vendored
50
.github/workflows/update_wiki.yml
vendored
|
|
@ -1,50 +0,0 @@
|
||||||
name: Update Wiki
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
paths:
|
|
||||||
- 'docs/**'
|
|
||||||
- '.github/workflows/update_wiki.yml'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update-wiki:
|
|
||||||
name: Update Wiki
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- name: Install packages
|
|
||||||
run: |
|
|
||||||
sudo apt install -y make rsync
|
|
||||||
# Checkout Main Repo
|
|
||||||
- uses: actions/checkout@v5
|
|
||||||
|
|
||||||
# Checkout Wiki Repo
|
|
||||||
- uses: actions/checkout@v5
|
|
||||||
with:
|
|
||||||
repository: ${{github.repository}}.wiki
|
|
||||||
persist-credentials: true
|
|
||||||
path: wiki
|
|
||||||
ref: master
|
|
||||||
|
|
||||||
# Generate final files
|
|
||||||
- name: Generate Files
|
|
||||||
run: |
|
|
||||||
cd ${GITHUB_WORKSPACE}/docs
|
|
||||||
make
|
|
||||||
|
|
||||||
# Copy Docs
|
|
||||||
- name: Copy files
|
|
||||||
run: |
|
|
||||||
rsync -av --exclude='.git/*' ${GITHUB_WORKSPACE}/docs/ ${GITHUB_WORKSPACE}/wiki/
|
|
||||||
|
|
||||||
# Push Changes
|
|
||||||
- name: Push changes
|
|
||||||
run: |
|
|
||||||
cd ${GITHUB_WORKSPACE}/wiki/
|
|
||||||
git config --local user.email "action@github.com"
|
|
||||||
git config --local user.name "GitHub Action"
|
|
||||||
git add .
|
|
||||||
git commit -m "Add changes"
|
|
||||||
git push
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
name: Test Repo
|
name: Repo Tests
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
|
|
@ -20,11 +20,8 @@ jobs:
|
||||||
contents: read
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Run test
|
- name: Run test
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
/bin/bash tests/repo/vars.sh
|
||||||
sudo apt-get install -y make
|
|
||||||
sudo make test/repo/install-deps
|
|
||||||
make test/repo
|
|
||||||
6
.gitignore
vendored
6
.gitignore
vendored
|
|
@ -1,13 +1,11 @@
|
||||||
/debugdata
|
/debugdata
|
||||||
/build
|
/build
|
||||||
/flatpaks/script.sh
|
|
||||||
/flatpaks/repo
|
|
||||||
/flatpaks/list.txt
|
|
||||||
/lorax_templates/post_*
|
/lorax_templates/post_*
|
||||||
/pkglists
|
/pkglists
|
||||||
/repos/*.repo
|
/repos
|
||||||
/results
|
/results
|
||||||
/xorriso/input.txt
|
/xorriso/input.txt
|
||||||
|
/xorriso/*.sh
|
||||||
/original-pkgsizes.txt
|
/original-pkgsizes.txt
|
||||||
/final-pkgsizes.txt
|
/final-pkgsizes.txt
|
||||||
/lorax.conf
|
/lorax.conf
|
||||||
|
|
|
||||||
2
.gitmodules
vendored
2
.gitmodules
vendored
|
|
@ -1,7 +1,7 @@
|
||||||
[submodule "external/fedora-lorax-templates"]
|
[submodule "external/fedora-lorax-templates"]
|
||||||
path = external/fedora-lorax-templates
|
path = external/fedora-lorax-templates
|
||||||
url = https://pagure.io/fedora-lorax-templates.git
|
url = https://pagure.io/fedora-lorax-templates.git
|
||||||
branch = f40
|
branch = f39
|
||||||
[submodule "external/lorax"]
|
[submodule "external/lorax"]
|
||||||
path = external/lorax
|
path = external/lorax
|
||||||
url = https://github.com/weldr/lorax.git
|
url = https://github.com/weldr/lorax.git
|
||||||
|
|
|
||||||
1
.mdlrc
1
.mdlrc
|
|
@ -1 +0,0 @@
|
||||||
style "#{File.dirname(__FILE__)}/.codacy/markdownlint.rb"
|
|
||||||
5
.vscode/settings.json
vendored
5
.vscode/settings.json
vendored
|
|
@ -1,5 +0,0 @@
|
||||||
{
|
|
||||||
"files.associations": {
|
|
||||||
"Makefile.inputs": "makefile"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
FROM fedora:42
|
FROM fedora:40
|
||||||
|
|
||||||
ARG VERSION=42
|
ARG VERSION=40
|
||||||
|
|
||||||
ENV ARCH="x86_64"
|
ENV ARCH="x86_64"
|
||||||
ENV IMAGE_NAME="base"
|
ENV IMAGE_NAME="base"
|
||||||
|
|
@ -19,7 +19,7 @@ VOLUME /build-container-installer/build
|
||||||
VOLUME /build-container-installer/repos
|
VOLUME /build-container-installer/repos
|
||||||
VOLUME /cache
|
VOLUME /cache
|
||||||
|
|
||||||
RUN dnf install -y make && make install-deps && dnf clean all
|
RUN dnf install -y make && make install-deps
|
||||||
|
|
||||||
ENTRYPOINT ["/bin/bash", "/build-container-installer/entrypoint.sh"]
|
ENTRYPOINT ["/bin/bash", "/build-container-installer/entrypoint.sh"]
|
||||||
|
|
||||||
|
|
|
||||||
307
Makefile
307
Makefile
|
|
@ -1,51 +1,115 @@
|
||||||
include Makefile.inputs
|
# Configuration vars
|
||||||
|
## Formatting = UPPERCASE
|
||||||
|
# General
|
||||||
|
ADDITIONAL_TEMPLATES =
|
||||||
|
ARCH = x86_64
|
||||||
|
EXTRA_BOOT_PARAMS =
|
||||||
|
IMAGE_NAME = base
|
||||||
|
IMAGE_REPO = quay.io/fedora-ostree-desktops
|
||||||
|
IMAGE_TAG = $(VERSION)
|
||||||
|
REPOS = $(subst :,\:,$(shell ls /etc/yum.repos.d/*.repo))
|
||||||
|
ROOTFS_SIZE = 4
|
||||||
|
VARIANT = Server
|
||||||
|
VERSION = 39
|
||||||
|
WEB_UI = false
|
||||||
|
# Flatpak
|
||||||
|
FLATPAK_REMOTE_NAME = flathub
|
||||||
|
FLATPAK_REMOTE_URL = https://flathub.org/repo/flathub.flatpakrepo
|
||||||
|
FLATPAK_REMOTE_REFS =
|
||||||
|
FLATPAK_REMOTE_REFS_DIR =
|
||||||
|
FLATPAK_DIR =
|
||||||
|
# Secure boot
|
||||||
|
ENROLLMENT_PASSWORD =
|
||||||
|
SECURE_BOOT_KEY_URL =
|
||||||
|
|
||||||
###################
|
###################
|
||||||
# Hidden vars
|
# Hidden vars
|
||||||
|
|
||||||
export SHELL := /bin/sh
|
|
||||||
# Cache
|
# Cache
|
||||||
export DNF_CACHE :=
|
DNF_CACHE =
|
||||||
export PACKAGE_MANAGER := dnf
|
PACKAGE_MANAGER = dnf
|
||||||
|
|
||||||
# Functions
|
# Functions
|
||||||
## Formatting = lowercase
|
## Formatting = lowercase
|
||||||
# Get a list of templates for the feature
|
# Get a list of templates for the feature
|
||||||
# $1 = feature
|
# $1 = feature
|
||||||
define get_templates
|
get_templates = $(shell ls lorax_templates/$(1)_*.tmpl) \
|
||||||
$(wildcard lorax_templates/$(1)_*.tmpl)
|
$(foreach file,$(notdir $(shell ls lorax_templates/scripts/post/$(1)_*)),lorax_templates/post_$(file).tmpl)
|
||||||
$(foreach file,$(notdir $(wildcard lorax_templates/scripts/post/$(1)_*)),lorax_templates/post_$(file).tmpl)
|
|
||||||
endef
|
|
||||||
|
|
||||||
define install_pkg
|
# Get a list of tests for the feature
|
||||||
$(PACKAGE_MANAGER) install -y $(if $(findstring dnf,$(PACKAGE_MANAGER)),--disablerepo='*-testing')
|
# $1 = test type
|
||||||
endef
|
# $2 = feature
|
||||||
export install_pkg
|
run_tests = tests="$(shell ls tests/$(1)/$(2)_*)"; \
|
||||||
|
if [ -n "$$tests" ]; \
|
||||||
|
then \
|
||||||
|
chmod +x $$tests; \
|
||||||
|
for test in $$tests; \
|
||||||
|
do \
|
||||||
|
$(foreach var,$(_VARS),$(var)=$($(var))) ./$${test}; \
|
||||||
|
RC=$$?; if [ $$RC != 0 ]; then exit $$RC; fi; \
|
||||||
|
done; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Converts a post script to a template
|
||||||
|
# $1 = script to convert
|
||||||
|
# $2 = file on ISO to write
|
||||||
|
# $3 = whether to copy the '<%' lines to the template
|
||||||
|
convert_post_to_tmpl = header=0; \
|
||||||
|
skip=0; \
|
||||||
|
while read -r line; \
|
||||||
|
do \
|
||||||
|
if [[ $$line =~ ^\<\% ]]; \
|
||||||
|
then \
|
||||||
|
if [[ '$(3)' == 'true' ]]; \
|
||||||
|
then \
|
||||||
|
echo $$line >> lorax_templates/post_$(1).tmpl; \
|
||||||
|
fi; \
|
||||||
|
echo >> lorax_templates/post_$(1).tmpl; \
|
||||||
|
else \
|
||||||
|
if [[ $$header == 0 ]]; \
|
||||||
|
then \
|
||||||
|
if [[ $$line =~ ^\#\#\ (.*)$$ ]]; \
|
||||||
|
then \
|
||||||
|
echo "append $(2) \"%post --erroronfail $${BASH_REMATCH[1]}\"" >> lorax_templates/post_$(1).tmpl; \
|
||||||
|
skip=1; \
|
||||||
|
else \
|
||||||
|
echo "append $(2) \"%post --erroronfail\"" >> lorax_templates/post_$(1).tmpl; \
|
||||||
|
fi; \
|
||||||
|
header=1; \
|
||||||
|
fi; \
|
||||||
|
if [[ $$skip == 0 ]]; \
|
||||||
|
then \
|
||||||
|
echo "append $(2) \"$${line//\"/\\\"}\"" >> lorax_templates/post_$(1).tmpl; \
|
||||||
|
fi; \
|
||||||
|
skip=0; \
|
||||||
|
fi; \
|
||||||
|
done < lorax_templates/scripts/post/$(1); \
|
||||||
|
echo "append $(2) \"%end\"" >> lorax_templates/post_$(1).tmpl
|
||||||
|
|
||||||
# Generated/internal vars
|
# Generated/internal vars
|
||||||
## Formatting = _UPPERCASE
|
## Formatting = _UPPERCASE
|
||||||
_IMAGE_REPO_ESCAPED := $(subst /,\/,$(IMAGE_REPO))
|
_BASE_DIR = $(shell pwd)
|
||||||
_IMAGE_REPO_DOUBLE_ESCAPED := $(subst \,\\\,$(_IMAGE_REPO_ESCAPED))
|
_IMAGE_REPO_ESCAPED = $(subst /,\/,$(IMAGE_REPO))
|
||||||
_LORAX_ARGS :=
|
_IMAGE_REPO_DOUBLE_ESCAPED = $(subst \,\\\,$(_IMAGE_REPO_ESCAPED))
|
||||||
export _LORAX_TEMPLATES := $(call get_templates,install) lorax_templates/install_include_post.tmpl
|
_LORAX_ARGS =
|
||||||
_REPO_FILES := $(subst /etc/yum.repos.d,repos,$(REPOS))
|
_LORAX_TEMPLATES = $(call get_templates,install)
|
||||||
_TEMP_DIR := $(shell mktemp -d)
|
_REPO_FILES = $(subst /etc/yum.repos.d,repos,$(REPOS))
|
||||||
_TEMPLATE_VARS := ARCH IMAGE_NAME IMAGE_REPO _IMAGE_REPO_DOUBLE_ESCAPED _IMAGE_REPO_ESCAPED IMAGE_SIGNED IMAGE_TAG REPOS _RHEL VARIANT VERSION WEB_UI
|
_TEMP_DIR = $(shell mktemp -d)
|
||||||
_VOLID := $(firstword $(subst -, ,$(IMAGE_NAME)))-$(ARCH)-$(IMAGE_TAG)
|
_TEMPLATE_VARS = ARCH _BASE_DIR IMAGE_NAME IMAGE_REPO _IMAGE_REPO_DOUBLE_ESCAPED _IMAGE_REPO_ESCAPED IMAGE_TAG REPOS _RHEL VARIANT VERSION WEB_UI
|
||||||
|
_VOLID = $(firstword $(subst -, ,$(IMAGE_NAME)))-$(ARCH)-$(IMAGE_TAG)
|
||||||
|
|
||||||
ifeq ($(findstring redhat.repo,$(REPOS)),redhat.repo)
|
ifeq ($(findstring redhat.repo,$(REPOS)),redhat.repo)
|
||||||
export _RHEL := true
|
_RHEL = true
|
||||||
export _LORAX_TEMPLATES += $(call get_templates,rhel)
|
|
||||||
else
|
else
|
||||||
undefine _RHEL
|
_RHEL = false
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(_RHEL),true)
|
ifeq ($(_RHEL),true)
|
||||||
_LORAX_ARGS += --nomacboot --noupgrade
|
_LORAX_ARGS += --nomacboot --noupgrade
|
||||||
else ifeq ($(VARIANT),Server)
|
else ifeq ($(VARIANT),Server)
|
||||||
_LORAX_ARGS += --macboot --noupgrade --squashfs-only
|
_LORAX_ARGS += --macboot --noupgrade
|
||||||
else
|
else
|
||||||
_LORAX_ARGS += --nomacboot --squashfs-only
|
_LORAX_ARGS += --nomacboot
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(WEB_UI),true)
|
ifeq ($(WEB_UI),true)
|
||||||
|
|
@ -53,96 +117,173 @@ _LORAX_ARGS += -i anaconda-webui
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifneq ($(DNF_CACHE),)
|
ifneq ($(DNF_CACHE),)
|
||||||
_LORAX_ARGS += --cachedir $(DNF_CACHE)
|
_LORAX_ARGS += --cachedir $(DNF_CACHE)
|
||||||
export _LORAX_TEMPLATES += $(call get_templates,cache)
|
_LORAX_TEMPLATES += $(call get_templates,cache)
|
||||||
_TEMPLATE_VARS += DNF_CACHE
|
_TEMPLATE_VARS += DNF_CACHE
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifneq ($(FLATPAK_DIR),)
|
ifneq ($(FLATPAK_DIR),)
|
||||||
_FLATPAK_REPO_GPG := $(shell curl -L $(FLATPAK_REMOTE_URL) | grep -i '^GPGKey=' | cut -d= -f2)
|
_FLATPAK_REPO_GPG = $(shell curl -L $(FLATPAK_REMOTE_URL) | grep -i '^GPGKey=' | cut -d= -f2)
|
||||||
export _FLATPAK_REPO_URL := $(shell curl -L $(FLATPAK_REMOTE_URL) | grep -i '^URL=' | cut -d= -f2)
|
_FLATPAK_REPO_URL = $(shell curl -L $(FLATPAK_REMOTE_URL) | grep -i '^URL=' | cut -d= -f2)
|
||||||
_LORAX_ARGS += -i flatpak-libs
|
_LORAX_ARGS += -i flatpak-libs
|
||||||
export _LORAX_TEMPLATES += $(call get_templates,flatpak)
|
_LORAX_TEMPLATES += $(call get_templates,flatpak)
|
||||||
_TEMPLATE_VARS += FLATPAK_DIR FLATPAK_REMOTE_NAME FLATPAK_REMOTE_REFS FLATPAK_REMOTE_URL _FLATPAK_REPO_GPG _FLATPAK_REPO_URL
|
_TEMPLATE_VARS += FLATPAK_DIR FLATPAK_REMOTE_NAME FLATPAK_REMOTE_REFS FLATPAK_REMOTE_URL _FLATPAK_REPO_GPG _FLATPAK_REPO_URL
|
||||||
else
|
else
|
||||||
ifneq ($(FLATPAK_REMOTE_REFS_DIR),)
|
ifneq ($(FLATPAK_REMOTE_REFS_DIR),)
|
||||||
COLLECTED_REFS := $(foreach file,$(filter-out README.md Makefile,$(wildcard $(FLATPAK_REMOTE_REFS_DIR)/*)),$(shell cat $(file)))
|
COLLECTED_REFS = $(foreach file,$(shell ls $(FLATPAK_REMOTE_REFS_DIR)/*),$(shell cat $(file)))
|
||||||
export FLATPAK_REMOTE_REFS += $(sort $(COLLECTED_REFS))
|
FLATPAK_REMOTE_REFS += $(sort $(COLLECTED_REFS))
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifneq ($(FLATPAK_REMOTE_REFS),)
|
ifneq ($(FLATPAK_REMOTE_REFS),)
|
||||||
_FLATPAK_REPO_GPG := $(shell curl -L $(FLATPAK_REMOTE_URL) | grep -i '^GPGKey=' | cut -d= -f2)
|
_FLATPAK_REPO_GPG = $(shell curl -L $(FLATPAK_REMOTE_URL) | grep -i '^GPGKey=' | cut -d= -f2)
|
||||||
export _FLATPAK_REPO_URL := $(shell curl -L $(FLATPAK_REMOTE_URL) | grep -i '^URL=' | cut -d= -f2)
|
_FLATPAK_REPO_URL = $(shell curl -L $(FLATPAK_REMOTE_URL) | grep -i '^URL=' | cut -d= -f2)
|
||||||
_LORAX_ARGS += -i flatpak-libs
|
_LORAX_ARGS += -i flatpak-libs
|
||||||
export _LORAX_TEMPLATES += $(call get_templates,flatpak) \
|
_LORAX_TEMPLATES += $(call get_templates,flatpak) \
|
||||||
external/fedora-lorax-templates/ostree-based-installer/lorax-embed-flatpaks.tmpl
|
external/fedora-lorax-templates/ostree-based-installer/lorax-embed-flatpaks.tmpl
|
||||||
_TEMPLATE_VARS += FLATPAK_DIR FLATPAK_REMOTE_NAME FLATPAK_REMOTE_REFS FLATPAK_REMOTE_URL _FLATPAK_REPO_GPG _FLATPAK_REPO_URL
|
_TEMPLATE_VARS += FLATPAK_DIR FLATPAK_REMOTE_NAME FLATPAK_REMOTE_REFS FLATPAK_REMOTE_URL _FLATPAK_REPO_GPG _FLATPAK_REPO_URL
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
|
||||||
ifneq ($(SECURE_BOOT_KEY_URL),)
|
ifneq ($(SECURE_BOOT_KEY_URL),)
|
||||||
export _LORAX_TEMPLATES += $(call get_templates,secureboot)
|
_LORAX_TEMPLATES += $(call get_templates,secureboot)
|
||||||
_TEMPLATE_VARS += ENROLLMENT_PASSWORD
|
_TEMPLATE_VARS += ENROLLMENT_PASSWORD
|
||||||
endif
|
endif
|
||||||
|
|
||||||
_SUBDIRS := container external flatpak_refs lorax_templates repos xorriso test
|
# Step 7: Build end ISO
|
||||||
|
|
||||||
# Create checksum
|
|
||||||
## Default action
|
## Default action
|
||||||
$(ISO_NAME)-CHECKSUM: $(ISO_NAME)
|
build/deploy.iso: boot.iso container/$(IMAGE_NAME)-$(IMAGE_TAG) xorriso/input.txt
|
||||||
cd $(dir $(ISO_NAME)) && sha256sum $(notdir $(ISO_NAME)) > $(notdir $(ISO_NAME))-CHECKSUM
|
mkdir $(_BASE_DIR)/build || true
|
||||||
|
xorriso -dialog on < $(_BASE_DIR)/xorriso/input.txt
|
||||||
|
implantisomd5 build/deploy.iso
|
||||||
|
|
||||||
# Build end ISO
|
external/lorax/branch-$(VERSION):
|
||||||
$(ISO_NAME): results/images/boot.iso container/$(IMAGE_NAME)-$(IMAGE_TAG) xorriso/input.txt
|
git config advice.detachedHead false
|
||||||
$(if $(wildcard $(dir $(ISO_NAME))),,mkdir -p $(dir $(ISO_NAME)); chmod ugo=rwX $(dir $(ISO_NAME)))
|
cd external/lorax && git reset --hard HEAD && git checkout tags/$(shell cd external/lorax && git tag -l lorax-$(VERSION).* --sort=creatordate | tail -n 1)
|
||||||
xorriso -dialog on < xorriso/input.txt
|
touch external/lorax/branch-$(VERSION)
|
||||||
implantisomd5 $(ISO_NAME)
|
|
||||||
chmod ugo=r $(ISO_NAME)
|
|
||||||
$(if $(GITHUB_OUTPUT), echo "iso_name=$(ISO_NAME)" >> $(GITUHB_OUTPUT))
|
|
||||||
|
|
||||||
# Download the secure boot key
|
# Step 1: Generate Lorax Templates
|
||||||
sb_pubkey.der:
|
lorax_templates/post_%.tmpl: lorax_templates/scripts/post/%
|
||||||
curl --fail -L -o sb_pubkey.der $(SECURE_BOOT_KEY_URL)
|
$(call convert_post_to_tmpl,$*,usr/share/anaconda/post-scripts/$*.ks,true)
|
||||||
|
|
||||||
# Build boot.iso using Lorax
|
repos: $(_REPO_FILES)
|
||||||
results/images/boot.iso: external/lorax/branch-$(VERSION) $(filter lorax_templates/%,$(_LORAX_TEMPLATES)) $(filter repos/%,$(_REPO_FILES)) $(if $(SECURE_BOOT_KEY_URL),sb_pubkey.der)
|
|
||||||
$(if $(wildcard results), rm -Rf results)
|
# Step 2: Replace vars in repo files
|
||||||
$(if $(wildcard /etc/rpm/macros.image-language-conf),mv /etc/rpm/macros.image-language-conf $(_TEMP_DIR)/macros.image-language-conf)
|
repos/%.repo: /etc/yum.repos.d/%.repo
|
||||||
|
mkdir repos || true
|
||||||
|
cp /etc/yum.repos.d/$*.repo $(_BASE_DIR)/repos/$*.repo
|
||||||
|
sed -i "s/\$$releasever/${VERSION}/g" $(_BASE_DIR)/repos/$*.repo
|
||||||
|
sed -i "s/\$$basearch/${ARCH}/g" $(_BASE_DIR)/repos/$*.repo
|
||||||
|
|
||||||
|
# Step 3: Build boot.iso using Lorax
|
||||||
|
boot.iso: external/lorax/branch-$(VERSION) $(filter lorax_templates/%,$(_LORAX_TEMPLATES)) $(_REPO_FILES)
|
||||||
|
rm -Rf $(_BASE_DIR)/results || true
|
||||||
|
mv /etc/rpm/macros.image-language-conf $(_TEMP_DIR)/macros.image-language-conf || true
|
||||||
|
|
||||||
|
# Download the secure boot key
|
||||||
|
if [ -n "$(SECURE_BOOT_KEY_URL)" ]; \
|
||||||
|
then \
|
||||||
|
curl --fail -L -o $(_BASE_DIR)/sb_pubkey.der $(SECURE_BOOT_KEY_URL); \
|
||||||
|
fi
|
||||||
|
|
||||||
lorax -p $(IMAGE_NAME) -v $(VERSION) -r $(VERSION) -t $(VARIANT) \
|
lorax -p $(IMAGE_NAME) -v $(VERSION) -r $(VERSION) -t $(VARIANT) \
|
||||||
--isfinal --buildarch=$(ARCH) --volid=$(_VOLID) --sharedir $(PWD)/external/lorax/share/templates.d/99-generic \
|
--isfinal --squashfs-only --buildarch=$(ARCH) --volid=$(_VOLID) --sharedir $(_BASE_DIR)/external/lorax/share/templates.d/99-generic \
|
||||||
$(_LORAX_ARGS) \
|
$(_LORAX_ARGS) \
|
||||||
$(foreach file,$(_REPO_FILES),--repo $(patsubst repos/%,$(PWD)/repos/%,$(file))) \
|
$(foreach file,$(_REPO_FILES),--repo $(_BASE_DIR)/$(file)) \
|
||||||
$(foreach file,$(_LORAX_TEMPLATES),--add-template $(PWD)/$(file)) \
|
$(foreach file,$(_LORAX_TEMPLATES),--add-template $(_BASE_DIR)/$(file)) \
|
||||||
$(foreach file,$(ADDITIONAL_TEMPLATES),--add-template $(file)) \
|
$(foreach file,$(ADDITIONAL_TEMPLATES),--add-template $(file)) \
|
||||||
$(foreach file,$(_FLATPAK_TEMPLATES),--add-template $(file)) \
|
$(foreach file,$(_FLATPAK_TEMPLATES),--add-template $(file)) \
|
||||||
$(foreach file,$(_EXTERNAL_TEMPLATES),--add-template $(PWD)/external/$(file)) \
|
$(foreach file,$(_EXTERNAL_TEMPLATES),--add-template $(_BASE_DIR)/external/$(file)) \
|
||||||
--rootfs-size $(ROOTFS_SIZE) \
|
--rootfs-size $(ROOTFS_SIZE) \
|
||||||
$(foreach var,$(_TEMPLATE_VARS),--add-template-var "$(shell echo $(var) | tr '[:upper:]' '[:lower:]')=$($(var))") \
|
$(foreach var,$(_TEMPLATE_VARS),--add-template-var "$(shell echo $(var) | tr '[:upper:]' '[:lower:]')=$($(var))") \
|
||||||
results/
|
$(_BASE_DIR)/results/
|
||||||
$(if $(wildcard $(_TEMP_DIR)/macros.image-language-conf),mv -f $(_TEMP_DIR)/macros.image-language-conf /etc/rpm/macros.image-language-conf)
|
mv $(_BASE_DIR)/results/images/boot.iso $(_BASE_DIR)/
|
||||||
|
mv -f $(_TEMP_DIR)/macros.image-language-conf /etc/rpm/macros.image-language-conf || true
|
||||||
|
|
||||||
|
# Step 4: Download container image
|
||||||
|
container/$(IMAGE_NAME)-$(IMAGE_TAG):
|
||||||
|
mkdir $(_BASE_DIR)/container || true
|
||||||
|
skopeo copy docker://$(IMAGE_REPO)/$(IMAGE_NAME):$(IMAGE_TAG) oci:$(_BASE_DIR)/container/$(IMAGE_NAME)-$(IMAGE_TAG)
|
||||||
|
|
||||||
|
# Step 5: Generate xorriso script
|
||||||
|
xorriso/%.sh: xorriso/%.sh.in
|
||||||
|
sed -i 's/quiet/quiet $(EXTRA_BOOT_PARAMS)/g' results/boot/grub2/grub.cfg
|
||||||
|
sed -i 's/quiet/quiet $(EXTRA_BOOT_PARAMS)/g' results/EFI/BOOT/grub.cfg
|
||||||
|
$(eval _VARS = FLATPAK_DIR IMAGE_NAME IMAGE_TAG ARCH VERSION)
|
||||||
|
$(foreach var,$(_VARS),$(var)=$($(var))) envsubst '$(foreach var,$(_VARS),$$$(var))' < $(_BASE_DIR)/xorriso/$*.sh.in > $(_BASE_DIR)/xorriso/$*.sh
|
||||||
|
|
||||||
|
# Step 6: Generate xorriso input
|
||||||
|
xorriso/input.txt: xorriso/gen_input.sh
|
||||||
|
bash $(_BASE_DIR)/xorriso/gen_input.sh | tee $(_BASE_DIR)/xorriso/input.txt
|
||||||
|
|
||||||
|
|
||||||
FILES_TO_CLEAN := $(wildcard build debugdata pkglists results original-pkgsizes.txt final-pkgsizes.txt lorax.conf *.iso *log)
|
|
||||||
.PHONY: clean
|
|
||||||
clean:
|
clean:
|
||||||
rm -Rf $(FILES_TO_CLEAN)
|
rm -Rf $(_BASE_DIR)/build || true
|
||||||
$(foreach DIR,$(_SUBDIRS),$(MAKE) -w -C $(DIR) clean;)
|
rm -Rf $(_BASE_DIR)/container || true
|
||||||
|
rm -Rf $(_BASE_DIR)/debugdata || true
|
||||||
|
rm -Rf $(_BASE_DIR)/pkglists || true
|
||||||
|
rm -Rf $(_BASE_DIR)/repos || true
|
||||||
|
rm -Rf $(_BASE_DIR)/results || true
|
||||||
|
rm -f $(_BASE_DIR)/lorax_templates/*.tmpl || true
|
||||||
|
rm -f $(_BASE_DIR)/xorriso/input.txt || true
|
||||||
|
rm -f $(_BASE_DIR)/xorriso/*.sh || true
|
||||||
|
rm -f $(_BASE_DIR)/{original,final}-pkgsizes.txt || true
|
||||||
|
rm -f $(_BASE_DIR)/lorax.conf || true
|
||||||
|
rm -f $(_BASE_DIR)/*.iso || true
|
||||||
|
rm -f $(_BASE_DIR)/*.log || true
|
||||||
|
|
||||||
.PHONY: install-deps
|
|
||||||
install-deps:
|
install-deps:
|
||||||
$(install_pkg) lorax xorriso coreutils gettext syslinux-nonlinux
|
if [ "$(PACKAGE_MANAGER)" =~ apt.* ]; then $(PACKAGE_MANAGER) update; fi
|
||||||
$(foreach DIR,$(filter-out test,$(_SUBDIRS)),$(MAKE) -w -C $(DIR) install-deps;)
|
$(PACKAGE_MANAGER) install -y lorax xorriso skopeo flatpak dbus-daemon ostree coreutils gettext git
|
||||||
|
|
||||||
|
install-test-deps:
|
||||||
|
if [ "$(PACKAGE_MANAGER)" =~ apt.* ]; then $(PACKAGE_MANAGER) update; fi
|
||||||
|
$(PACKAGE_MANAGER) install -y qemu qemu-utils xorriso unzip qemu-system-x86 netcat socat jq isomd5sum ansible make coreutils squashfs-tools
|
||||||
|
|
||||||
|
|
||||||
.PHONY: $(_SUBDIRS) $(wildcard test/*) $(wildcard test/*/*)
|
test: test-iso test-vm
|
||||||
test $(addsuffix /*,$(_SUBDIRS)):
|
|
||||||
$(eval DIR=$(firstword $(subst /, ,$@)))
|
|
||||||
$(if $(filter-out $(DIR),$@), $(eval TARGET=$(subst $(DIR)/,,$@)),$(eval TARGET=))
|
|
||||||
$(MAKE) -w -C $(DIR) $(TARGET)
|
|
||||||
|
|
||||||
.DEFAULT:
|
test-repo:
|
||||||
$(eval DIR=$(firstword $(subst /, ,$@)))
|
bash tests/repo/vars.sh
|
||||||
$(if $(filter-out $(DIR),$@), $(eval TARGET=$(subst $(DIR)/,,$@)),$(eval TARGET=))
|
|
||||||
$(MAKE) -w -C $(DIR) $(TARGET)
|
test-iso:
|
||||||
|
$(eval _VARS = VERSION FLATPAK_REMOTE_NAME _FLATPAK_REPO_URL)
|
||||||
|
|
||||||
|
sudo modprobe loop
|
||||||
|
sudo mkdir /mnt/iso /mnt/install
|
||||||
|
sudo mount -o loop deploy.iso /mnt/iso
|
||||||
|
sudo mount -t squashfs -o loop /mnt/iso/images/install.img /mnt/install
|
||||||
|
|
||||||
|
# install tests
|
||||||
|
$(call run_tests,iso,install)
|
||||||
|
|
||||||
|
# flapak tests
|
||||||
|
if [ -n "$(FLATPAK_REMOTE_REFS)" ]; then $(call run_tests,iso,flatpak); fi
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
sudo umount /mnt/install
|
||||||
|
sudo umount /mnt/iso
|
||||||
|
|
||||||
|
ansible_inventory:
|
||||||
|
echo "ungrouped:" > ansible_inventory
|
||||||
|
echo " hosts:" >> ansible_inventory
|
||||||
|
echo " vm:" >> ansible_inventory
|
||||||
|
echo " ansible_host: ${VM_IP}" >> ansible_inventory
|
||||||
|
echo " ansible_port: ${VM_PORT}" >> ansible_inventory
|
||||||
|
echo " ansible_user: ${VM_USER}" >> ansible_inventory
|
||||||
|
echo " ansible_password: ${VM_PASS}" >> ansible_inventory
|
||||||
|
echo " ansible_become_pass: ${VM_PASS}" >> ansible_inventory
|
||||||
|
echo " ansible_ssh_common_args: '-o StrictHostKeyChecking=no'" >> ansible_inventory
|
||||||
|
|
||||||
|
test-vm: ansible_inventory
|
||||||
|
$(eval _VARS = IMAGE_REPO IMAGE_NAME IMAGE_TAG)
|
||||||
|
|
||||||
|
ansible -i ansible_inventory -m ansible.builtin.wait_for_connection vm
|
||||||
|
|
||||||
|
# install tests
|
||||||
|
$(call run_tests,vm,install)
|
||||||
|
|
||||||
|
# flapak tests
|
||||||
|
if [ -n "$(FLATPAK_REMOTE_REFS)" ]; then $(call run_tests,vm,flatpak); fi
|
||||||
|
|
||||||
|
.PHONY: clean install-deps install-test-deps test test-iso test-vm
|
||||||
|
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
# Configuration vars
|
|
||||||
## Formatting = UPPERCASE
|
|
||||||
# General
|
|
||||||
export ADDITIONAL_TEMPLATES :=
|
|
||||||
export ARCH := x86_64
|
|
||||||
export EXTRA_BOOT_PARAMS :=
|
|
||||||
export IMAGE_NAME := base
|
|
||||||
export IMAGE_REPO := quay.io/fedora-ostree-desktops
|
|
||||||
export IMAGE_SRC :=
|
|
||||||
export IMAGE_TAG = $(VERSION)
|
|
||||||
export IMAGE_SIGNED := true
|
|
||||||
REPOS := $(subst :,\:,$(wildcard /etc/yum.repos.d/*.repo))
|
|
||||||
export ROOTFS_SIZE := 4
|
|
||||||
export VARIANT := Server
|
|
||||||
export VERSION := 39
|
|
||||||
export WEB_UI := false
|
|
||||||
# Flatpak
|
|
||||||
export FLATPAK_REMOTE_NAME := flathub
|
|
||||||
export FLATPAK_REMOTE_URL := https://flathub.org/repo/flathub.flatpakrepo
|
|
||||||
export FLATPAK_REMOTE_REFS :=
|
|
||||||
export FLATPAK_REMOTE_REFS_DIR :=
|
|
||||||
export FLATPAK_DIR :=
|
|
||||||
# Secure boot
|
|
||||||
export ENROLLMENT_PASSWORD :=
|
|
||||||
export SECURE_BOOT_KEY_URL :=
|
|
||||||
export ISO_NAME := build/deploy.iso
|
|
||||||
128
README.md
128
README.md
|
|
@ -1,14 +1,10 @@
|
||||||
[](https://github.com/jasonn3/build-container-installer/actions/workflows/tests.yml)
|

|
||||||
[](https://app.codacy.com/gh/JasonN3/build-container-installer/dashboard?utm_source=gh&utm_medium=referral&utm_content=&utm_campaign=Badge_grade)
|
|
||||||
|
|
||||||
# Build Container Installer Action
|
# Build Container Installer Action
|
||||||
|
This action is used to enerate an ISO for installing an OSTree stored in a container image. This utilizes the anaconda command `ostreecontainer`
|
||||||
This action is used to generate an ISO for installing an OSTree stored in a container image. This utilizes the anaconda command `ostreecontainer`, which also supports bootc.
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
This action is designed to be called from a GitHub workflow using the following format
|
This action is designed to be called from a GitHub workflow using the following format
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- name: Build ISO
|
- name: Build ISO
|
||||||
uses: jasonn3/build-container-installer@main
|
uses: jasonn3/build-container-installer@main
|
||||||
|
|
@ -29,22 +25,120 @@ This action is designed to be called from a GitHub workflow using the following
|
||||||
with:
|
with:
|
||||||
name: ${{ steps.build.outputs.iso_name }}
|
name: ${{ steps.build.outputs.iso_name }}
|
||||||
path: |
|
path: |
|
||||||
${{ steps.build.outputs.iso_path }}/${{ steps.build.outputs.iso_name }}
|
${{ steps.build.outputs.iso_path }}
|
||||||
${{ steps.build.outputs.iso_path }}/${{ steps.build.outputs.iso_name }}-CHECKSUM
|
${{ steps.build.outputs.iso_path }}-CHECKSUM
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 0
|
retention-days: 0
|
||||||
compression-level: 0
|
compression-level: 0
|
||||||
```
|
```
|
||||||
|
|
||||||
**See the [Wiki](https://github.com/JasonN3/build-container-installer/wiki) for development and usage information.**
|
See [Customizing](#customizing) for information about customizing the ISO that gets created using `with`
|
||||||
|
|
||||||
|
## Customizing
|
||||||
|
The following variables can be used to customize the created ISO.
|
||||||
|
|
||||||
## Star History
|
### Inputs
|
||||||
|
| Variable | Description | Default Value | Action | Container | Makefile |
|
||||||
|
| ----------------------- | ---------------------------------------------------------------------------- | -------------------------------------------- | ------------------ | ------------------ | ------------------ |
|
||||||
|
| additional_templates | Space delimited list of additional Lorax templates to include | \[empty\] | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| arch | Architecture for image to build | x86_64 | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| enrollment_password | Used for supporting secure boot (requires SECURE_BOOT_KEY_URL to be defined) | container-installer | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| extra_boot_params | Extra params used by grub to boot the anaconda installer | \[empty\] | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| flatpak_remote_name | Name of the Flatpak repo on the destination OS | flathub | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| flatpak_remote_refs | Space separated list of flatpak refs to install | \[empty\] | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| flatpak_remote_refs_dir | Directory that contains files that list the flatpak refs to install | \[empty\] | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| flatpak_remote_url | URL of the flatpakrepo file | https://flathub.org/repo/flathub.flatpakrepo | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| image_name | Name of the source container image | base | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| image_repo | Repository containing the source container image | quay.io/fedora-ostree-desktops | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| image_tag | Tag of the source container image | *VERSION* | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| iso_name | Name of the ISO you wish to output when completed | build/deploy.iso | :white_check_mark: | :x: | :x: |
|
||||||
|
| repos | List of repo files for Lorax to use | /etc/yum.repos.d/*.repo | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| rootfs_size | The size (in GiB) for the squashfs runtime volume | 2 | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| secure_boot_key_url | Secure boot key that is installed from URL location\*\* | \[empty\] | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| variant | Source container variant\* | Server | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| version | Fedora version of installer to build | 39 | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
| web_ui | Enable Anaconda WebUI (experimental) | false | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
|
|
||||||
<a href="https://star-history.com/#jasonn3/build-container-installer&Date">
|
\*Available options for VARIANT can be found by running `dnf provides system-release`.
|
||||||
<picture>
|
Variant will be the third item in the package name. Example: `fedora-release-kinoite-39-34.noarch` will be kinoite
|
||||||
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=jasonn3/build-container-installer&type=Date&theme=dark" />
|
|
||||||
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=jasonn3/build-container-installer&type=Date" />
|
\*\* If you need to reference a local file, you can use `file://*path*`
|
||||||
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=jasonn3/build-container-installer&type=Date" />
|
|
||||||
</picture>
|
### Outputs
|
||||||
</a>
|
| Variable | Description | Usage |
|
||||||
|
| -------- | ----------------------------------------| ------------------------------------------------ |
|
||||||
|
| iso_name | The name of the resulting .iso | ${{ steps.YOUR_ID_FOR_ACTION.outputs.iso_name }} |
|
||||||
|
| iso_path | The name and path of the resulting .iso | ${{ steps.YOUR_ID_FOR_ACTION.outputs.iso_name }} |
|
||||||
|
|
||||||
|
For outputs, see example above.
|
||||||
|
|
||||||
|
## Development
|
||||||
|
### Makefile
|
||||||
|
The Makefile contains all of the commands that are run in the action. There are separate targets for each file generated, however `make` can be used to generate the final image and `make clean` can be used to clean up the workspace. The resulting ISO will be stored in the `build` directory.
|
||||||
|
|
||||||
|
`make install-deps` can be used to install the necessary packages
|
||||||
|
|
||||||
|
See [Customizing](#customizing) for information about customizing the ISO that gets created. All variable should be specified CAPITALIZED.
|
||||||
|
|
||||||
|
### Container
|
||||||
|
A container with `make install-deps` already run is provided at `ghcr.io/jasonn3/build-container-installer:latest`
|
||||||
|
|
||||||
|
To use the container file, run `docker run --privileged --volume .:/build-container-installer/build ghcr.io/jasonn3/build-container-installer:latest`.
|
||||||
|
|
||||||
|
This will create an ISO with the baked in defaults of the container image. The resulting file will be called `deploy.iso`
|
||||||
|
|
||||||
|
See [Customizing](#customizing) for information about customizing the ISO that gets created. The variable can either be defined as environment variables. All variable should be specified CAPITALIZED.
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
Building an ISO to install Fedora 38
|
||||||
|
```bash
|
||||||
|
docker run --rm --privileged --volume .:/github/workspace/build ghcr.io/jasonn3/build-container-installer:latest VERSION=38 IMAGE_NAME=base IMAGE_TAG=38 VARIANT=Server
|
||||||
|
```
|
||||||
|
|
||||||
|
Building an ISO to install Fedora 39
|
||||||
|
```bash
|
||||||
|
docker run --rm --privileged --volume .:/github/workspace/build ghcr.io/jasonn3/build-container-installer:latest VERSION=39 IMAGE_NAME=base IMAGE_TAG=39 VARIANT=Server
|
||||||
|
```
|
||||||
|
|
||||||
|
### VSCode Dev Container
|
||||||
|
There is a dev container configuration provided for development. By default it will use the existing container image available at `ghcr.io/jasonn3/build-container-installer:latest`, however, you can have it build a new image by editing `.devcontainer/devcontainer.json` and replacing `image` with `build`. `Ctrl+/` can be used to comment and uncomment blocks of code within VSCode.
|
||||||
|
|
||||||
|
The code from VSCode will be available at `/workspaces/build-container-installer` once the container has started.
|
||||||
|
|
||||||
|
Privileged is required for access to loop devices for lorax.
|
||||||
|
|
||||||
|
Use existing container image:
|
||||||
|
```
|
||||||
|
{
|
||||||
|
"name": "Existing Dockerfile",
|
||||||
|
// "build": {
|
||||||
|
// "context": "..",
|
||||||
|
// "dockerfile": "../Containerfile",
|
||||||
|
// "args": {
|
||||||
|
// "version": "39"
|
||||||
|
// }
|
||||||
|
// },
|
||||||
|
"image": "ghcr.io/jasonn3/build-container-installer:latest",
|
||||||
|
"overrideCommand": true,
|
||||||
|
"shutdownAction": "stopContainer",
|
||||||
|
"privileged": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Build a new container image:
|
||||||
|
```
|
||||||
|
{
|
||||||
|
"name": "Existing Dockerfile",
|
||||||
|
"build": {
|
||||||
|
"context": "..",
|
||||||
|
"dockerfile": "../Containerfile",
|
||||||
|
"args": {
|
||||||
|
"version": "39"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
//"image": "ghcr.io/jasonn3/build-container-installer:latest",
|
||||||
|
"overrideCommand": true,
|
||||||
|
"shutdownAction": "stopContainer",
|
||||||
|
"privileged": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
|
||||||
132
action.yml
132
action.yml
|
|
@ -29,18 +29,18 @@ inputs:
|
||||||
required: false
|
required: false
|
||||||
default: "true"
|
default: "true"
|
||||||
enrollment_password:
|
enrollment_password:
|
||||||
description: Used for supporting secure boot (requires SECURE_BOOT_KEY_URL to be defined)
|
description: Used for supporting secure boot (requires secure_boot_key_url to be defined)
|
||||||
required: false
|
required: false
|
||||||
default: "container-installer"
|
default: "container-installer"
|
||||||
extra_boot_params:
|
extra_boot_params:
|
||||||
description: Extra params used by grub to boot the anaconda installer
|
description: Extra params used by grub to boot the anaconda installer
|
||||||
required: false
|
required: false
|
||||||
flatpak_remote_name:
|
flatpak_remote_name:
|
||||||
description: Name of the Flatpak repo on the destination OS
|
description: Name of the Flatpak remote repo
|
||||||
required: false
|
required: false
|
||||||
default: "flathub"
|
default: "flathub"
|
||||||
flatpak_remote_refs:
|
flatpak_remote_refs:
|
||||||
description: Space separated list of flatpak refs to install
|
description: Space delimited list of refs to the flatpak packages to install
|
||||||
required: false
|
required: false
|
||||||
default: ""
|
default: ""
|
||||||
flatpak_remote_refs_dir:
|
flatpak_remote_refs_dir:
|
||||||
|
|
@ -48,7 +48,7 @@ inputs:
|
||||||
required: false
|
required: false
|
||||||
default: ""
|
default: ""
|
||||||
flatpak_remote_url:
|
flatpak_remote_url:
|
||||||
description: URL of the flatpakrepo file
|
description: The URL of the Flatpak remote flatpakrepo file
|
||||||
required: false
|
required: false
|
||||||
default: https://flathub.org/repo/flathub.flatpakrepo
|
default: https://flathub.org/repo/flathub.flatpakrepo
|
||||||
image_name:
|
image_name:
|
||||||
|
|
@ -59,29 +59,18 @@ inputs:
|
||||||
description: Repository containing the source container image
|
description: Repository containing the source container image
|
||||||
required: true
|
required: true
|
||||||
default: quay.io/fedora-ostree-desktops
|
default: quay.io/fedora-ostree-desktops
|
||||||
image_signed:
|
|
||||||
description: Whether the container image is signed. The policy to test the signing must be configured inside the container image
|
|
||||||
required: false
|
|
||||||
default: "true"
|
|
||||||
image_src:
|
|
||||||
description: Overrides the source of the container image. Must be formatted for the skopeo copy command
|
|
||||||
required: false
|
|
||||||
image_tag:
|
image_tag:
|
||||||
description: Tag of the source container image
|
description: Tag of the source container image. Defaults to the installer version
|
||||||
required: false
|
required: false
|
||||||
iso_name:
|
iso_name:
|
||||||
description: Name of the ISO you wish to output when completed
|
description: "Name of the resulting ISO. Relative paths are relative to github.workspace"
|
||||||
required: false
|
required: false
|
||||||
default: build/deploy.iso
|
default: build/deploy.iso
|
||||||
make_target:
|
|
||||||
description: Overrides the default make target
|
|
||||||
required: false
|
|
||||||
repos:
|
repos:
|
||||||
description: List of repo files for Lorax to use
|
description: List of repo files for Lorax to use
|
||||||
required: false
|
required: false
|
||||||
rootfs_size:
|
rootfs_size:
|
||||||
description: The size (in GiB) for the squashfs runtime volume
|
description: The size (in GiB) for the squashfs runtime volume
|
||||||
default: "2"
|
|
||||||
secure_boot_key_url:
|
secure_boot_key_url:
|
||||||
description: Secure boot key that is installed from URL location
|
description: Secure boot key that is installed from URL location
|
||||||
required: false
|
required: false
|
||||||
|
|
@ -89,7 +78,7 @@ inputs:
|
||||||
description: Overrides the skopeo cache key
|
description: Overrides the skopeo cache key
|
||||||
required: false
|
required: false
|
||||||
variant:
|
variant:
|
||||||
description: "Source container variant. Available options can be found by running `dnf provides system-release`. Variant will be the third item in the package name. Example: `fedora-release-kinoite-39-34.noarch` will be kinoite"
|
description: "Source container variant. Available options can be found by running `dnf provides system-release`. Variant will be the third item in the package name. Example: `fedora-release-kinoite-39-34.noarch` will be kinonite"
|
||||||
required: true
|
required: true
|
||||||
default: Server
|
default: Server
|
||||||
version:
|
version:
|
||||||
|
|
@ -103,14 +92,11 @@ inputs:
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
iso_name:
|
iso_name:
|
||||||
value: ${{ steps.docker.outputs.iso_name }}
|
value: ${{ steps.rename_iso.outputs.iso_name }}
|
||||||
description: The name of the resulting .iso
|
description: The name of the resulting .iso
|
||||||
iso_path:
|
iso_path:
|
||||||
value: ${{ steps.docker.outputs.iso_path }}
|
value: ${{ steps.rename_iso.outputs.iso_path }}
|
||||||
description: The path of the resulting .iso
|
description: The name and path of the resulting .iso
|
||||||
flatpak_refs:
|
|
||||||
value: ${{ steps.docker.outputs.flatpak_refs }}
|
|
||||||
description: The list of Flatpak refs
|
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: composite
|
using: composite
|
||||||
|
|
@ -149,30 +135,49 @@ runs:
|
||||||
mkdir /cache/skopeo || true
|
mkdir /cache/skopeo || true
|
||||||
|
|
||||||
- name: Determine Flatpak dependencies
|
- name: Determine Flatpak dependencies
|
||||||
if: inputs.enable_flatpak_dependencies == 'true' && (inputs.flatpak_remote_refs != '' || inputs.flatpak_remote_refs_dir != '')
|
if: inputs.enable_flatpak_dependencies == 'true'
|
||||||
id: flatpak_dependencies
|
id: flatpak_dependencies
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cd ${{ github.action_path }}
|
image="${{ inputs.image_repo }}/${{ inputs.image_name }}:${{ inputs.image_tag }}"
|
||||||
make flatpaks/repo \
|
# Make temp space
|
||||||
FLATPAK_REMOTE_NAME="${{ inputs.flatpak_remote_name }}" \
|
FLATPAK_DIR=$(basename $(mktemp -d -p ${{ github.workspace }} flatpak.XXX))
|
||||||
${{ inputs.flatpak_remote_refs && format('FLATPAK_REMOTE_REFS="{0}"', inputs.flatpak_remote_refs) || ''}} \
|
# Get list of refs from directory
|
||||||
${{ inputs.flatpak_remote_refs_dir && format('FLATPAK_REMOTE_REFS_DIR="{0}/{1}"', github.workspace, inputs.flatpak_remote_refs_dir) || ''}} \
|
sudo mkdir /github || true
|
||||||
FLATPAK_REMOTE_URL="${{ inputs.flatpak_remote_url }}" \
|
sudo ln -s ${{ github.workspace }} /github/workspace
|
||||||
IMAGE_NAME="${{ inputs.image_name }}" \
|
DIR_REFS=$(cat ${{ inputs.flatpak_remote_refs_dir }}/* | tr '\n' ' ' )
|
||||||
IMAGE_REPO="${{ inputs.image_repo }}" \
|
# Generate install script
|
||||||
IMAGE_SRC="${{ inputs.image_src }}" \
|
cat << EOF > ${{ github.workspace }}/${FLATPAK_DIR}/script.sh
|
||||||
IMAGE_TAG="${{ inputs.image_tag || inputs.version }}"
|
cat /flatpak_dir/script.sh
|
||||||
|
mkdir -p /flatpak/flatpak /flatpak/triggers
|
||||||
|
mkdir /var/tmp || true
|
||||||
|
chmod -R 1777 /var/tmp
|
||||||
|
flatpak config --system --set languages "*"
|
||||||
|
flatpak remote-add --system ${{ inputs.flatpak_remote_name }} ${{ inputs.flatpak_remote_url }}
|
||||||
|
flatpak install --system -y ${{ inputs.flatpak_remote_refs }} ${DIR_REFS}
|
||||||
|
ostree init --repo=/flatpak_dir/repo --mode=archive-z2
|
||||||
|
for i in \$(ostree refs --repo=\${FLATPAK_SYSTEM_DIR}/repo | grep '^deploy/' | sed 's/^deploy\///g')
|
||||||
|
do
|
||||||
|
echo "Copying \${i}..."
|
||||||
|
ostree --repo=/flatpak_dir/repo pull-local \${FLATPAK_SYSTEM_DIR}/repo \$(ostree --repo=\${FLATPAK_SYSTEM_DIR}/repo rev-parse ${{ inputs.flatpak_remote_name }}/\${i})
|
||||||
|
mkdir -p \$(dirname /flatpak_dir/repo/refs/heads/\${i})
|
||||||
|
ostree --repo=\${FLATPAK_SYSTEM_DIR}/repo rev-parse ${{ inputs.flatpak_remote_name }}/\${i} > /flatpak_dir/repo/refs/heads/\${i}
|
||||||
|
done
|
||||||
|
flatpak build-update-repo /flatpak_dir/repo
|
||||||
|
ostree refs --repo=/flatpak_dir/repo
|
||||||
|
EOF
|
||||||
|
docker run --rm --privileged --entrypoint bash -e FLATPAK_SYSTEM_DIR=/flatpak/flatpak -e FLATPAK_TRIGGERSDIR=/flatpak/triggers --volume ${{ github.workspace }}/${FLATPAK_DIR}:/flatpak_dir ${image} /flatpak_dir/script.sh
|
||||||
|
echo "flatpak_dir=${FLATPAK_DIR}" >> $GITHUB_OUTPUT
|
||||||
|
docker rmi ${image}
|
||||||
|
|
||||||
- name: Run docker image
|
- name: Run docker image
|
||||||
id: docker
|
|
||||||
env:
|
env:
|
||||||
ACTION_REPO: ${{ github.action_repository }}
|
ACTION_REPO: ${{ github.action_repository }}
|
||||||
ACTION_REF: ${{ github.action_ref }}
|
ACTION_REF: ${{ github.action_ref }}
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
image=$(echo "ghcr.io/${ACTION_REPO}" | tr [:upper:] [:lower:])
|
image=$(echo "ghcr.io/${ACTION_REPO}" | tr [:upper:] [:lower:])
|
||||||
# Check if running inside of the action repo
|
# Check if running inside01 of the action repo
|
||||||
if [[ -z "${ACTION_REPO}" ]]
|
if [[ -z "${ACTION_REPO}" ]]
|
||||||
then
|
then
|
||||||
image=$(echo "ghcr.io/${{ github.repository }}" | tr [:upper:] [:lower:])
|
image=$(echo "ghcr.io/${{ github.repository }}" | tr [:upper:] [:lower:])
|
||||||
|
|
@ -202,43 +207,30 @@ runs:
|
||||||
then
|
then
|
||||||
echo "ERROR: flatpak_remote_refs is mutually exclusive to flatpak_remote_refs_dir"
|
echo "ERROR: flatpak_remote_refs is mutually exclusive to flatpak_remote_refs_dir"
|
||||||
exit 1
|
exit 1
|
||||||
|
else
|
||||||
|
if [[ -n "${{ inputs.flatpak_remote_refs }}" ]]
|
||||||
|
then
|
||||||
|
vars="${vars} FLATPAK_REMOTE_REFS=\"${{ inputs.flatpak_remote_refs }}\""
|
||||||
|
else
|
||||||
|
vars="${vars} FLATPAK_REMOTE_REFS_DIR=\"${{ inputs.flatpak_remote_refs_dir }}\""
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
docker run --privileged --volume ${{ github.workspace }}:/github/workspace/ ${cache} ${image}:${tag} \
|
docker run --privileged --volume ${{ github.workspace }}:/github/workspace/ ${cache} ${image}:${tag} \
|
||||||
${{ inputs.make_target }} \
|
|
||||||
ADDITIONAL_TEMPLATES="${{ inputs.additional_templates }}" \
|
ADDITIONAL_TEMPLATES="${{ inputs.additional_templates }}" \
|
||||||
ARCH="${{ inputs.arch }}" \
|
ARCH="${{ inputs.arch }}" \
|
||||||
DNF_CACHE="/cache/dnf" \
|
DNF_CACHE="/cache/dnf" \
|
||||||
ENROLLMENT_PASSWORD="${{ inputs.enrollment_password }}" \
|
ENROLLMENT_PASSWORD="${{ inputs.enrollment_password }}" \
|
||||||
EXTRA_BOOT_PARAMS="${{ inputs.extra_boot_params }}" \
|
|
||||||
FLATPAK_REMOTE_NAME="${{ inputs.flatpak_remote_name }}" \
|
FLATPAK_REMOTE_NAME="${{ inputs.flatpak_remote_name }}" \
|
||||||
${{ inputs.flatpak_remote_refs && format('FLATPAK_REMOTE_REFS="{0}"', inputs.flatpak_remote_refs) || ''}} \
|
${vars} \
|
||||||
${{ inputs.flatpak_remote_refs_dir && format('FLATPAK_REMOTE_REFS_DIR="/github/workspace/{0}"', inputs.flatpak_remote_refs_dir) || ''}} \
|
|
||||||
FLATPAK_REMOTE_URL="${{ inputs.flatpak_remote_url }}" \
|
FLATPAK_REMOTE_URL="${{ inputs.flatpak_remote_url }}" \
|
||||||
FLATPAK_DIR="${{ steps.flatpak_dependencies.outputs.flatpak_dir && format('/github/workspace/{0}', steps.flatpak_dependencies.outputs.flatpak_dir) || '' }}" \
|
FLATPAK_DIR="${{ steps.flatpak_dependencies.outputs.flatpak_dir && format('/github/workspace/{0}', steps.flatpak_dependencies.outputs.flatpak_dir) || '' }}" \
|
||||||
IMAGE_NAME="${{ inputs.image_name }}" \
|
IMAGE_NAME="${{ inputs.image_name }}" \
|
||||||
IMAGE_REPO="${{ inputs.image_repo }}" \
|
IMAGE_REPO="${{ inputs.image_repo }}" \
|
||||||
IMAGE_SIGNED="${{ inputs.image_signed }}" \
|
|
||||||
IMAGE_SRC="${{ inputs.image_src }}" \
|
|
||||||
IMAGE_TAG="${{ inputs.image_tag || inputs.version }}" \
|
IMAGE_TAG="${{ inputs.image_tag || inputs.version }}" \
|
||||||
ISO_NAME=/github/workspace/${{ inputs.iso_name }} \
|
|
||||||
${{ inputs.repos && format('REPOS="{0}"', inputs.repos) || '' }} \
|
|
||||||
SECURE_BOOT_KEY_URL="${{ inputs.secure_boot_key_url }}" \
|
SECURE_BOOT_KEY_URL="${{ inputs.secure_boot_key_url }}" \
|
||||||
VARIANT="${{ inputs.variant }}" \
|
VARIANT="${{ inputs.variant }}" \
|
||||||
VERSION="${{ inputs.version }}" \
|
VERSION="${{ inputs.version }}" \
|
||||||
WEB_UI="${{ inputs.web_ui }}"
|
WEB_UI="${{ inputs.web_ui }}"
|
||||||
echo "iso_path=$(dirname ${{ inputs.iso_name }})" >> $GITHUB_OUTPUT
|
|
||||||
echo "iso_name=$(basename ${{ inputs.iso_name }})" >> $GITHUB_OUTPUT
|
|
||||||
if [[ "${{ steps.flatpak_dependencies.outputs.flatpak_dir }}" != '' ]]
|
|
||||||
then
|
|
||||||
echo "flatpak_refs=$(cat ${{ github.workspace }}/${{ steps.flatpak_dependencies.outputs.flatpak_dir }}/list.txt | tr '\n' ' ')" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
if [[ "${{ inputs.flatpak_remote_refs_dir }}" != '' ]]
|
|
||||||
then
|
|
||||||
echo "flatpak_refs=$(cat ${{ github.workspace }}/${{ inputs.flatpak_remote_refs_dir }}/* | tr '\n' ' ')" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
echo "flatpak_refs=${{ inputs.flatpak_remote_refs}}" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Save dnf cache
|
- name: Save dnf cache
|
||||||
env:
|
env:
|
||||||
|
|
@ -257,3 +249,27 @@ runs:
|
||||||
with:
|
with:
|
||||||
path: /cache/skopeo
|
path: /cache/skopeo
|
||||||
key: ${{ inputs.skopeo_cache_key || env.skopeo_cache_key }}
|
key: ${{ inputs.skopeo_cache_key || env.skopeo_cache_key }}
|
||||||
|
|
||||||
|
|
||||||
|
- name: Rename ISO file
|
||||||
|
id: rename_iso
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [[ ! ( "${{ inputs.iso_name }}" =~ \.iso$ ) ]]
|
||||||
|
then
|
||||||
|
iso_name="${{ inputs.iso_name }}.iso"
|
||||||
|
else
|
||||||
|
iso_name="${{ inputs.iso_name }}"
|
||||||
|
fi
|
||||||
|
if [[ "${{ inputs.iso_name }}" =~ ^/ ]]
|
||||||
|
then
|
||||||
|
full_path="${iso_name}"
|
||||||
|
else
|
||||||
|
full_path="${{ github.workspace }}/${iso_name}"
|
||||||
|
fi
|
||||||
|
mv ${{ github.workspace }}/build/deploy.iso ${full_path} || true
|
||||||
|
cd $(dirname ${full_path})
|
||||||
|
iso_fn=$(basename ${iso_name})
|
||||||
|
sha256sum ${iso_fn} > ${iso_fn}-CHECKSUM
|
||||||
|
echo "iso_path=${full_path}" >> $GITHUB_OUTPUT
|
||||||
|
echo "iso_name=${iso_fn}" >> $GITHUB_OUTPUT
|
||||||
|
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
$(IMAGE_NAME)-$(IMAGE_TAG):
|
|
||||||
skopeo copy $(if $(IMAGE_SRC),$(IMAGE_SRC),docker://$(IMAGE_REPO)/$(IMAGE_NAME):$(IMAGE_TAG)) oci:$(IMAGE_NAME)-$(IMAGE_TAG)
|
|
||||||
|
|
||||||
install-deps:
|
|
||||||
$(install_pkg) skopeo
|
|
||||||
|
|
||||||
FILES=$(filter-out Makefile,$(wildcard *))
|
|
||||||
clean:
|
|
||||||
ifneq ($(FILES),)
|
|
||||||
rm -Rf $(FILES)
|
|
||||||
endif
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
-----BEGIN PUBLIC KEY-----
|
|
||||||
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEY4ljyIhI2w9DOptB4WT20S+K5ts3
|
|
||||||
GJTEKRkXmIYEXGfyKpJMdlGCWeg2kOam5dNhWKXXl46d3eBBo9S53TPpyQ==
|
|
||||||
-----END PUBLIC KEY-----
|
|
||||||
|
|
@ -1,12 +0,0 @@
|
||||||
SHELL = /bin/bash
|
|
||||||
|
|
||||||
docs:
|
|
||||||
find -name '*.md' -print0 | xargs -0 -I {} bash -c ' \
|
|
||||||
source_file=$${1:2}; \
|
|
||||||
final_file=$${source_file//\//_}; \
|
|
||||||
mv "$${source_file}" "$${final_file}"; \
|
|
||||||
no_ext_source=$${source_file:0:-3}; \
|
|
||||||
no_ext_final=$${final_file:0:-3}; \
|
|
||||||
sed -i "s;(\(../\)*$${source_file});($${no_ext_final});g" $$(find -name '\''*.md'\''); \
|
|
||||||
' _ {}
|
|
||||||
find . -type d -empty -delete
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
These are the files for the [wiki](https://github.com/JasonN3/build-container-installer/wiki)
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
- [Home](home.md)
|
|
||||||
- [Usage](usage.md)
|
|
||||||
- Development
|
|
||||||
- [Using the Makefile](development/makefile.md)
|
|
||||||
- [Using the Container](development/container.md)
|
|
||||||
- [Using the VSCode Dev Container](development/vscode.md)
|
|
||||||
|
|
||||||
- Examples
|
|
||||||
- [Adding Flatpaks](examples/adding-flatpaks.md)
|
|
||||||
|
|
||||||
- [Known Errors](known_errors.md)
|
|
||||||
|
|
@ -1,22 +0,0 @@
|
||||||
# Using the Container
|
|
||||||
|
|
||||||
A container with `make install-deps` already run is provided at `ghcr.io/jasonn3/build-container-installer:latest`
|
|
||||||
|
|
||||||
To use the container file, run `podman run --privileged --volume .:/build-container-installer/build ghcr.io/jasonn3/build-container-installer:latest`.
|
|
||||||
|
|
||||||
This will create an ISO with the baked in defaults of the container image. The resulting file will be called `deploy.iso`
|
|
||||||
|
|
||||||
See [Inputs](usage#inputs) for information about customizing the ISO that gets created. The variables can be defined as environment variables or command arguments. All variables should be specified in CAPITALIZED form.
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
Building an ISO to install Fedora 39
|
|
||||||
```bash
|
|
||||||
podman run --rm --privileged --volume .:/build-container-installer/build ghcr.io/jasonn3/build-container-installer:latest VERSION=39 IMAGE_NAME=base IMAGE_TAG=39 VARIANT=Server
|
|
||||||
```
|
|
||||||
|
|
||||||
Building an ISO to install Fedora 40
|
|
||||||
```bash
|
|
||||||
podman run --rm --privileged --volume .:/build-container-installer/build ghcr.io/jasonn3/build-container-installer:latest VERSION=40 IMAGE_NAME=base IMAGE_TAG=40 VARIANT=Server
|
|
||||||
```
|
|
||||||
|
|
||||||
The same commands are also available using `docker` by replacing `podman` with `docker` in each command.
|
|
||||||
|
|
@ -1,7 +0,0 @@
|
||||||
# Using the Makefile
|
|
||||||
|
|
||||||
The Makefile contains all the commands that are run in the action. There are separate targets for each file generated, however `make` can be used to generate the final image and `make clean` can be used to clean up the workspace. The resulting ISO will be stored in the `build` directory.
|
|
||||||
|
|
||||||
`make install-deps` can be used to install the necessary packages.
|
|
||||||
|
|
||||||
See [Inputs](usage#inputs) for information about the available parameters. All variables should be specified in CAPITALIZED form.
|
|
||||||
|
|
@ -1,46 +0,0 @@
|
||||||
# Using the VSCode Dev Container
|
|
||||||
|
|
||||||
There is a dev container configuration provided for development. By default, it will use the existing container image available at `ghcr.io/jasonn3/build-container-installer:latest`. However, you can have it build a new image by editing `.devcontainer/devcontainer.json` and replacing `image` with `build`. `Ctrl+/` can be used to comment and uncomment blocks of code within VSCode.
|
|
||||||
|
|
||||||
The code from VSCode will be available at `/workspaces/build-container-installer` once the container has started.
|
|
||||||
|
|
||||||
Privileged is required for access to loop devices for lorax.
|
|
||||||
|
|
||||||
## Use existing container image
|
|
||||||
|
|
||||||
```diff
|
|
||||||
{
|
|
||||||
"name": "Existing Image",
|
|
||||||
- "build": {
|
|
||||||
- "context": "..",
|
|
||||||
- "dockerfile": "../Containerfile",
|
|
||||||
- "args": {
|
|
||||||
- "version": "39"
|
|
||||||
- }
|
|
||||||
- },
|
|
||||||
+ "image": "ghcr.io/jasonn3/build-container-installer:latest",
|
|
||||||
"overrideCommand": true,
|
|
||||||
"shutdownAction": "stopContainer",
|
|
||||||
"privileged": true
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Build a new container image
|
|
||||||
|
|
||||||
```diff
|
|
||||||
{
|
|
||||||
"name": "New Image",
|
|
||||||
+ "build": {
|
|
||||||
+ "context": "..",
|
|
||||||
+ "dockerfile": "../Containerfile",
|
|
||||||
+ "args": {
|
|
||||||
+ "version": "39"
|
|
||||||
+ }
|
|
||||||
+ },
|
|
||||||
- "image": "ghcr.io/jasonn3/build-container-installer:latest",
|
|
||||||
"overrideCommand": true,
|
|
||||||
"shutdownAction": "stopContainer",
|
|
||||||
"privileged": true
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
@ -1,74 +0,0 @@
|
||||||
# Adding Flatpaks
|
|
||||||
|
|
||||||
- [Directly using refs](#directly-using-refs)
|
|
||||||
- [Using a directory](#using-a-directory)
|
|
||||||
|
|
||||||
## Directly using refs
|
|
||||||
|
|
||||||
Action:
|
|
||||||
Specify the following in your workflow:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- name: Build ISO
|
|
||||||
uses: jasonn3/build-container-installer@main
|
|
||||||
id: build
|
|
||||||
with:
|
|
||||||
flatpak_remote_name: flathub
|
|
||||||
flatpak_remote_url: https://flathub.org/repo/flathub.flatpakrepo
|
|
||||||
flatpak_remote_refs: app/org.videolan.VLC/x86_64/stable runtime/org.kde.Platform/x86_64/5.15-23.08
|
|
||||||
```
|
|
||||||
|
|
||||||
Podman:
|
|
||||||
Run the following command:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
podman run --privileged --volume ./:/github/workspace/ ghcr.io/jasonn3/build-container-installer:main \
|
|
||||||
FLATPAK_REMOTE_NAME=flathub \
|
|
||||||
FLATPAK_REMOTE_URL=https://flathub.org/repo/flathub.flatpakrepo \
|
|
||||||
FLATPAK_REMOTE_REFS="app/org.videolan.VLC/x86_64/stable runtime/org.kde.Platform/x86_64/5.15-23.08"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Using a directory
|
|
||||||
|
|
||||||
Action:
|
|
||||||
|
|
||||||
1. Create a directory within your GitHub repo named flatpak_refs
|
|
||||||
1. Create a file within flatpak_refs with the following content
|
|
||||||
|
|
||||||
```plaintext
|
|
||||||
app/org.videolan.VLC/x86_64/stable
|
|
||||||
runtime/org.kde.Platform/x86_64/5.15-23.08
|
|
||||||
```
|
|
||||||
|
|
||||||
Specify the following in your workflow:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- name: Build ISO
|
|
||||||
uses: jasonn3/build-container-installer@main
|
|
||||||
id: build
|
|
||||||
with:
|
|
||||||
flatpak_remote_name: flathub
|
|
||||||
flatpak_remote_url: https://flathub.org/repo/flathub.flatpakrepo
|
|
||||||
flatpak_remote_refs_dir: /github/workspace/flatpak_refs
|
|
||||||
```
|
|
||||||
|
|
||||||
Podman:
|
|
||||||
|
|
||||||
1. Create a directory named flatpak_refs
|
|
||||||
1. Create a file within flatpak_refs with the following content
|
|
||||||
|
|
||||||
```plaintext
|
|
||||||
app/org.videolan.VLC/x86_64/stable
|
|
||||||
runtime/org.kde.Platform/x86_64/5.15-23.08
|
|
||||||
```
|
|
||||||
|
|
||||||
Run the following command:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
podman run --privileged --volume ./:/github/workspace/ ghcr.io/jasonn3/build-container-installer:main \
|
|
||||||
FLATPAK_REMOTE_NAME=flathub \
|
|
||||||
FLATPAK_REMOTE_URL=https://flathub.org/repo/flathub.flatpakrepo \
|
|
||||||
FLATPAK_REMOTE_REFS="app/org.videolan.VLC/x86_64/stable runtime/org.kde.Platform/x86_64/5.15-23.08"
|
|
||||||
```
|
|
||||||
14
docs/home.md
14
docs/home.md
|
|
@ -1,14 +0,0 @@
|
||||||
Welcome to the build-container-installer wiki!
|
|
||||||
|
|
||||||
## Index
|
|
||||||
|
|
||||||
- [Usage](usage.md)
|
|
||||||
- Development
|
|
||||||
- [Using the Makefile](development/makefile.md)
|
|
||||||
- [Using the Container](development/container.md)
|
|
||||||
- [Using the VSCode Dev Container](development/vscode.md)
|
|
||||||
|
|
||||||
- Examples
|
|
||||||
- [Adding Flatpaks](examples/adding-flatpaks.md)
|
|
||||||
|
|
||||||
- [Known Errors](known_errors.md)
|
|
||||||
|
|
@ -1,7 +0,0 @@
|
||||||
# Known Errors
|
|
||||||
|
|
||||||
This page describes known errors and how to resolve them.
|
|
||||||
|
|
||||||
## failed to write boot loader configuration
|
|
||||||
|
|
||||||
Add `RUN bootupctl backend generate-update-metadata` at the end of your Dockerfile/Containerfile
|
|
||||||
|
|
@ -1,68 +0,0 @@
|
||||||
# Usage
|
|
||||||
|
|
||||||
This action is designed to be called from a GitHub workflow using the following format
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- name: Build ISO
|
|
||||||
uses: jasonn3/build-container-installer@main
|
|
||||||
id: build
|
|
||||||
with:
|
|
||||||
arch: ${{ env.ARCH}}
|
|
||||||
image_name: ${{ env.IMAGE_NAME}}
|
|
||||||
image_repo: ${{ env.IMAGE_REPO}}
|
|
||||||
image_tag: ${{ env.IMAGE_TAG }}
|
|
||||||
version: ${{ env.VERSION }}
|
|
||||||
variant: ${{ env.VARIANT }}
|
|
||||||
iso_name: ${{ env.IMAGE_NAME }}-${{ env.IMAGE_TAG }}-${{ env.VERSION }}.iso
|
|
||||||
|
|
||||||
# This example is for uploading your ISO as a Github artifact. You can do something similar using any cloud storage, so long as you copy the output
|
|
||||||
- name: Upload ISO as artifact
|
|
||||||
id: upload
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ steps.build.outputs.iso_name }}
|
|
||||||
path: |
|
|
||||||
${{ steps.build.outputs.iso_path }}/${{ steps.build.outputs.iso_name }}
|
|
||||||
${{ steps.build.outputs.iso_path }}/${{ steps.build.outputs.iso_name }}-CHECKSUM
|
|
||||||
if-no-files-found: error
|
|
||||||
retention-days: 0
|
|
||||||
compression-level: 0
|
|
||||||
```
|
|
||||||
|
|
||||||
## Inputs
|
|
||||||
|
|
||||||
| Variable | Description | Default Value | Action | Container/Makefile |
|
|
||||||
| ----------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------- | ------------------ | ------------------ |
|
|
||||||
| additional_templates | Space delimited list of additional Lorax templates to include | \[empty\] | :white_check_mark: | :white_check_mark: |
|
|
||||||
| arch | Architecture for image to build | x86_64 | :white_check_mark: | :white_check_mark: |
|
|
||||||
| enrollment_password | Used for supporting secure boot (requires SECURE_BOOT_KEY_URL to be defined) | container-installer | :white_check_mark: | :white_check_mark: |
|
|
||||||
| extra_boot_params | Extra params used by grub to boot the anaconda installer | \[empty\] | :white_check_mark: | :white_check_mark: |
|
|
||||||
| flatpak_remote_name | Name of the Flatpak repo on the destination OS | flathub | :white_check_mark: | :white_check_mark: |
|
|
||||||
| flatpak_remote_refs | Space separated list of flatpak refs to install | \[empty\] | :white_check_mark: | :white_check_mark: |
|
|
||||||
| flatpak_remote_refs_dir | Directory that contains files that list the flatpak refs to install | \[empty\] | :white_check_mark: | :white_check_mark: |
|
|
||||||
| flatpak_remote_url | URL of the flatpakrepo file | <https://flathub.org/repo/flathub.flatpakrepo> | :white_check_mark: | :white_check_mark: |
|
|
||||||
| image_name | Name of the source container image | base | :white_check_mark: | :white_check_mark: |
|
|
||||||
| image_repo | Repository containing the source container image | quay.io/fedora-ostree-desktops | :white_check_mark: | :white_check_mark: |
|
|
||||||
| image_signed | Whether the container image is signed. The policy to test the signing must be configured inside the container image | true | :white_check_mark: | :white_check_mark: |
|
|
||||||
| image_src | Overrides the source of the container image. Must be formatted for the skopeo copy command | \[empty\] | :white_check_mark: | :white_check_mark: |
|
|
||||||
| image_tag | Tag of the source container image | *VERSION* | :white_check_mark: | :white_check_mark: |
|
|
||||||
| iso_name | Name of the ISO you wish to output when completed | build/deploy.iso | :white_check_mark: | :white_check_mark: |
|
|
||||||
| make_target | Overrides the default make target | *ISO_NAME*-Checksum | :white_check_mark: | :x: |
|
|
||||||
| repos | List of repo files for Lorax to use | /etc/yum.repos.d/*.repo | :white_check_mark: | :white_check_mark: |
|
|
||||||
| rootfs_size | The size (in GiB) for the squashfs runtime volume | 2 | :white_check_mark: | :white_check_mark: |
|
|
||||||
| secure_boot_key_url | Secure boot key that is installed from URL location\*\* | \[empty\] | :white_check_mark: | :white_check_mark: |
|
|
||||||
| variant | Source container variant\* | Server | :white_check_mark: | :white_check_mark: |
|
|
||||||
| version | Fedora version of installer to build | 39 | :white_check_mark: | :white_check_mark: |
|
|
||||||
| web_ui | Enable Anaconda WebUI (experimental) | false | :white_check_mark: | :white_check_mark: |
|
|
||||||
|
|
||||||
\*Available options for VARIANT can be found by running `dnf provides system-release`.
|
|
||||||
Variant will be the third item in the package name. Example: `fedora-release-kinoite-39-34.noarch` will be kinoite
|
|
||||||
|
|
||||||
\*\* If you need to reference a local file, you can use `file://*path*`
|
|
||||||
|
|
||||||
## Outputs
|
|
||||||
|
|
||||||
| Variable | Description | Usage |
|
|
||||||
| -------- | ----------------------------------------| ------------------------------------------------ |
|
|
||||||
| iso_name | The name of the resulting .iso | ${{ steps.YOUR_ID_FOR_ACTION.outputs.iso_name }} |
|
|
||||||
| iso_path | The path to the resulting .iso | ${{ steps.YOUR_ID_FOR_ACTION.outputs.iso_path }} |
|
|
||||||
|
|
@ -5,6 +5,13 @@ set -ex
|
||||||
# Create /dev/loop0 if it doesn't already exist. `losetup` has an issue creating it during the first run
|
# Create /dev/loop0 if it doesn't already exist. `losetup` has an issue creating it during the first run
|
||||||
mknod -m 0660 /dev/loop0 b 7 0 2>/dev/null || true
|
mknod -m 0660 /dev/loop0 b 7 0 2>/dev/null || true
|
||||||
|
|
||||||
|
for i
|
||||||
|
do
|
||||||
|
key=$(echo ${i} | cut -d= -f1)
|
||||||
|
value=$(echo ${i} | cut -d= -f2-)
|
||||||
|
export ${key}="${value}"
|
||||||
|
done
|
||||||
|
|
||||||
if [[ -d /cache/skopeo ]]
|
if [[ -d /cache/skopeo ]]
|
||||||
then
|
then
|
||||||
ln -s /cache/skopeo /build-container-installer/container
|
ln -s /cache/skopeo /build-container-installer/container
|
||||||
|
|
@ -15,5 +22,18 @@ then
|
||||||
mkdir /cache/dnf
|
mkdir /cache/dnf
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Run make command
|
# Pull container
|
||||||
make "$@"
|
make container/${IMAGE_NAME}-${IMAGE_TAG} "$@"
|
||||||
|
|
||||||
|
# Build base ISO
|
||||||
|
make boot.iso "$@"
|
||||||
|
|
||||||
|
# Add container to ISO
|
||||||
|
make build/deploy.iso "$@"
|
||||||
|
|
||||||
|
# Make output dir in github workspace
|
||||||
|
mkdir /github/workspace/build || true
|
||||||
|
|
||||||
|
# Copy resulting iso to github workspace and fix permissions
|
||||||
|
cp build/deploy.iso /github/workspace/build
|
||||||
|
chmod -R ugo=rwX /github/workspace/build
|
||||||
|
|
|
||||||
12
external/Makefile
vendored
12
external/Makefile
vendored
|
|
@ -1,12 +0,0 @@
|
||||||
lorax/branch-$(VERSION):
|
|
||||||
git config advice.detachedHead false
|
|
||||||
cd lorax && git reset --hard HEAD && git checkout $(if $(_RHEL),rhel$(word 1,$(subst ., ,$(VERSION)))-branch,tags/$(shell cd lorax && git tag -l lorax-$(VERSION).* --sort=creatordate | grep -v 'lorax-40\.5' | tail -n 1))
|
|
||||||
touch lorax/branch-$(VERSION)
|
|
||||||
|
|
||||||
install-deps:
|
|
||||||
# Used by external/fedora-lorax-templates/ostree-based-installer/lorax-embed-flatpaks.tmpl
|
|
||||||
$(install_pkg) flatpak dbus-daemon ostree
|
|
||||||
# Used to clone proper lorax branch
|
|
||||||
$(install_pkg) git
|
|
||||||
|
|
||||||
clean:
|
|
||||||
|
|
@ -1,43 +0,0 @@
|
||||||
IMAGE := $(IMAGE_REPO)/$(IMAGE_NAME):$(IMAGE_TAG)
|
|
||||||
FLATPAK_DIR := $(if $(GITHUB_WORKSPACE),$(shell mktemp -d -p $(GITHUB_WORKSPACE) flatpak.XXX),$(PWD)/flatpaks)
|
|
||||||
|
|
||||||
.PHONY: full_list
|
|
||||||
full_list: repo
|
|
||||||
cat $(FLATPAK_DIR)/list.txt >&2
|
|
||||||
|
|
||||||
|
|
||||||
repo: script.sh
|
|
||||||
$(if $(GITHUB_WORKSPACE),cp script.sh $(FLATPAK_DIR)/)
|
|
||||||
docker run --rm --privileged --entrypoint bash -e FLATPAK_SYSTEM_DIR=/flatpak/flatpak -e FLATPAK_TRIGGERSDIR=/flatpak/triggers --volume $(FLATPAK_DIR):/flatpak_dir $(IMAGE) /flatpak_dir/script.sh
|
|
||||||
$(if $(GITHUB_OUTPUT),echo "flatpak_dir=$(subst $(GITHUB_WORKSPACE)/,,$(FLATPAK_DIR))" >> $(GITHUB_OUTPUT))
|
|
||||||
docker rmi $(IMAGE)
|
|
||||||
|
|
||||||
script.sh:
|
|
||||||
cat << EOF > script.sh
|
|
||||||
which flatpak &> /dev/null || dnf install -y flatpak
|
|
||||||
mkdir -p /flatpak/flatpak /flatpak/triggers
|
|
||||||
mkdir /var/tmp || true
|
|
||||||
chmod -R 1777 /var/tmp
|
|
||||||
flatpak config --system --set languages "*"
|
|
||||||
flatpak remote-add --system $(FLATPAK_REMOTE_NAME) $(FLATPAK_REMOTE_URL)
|
|
||||||
flatpak install --system -y $(FLATPAK_REMOTE_REFS)
|
|
||||||
ostree init --repo=/flatpak_dir/repo --mode=archive-z2
|
|
||||||
for i in \$$(ostree refs --repo=\$${FLATPAK_SYSTEM_DIR}/repo | grep '^deploy/' | sed 's/^deploy\///g')
|
|
||||||
do
|
|
||||||
echo "Copying \$${i}..."
|
|
||||||
ostree --repo=/flatpak_dir/repo pull-local \$${FLATPAK_SYSTEM_DIR}/repo \$$(ostree --repo=\$${FLATPAK_SYSTEM_DIR}/repo rev-parse $(FLATPAK_REMOTE_NAME)/\$${i})
|
|
||||||
mkdir -p \$$(dirname /flatpak_dir/repo/refs/heads/\$${i})
|
|
||||||
ostree --repo=\$${FLATPAK_SYSTEM_DIR}/repo rev-parse $(FLATPAK_REMOTE_NAME)/\$${i} > /flatpak_dir/repo/refs/heads/\$${i}
|
|
||||||
done
|
|
||||||
flatpak build-update-repo /flatpak_dir/repo
|
|
||||||
ostree refs --repo=/flatpak_dir/repo | tee /flatpak_dir/list.txt
|
|
||||||
EOF
|
|
||||||
|
|
||||||
install-deps:
|
|
||||||
|
|
||||||
clean:
|
|
||||||
$(if $(wildcard script.sh),rm script.sh)
|
|
||||||
$(if $(wildcard repo),rm -Rf repo)
|
|
||||||
$(if $(wildcard list.txt),rm list.txt)
|
|
||||||
|
|
||||||
.ONESHELL:
|
|
||||||
|
|
@ -1,52 +0,0 @@
|
||||||
# Converts a post script to a template
|
|
||||||
# $1 = script to convert
|
|
||||||
# $2 = file on ISO to write
|
|
||||||
# $3 = whether to copy the '<%' lines to the template
|
|
||||||
define convert_post_to_tmpl
|
|
||||||
header=0; \
|
|
||||||
skip=0; \
|
|
||||||
while read -r line; \
|
|
||||||
do \
|
|
||||||
if [[ $$line =~ ^\<\% ]]; \
|
|
||||||
then \
|
|
||||||
if [[ '$(3)' == 'true' ]]; \
|
|
||||||
then \
|
|
||||||
echo $$line >> post_$(1).tmpl; \
|
|
||||||
fi; \
|
|
||||||
echo >> post_$(1).tmpl; \
|
|
||||||
else \
|
|
||||||
if [[ $$header == 0 ]]; \
|
|
||||||
then \
|
|
||||||
if [[ $$line =~ ^\#\#\ (.*)$$ ]]; \
|
|
||||||
then \
|
|
||||||
echo "append $(2) \"%post --erroronfail $${BASH_REMATCH[1]}\"" >> post_$(1).tmpl; \
|
|
||||||
skip=1; \
|
|
||||||
else \
|
|
||||||
echo "append $(2) \"%post --erroronfail\"" >> post_$(1).tmpl; \
|
|
||||||
fi; \
|
|
||||||
header=1; \
|
|
||||||
fi; \
|
|
||||||
if [[ $$skip == 0 ]]; \
|
|
||||||
then \
|
|
||||||
echo "append $(2) \"$${line//\"/\\\"}\"" >> post_$(1).tmpl; \
|
|
||||||
fi; \
|
|
||||||
skip=0; \
|
|
||||||
fi; \
|
|
||||||
done < scripts/post/$(1); \
|
|
||||||
echo "append $(2) \"%end\"" >> post_$(1).tmpl
|
|
||||||
endef
|
|
||||||
|
|
||||||
post_%.tmpl: scripts/post/%
|
|
||||||
$(call convert_post_to_tmpl,$*,usr/share/anaconda/post-scripts/$*.ks,true)
|
|
||||||
|
|
||||||
install_include_post.tmpl:
|
|
||||||
echo '<%page />' > install_include_post.tmpl
|
|
||||||
for file in $(patsubst post_%.tmpl, %, $(filter post_%, $(notdir $(_LORAX_TEMPLATES)))); do echo "append usr/share/anaconda/interactive-defaults.ks \"%include /usr/share/anaconda/post-scripts/$${file}.ks\"" >> install_include_post.tmpl; done
|
|
||||||
|
|
||||||
install-deps:
|
|
||||||
|
|
||||||
FILES=$(wildcard post_*) install_include_post.tmpl
|
|
||||||
clean:
|
|
||||||
ifneq ($(FILES),)
|
|
||||||
rm -Rf $(FILES)
|
|
||||||
endif
|
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
<%page args="flatpak_remote_name, _flatpak_repo_url, version"/>
|
<%page args="flatpak_remote_name, _flatpak_repo_url, version"/>
|
||||||
% if int(version) >= 41:
|
% if int(version) >= 41:
|
||||||
append etc/anaconda/conf.d/anaconda.conf "[Payload]"
|
|
||||||
append etc/anaconda/conf.d/anaconda.conf "flatpak_remote = ${flatpak_remote_name} ${_flatpak_repo_url}"
|
append etc/anaconda/conf.d/anaconda.conf "flatpak_remote = ${flatpak_remote_name} ${_flatpak_repo_url}"
|
||||||
% else:
|
% else:
|
||||||
replace "flatpak_manager\.add_remote\(\".*\", \".*\"\)" "flatpak_manager.add_remote(\"${flatpak_remote_name}\", \"${_flatpak_repo_url}\")" /usr/lib64/python*/site-packages/pyanaconda/modules/payloads/payload/rpm_ostree/flatpak_installation.py
|
replace "flatpak_manager\.add_remote\(\".*\", \".*\"\)" "flatpak_manager.add_remote(\"${flatpak_remote_name}\", \"${_flatpak_repo_url}\")" /usr/lib64/python*/site-packages/pyanaconda/modules/payloads/payload/rpm_ostree/flatpak_installation.py
|
||||||
replace "flatpak_manager\.replace_installed_refs_remote\(\".*\"\)" "flatpak_manager.replace_installed_refs_remote(\"${flatpak_remote_name}\")" /usr/lib64/python*/site-packages/pyanaconda/modules/payloads/payload/rpm_ostree/flatpak_installation.py
|
replace "flatpak_manager\.replace_installed_refs_remote\(\".*\"\)" "flatpak_manager.replace_installed_refs_remote(\"${flatpak_remote_name}\")" /usr/lib64/python*/site-packages/pyanaconda/modules/payloads/payload/rpm_ostree/flatpak_installation.py
|
||||||
% endif
|
% endif
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,18 +1,7 @@
|
||||||
<%page args="image_repo, _image_repo_double_escaped, image_name, image_signed, image_tag, _rhel, version"/>
|
<%page args="image_repo, _image_repo_double_escaped, image_name, image_tag, _rhel, version"/>
|
||||||
|
|
||||||
if (which bootc &> /dev/null) && [ ${_rhel} == 'false' && ${version} -ge 39 ]
|
if (which bootc &> /dev/null) && [ ${_rhel} == 'false' && ${version} -ge 39 ]
|
||||||
then
|
then
|
||||||
if [ ${image_signed} == 'true' ]
|
|
||||||
then
|
|
||||||
bootc switch --mutate-in-place --enforce-container-sigpolicy --transport registry ${image_repo}/${image_name}:${image_tag}
|
bootc switch --mutate-in-place --enforce-container-sigpolicy --transport registry ${image_repo}/${image_name}:${image_tag}
|
||||||
else
|
|
||||||
bootc switch --mutate-in-place --transport registry ${image_repo}/${image_name}:${image_tag}
|
|
||||||
fi
|
|
||||||
else
|
else
|
||||||
if [ ${image_signed} == 'true' ]
|
|
||||||
then
|
|
||||||
sed -i 's/container-image-reference=.*/container-image-reference=ostree-image-signed:docker:\/\/${_image_repo_double_escaped}\/${image_name}:${image_tag}/' /ostree/deploy/default/deploy/*.origin
|
sed -i 's/container-image-reference=.*/container-image-reference=ostree-image-signed:docker:\/\/${_image_repo_double_escaped}\/${image_name}:${image_tag}/' /ostree/deploy/default/deploy/*.origin
|
||||||
else
|
|
||||||
sed -i 's/container-image-reference=.*/container-image-reference=ostree-unverified-image:docker:\/\/${_image_repo_double_escaped}\/${image_name}:${image_tag}/' /ostree/deploy/default/deploy/*.origin
|
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
|
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
repos: $(_REPO_FILES)
|
|
||||||
|
|
||||||
# Step 2: Replace vars in repo files
|
|
||||||
%.repo: /etc/yum.repos.d/%.repo
|
|
||||||
cp /etc/yum.repos.d/$*.repo $*.repo
|
|
||||||
sed -i "s/\$$releasever/$(VERSION)/g" $*.repo
|
|
||||||
sed -i "s/\$$basearch/$(ARCH)/g" $*.repo
|
|
||||||
|
|
||||||
install-deps:
|
|
||||||
|
|
||||||
FILES=$(wildcard *.repo)
|
|
||||||
clean:
|
|
||||||
ifneq ($(FILES),)
|
|
||||||
rm -Rf $(FILES)
|
|
||||||
endif
|
|
||||||
|
|
@ -1,23 +0,0 @@
|
||||||
all: $(filter-out README.md Makefile,$(wildcard *))
|
|
||||||
|
|
||||||
$(filter-out README.md Makefile,$(wildcard *)):
|
|
||||||
$(eval DIR=$(firstword $(subst /, ,$@)))
|
|
||||||
$(MAKE) -w -C $(DIR)
|
|
||||||
|
|
||||||
$(filter-out README.md Makefile,$(wildcard */*)):
|
|
||||||
$(eval DIR=$(firstword $(subst /, ,$@)))
|
|
||||||
$(eval TARGET=$(subst $(DIR)/,,$@))
|
|
||||||
$(MAKE) -w -C $(DIR) $(TARGET)
|
|
||||||
|
|
||||||
.DEFAULT:
|
|
||||||
$(eval DIR=$(firstword $(subst /, ,$@)))
|
|
||||||
$(if $(filter-out $(DIR),$@), $(eval TARGET=$(subst $(DIR)/,,$@)),$(eval TARGET=))
|
|
||||||
$(MAKE) -w -C $(DIR) $(TARGET)
|
|
||||||
|
|
||||||
install-deps:
|
|
||||||
$(foreach DIR,$(filter-out README.md Makefile,$(wildcard *)),$(MAKE) -w -C $(DIR) install-deps;)
|
|
||||||
|
|
||||||
clean:
|
|
||||||
$(foreach DIR,$(filter-out README.md Makefile,$(wildcard *)),$(MAKE) -w -C $(DIR) clean;)
|
|
||||||
|
|
||||||
.PHONY: all $(filter-out README.md Makefile,$(wildcard *)) $(filter-out README.md Makefile,$(wildcard */*))
|
|
||||||
|
|
@ -1,25 +0,0 @@
|
||||||
ISO_NAME=deploy.iso
|
|
||||||
ISO_TESTS=$(wildcard install_*) $(if $(FLATPAK_REMOTE_REFS),$(wildcard flatpak_*))$(if $(FLATPAK_DIR),$(wildcard flatpak_*))
|
|
||||||
|
|
||||||
all: $(ISO_TESTS) clean
|
|
||||||
|
|
||||||
$(ISO_TESTS): mnt/iso
|
|
||||||
$(eval _VARS = ISO_NAME VERSION FLATPAK_REMOTE_NAME _FLATPAK_REPO_URL)
|
|
||||||
chmod +x $@
|
|
||||||
$(foreach var,$(_VARS),$(var)=$($(var))) ./$@
|
|
||||||
|
|
||||||
mnt/iso:
|
|
||||||
sudo modprobe loop
|
|
||||||
sudo mkdir -p mnt/iso mnt/install
|
|
||||||
sudo mount -o loop ../../$(ISO_NAME) mnt/iso
|
|
||||||
sudo mount -t squashfs -o loop mnt/iso/images/install.img mnt/install
|
|
||||||
|
|
||||||
clean:
|
|
||||||
sudo umount mnt/install || true
|
|
||||||
sudo umount mnt/iso || true
|
|
||||||
sudo rmdir mnt/install mnt/iso
|
|
||||||
|
|
||||||
install-deps:
|
|
||||||
$(install_pkg) isomd5sum coreutils squashfs-tools curl
|
|
||||||
|
|
||||||
.PHONY: all $(ISO_TESTS) clean
|
|
||||||
|
|
@ -1,54 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
if [[ ${VERSION} -ge 41 ]]
|
|
||||||
then
|
|
||||||
result=0
|
|
||||||
grep "^\[Payload\]" mnt/install/etc/anaconda/conf.d/anaconda.conf > /dev/null || {
|
|
||||||
echo "Missing [Payload] header"
|
|
||||||
result=1
|
|
||||||
}
|
|
||||||
grep "^flatpak_remote = ${FLATPAK_REMOTE_NAME} ${_FLATPAK_REPO_URL}" mnt/install/etc/anaconda/conf.d/anaconda.conf > /dev/null || {
|
|
||||||
echo "Missing flatpak_remote option"
|
|
||||||
result=1
|
|
||||||
}
|
|
||||||
exit ${result}
|
|
||||||
fi
|
|
||||||
|
|
||||||
add_line=$(grep flatpak_manager.add_remote mnt/install/usr/lib64/python*/site-packages/pyanaconda/modules/payloads/payload/rpm_ostree/flatpak_installation.py)
|
|
||||||
|
|
||||||
add_line_repo=$(echo "${add_line}" | grep "${FLATPAK_REMOTE_NAME}")
|
|
||||||
add_line_url=$(echo "${add_line}" | grep "${_FLATPAK_REPO_URL}")
|
|
||||||
|
|
||||||
result=0
|
|
||||||
if [ -z "${add_line_repo}" ]
|
|
||||||
then
|
|
||||||
echo "Repo name not updated on add_remote line"
|
|
||||||
echo "${add_line}"
|
|
||||||
result=1
|
|
||||||
else
|
|
||||||
echo "Repo name found on add_remote line"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "${add_line_url}" ]
|
|
||||||
then
|
|
||||||
echo "Repo url not updated on add_remote line"
|
|
||||||
echo "${add_line}"
|
|
||||||
result=1
|
|
||||||
else
|
|
||||||
echo "Repo url found on add_remote line"
|
|
||||||
fi
|
|
||||||
|
|
||||||
replace_line=$(grep flatpak_manager.replace_installed_refs_remote mnt/install/usr/lib64/python*/site-packages/pyanaconda/modules/payloads/payload/rpm_ostree/flatpak_installation.py)
|
|
||||||
|
|
||||||
replace_line_repo=$(echo "${replace_line}" | grep "${FLATPAK_REMOTE_NAME}")
|
|
||||||
|
|
||||||
if [ -z "${replace_line_repo}" ]
|
|
||||||
then
|
|
||||||
echo "Repo name not updated on replace_installed_refs line"
|
|
||||||
echo "${replace_line}"
|
|
||||||
result=1
|
|
||||||
else
|
|
||||||
echo "Repo name found on replace_installed_refs line"
|
|
||||||
fi
|
|
||||||
|
|
||||||
exit ${result}
|
|
||||||
|
|
@ -1,14 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
#set -ex
|
|
||||||
|
|
||||||
checkisomd5 "../../${ISO_NAME}"
|
|
||||||
if [[ $? != 0 ]]
|
|
||||||
then
|
|
||||||
echo "Found:"
|
|
||||||
checkisomd5 --md5sumonly "../../${ISO_NAME}"
|
|
||||||
echo "Expected:"
|
|
||||||
implantisomd5 --force "../../${ISO_NAME}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
cd "$(dirname "../../${ISO_NAME}")" && sha256sum -c "$(basename "${ISO_NAME}")-CHECKSUM"
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
REPO_TESTS=$(filter-out README.md Makefile,$(wildcard *))
|
|
||||||
|
|
||||||
all: $(REPO_TESTS)
|
|
||||||
|
|
||||||
$(REPO_TESTS):
|
|
||||||
chmod +x $@
|
|
||||||
./$@
|
|
||||||
|
|
||||||
install-deps:
|
|
||||||
|
|
||||||
.PHONY: $(REPO_TESTS)
|
|
||||||
|
|
@ -1,122 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
makefile = open('../../Makefile.inputs', 'r')
|
|
||||||
makefile_lines = makefile.readlines()
|
|
||||||
|
|
||||||
inputs = {}
|
|
||||||
outputs = {}
|
|
||||||
errors = 0
|
|
||||||
|
|
||||||
for line in makefile_lines:
|
|
||||||
if line.startswith('#'):
|
|
||||||
makefile_lines.remove(line)
|
|
||||||
continue
|
|
||||||
parts = line.split('=', 1)
|
|
||||||
if parts[0].startswith('export'):
|
|
||||||
var_name = parts[0].strip().split(' ')[1].lower()
|
|
||||||
else:
|
|
||||||
var_name = parts[0].strip().lower()
|
|
||||||
inputs[var_name] = {'default_value': parts[1].strip(), 'makefile': True}
|
|
||||||
|
|
||||||
action = open('../../action.yml', 'r')
|
|
||||||
action_lines = action.readlines()
|
|
||||||
|
|
||||||
at_inputs = False
|
|
||||||
at_outputs = False
|
|
||||||
for line in action_lines:
|
|
||||||
if not at_inputs:
|
|
||||||
if line.strip() == 'inputs:':
|
|
||||||
at_inputs = True
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if line.startswith(' '):
|
|
||||||
parts = line.strip().split(':', 1)
|
|
||||||
if parts[0] == 'description':
|
|
||||||
inputs[var_name]['description'] = parts[1].strip()
|
|
||||||
if parts[0] == 'deprecationMessage':
|
|
||||||
inputs[var_name]['deprecated'] = True
|
|
||||||
if parts[0] == 'default':
|
|
||||||
if 'default' in inputs[var_name]:
|
|
||||||
if inputs[var_name]['default_value'] != parts[1].strip().strip('"'):
|
|
||||||
print("ERROR: Default value for " + var_name + " in action.yml does not match Makefile")
|
|
||||||
errors += 1
|
|
||||||
else:
|
|
||||||
inputs[var_name]['default_value'] = parts[1].strip().strip('"')
|
|
||||||
elif line.startswith(' '):
|
|
||||||
var_name = line.strip().strip(':').lower()
|
|
||||||
if not var_name in inputs:
|
|
||||||
inputs[var_name] = {}
|
|
||||||
inputs[var_name]['action'] = True
|
|
||||||
else:
|
|
||||||
at_inputs = False
|
|
||||||
|
|
||||||
if not at_outputs:
|
|
||||||
if line.strip() == 'outputs:':
|
|
||||||
at_outputs = True
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if line.startswith(' '):
|
|
||||||
parts = line.strip().split(':', 1)
|
|
||||||
if parts[0] == 'description':
|
|
||||||
outputs[var_name]['description'] = parts[1].strip()
|
|
||||||
if parts[0] == 'deprecationMessage':
|
|
||||||
outputs[var_name]['deprecated'] = True
|
|
||||||
if parts[0] == 'default':
|
|
||||||
outputs[var_name]['default_value'] = parts[1].strip().strip('"')
|
|
||||||
elif line.startswith(' '):
|
|
||||||
var_name = line.strip().strip(':').lower()
|
|
||||||
outputs[var_name] = {}
|
|
||||||
else:
|
|
||||||
at_outputs = False
|
|
||||||
|
|
||||||
|
|
||||||
readme = open('../../README.md', 'r')
|
|
||||||
readme_lines = readme.readlines()
|
|
||||||
|
|
||||||
at_inputs = False
|
|
||||||
skip_header = True
|
|
||||||
at_outputs = False
|
|
||||||
for line in readme_lines:
|
|
||||||
if not at_inputs:
|
|
||||||
if line.strip() == '### Inputs':
|
|
||||||
at_inputs = True
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if skip_header:
|
|
||||||
if line.startswith('| -----'):
|
|
||||||
skip_header = False
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if not line.startswith('|'):
|
|
||||||
at_inputs = False
|
|
||||||
continue
|
|
||||||
parts = line.split('|')
|
|
||||||
var_name = parts[1].strip().lower()
|
|
||||||
if not var_name in inputs:
|
|
||||||
print("ERROR: " + var_name + " is not listed in action.yml or Makefile")
|
|
||||||
errors += 1
|
|
||||||
continue
|
|
||||||
if 'description' in inputs[var_name]:
|
|
||||||
if parts[2].strip().strip('\*') != inputs[var_name]['description']:
|
|
||||||
print("WARNING: " + var_name + " description in README.md does not match action.yml")
|
|
||||||
if 'default_value' in inputs[var_name]:
|
|
||||||
if not parts[3].strip().strip('"<>').startswith('*'):
|
|
||||||
if inputs[var_name]['default_value'] == "":
|
|
||||||
if parts[3].strip().strip('"') != '\\[empty\\]':
|
|
||||||
print("ERROR: " + var_name + " default value in README.md does not match action.yml")
|
|
||||||
print("Found " + parts[3].strip().strip('"<>'))
|
|
||||||
print("Expected " + inputs[var_name]['default_value'])
|
|
||||||
errors += 1
|
|
||||||
elif parts[3].strip().strip('"<>') != inputs[var_name]['default_value']:
|
|
||||||
print("ERROR: " + var_name + " default value in README.md does not match action.yml")
|
|
||||||
print("Found " + parts[3].strip().strip('"<>'))
|
|
||||||
print("Expected " + inputs[var_name]['default_value'])
|
|
||||||
errors += 1
|
|
||||||
if 'action' in inputs[var_name] and inputs[var_name]['action']:
|
|
||||||
if parts[4].strip() != ':white_check_mark:':
|
|
||||||
print("WARNING: " + var_name + " not labeled as in action.yml in the README.md")
|
|
||||||
if 'makefile' in inputs[var_name] and inputs[var_name]['makefile']:
|
|
||||||
if parts[4].strip() != ':white_check_mark:':
|
|
||||||
print("WARNING: " + var_name + " not labeled as in Makefile in the README.md")
|
|
||||||
|
|
||||||
exit(errors)
|
|
||||||
100
test/vm/Makefile
100
test/vm/Makefile
|
|
@ -1,100 +0,0 @@
|
||||||
VM_TESTS=$(wildcard install_*) $(if $(FLATPAK_REMOTE_REFS),$(wildcard flatpak_*))$(if $(FLATPAK_DIR),$(wildcard flatpak_*))
|
|
||||||
|
|
||||||
all: $(VM_TESTS) clean
|
|
||||||
|
|
||||||
$(VM_TESTS): start_vm ansible_inventory
|
|
||||||
$(eval _VARS = IMAGE_REPO IMAGE_NAME IMAGE_TAG)
|
|
||||||
|
|
||||||
ansible -i ansible_inventory -m ansible.builtin.wait_for_connection vm
|
|
||||||
|
|
||||||
chmod +x $@
|
|
||||||
$(foreach var,$(_VARS),$(var)=$($(var))) ./$@
|
|
||||||
|
|
||||||
ansible_inventory:
|
|
||||||
echo "ungrouped:" > ansible_inventory
|
|
||||||
echo " hosts:" >> ansible_inventory
|
|
||||||
echo " vm:" >> ansible_inventory
|
|
||||||
echo " ansible_host: $(VM_IP)" >> ansible_inventory
|
|
||||||
echo " ansible_port: $(VM_PORT)" >> ansible_inventory
|
|
||||||
echo " ansible_user: $(VM_USER)" >> ansible_inventory
|
|
||||||
echo " ansible_password: $(VM_PASS)" >> ansible_inventory
|
|
||||||
echo " ansible_become_pass: $(VM_PASS)" >> ansible_inventory
|
|
||||||
echo " ansible_ssh_common_args: '-o StrictHostKeyChecking=no'" >> ansible_inventory
|
|
||||||
|
|
||||||
.PHONY: $(VM_TESTS) install-deps
|
|
||||||
|
|
||||||
install-deps:
|
|
||||||
$(install_pkg) qemu-system qemu-utils xorriso qemu-system-x86 ncat socat jq ansible curl
|
|
||||||
|
|
||||||
files/mnt/iso:
|
|
||||||
$(if $(wildcard files/mnt),,mkdir files/mnt)
|
|
||||||
$(if $(wildcard files/mnt/iso),,mkdir files/mnt/iso)
|
|
||||||
sudo mount -o loop ../../$(ISO_NAME) files/mnt/iso
|
|
||||||
|
|
||||||
files/grub.cfg: files/mnt/iso
|
|
||||||
cp files/mnt/iso/$(if $(_RHEL),isolinux/grub.conf,boot/grub2/grub.cfg) files/grub.cfg
|
|
||||||
sed -i 's/quiet/console=ttyS0,115200n8 inst.ks=cdrom:\/ks.cfg/' files/grub.cfg
|
|
||||||
sed -i 's/set default="1"/set default="0"/' files/grub.cfg
|
|
||||||
sed -i 's/set timeout=60/set timeout=1/' files/grub.cfg
|
|
||||||
|
|
||||||
.PHONY: clean
|
|
||||||
clean:
|
|
||||||
$(if $(wildcard start_vm), kill "$(shell cat start_vm)")
|
|
||||||
$(if $(wildcard files/mnt/iso),sudo umount files/mnt/iso)
|
|
||||||
$(if $(wildcard files/mnt/iso),rmdir files/mnt/iso)
|
|
||||||
$(if $(wildcard ansible_inventory),rm ansible_inventory)
|
|
||||||
$(if $(wildcard files/install.iso),rm files/install.iso)
|
|
||||||
$(if $(wildcard files/disk.qcow2),rm files/disk.qcow2)
|
|
||||||
$(if $(wildcard install_os),rm install_os)
|
|
||||||
$(if $(wildcard start_vm),rm start_vm)
|
|
||||||
|
|
||||||
files/install.iso: files/grub.cfg
|
|
||||||
xorriso -dialog on << EOF
|
|
||||||
-indev ../../$(ISO_NAME)
|
|
||||||
-outdev files/install.iso
|
|
||||||
-boot_image any replay
|
|
||||||
-joliet on
|
|
||||||
-compliance joliet_long_names
|
|
||||||
-map files/ks.cfg ks.cfg
|
|
||||||
-chmod 0444 ks.cfg
|
|
||||||
-map files/grub.cfg $(if $(_RHEL),isolinux/grub.conf,boot/grub2/grub.cfg)
|
|
||||||
-end
|
|
||||||
EOF
|
|
||||||
|
|
||||||
files/disk.qcow2:
|
|
||||||
qemu-img create -f qcow2 files/disk.qcow2 50G
|
|
||||||
|
|
||||||
install_os: files/install.iso files/disk.qcow2
|
|
||||||
timeout 1h qemu-system-x86_64 -name "Anaconda" -boot d -m 4096 -cpu qemu64 -display none -cdrom files/install.iso -smp 2 -hda files/disk.qcow2 -serial telnet:localhost:4321,server=on,wait=off & QEMU_PID=$$!
|
|
||||||
echo "PID: $$QEMU_PID"
|
|
||||||
timeout 1m bash -c "while ! (echo > /dev/tcp/127.0.0.1/4321); do sleep 0.1; done"
|
|
||||||
(nc localhost 4321 | tee vm.stdout) &
|
|
||||||
wait $$QEMU_PID
|
|
||||||
touch install_os
|
|
||||||
|
|
||||||
.ONESHELL:
|
|
||||||
|
|
||||||
start_vm: install_os
|
|
||||||
mkfifo vm.stdin
|
|
||||||
qemu-system-x86_64 -name "Anaconda" \
|
|
||||||
-m 4096 -cpu qemu64 -display none -smp 2 \
|
|
||||||
-chardev socket,path=/tmp/qga.sock,server=on,wait=off,id=qga0 \
|
|
||||||
-device e1000,netdev=net0 \
|
|
||||||
-netdev user,id=net0,hostfwd=tcp::$(VM_PORT)-:22 \
|
|
||||||
-device virtio-serial \
|
|
||||||
-device virtserialport,chardev=qga0,name=org.qemu.guest_agent.0 \
|
|
||||||
-boot c -hda files/disk.qcow2 -serial telnet:localhost:4321,server=on,wait=off & export QEMU_PID=$$!
|
|
||||||
echo "PID: $$QEMU_PID"
|
|
||||||
|
|
||||||
timeout 1m bash -c "while ! (echo > /dev/tcp/127.0.0.1/4321); do sleep 0.1; done"
|
|
||||||
(tail -f vm.stdin | nc localhost 4321 | tee vm.stdout) &
|
|
||||||
|
|
||||||
timeout 30m bash -c "while ! (echo > /dev/tcp/$(VM_IP)/$(VM_PORT)); do sleep 1; done"
|
|
||||||
|
|
||||||
if ! (echo > /dev/tcp/$(VM_IP)/$(VM_PORT))
|
|
||||||
then
|
|
||||||
echo "SSH must be installed and enabled inside the container"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "VM ready for tests at IP $(VM_IP):$(VM_PORT)"
|
|
||||||
echo $$QEMU_PID > start_vm
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
lang en_US.UTF-8
|
|
||||||
keyboard us
|
|
||||||
timezone Americas/New_York
|
|
||||||
zerombr
|
|
||||||
clearpart --all --initlabel
|
|
||||||
autopart
|
|
||||||
poweroff
|
|
||||||
user --name=core --groups=wheel --password=foobar
|
|
||||||
%include /usr/share/anaconda/interactive-defaults.ks
|
|
||||||
37
tests/iso/flatpak_repo_updated.sh
Normal file
37
tests/iso/flatpak_repo_updated.sh
Normal file
|
|
@ -0,0 +1,37 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
add_line=$(grep flatpak_manager.add_remote /mnt/install/usr/lib64/python*/site-packages/pyanaconda/modules/payloads/payload/rpm_ostree/flatpak_installation.py)
|
||||||
|
|
||||||
|
add_line_repo=$(echo ${add_line} | grep ${FLATPAK_REMOTE_NAME})
|
||||||
|
add_line_url=$(echo ${add_line} | grep ${_FLATPAK_REPO_URL})
|
||||||
|
|
||||||
|
result=0
|
||||||
|
if [ -z "${add_line_repo}" ]
|
||||||
|
then
|
||||||
|
echo "Repo name not updated on add_remote line"
|
||||||
|
result=1
|
||||||
|
else
|
||||||
|
echo "Repo name found on add_remote line"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "${add_line_url}" ]
|
||||||
|
then
|
||||||
|
echo "Repo url not updated on add_remote line"
|
||||||
|
result=1
|
||||||
|
else
|
||||||
|
echo "Repo url found on add_remote line"
|
||||||
|
fi
|
||||||
|
|
||||||
|
replace_line=$(grep flatpak_manager.replace_installed_refs_remote /mnt/install/usr/lib64/python*/site-packages/pyanaconda/modules/payloads/payload/rpm_ostree/flatpak_installation.py)
|
||||||
|
|
||||||
|
replace_line_repo=$(echo ${replace_line} | grep ${FLATPAK_REMOTE_NAME})
|
||||||
|
|
||||||
|
if [ -z "${replace_line_repo}" ]
|
||||||
|
then
|
||||||
|
echo "Repo name not updated on replace_installed_refs line"
|
||||||
|
result=1
|
||||||
|
else
|
||||||
|
echo "Repo name found on replace_installed_refs line"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit ${result}
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
FOUND_VERSION=$(grep VERSION_ID mnt/install/etc/os-release | cut -d= -f2 | tr -d '"')
|
FOUND_VERSION=$(cat /mnt/install/etc/os-release | grep VERSION_ID | cut -d= -f2)
|
||||||
|
|
||||||
if [[ ${FOUND_VERSION} != ${VERSION} ]]
|
if [[ ${FOUND_VERSION} != ${VERSION} ]]
|
||||||
then
|
then
|
||||||
39
tests/repo/vars.sh
Normal file
39
tests/repo/vars.sh
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
vars=()
|
||||||
|
|
||||||
|
while read -r line
|
||||||
|
do
|
||||||
|
if ! [[ $line =~ ^# ]]
|
||||||
|
then
|
||||||
|
vars+=$(echo $line | cut -d= -f1 | tr [:upper:] [:lower:])
|
||||||
|
fi
|
||||||
|
if [[ $line =~ ^########## ]]
|
||||||
|
then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done < Makefile
|
||||||
|
|
||||||
|
result=0
|
||||||
|
|
||||||
|
for var in $vars
|
||||||
|
do
|
||||||
|
grep "^| ${var}" README.md > /dev/null
|
||||||
|
if [[ $? != 0 ]]
|
||||||
|
then
|
||||||
|
echo "$var not found in README.md"
|
||||||
|
result=1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
for var in $vars
|
||||||
|
do
|
||||||
|
grep "^ ${var}:" action.yml > /dev/null
|
||||||
|
if [[ $? != 0 ]]
|
||||||
|
then
|
||||||
|
echo "$var not found in action.yml"
|
||||||
|
result=1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
exit ${result}
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env -S ansible-playbook -i ./ansible_inventory
|
#!/usr/bin/env -S ansible-playbook -i ./ansible_inventory
|
||||||
---
|
---
|
||||||
- name: Test fedora flatpak repo wasn't enabled
|
- name: Test for installed flatpaks
|
||||||
hosts: vm
|
hosts: vm
|
||||||
gather_facts: no
|
gather_facts: no
|
||||||
|
|
||||||
|
|
@ -10,9 +10,7 @@
|
||||||
register: services_state
|
register: services_state
|
||||||
|
|
||||||
- name: Check that flatpak-add-fedora-repos is disabled
|
- name: Check that flatpak-add-fedora-repos is disabled
|
||||||
when: services_state['ansible_facts']['services']['flatpak-add-fedora-repos.service'] is defined
|
|
||||||
ansible.builtin.assert:
|
ansible.builtin.assert:
|
||||||
that:
|
that:
|
||||||
- services_state['ansible_facts']['services']['flatpak-add-fedora-repos.service']['status'] == 'disabled'
|
- services_state['ansible_facts']['services']['flatpak-add-fedora-repos.service']['status'] == 'disabled'
|
||||||
fail_msg: 'flatpak-add-fedora-repos.service is not disabled'
|
fail_msg: 'flatpak-add-fedora-repos.service is not disabled'
|
||||||
success_msg: 'flatpak-add-fedora-repos.service is correctly disabled'
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env -S ansible-playbook -i ./ansible_inventory
|
#!/usr/bin/env -S ansible-playbook -i ./ansible_inventory
|
||||||
---
|
---
|
||||||
- name: Test flatpak update
|
- name: Test for flatpaks
|
||||||
hosts: vm
|
hosts: vm
|
||||||
gather_facts: no
|
gather_facts: no
|
||||||
|
|
||||||
|
|
@ -1,14 +0,0 @@
|
||||||
input.txt: gen_input.sh
|
|
||||||
find
|
|
||||||
$(if $(wildcard ../results/boot/grub2/grub.cfg),sed -i 's/quiet/quiet $(EXTRA_BOOT_PARAMS)/g' ../results/boot/grub2/grub.cfg)
|
|
||||||
sed -i 's/quiet/quiet $(EXTRA_BOOT_PARAMS)/g' ../results/EFI/BOOT/grub.cfg
|
|
||||||
$(eval _VARS = ARCH FLATPAK_DIR IMAGE_NAME IMAGE_TAG ISO_NAME VERSION)
|
|
||||||
$(foreach var,$(_VARS),$(var)=$($(var))) bash gen_input.sh | tee input.txt
|
|
||||||
|
|
||||||
install-deps:
|
|
||||||
|
|
||||||
FILES=$(wildcard input.txt)
|
|
||||||
clean:
|
|
||||||
ifneq ($(FILES),)
|
|
||||||
rm -Rf $(FILES)
|
|
||||||
endif
|
|
||||||
|
|
@ -1,53 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
echo "-report_about WARNING"
|
|
||||||
echo "-indev ${PWD}/../results/images/boot.iso"
|
|
||||||
echo "-outdev ${ISO_NAME}"
|
|
||||||
echo "-boot_image any replay"
|
|
||||||
echo "-joliet on"
|
|
||||||
echo "-compliance joliet_long_names"
|
|
||||||
pushd "${PWD}/../results" > /dev/null
|
|
||||||
#for file in $(find .)
|
|
||||||
for file in ./boot/grub2/grub.cfg ./EFI/BOOT/grub.cfg
|
|
||||||
do
|
|
||||||
if [[ "$file" == "./images/boot.iso" ]]
|
|
||||||
then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
if [[ -f ${PWD}/${file} ]]
|
|
||||||
then
|
|
||||||
echo "-map ${PWD}/${file} ${file:2}"
|
|
||||||
echo "-chmod 0444 ${file:2}"
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
popd > /dev/null
|
|
||||||
|
|
||||||
if [[ -n "${FLATPAK_DIR}" ]]
|
|
||||||
then
|
|
||||||
pushd "${FLATPAK_DIR}" > /dev/null
|
|
||||||
for file in $(find repo)
|
|
||||||
do
|
|
||||||
if [[ "${file}" == "repo/.lock" ]]
|
|
||||||
then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
echo "-map ${PWD}/${file} flatpak/${file}"
|
|
||||||
echo "-chmod 0444 flatpak/${file}"
|
|
||||||
done
|
|
||||||
popd > /dev/null
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -f "${PWD}/../sb_pubkey.der" ]
|
|
||||||
then
|
|
||||||
echo "-map ${PWD}/../sb_pubkey.der sb_pubkey.der"
|
|
||||||
echo "-chmod 0444 /sb_pubkey.der"
|
|
||||||
fi
|
|
||||||
|
|
||||||
pushd "${PWD}/../container" > /dev/null
|
|
||||||
for file in $(find "${IMAGE_NAME}-${IMAGE_TAG}" -type f)
|
|
||||||
do
|
|
||||||
echo "-map ${PWD}/${file} ${file}"
|
|
||||||
echo "-chmod 0444 ${file}"
|
|
||||||
done
|
|
||||||
popd > /dev/null
|
|
||||||
echo "-end"
|
|
||||||
37
xorriso/gen_input.sh.in
Normal file
37
xorriso/gen_input.sh.in
Normal file
|
|
@ -0,0 +1,37 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
echo "-indev $(pwd)/boot.iso"
|
||||||
|
echo "-outdev $(pwd)/build/deploy.iso"
|
||||||
|
echo "-boot_image any replay"
|
||||||
|
echo "-joliet on"
|
||||||
|
echo "-compliance joliet_long_names"
|
||||||
|
echo "-map $(pwd)/results/boot/grub2/grub.cfg boot/grub2/grub.cfg"
|
||||||
|
echo "-chmod 0444 boot/grub2/grub.cfg"
|
||||||
|
echo "-map $(pwd)/results/EFI/BOOT/grub.cfg EFI/BOOT/grub.cfg"
|
||||||
|
echo "-chmod 0444 EFI/BOOT/grub.cfg"
|
||||||
|
|
||||||
|
if [[ -n "${FLATPAK_DIR}" ]]
|
||||||
|
then
|
||||||
|
pushd ${FLATPAK_DIR} > /dev/null
|
||||||
|
for file in $(find *)
|
||||||
|
do
|
||||||
|
echo "-map $(pwd)/${file} flatpak/${file}"
|
||||||
|
echo "-chmod 0444 flatpak/${file}"
|
||||||
|
done
|
||||||
|
popd > /dev/null
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -f $(pwd)/sb_pubkey.der ]
|
||||||
|
then
|
||||||
|
echo "-map $(pwd)/sb_pubkey.der sb_pubkey.der"
|
||||||
|
echo "-chmod 0444 /sb_pubkey.der"
|
||||||
|
fi
|
||||||
|
|
||||||
|
pushd container > /dev/null
|
||||||
|
for file in $(find ${IMAGE_NAME}-${IMAGE_TAG})
|
||||||
|
do
|
||||||
|
echo "-map $(pwd)/${file} ${file}"
|
||||||
|
echo "-chmod 0444 ${file}"
|
||||||
|
done
|
||||||
|
popd > /dev/null
|
||||||
|
echo "-end"
|
||||||
Loading…
Add table
Add a link
Reference in a new issue