Initial commit
Some checks failed
Check Case Normalization / Build image using Buildah (push) Failing after 1s
CI checks / Run ESLint (push) Failing after 1s
CI checks / Check Distribution (push) Failing after 0s
CI checks / Check Input and Output enums (push) Failing after 0s
Build with docker/metadata-action / Build image with Containerfile (push) Failing after 2s
Build with docker/metadata-action / Build image without Containerfile (push) Failing after 1s
Link checker / Check links in markdown (push) Failing after 1s
Multiarch build / Build multi-platform image using Containerfile (push) Has been cancelled
Multiarch build / Build multi-architecture image from scratch (push) Has been cancelled
Multiarch build / Build multi-architecture image using Containerfile (push) Has been cancelled
Build / Build image using Buildah (push) Failing after 1s
Build from containerfile / Build image using Buildah (push) Failing after 1s
Some checks failed
Check Case Normalization / Build image using Buildah (push) Failing after 1s
CI checks / Run ESLint (push) Failing after 1s
CI checks / Check Distribution (push) Failing after 0s
CI checks / Check Input and Output enums (push) Failing after 0s
Build with docker/metadata-action / Build image with Containerfile (push) Failing after 2s
Build with docker/metadata-action / Build image without Containerfile (push) Failing after 1s
Link checker / Check links in markdown (push) Failing after 1s
Multiarch build / Build multi-platform image using Containerfile (push) Has been cancelled
Multiarch build / Build multi-architecture image from scratch (push) Has been cancelled
Multiarch build / Build multi-architecture image using Containerfile (push) Has been cancelled
Build / Build image using Buildah (push) Failing after 1s
Build from containerfile / Build image using Buildah (push) Failing after 1s
This commit is contained in:
parent
b7b1ae7dbe
commit
75282d0c68
20 changed files with 1897 additions and 0 deletions
15
.editorconfig
Normal file
15
.editorconfig
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
charset = utf-8
|
||||||
|
tab_width = 4
|
||||||
|
indent_size = 4
|
||||||
|
end_of_line = lf
|
||||||
|
indent_style = space
|
||||||
|
max_line_length = 120
|
||||||
|
insert_final_newline = true
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
|
[*.{yml,yaml}]
|
||||||
|
tab_width = 2
|
||||||
|
indent_size = 2
|
3
.github/install_latest_buildah.sh
vendored
Normal file
3
.github/install_latest_buildah.sh
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
sudo apt-key add - < Release.key
|
||||||
|
sudo apt-get update -qq
|
||||||
|
sudo apt-get -qq -y install buildah
|
66
.github/workflows/check-lowercase.yaml
vendored
Normal file
66
.github/workflows/check-lowercase.yaml
vendored
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
# This workflow will perform a test whenever there
|
||||||
|
# is some change in code done to ensure that the changes
|
||||||
|
# are not buggy and we are getting the desired output.
|
||||||
|
name: Check Case Normalization
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * *' # every day at midnight
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: ImageCaseTest
|
||||||
|
IMAGE_TAGS: v1 TagCaseTest
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: Build image using Buildah
|
||||||
|
runs-on: container
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
install_latest: [ false ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
# Checkout buildah action github repository
|
||||||
|
- name: Checkout Buildah action
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
path: "buildah-build"
|
||||||
|
|
||||||
|
- name: Install latest buildah
|
||||||
|
if: matrix.install_latest
|
||||||
|
run: |
|
||||||
|
bash buildah-build/.github/install_latest_buildah.sh
|
||||||
|
|
||||||
|
- name: Create Dockerfile
|
||||||
|
run: |
|
||||||
|
cat > Containerfile<<EOF
|
||||||
|
FROM busybox
|
||||||
|
RUN echo "hello world"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Build image using Buildah action
|
||||||
|
- name: Build Image
|
||||||
|
id: build_image
|
||||||
|
uses: ./buildah-build/
|
||||||
|
with:
|
||||||
|
image: ${{ env.IMAGE_NAME }}
|
||||||
|
layers: false
|
||||||
|
tags: ${{ env.IMAGE_TAGS }}
|
||||||
|
containerfiles: |
|
||||||
|
./Containerfile
|
||||||
|
extra-args: |
|
||||||
|
--pull
|
||||||
|
|
||||||
|
- name: Echo Outputs
|
||||||
|
run: |
|
||||||
|
echo "Image: ${{ steps.build_image.outputs.image }}"
|
||||||
|
echo "Tags: ${{ steps.build_image.outputs.tags }}"
|
||||||
|
echo "Tagged Image: ${{ steps.build_image.outputs.image-with-tag }}"
|
||||||
|
|
||||||
|
# Check if image is build
|
||||||
|
- name: Check images created
|
||||||
|
run: buildah images
|
48
.github/workflows/ci.yml
vendored
Normal file
48
.github/workflows/ci.yml
vendored
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
name: CI checks
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
name: Run ESLint
|
||||||
|
runs-on: container
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- run: npm ci
|
||||||
|
- run: npm run lint
|
||||||
|
|
||||||
|
check-dist:
|
||||||
|
name: Check Distribution
|
||||||
|
runs-on: container
|
||||||
|
env:
|
||||||
|
BUNDLE_FILE: "dist/index.js"
|
||||||
|
BUNDLE_COMMAND: "npm run bundle"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Verify Latest Bundle
|
||||||
|
uses: redhat-actions/common/bundle-verifier@v1
|
||||||
|
with:
|
||||||
|
bundle_file: ${{ env.BUNDLE_FILE }}
|
||||||
|
bundle_command: ${{ env.BUNDLE_COMMAND }}
|
||||||
|
|
||||||
|
check-inputs-outputs:
|
||||||
|
name: Check Input and Output enums
|
||||||
|
runs-on: container
|
||||||
|
env:
|
||||||
|
IO_FILE: ./src/generated/inputs-outputs.ts
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Verify Input and Output enums
|
||||||
|
uses: redhat-actions/common/action-io-generator@v1
|
||||||
|
with:
|
||||||
|
io_file: ${{ env.IO_FILE }}
|
65
.github/workflows/containerfile_build.yml
vendored
Normal file
65
.github/workflows/containerfile_build.yml
vendored
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
# This workflow will perform a test whenever there
|
||||||
|
# is some change in code done to ensure that the changes
|
||||||
|
# are not buggy and we are getting the desired output.
|
||||||
|
name: Build from containerfile
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * *' # every day at midnight
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: "hello-world"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: Build image using Buildah
|
||||||
|
runs-on: container
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
install_latest: [ false ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
# Checkout buildah action github repository
|
||||||
|
- name: Checkout Buildah action
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
path: "buildah-build"
|
||||||
|
|
||||||
|
- name: Install latest buildah
|
||||||
|
if: matrix.install_latest
|
||||||
|
run: |
|
||||||
|
bash buildah-build/.github/install_latest_buildah.sh
|
||||||
|
|
||||||
|
- name: Create Dockerfile
|
||||||
|
run: |
|
||||||
|
cat > Containerfile<<EOF
|
||||||
|
FROM busybox
|
||||||
|
RUN echo "hello world"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Build image using Buildah action
|
||||||
|
- name: Build Image
|
||||||
|
id: build_image
|
||||||
|
uses: ./buildah-build/
|
||||||
|
with:
|
||||||
|
image: ${{ env.IMAGE_NAME }}
|
||||||
|
layers: false
|
||||||
|
tags: 'latest ${{ github.sha }}'
|
||||||
|
containerfiles: |
|
||||||
|
./Containerfile
|
||||||
|
extra-args: |
|
||||||
|
--pull
|
||||||
|
|
||||||
|
- name: Echo Outputs
|
||||||
|
run: |
|
||||||
|
echo "Image: ${{ steps.build_image.outputs.image }}"
|
||||||
|
echo "Tags: ${{ steps.build_image.outputs.tags }}"
|
||||||
|
echo "Tagged Image: ${{ steps.build_image.outputs.image-with-tag }}"
|
||||||
|
|
||||||
|
# Check if image is build
|
||||||
|
- name: Check images created
|
||||||
|
run: buildah images | grep '${{ env.IMAGE_NAME }}'
|
185
.github/workflows/docker_metadata_action.yml
vendored
Normal file
185
.github/workflows/docker_metadata_action.yml
vendored
Normal file
|
@ -0,0 +1,185 @@
|
||||||
|
# This workflow will perform a test whenever there
|
||||||
|
# is some change in code done to ensure that the changes
|
||||||
|
# are not buggy and we are getting the desired output.
|
||||||
|
name: Build with docker/metadata-action
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * *' # every day at midnight
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-containerfile:
|
||||||
|
name: Build image with Containerfile
|
||||||
|
runs-on: container
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
install_latest: [ false ]
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: "hello-world"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
# Checkout buildah action github repository
|
||||||
|
- name: Checkout Buildah action
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Docker Metadata
|
||||||
|
id: docker-metadata
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: |
|
||||||
|
${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=edge
|
||||||
|
type=sha
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=schedule
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=semver,pattern={{major}},enable=${{ !startsWith(github.ref, 'refs/tags/v0.') }}
|
||||||
|
|
||||||
|
- name: Install latest buildah
|
||||||
|
if: matrix.install_latest
|
||||||
|
run: |
|
||||||
|
bash .github/install_latest_buildah.sh
|
||||||
|
|
||||||
|
- name: Create Dockerfile
|
||||||
|
run: |
|
||||||
|
cat > Containerfile<<EOF
|
||||||
|
FROM busybox
|
||||||
|
RUN echo "hello world"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Build image using Buildah action
|
||||||
|
- name: Build Image
|
||||||
|
id: build_image
|
||||||
|
uses: ./
|
||||||
|
with:
|
||||||
|
layers: false
|
||||||
|
tags: ${{ steps.docker-metadata.outputs.tags }}
|
||||||
|
labels: ${{ steps.docker-metadata.outputs.labels }}
|
||||||
|
containerfiles: |
|
||||||
|
./Containerfile
|
||||||
|
extra-args: |
|
||||||
|
--pull
|
||||||
|
|
||||||
|
- name: Echo Outputs
|
||||||
|
run: |
|
||||||
|
echo "Image: ${{ steps.build_image.outputs.image }}"
|
||||||
|
echo "Tags: ${{ steps.build_image.outputs.tags }}"
|
||||||
|
echo "Tagged Image: ${{ steps.build_image.outputs.image-with-tag }}"
|
||||||
|
|
||||||
|
# Check if image is build
|
||||||
|
- name: Check images created
|
||||||
|
run: buildah images | grep '${{ env.IMAGE_NAME }}'
|
||||||
|
|
||||||
|
- name: Check image metadata
|
||||||
|
run: |
|
||||||
|
set -x
|
||||||
|
buildah inspect ${{ steps.build_image.outputs.image-with-tag }} | jq '.OCIv1.config.Labels."org.opencontainers.image.title"'
|
||||||
|
buildah inspect ${{ steps.build_image.outputs.image-with-tag }} | jq '.OCIv1.config.Labels."org.opencontainers.image.description"'
|
||||||
|
buildah inspect ${{ steps.build_image.outputs.image-with-tag }} | jq '.Docker.config.Labels."org.opencontainers.image.title"'
|
||||||
|
buildah inspect ${{ steps.build_image.outputs.image-with-tag }} | jq '.Docker.config.Labels."org.opencontainers.image.description"'
|
||||||
|
|
||||||
|
build-scratch:
|
||||||
|
name: Build image without Containerfile
|
||||||
|
runs-on: container
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
install_latest: [ false ]
|
||||||
|
|
||||||
|
env:
|
||||||
|
PROJECT_DIR: spring-petclinic
|
||||||
|
IMAGE_NAME: spring-petclinic
|
||||||
|
MVN_REPO_DIR: ~/.m2/repository
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
# Checkout buildah action github repository
|
||||||
|
- name: Checkout Buildah action
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Docker Metadata
|
||||||
|
id: docker-metadata
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: |
|
||||||
|
${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=edge
|
||||||
|
type=sha
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=schedule
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=semver,pattern={{major}},enable=${{ !startsWith(github.ref, 'refs/tags/v0.') }}
|
||||||
|
|
||||||
|
- name: Install latest buildah
|
||||||
|
if: matrix.install_latest
|
||||||
|
run: |
|
||||||
|
bash .github/install_latest_buildah.sh
|
||||||
|
|
||||||
|
# Checkout spring-petclinic github repository
|
||||||
|
- name: Checkout spring-petclinic project
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: "spring-projects/spring-petclinic"
|
||||||
|
path: ${{ env.PROJECT_DIR }}
|
||||||
|
|
||||||
|
# Setup java.
|
||||||
|
- name: Setup Java
|
||||||
|
uses: actions/setup-java@v3
|
||||||
|
with:
|
||||||
|
distribution: 'temurin'
|
||||||
|
java-version: '17'
|
||||||
|
cache: 'maven'
|
||||||
|
|
||||||
|
# Run maven to build the project
|
||||||
|
- name: Maven
|
||||||
|
working-directory: ${{ env.PROJECT_DIR }}
|
||||||
|
run: |
|
||||||
|
mvn package -ntp -B
|
||||||
|
|
||||||
|
# Build image using Buildah action
|
||||||
|
- name: Build Image
|
||||||
|
id: build_image
|
||||||
|
uses: ./
|
||||||
|
with:
|
||||||
|
tags: ${{ steps.docker-metadata.outputs.tags }}
|
||||||
|
labels: ${{ steps.docker-metadata.outputs.labels }}
|
||||||
|
base-image: 'registry.access.redhat.com/openjdk/openjdk-11-rhel7'
|
||||||
|
# To avoid hardcoding a particular version of the binary.
|
||||||
|
content: |
|
||||||
|
./spring-petclinic/target/spring-petclinic-*.jar
|
||||||
|
entrypoint: |
|
||||||
|
java
|
||||||
|
-jar
|
||||||
|
spring-petclinic-*.jar
|
||||||
|
port: 8080
|
||||||
|
arch: amd64
|
||||||
|
workdir: "."
|
||||||
|
|
||||||
|
- name: Echo Outputs
|
||||||
|
run: |
|
||||||
|
echo "Image: ${{ steps.build_image.outputs.image }}"
|
||||||
|
echo "Tags: ${{ steps.build_image.outputs.tags }}"
|
||||||
|
echo "Tagged Image: ${{ steps.build_image.outputs.image-with-tag }}"
|
||||||
|
|
||||||
|
# Check if image is build
|
||||||
|
- name: Check images created
|
||||||
|
run: buildah images | grep '${{ env.IMAGE_NAME }}'
|
||||||
|
|
||||||
|
- name: Check image metadata
|
||||||
|
run: |
|
||||||
|
set -x
|
||||||
|
buildah inspect ${{ steps.build_image.outputs.image-with-tag }} | jq '.OCIv1.config.Labels."org.opencontainers.image.title"'
|
||||||
|
buildah inspect ${{ steps.build_image.outputs.image-with-tag }} | jq '.OCIv1.config.Labels."org.opencontainers.image.description"'
|
||||||
|
buildah inspect ${{ steps.build_image.outputs.image-with-tag }} | jq '.Docker.config.Labels."org.opencontainers.image.title"'
|
||||||
|
buildah inspect ${{ steps.build_image.outputs.image-with-tag }} | jq '.Docker.config.Labels."org.opencontainers.image.description"'
|
20
.github/workflows/link_check.yml
vendored
Normal file
20
.github/workflows/link_check.yml
vendored
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
name: Link checker
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- '**.md'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- '**.md'
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * *' # every day at midnight
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
markdown-link-check:
|
||||||
|
name: Check links in markdown
|
||||||
|
runs-on: container
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: gaurav-nelson/github-action-markdown-link-check@v1
|
||||||
|
with:
|
||||||
|
use-verbose-mode: true
|
229
.github/workflows/multiarch.yml
vendored
Normal file
229
.github/workflows/multiarch.yml
vendored
Normal file
|
@ -0,0 +1,229 @@
|
||||||
|
name: Multiarch build
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * *' # every day at midnight
|
||||||
|
|
||||||
|
env:
|
||||||
|
PROJECT_DIR: spring-petclinic
|
||||||
|
MVN_REPO_DIR: ~/.m2/repository
|
||||||
|
IMAGE_TAG: latest
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-multiarch-containerfile:
|
||||||
|
name: Build multi-architecture image using Containerfile
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: hello-world-multiarch
|
||||||
|
runs-on: container
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
install_latest: [ false ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
# Checkout buildah action github repository
|
||||||
|
- name: Checkout Buildah action
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
path: "buildah-build"
|
||||||
|
|
||||||
|
- name: Install latest buildah
|
||||||
|
if: matrix.install_latest
|
||||||
|
run: |
|
||||||
|
bash buildah-build/.github/install_latest_buildah.sh
|
||||||
|
|
||||||
|
- name: Install qemu dependency
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y qemu-user-static
|
||||||
|
|
||||||
|
- name: Create Containerfile
|
||||||
|
run: |
|
||||||
|
cat > Containerfile<<EOF
|
||||||
|
|
||||||
|
FROM docker.io/alpine:3.14
|
||||||
|
|
||||||
|
RUN echo "hello world"
|
||||||
|
|
||||||
|
ENTRYPOINT [ "sh", "-c", "echo -n 'Machine: ' && uname -m && echo -n 'Bits: ' && getconf LONG_BIT && echo 'goodbye world'" ]
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Build Image
|
||||||
|
id: build_image_multiarch
|
||||||
|
uses: ./buildah-build/
|
||||||
|
with:
|
||||||
|
image: ${{ env.IMAGE_NAME }}
|
||||||
|
tags: latest v1
|
||||||
|
archs: amd64 # Single arch testcase
|
||||||
|
containerfiles: |
|
||||||
|
./Containerfile
|
||||||
|
|
||||||
|
- name: Echo Outputs
|
||||||
|
run: |
|
||||||
|
echo "Image: ${{ steps.build_image_multiarch.outputs.image }}"
|
||||||
|
echo "Tags: ${{ steps.build_image_multiarch.outputs.tags }}"
|
||||||
|
echo "Tagged Image: ${{ steps.build_image_multiarch.outputs.image-with-tag }}"
|
||||||
|
|
||||||
|
- name: Check images created
|
||||||
|
run: buildah images | grep '${{ env.IMAGE_NAME }}'
|
||||||
|
|
||||||
|
- name: Check image metadata
|
||||||
|
run: |
|
||||||
|
set -x
|
||||||
|
buildah inspect ${{ steps.build_image_multiarch.outputs.image }}:${{ env.IMAGE_TAG }} | jq ".OCIv1.architecture"
|
||||||
|
buildah inspect ${{ steps.build_image_multiarch.outputs.image }}:${{ env.IMAGE_TAG }} | jq ".Docker.architecture"
|
||||||
|
|
||||||
|
- name: Run image
|
||||||
|
run: |
|
||||||
|
podman run --rm ${{ steps.build_image_multiarch.outputs.image }}:${{ env.IMAGE_TAG }}
|
||||||
|
|
||||||
|
build-multiplatform-containerfile:
|
||||||
|
name: Build multi-platform image using Containerfile
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: hello-world-multiplatform
|
||||||
|
runs-on: container
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
install_latest: [ false ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
# Checkout buildah action github repository
|
||||||
|
- name: Checkout Buildah action
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
path: "buildah-build"
|
||||||
|
|
||||||
|
- name: Install latest buildah
|
||||||
|
if: matrix.install_latest
|
||||||
|
run: |
|
||||||
|
bash buildah-build/.github/install_latest_buildah.sh
|
||||||
|
|
||||||
|
- name: Install qemu dependency
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y qemu-user-static
|
||||||
|
|
||||||
|
- name: Create Containerfile
|
||||||
|
run: |
|
||||||
|
cat > Containerfile<<EOF
|
||||||
|
|
||||||
|
FROM docker.io/alpine:3.16
|
||||||
|
|
||||||
|
RUN echo "hello world"
|
||||||
|
|
||||||
|
ENTRYPOINT [ "sh", "-c", "echo -n 'Machine: ' && uname -m && echo -n 'Bits: ' && getconf LONG_BIT && echo 'goodbye world'" ]
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Build Image
|
||||||
|
id: build_image_multiplatform
|
||||||
|
uses: ./buildah-build/
|
||||||
|
with:
|
||||||
|
image: ${{ env.IMAGE_NAME }}
|
||||||
|
tags: ${{ env.IMAGE_TAG }}
|
||||||
|
platforms: linux/amd64, linux/ppc64le
|
||||||
|
containerfiles: |
|
||||||
|
./Containerfile
|
||||||
|
|
||||||
|
- name: Echo Outputs
|
||||||
|
run: |
|
||||||
|
echo "Image: ${{ steps.build_image_multiplatform.outputs.image }}"
|
||||||
|
echo "Tags: ${{ steps.build_image_multiplatform.outputs.tags }}"
|
||||||
|
echo "Tagged Image: ${{ steps.build_image_multiplatform.outputs.image-with-tag }}"
|
||||||
|
|
||||||
|
- name: Check images created
|
||||||
|
run: buildah images | grep '${{ env.IMAGE_NAME }}'
|
||||||
|
|
||||||
|
- name: Check manifest
|
||||||
|
run: |
|
||||||
|
set -x
|
||||||
|
buildah manifest inspect ${{ steps.build_image_multiplatform.outputs.image }}:${{ env.IMAGE_TAG }}
|
||||||
|
|
||||||
|
- name: Run image
|
||||||
|
run: |
|
||||||
|
podman run --rm ${{ steps.build_image_multiplatform.outputs.image }}:${{ env.IMAGE_TAG }}
|
||||||
|
|
||||||
|
build-multiarch-scratch:
|
||||||
|
name: Build multi-architecture image from scratch
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: spring-petclinic-multiarch
|
||||||
|
runs-on: container
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
install_latest: [ false ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
# Checkout buildah action github repository
|
||||||
|
- name: Checkout Buildah action
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
path: "buildah-build"
|
||||||
|
|
||||||
|
- name: Install latest buildah
|
||||||
|
if: matrix.install_latest
|
||||||
|
run: |
|
||||||
|
bash buildah-build/.github/install_latest_buildah.sh
|
||||||
|
|
||||||
|
- name: Install qemu dependency
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y qemu-user-static
|
||||||
|
|
||||||
|
# Checkout spring-petclinic github repository
|
||||||
|
- name: Checkout spring-petclinic project
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: "spring-projects/spring-petclinic"
|
||||||
|
path: ${{ env.PROJECT_DIR }}
|
||||||
|
|
||||||
|
# Setup java.
|
||||||
|
- name: Setup Java
|
||||||
|
uses: actions/setup-java@v3
|
||||||
|
with:
|
||||||
|
distribution: 'temurin'
|
||||||
|
java-version: '17'
|
||||||
|
cache: 'maven'
|
||||||
|
|
||||||
|
# Run maven to build the project
|
||||||
|
- name: Maven
|
||||||
|
working-directory: ${{ env.PROJECT_DIR }}
|
||||||
|
run: |
|
||||||
|
mvn package -ntp -B
|
||||||
|
|
||||||
|
- name: Build Image
|
||||||
|
id: build_image_multiarch
|
||||||
|
uses: ./buildah-build/
|
||||||
|
with:
|
||||||
|
image: ${{ env.IMAGE_NAME }}
|
||||||
|
tags: ${{ env.IMAGE_TAG }}
|
||||||
|
base-image: 'registry.access.redhat.com/openjdk/openjdk-11-rhel7'
|
||||||
|
archs: amd64, i386, ppc64le
|
||||||
|
# To avoid hardcoding a particular version of the binary.
|
||||||
|
content: |
|
||||||
|
./spring-petclinic/target/spring-petclinic-*.jar
|
||||||
|
entrypoint: |
|
||||||
|
java
|
||||||
|
-jar
|
||||||
|
spring-petclinic-*.jar
|
||||||
|
port: 8080
|
||||||
|
workdir: "."
|
||||||
|
|
||||||
|
- name: Echo Outputs
|
||||||
|
run: |
|
||||||
|
echo "Image: ${{ steps.build_image_multiarch.outputs.image }}"
|
||||||
|
echo "Tags: ${{ steps.build_image_multiarch.outputs.tags }}"
|
||||||
|
echo "Tagged Image: ${{ steps.build_image_multiarch.outputs.image-with-tag }}"
|
||||||
|
|
||||||
|
- name: Check images created
|
||||||
|
run: buildah images | grep '${{ env.IMAGE_NAME }}'
|
||||||
|
|
||||||
|
- name: Check manifest
|
||||||
|
run: |
|
||||||
|
set -x
|
||||||
|
buildah manifest inspect ${{ steps.build_image_multiarch.outputs.image }}:${{ env.IMAGE_TAG }}
|
87
.github/workflows/scratch_build.yml
vendored
Normal file
87
.github/workflows/scratch_build.yml
vendored
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
# This workflow will perform a test whenever there
|
||||||
|
# is some change in code done to ensure that the changes
|
||||||
|
# are not buggy and we are getting the desired output.
|
||||||
|
name: Build
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * *' # every day at midnight
|
||||||
|
|
||||||
|
env:
|
||||||
|
PROJECT_DIR: spring-petclinic
|
||||||
|
IMAGE_NAME: spring-petclinic
|
||||||
|
MVN_REPO_DIR: ~/.m2/repository
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: Build image using Buildah
|
||||||
|
runs-on: container
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
install_latest: [ false ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
# Checkout buildah action github repository
|
||||||
|
- name: Checkout Buildah action
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
path: "buildah-build"
|
||||||
|
|
||||||
|
- name: Install latest buildah
|
||||||
|
if: matrix.install_latest
|
||||||
|
run: |
|
||||||
|
bash buildah-build/.github/install_latest_buildah.sh
|
||||||
|
|
||||||
|
# Checkout spring-petclinic github repository
|
||||||
|
- name: Checkout spring-petclinic project
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: "spring-projects/spring-petclinic"
|
||||||
|
path: ${{ env.PROJECT_DIR }}
|
||||||
|
|
||||||
|
# Setup java.
|
||||||
|
- name: Setup Java
|
||||||
|
uses: actions/setup-java@v3
|
||||||
|
with:
|
||||||
|
distribution: 'temurin'
|
||||||
|
java-version: '17'
|
||||||
|
cache: 'maven'
|
||||||
|
|
||||||
|
# Run maven to build the project
|
||||||
|
- name: Maven
|
||||||
|
working-directory: ${{ env.PROJECT_DIR }}
|
||||||
|
run: |
|
||||||
|
mvn package -ntp -B
|
||||||
|
|
||||||
|
# Build image using Buildah action
|
||||||
|
- name: Build Image
|
||||||
|
id: build_image
|
||||||
|
uses: ./buildah-build/
|
||||||
|
with:
|
||||||
|
image: ${{ env.IMAGE_NAME }}
|
||||||
|
tags: 'latest ${{ github.sha }}'
|
||||||
|
base-image: 'registry.access.redhat.com/openjdk/openjdk-11-rhel7'
|
||||||
|
# To avoid hardcoding a particular version of the binary.
|
||||||
|
content: |
|
||||||
|
./spring-petclinic/target/spring-petclinic-*.jar
|
||||||
|
entrypoint: |
|
||||||
|
java
|
||||||
|
-jar
|
||||||
|
spring-petclinic-*.jar
|
||||||
|
port: 8080
|
||||||
|
arch: amd64
|
||||||
|
workdir: "."
|
||||||
|
|
||||||
|
- name: Echo Outputs
|
||||||
|
run: |
|
||||||
|
echo "Image: ${{ steps.build_image.outputs.image }}"
|
||||||
|
echo "Tags: ${{ steps.build_image.outputs.tags }}"
|
||||||
|
echo "Tagged Image: ${{ steps.build_image.outputs.image-with-tag }}"
|
||||||
|
|
||||||
|
# Check if image is build
|
||||||
|
- name: Check images created
|
||||||
|
run: buildah images | grep '${{ env.IMAGE_NAME }}'
|
36
.github/workflows/security_scan.yml
vendored
Normal file
36
.github/workflows/security_scan.yml
vendored
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
name: Vulnerability Scan with CRDA
|
||||||
|
on:
|
||||||
|
# push:
|
||||||
|
workflow_dispatch:
|
||||||
|
# pull_request_target:
|
||||||
|
# types: [ assigned, opened, synchronize, reopened, labeled, edited ]
|
||||||
|
# schedule:
|
||||||
|
# - cron: '0 0 * * *' # every day at midnight
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
crda-scan:
|
||||||
|
runs-on: container
|
||||||
|
name: Scan project vulnerability with CRDA
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
cache: 'npm'
|
||||||
|
|
||||||
|
- name: Install CRDA
|
||||||
|
uses: redhat-actions/openshift-tools-installer@v1
|
||||||
|
with:
|
||||||
|
source: github
|
||||||
|
github_pat: ${{ github.token }}
|
||||||
|
crda: "latest"
|
||||||
|
|
||||||
|
- name: CRDA Scan
|
||||||
|
id: scan
|
||||||
|
uses: redhat-actions/crda@v1
|
||||||
|
with:
|
||||||
|
crda_key: ${{ secrets.CRDA_KEY }}
|
||||||
|
fail_on: never
|
21
LICENSE
Normal file
21
LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2020 Red Hat. All rights reserved.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE
|
16
README.md
16
README.md
|
@ -0,0 +1,16 @@
|
||||||
|
# buildah-build
|
||||||
|
[](https://github.com/redhat-actions/buildah-build/actions?query=workflow%3A%22CI+checks%22)
|
||||||
|
[](https://github.com/redhat-actions/buildah-build/actions?query=workflow%3ABuild)
|
||||||
|
[](https://github.com/redhat-actions/buildah-build/actions?query=workflow%3A%22Build+from+containerfile%22)
|
||||||
|
[](https://github.com/redhat-actions/buildah-build/actions?query=workflow%3A%22Link+checker%22)
|
||||||
|
<br>
|
||||||
|
<br>
|
||||||
|
[](https://github.com/redhat-actions/buildah-build/tags)
|
||||||
|
[](./LICENSE)
|
||||||
|
[](./dist)
|
||||||
|
|
||||||
|
Buildah Build is a Gitea/Forgejo Action for building Docker, Podman and Kubernetes-compatible images quickly and easily.
|
||||||
|
|
||||||
|
[Buildah](https://github.com/containers/buildah/tree/master/docs) only works on Linux.
|
||||||
|
|
||||||
|
This code is a heavily stripped down and adapted version for Forgejo of the [Redhat version](https://github.com/redhat-actions/buildah-build).
|
94
action.yml
Normal file
94
action.yml
Normal file
|
@ -0,0 +1,94 @@
|
||||||
|
name: 'Buildah Build'
|
||||||
|
description: 'Build a container image, with or without a Containerfile'
|
||||||
|
author: 'Phil and Red Hat'
|
||||||
|
branding:
|
||||||
|
icon: circle
|
||||||
|
color: red
|
||||||
|
inputs:
|
||||||
|
image:
|
||||||
|
description: 'The name (reference) of the image to build'
|
||||||
|
required: false
|
||||||
|
tags:
|
||||||
|
description: 'The tags of the image to build. For multiple tags, seperate by whitespace. For example, "latest v1".'
|
||||||
|
required: false
|
||||||
|
default: latest
|
||||||
|
labels:
|
||||||
|
description: 'The labels of the image to build. Seperate by newline. For example, "io.containers.capabilities=sys_admin,mknod".'
|
||||||
|
required: false
|
||||||
|
base-image:
|
||||||
|
description: 'The base image to use to create a new container image'
|
||||||
|
required: false
|
||||||
|
containerfiles:
|
||||||
|
description: 'List of Containerfile paths (eg: ./Containerfile)'
|
||||||
|
required: false
|
||||||
|
dockerfiles:
|
||||||
|
description: 'Alias for "containerfiles". "containerfiles" takes precedence if both are set.'
|
||||||
|
required: false
|
||||||
|
context:
|
||||||
|
description: 'Path of the directory to use as context (default: .)'
|
||||||
|
required: false
|
||||||
|
default: '.'
|
||||||
|
content:
|
||||||
|
description: 'List of files/directories to copy inside the base image'
|
||||||
|
required: false
|
||||||
|
entrypoint:
|
||||||
|
description: 'The entry point to set for containers based on image'
|
||||||
|
required: false
|
||||||
|
layers:
|
||||||
|
description: 'Set to true to cache intermediate layers during build process'
|
||||||
|
required: false
|
||||||
|
port:
|
||||||
|
description: 'The port to expose when running containers based on image'
|
||||||
|
required: false
|
||||||
|
workdir:
|
||||||
|
description: 'The working directory to use within the container'
|
||||||
|
required: false
|
||||||
|
envs:
|
||||||
|
description: 'List of environment variables to be set when running containers based on image'
|
||||||
|
required: false
|
||||||
|
build-args:
|
||||||
|
description: 'List of --build-args to pass to buildah'
|
||||||
|
required: false
|
||||||
|
oci:
|
||||||
|
description: 'Set to true to build using the OCI image format instead of the Docker image format'
|
||||||
|
default: 'false'
|
||||||
|
required: false
|
||||||
|
arch:
|
||||||
|
description:
|
||||||
|
'Label the image with this ARCH, instead of defaulting to the host architecture'
|
||||||
|
required: false
|
||||||
|
archs:
|
||||||
|
description: |
|
||||||
|
'Same as input 'arch', use this for multiple architectures.
|
||||||
|
Seperate them by a comma'
|
||||||
|
required: false
|
||||||
|
platform:
|
||||||
|
description: |
|
||||||
|
Label the image with this PLATFORM, instead of defaulting to the host platform.
|
||||||
|
Only supported for containerfile builds.
|
||||||
|
required: false
|
||||||
|
platforms:
|
||||||
|
description: |
|
||||||
|
'Same as input 'platform', use this for multiple platforms.
|
||||||
|
Seperate them by a comma'
|
||||||
|
required: false
|
||||||
|
extra-args:
|
||||||
|
description: |
|
||||||
|
Extra args to be passed to buildah bud and buildah from.
|
||||||
|
Separate arguments by newline. Do not use quotes - @actions/exec will do the quoting for you.
|
||||||
|
required: false
|
||||||
|
tls-verify:
|
||||||
|
description: |
|
||||||
|
Require HTTPS and verify certificates when accessing the registry. Defaults to true.
|
||||||
|
required: false
|
||||||
|
default: 'true'
|
||||||
|
outputs:
|
||||||
|
image:
|
||||||
|
description: 'Name of the image built'
|
||||||
|
tags:
|
||||||
|
description: 'List of the tags that were created, separated by spaces'
|
||||||
|
image-with-tag:
|
||||||
|
description: 'Name of the image tagged with the first tag present'
|
||||||
|
runs:
|
||||||
|
using: 'node20'
|
||||||
|
main: 'dist/index.js'
|
41
package.json
Normal file
41
package.json
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
{
|
||||||
|
"name": "buildah-build",
|
||||||
|
"version": "1.0",
|
||||||
|
"engines": {
|
||||||
|
"node": "20"
|
||||||
|
},
|
||||||
|
"description": "Action for building OCI-compatible images using buildah",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://philome.mooo.com/code/Philome/buildah-build"
|
||||||
|
},
|
||||||
|
"main": "dist/index.js",
|
||||||
|
"scripts": {
|
||||||
|
"compile": "tsc -p .",
|
||||||
|
"bundle": "ncc build src/index.ts --source-map --minify",
|
||||||
|
"clean": "rm -rf out/ dist/",
|
||||||
|
"lint": "eslint . --max-warnings=0",
|
||||||
|
"generate-ios": "npx action-io-generator -w -o ./src/generated/inputs-outputs.ts"
|
||||||
|
},
|
||||||
|
"keywords": [],
|
||||||
|
"author": "Phil, based on Red Hat",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@actions/core": "1.10.1",
|
||||||
|
"@actions/exec": "1.1.1",
|
||||||
|
"@actions/io": "1.1.3",
|
||||||
|
"ini": "4.1.1"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@redhat-actions/action-io-generator": "1.5.0",
|
||||||
|
"@redhat-actions/eslint-config": "1.3.2",
|
||||||
|
"@redhat-actions/tsconfig": "1.2.0",
|
||||||
|
"@types/ini": "1.3.31",
|
||||||
|
"@types/node": "^20.0",
|
||||||
|
"@typescript-eslint/eslint-plugin": "6.7.3",
|
||||||
|
"@typescript-eslint/parser": "6.7.3",
|
||||||
|
"@vercel/ncc": "0.38.0",
|
||||||
|
"eslint": "8.50.0",
|
||||||
|
"typescript": "5.2.2"
|
||||||
|
}
|
||||||
|
}
|
311
src/buildah.ts
Normal file
311
src/buildah.ts
Normal file
|
@ -0,0 +1,311 @@
|
||||||
|
/***************************************************************************************************
|
||||||
|
* Copyright (c) Red Hat, Inc. All rights reserved.
|
||||||
|
* Licensed under the MIT License. See LICENSE file in the project root for license information.
|
||||||
|
**************************************************************************************************/
|
||||||
|
|
||||||
|
import * as core from "@actions/core";
|
||||||
|
import * as exec from "@actions/exec";
|
||||||
|
import * as path from "path";
|
||||||
|
import CommandResult from "./types";
|
||||||
|
import { isStorageDriverOverlay, findFuseOverlayfsPath, getFullImageName } from "./utils";
|
||||||
|
|
||||||
|
export interface BuildahConfigSettings {
|
||||||
|
entrypoint?: string[];
|
||||||
|
envs?: string[];
|
||||||
|
port?: string;
|
||||||
|
workingdir?: string;
|
||||||
|
arch?: string;
|
||||||
|
labels?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Buildah {
|
||||||
|
buildUsingDocker(
|
||||||
|
image: string, context: string, containerFiles: string[], buildArgs: string[],
|
||||||
|
useOCI: boolean, labels: string[], layers: string,
|
||||||
|
extraArgs: string[], tlsVerify: boolean, arch?: string, platform?: string,
|
||||||
|
): Promise<CommandResult>;
|
||||||
|
from(baseImage: string, tlsVerify: boolean, extraArgs: string[]): Promise<CommandResult>;
|
||||||
|
config(container: string, setting: BuildahConfigSettings): Promise<CommandResult>;
|
||||||
|
copy(container: string, contentToCopy: string[]): Promise<CommandResult | undefined>;
|
||||||
|
commit(container: string, newImageName: string, useOCI: boolean): Promise<CommandResult>;
|
||||||
|
manifestCreate(manifest: string): Promise<void>;
|
||||||
|
manifestAdd(manifest: string, imageName: string, tags: string[]): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class BuildahCli implements Buildah {
|
||||||
|
private readonly executable: string;
|
||||||
|
|
||||||
|
public storageOptsEnv = "";
|
||||||
|
|
||||||
|
constructor(executable: string) {
|
||||||
|
this.executable = executable;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Checks for storage driver if found "overlay",
|
||||||
|
// then checks if "fuse-overlayfs" is installed.
|
||||||
|
// If yes, add mount program to use "fuse-overlayfs"
|
||||||
|
async setStorageOptsEnv(): Promise<void> {
|
||||||
|
if (await isStorageDriverOverlay()) {
|
||||||
|
const fuseOverlayfsPath = await findFuseOverlayfsPath();
|
||||||
|
if (fuseOverlayfsPath) {
|
||||||
|
core.info(`Overriding storage mount_program with "fuse-overlayfs" in environment`);
|
||||||
|
this.storageOptsEnv = `overlay.mount_program=${fuseOverlayfsPath}`;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.warning(`"fuse-overlayfs" is not found. Install it before running this action. `
|
||||||
|
+ `For more detail see https://github.com/redhat-actions/buildah-build/issues/45`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.info("Storage driver is not 'overlay', so not overriding storage configuration");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static getImageFormatOption(useOCI: boolean): string[] {
|
||||||
|
return [ "--format", useOCI ? "oci" : "docker" ];
|
||||||
|
}
|
||||||
|
|
||||||
|
async buildUsingDocker(
|
||||||
|
image: string,
|
||||||
|
context: string,
|
||||||
|
containerFiles: string[],
|
||||||
|
buildArgs: string[],
|
||||||
|
useOCI: boolean,
|
||||||
|
labels: string[],
|
||||||
|
layers: string,
|
||||||
|
extraArgs: string[],
|
||||||
|
tlsVerify: boolean,
|
||||||
|
arch?: string,
|
||||||
|
platform?: string
|
||||||
|
): Promise<CommandResult> {
|
||||||
|
const args: string[] = [ "bud" ];
|
||||||
|
if (arch) {
|
||||||
|
args.push("--arch");
|
||||||
|
args.push(arch);
|
||||||
|
}
|
||||||
|
if (platform) {
|
||||||
|
args.push("--platform");
|
||||||
|
args.push(platform);
|
||||||
|
}
|
||||||
|
containerFiles.forEach((file) => {
|
||||||
|
args.push("-f");
|
||||||
|
args.push(file);
|
||||||
|
});
|
||||||
|
labels.forEach((label) => {
|
||||||
|
args.push("--label");
|
||||||
|
args.push(label);
|
||||||
|
});
|
||||||
|
buildArgs.forEach((buildArg) => {
|
||||||
|
args.push("--build-arg");
|
||||||
|
args.push(buildArg);
|
||||||
|
});
|
||||||
|
args.push(...BuildahCli.getImageFormatOption(useOCI));
|
||||||
|
args.push(`--tls-verify=${tlsVerify}`);
|
||||||
|
if (layers) {
|
||||||
|
args.push(`--layers=${layers}`);
|
||||||
|
}
|
||||||
|
if (extraArgs.length > 0) {
|
||||||
|
args.push(...extraArgs);
|
||||||
|
}
|
||||||
|
args.push("-t");
|
||||||
|
args.push(image);
|
||||||
|
args.push(context);
|
||||||
|
return this.execute(args);
|
||||||
|
}
|
||||||
|
|
||||||
|
async from(baseImage: string, tlsVerify: boolean, extraArgs: string[]): Promise<CommandResult> {
|
||||||
|
const args: string[] = [ "from" ];
|
||||||
|
args.push(`--tls-verify=${tlsVerify}`);
|
||||||
|
if (extraArgs.length > 0) {
|
||||||
|
args.push(...extraArgs);
|
||||||
|
}
|
||||||
|
args.push(baseImage);
|
||||||
|
return this.execute(args);
|
||||||
|
}
|
||||||
|
|
||||||
|
async copy(container: string, contentToCopy: string[], contentPath?: string): Promise<CommandResult | undefined> {
|
||||||
|
if (contentToCopy.length === 0) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
core.debug("copy");
|
||||||
|
core.debug(container);
|
||||||
|
core.debug("content: " + contentToCopy.join(" "));
|
||||||
|
if (contentToCopy.length > 0) {
|
||||||
|
const args: string[] = [ "copy", container ].concat(contentToCopy);
|
||||||
|
if (contentPath) {
|
||||||
|
args.push(contentPath);
|
||||||
|
}
|
||||||
|
return this.execute(args);
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
async config(container: string, settings: BuildahConfigSettings): Promise<CommandResult> {
|
||||||
|
core.debug("config");
|
||||||
|
core.debug(container);
|
||||||
|
const args: string[] = [ "config" ];
|
||||||
|
if (settings.entrypoint) {
|
||||||
|
args.push("--entrypoint");
|
||||||
|
args.push(BuildahCli.convertArrayToStringArg(settings.entrypoint));
|
||||||
|
}
|
||||||
|
if (settings.port) {
|
||||||
|
args.push("--port");
|
||||||
|
args.push(settings.port);
|
||||||
|
}
|
||||||
|
if (settings.envs) {
|
||||||
|
settings.envs.forEach((env) => {
|
||||||
|
args.push("--env");
|
||||||
|
args.push(env);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (settings.arch) {
|
||||||
|
args.push("--arch");
|
||||||
|
args.push(settings.arch);
|
||||||
|
}
|
||||||
|
if (settings.workingdir) {
|
||||||
|
args.push("--workingdir");
|
||||||
|
args.push(settings.workingdir);
|
||||||
|
}
|
||||||
|
if (settings.labels) {
|
||||||
|
settings.labels.forEach((label) => {
|
||||||
|
args.push("--label");
|
||||||
|
args.push(label);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
args.push(container);
|
||||||
|
return this.execute(args);
|
||||||
|
}
|
||||||
|
|
||||||
|
async commit(container: string, newImageName: string, useOCI: boolean): Promise<CommandResult> {
|
||||||
|
core.debug("commit");
|
||||||
|
core.debug(container);
|
||||||
|
core.debug(newImageName);
|
||||||
|
const args: string[] = [
|
||||||
|
"commit", ...BuildahCli.getImageFormatOption(useOCI),
|
||||||
|
"--squash", container, newImageName,
|
||||||
|
];
|
||||||
|
return this.execute(args);
|
||||||
|
}
|
||||||
|
|
||||||
|
async tag(imageName: string, tags: string[]): Promise<void> {
|
||||||
|
const args: string[] = [ "tag" ];
|
||||||
|
const builtImage = [];
|
||||||
|
for (const tag of tags) {
|
||||||
|
args.push(getFullImageName(imageName, tag));
|
||||||
|
builtImage.push(getFullImageName(imageName, tag));
|
||||||
|
}
|
||||||
|
core.info(`Tagging the built image with tags ${tags.toString()}`);
|
||||||
|
await this.execute(args);
|
||||||
|
core.info(`✅ Successfully built image${builtImage.length !== 1 ? "s" : ""} "${builtImage.join(", ")}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unfortunately buildah doesn't support the exists command yet
|
||||||
|
// https://github.com/containers/buildah/issues/4217
|
||||||
|
|
||||||
|
// async manifestExists(manifest: string): Promise<boolean> {
|
||||||
|
// const args: string[] = [ "manifest", "exists" ];
|
||||||
|
// args.push(manifest);
|
||||||
|
// const execOptions: exec.ExecOptions = {ignoreReturnCode: true};
|
||||||
|
// core.info(`Checking if manifest ${manifest} exists`);
|
||||||
|
// const {exitCode} = await this.execute(args, execOptions);
|
||||||
|
// return exitCode ? false : true;
|
||||||
|
// }
|
||||||
|
|
||||||
|
async manifestRm(manifest: string): Promise<void> {
|
||||||
|
const execOptions: exec.ExecOptions = { ignoreReturnCode: true };
|
||||||
|
const args: string[] = [ "manifest", "rm" ];
|
||||||
|
args.push(manifest);
|
||||||
|
core.info(`Removing existing manifest ${manifest}`);
|
||||||
|
await this.execute(args, execOptions);
|
||||||
|
}
|
||||||
|
|
||||||
|
async manifestCreate(manifest: string): Promise<void> {
|
||||||
|
const args: string[] = [ "manifest", "create" ];
|
||||||
|
args.push(manifest);
|
||||||
|
core.info(`Creating manifest ${manifest}`);
|
||||||
|
await this.execute(args);
|
||||||
|
}
|
||||||
|
|
||||||
|
async manifestAdd(manifest: string, image: string): Promise<void> {
|
||||||
|
const args: string[] = [ "manifest", "add" ];
|
||||||
|
args.push(manifest);
|
||||||
|
args.push(image);
|
||||||
|
core.info(`Adding image "${image}" to the manifest.`);
|
||||||
|
await this.execute(args);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static convertArrayToStringArg(args: string[]): string {
|
||||||
|
let arrayAsString = "[";
|
||||||
|
args.forEach((arg) => {
|
||||||
|
arrayAsString += `"${arg}",`;
|
||||||
|
});
|
||||||
|
return `${arrayAsString.slice(0, -1)}]`;
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(
|
||||||
|
args: string[],
|
||||||
|
execOptions: exec.ExecOptions & { group?: boolean } = {},
|
||||||
|
): Promise<CommandResult> {
|
||||||
|
// ghCore.info(`${EXECUTABLE} ${args.join(" ")}`)
|
||||||
|
|
||||||
|
let stdout = "";
|
||||||
|
let stderr = "";
|
||||||
|
|
||||||
|
const finalExecOptions = { ...execOptions };
|
||||||
|
finalExecOptions.ignoreReturnCode = true; // the return code is processed below
|
||||||
|
|
||||||
|
finalExecOptions.listeners = {
|
||||||
|
stdline: (line): void => {
|
||||||
|
stdout += line + "\n";
|
||||||
|
},
|
||||||
|
errline: (line):void => {
|
||||||
|
stderr += line + "\n";
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
if (execOptions.group) {
|
||||||
|
const groupName = [ this.executable, ...args ].join(" ");
|
||||||
|
core.startGroup(groupName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// To solve https://github.com/redhat-actions/buildah-build/issues/45
|
||||||
|
const execEnv: { [key: string] : string } = {};
|
||||||
|
Object.entries(process.env).forEach(([ key, value ]) => {
|
||||||
|
if (value != null) {
|
||||||
|
execEnv[key] = value;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (this.storageOptsEnv) {
|
||||||
|
execEnv.STORAGE_OPTS = this.storageOptsEnv;
|
||||||
|
}
|
||||||
|
|
||||||
|
finalExecOptions.env = execEnv;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const exitCode = await exec.exec(this.executable, args, finalExecOptions);
|
||||||
|
|
||||||
|
if (execOptions.ignoreReturnCode !== true && exitCode !== 0) {
|
||||||
|
// Throwing the stderr as part of the Error makes the stderr
|
||||||
|
// show up in the action outline, which saves some clicking when debugging.
|
||||||
|
let error = `${path.basename(this.executable)} exited with code ${exitCode}`;
|
||||||
|
if (stderr) {
|
||||||
|
error += `\n${stderr}`;
|
||||||
|
}
|
||||||
|
throw new Error(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
exitCode, output: stdout, error: stderr,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
finally {
|
||||||
|
if (execOptions.group) {
|
||||||
|
core.endGroup();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
154
src/generated/inputs-outputs.ts
Normal file
154
src/generated/inputs-outputs.ts
Normal file
|
@ -0,0 +1,154 @@
|
||||||
|
// This file was auto-generated by action-io-generator. Do not edit by hand!
|
||||||
|
export enum Inputs {
|
||||||
|
/**
|
||||||
|
* Label the image with this ARCH, instead of defaulting to the host architecture
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
ARCH = "arch",
|
||||||
|
/**
|
||||||
|
* 'Same as input 'arch', use this for multiple architectures.
|
||||||
|
* Seperate them by a comma'
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
ARCHS = "archs",
|
||||||
|
/**
|
||||||
|
* The base image to use to create a new container image
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
BASE_IMAGE = "base-image",
|
||||||
|
/**
|
||||||
|
* List of --build-args to pass to buildah
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
BUILD_ARGS = "build-args",
|
||||||
|
/**
|
||||||
|
* List of Containerfile paths (eg: ./Containerfile)
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
CONTAINERFILES = "containerfiles",
|
||||||
|
/**
|
||||||
|
* List of files/directories to copy inside the base image
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
CONTENT = "content",
|
||||||
|
/**
|
||||||
|
* Path of the directory to use as context (default: .)
|
||||||
|
* Required: false
|
||||||
|
* Default: "."
|
||||||
|
*/
|
||||||
|
CONTEXT = "context",
|
||||||
|
/**
|
||||||
|
* Alias for "containerfiles". "containerfiles" takes precedence if both are set.
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
DOCKERFILES = "dockerfiles",
|
||||||
|
/**
|
||||||
|
* The entry point to set for containers based on image
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
ENTRYPOINT = "entrypoint",
|
||||||
|
/**
|
||||||
|
* List of environment variables to be set when running containers based on image
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
ENVS = "envs",
|
||||||
|
/**
|
||||||
|
* Extra args to be passed to buildah bud and buildah from.
|
||||||
|
* Separate arguments by newline. Do not use quotes - @actions/exec will do the quoting for you.
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
EXTRA_ARGS = "extra-args",
|
||||||
|
/**
|
||||||
|
* The name (reference) of the image to build
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
IMAGE = "image",
|
||||||
|
/**
|
||||||
|
* The labels of the image to build. Seperate by newline. For example, "io.containers.capabilities=sys_admin,mknod".
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
LABELS = "labels",
|
||||||
|
/**
|
||||||
|
* Set to true to cache intermediate layers during build process
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
LAYERS = "layers",
|
||||||
|
/**
|
||||||
|
* Set to true to build using the OCI image format instead of the Docker image format
|
||||||
|
* Required: false
|
||||||
|
* Default: "false"
|
||||||
|
*/
|
||||||
|
OCI = "oci",
|
||||||
|
/**
|
||||||
|
* Label the image with this PLATFORM, instead of defaulting to the host platform.
|
||||||
|
* Only supported for containerfile builds.
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
PLATFORM = "platform",
|
||||||
|
/**
|
||||||
|
* 'Same as input 'platform', use this for multiple platforms.
|
||||||
|
* Seperate them by a comma'
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
PLATFORMS = "platforms",
|
||||||
|
/**
|
||||||
|
* The port to expose when running containers based on image
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
PORT = "port",
|
||||||
|
/**
|
||||||
|
* The tags of the image to build. For multiple tags, seperate by whitespace. For example, "latest v1".
|
||||||
|
* Required: false
|
||||||
|
* Default: "latest"
|
||||||
|
*/
|
||||||
|
TAGS = "tags",
|
||||||
|
/**
|
||||||
|
* Require HTTPS and verify certificates when accessing the registry. Defaults to true.
|
||||||
|
* Required: false
|
||||||
|
* Default: "true"
|
||||||
|
*/
|
||||||
|
TLS_VERIFY = "tls-verify",
|
||||||
|
/**
|
||||||
|
* The working directory to use within the container
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
WORKDIR = "workdir",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum Outputs {
|
||||||
|
/**
|
||||||
|
* Name of the image built
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
IMAGE = "image",
|
||||||
|
/**
|
||||||
|
* Name of the image tagged with the first tag present
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
IMAGE_WITH_TAG = "image-with-tag",
|
||||||
|
/**
|
||||||
|
* List of the tags that were created, separated by spaces
|
||||||
|
* Required: false
|
||||||
|
* Default: None.
|
||||||
|
*/
|
||||||
|
TAGS = "tags",
|
||||||
|
}
|
311
src/index.ts
Normal file
311
src/index.ts
Normal file
|
@ -0,0 +1,311 @@
|
||||||
|
/***************************************************************************************************
|
||||||
|
* Copyright (c) Red Hat, Inc. All rights reserved.
|
||||||
|
* Licensed under the MIT License. See LICENSE file in the project root for license information.
|
||||||
|
**************************************************************************************************/
|
||||||
|
|
||||||
|
import * as core from "@actions/core";
|
||||||
|
import * as io from "@actions/io";
|
||||||
|
import * as path from "path";
|
||||||
|
import { Inputs, Outputs } from "./generated/inputs-outputs";
|
||||||
|
import { BuildahCli, BuildahConfigSettings } from "./buildah";
|
||||||
|
import {
|
||||||
|
getArch, getPlatform, getContainerfiles, getInputList, splitByNewline,
|
||||||
|
isFullImageName, getFullImageName, removeIllegalCharacters,
|
||||||
|
} from "./utils";
|
||||||
|
|
||||||
|
export async function run(): Promise<void> {
|
||||||
|
//if (process.env.RUNNER_OS !== "Linux") {
|
||||||
|
//throw new Error("buildah, and therefore this action, only works on Linux. Please use a Linux runner.");
|
||||||
|
//}
|
||||||
|
|
||||||
|
// get buildah cli
|
||||||
|
const buildahPath = await io.which("buildah", true);
|
||||||
|
const cli: BuildahCli = new BuildahCli(buildahPath);
|
||||||
|
|
||||||
|
// print buildah version
|
||||||
|
await cli.execute([ "version" ], { group: true });
|
||||||
|
|
||||||
|
// Check if fuse-overlayfs exists and find the storage driver
|
||||||
|
await cli.setStorageOptsEnv();
|
||||||
|
|
||||||
|
const DEFAULT_TAG = "latest";
|
||||||
|
const workspace = process.env.GITHUB_WORKSPACE || process.cwd();
|
||||||
|
const containerFiles = getContainerfiles();
|
||||||
|
const image = core.getInput(Inputs.IMAGE);
|
||||||
|
const tags = core.getInput(Inputs.TAGS);
|
||||||
|
const tagsList: string[] = tags.trim().split(/\s+/);
|
||||||
|
const labels = core.getInput(Inputs.LABELS);
|
||||||
|
const labelsList: string[] = labels ? splitByNewline(labels) : [];
|
||||||
|
|
||||||
|
const normalizedTagsList: string[] = [];
|
||||||
|
let isNormalized = false;
|
||||||
|
for (const tag of tagsList) {
|
||||||
|
normalizedTagsList.push(tag.toLowerCase());
|
||||||
|
if (tag.toLowerCase() !== tag) {
|
||||||
|
isNormalized = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const normalizedImage = image.toLowerCase();
|
||||||
|
if (isNormalized || image !== normalizedImage) {
|
||||||
|
core.warning(`Reference to image and/or tag must be lowercase.`
|
||||||
|
+ ` Reference has been converted to be compliant with standard.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// info message if user doesn't provides any tag
|
||||||
|
if (tagsList.length === 0) {
|
||||||
|
core.info(`Input "${Inputs.TAGS}" is not provided, using default tag "${DEFAULT_TAG}"`);
|
||||||
|
tagsList.push(DEFAULT_TAG);
|
||||||
|
}
|
||||||
|
|
||||||
|
const inputExtraArgsStr = core.getInput(Inputs.EXTRA_ARGS);
|
||||||
|
let buildahExtraArgs: string[] = [];
|
||||||
|
if (inputExtraArgsStr) {
|
||||||
|
// transform the array of lines into an array of arguments
|
||||||
|
// by splitting over lines, then over spaces, then trimming.
|
||||||
|
const lines = splitByNewline(inputExtraArgsStr);
|
||||||
|
buildahExtraArgs = lines.flatMap((line) => line.split(" ")).map((arg) => arg.trim());
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if all tags provided are in `image:tag` format
|
||||||
|
const isFullImageNameTag = isFullImageName(normalizedTagsList[0]);
|
||||||
|
if (normalizedTagsList.some((tag) => isFullImageName(tag) !== isFullImageNameTag)) {
|
||||||
|
throw new Error(`Input "${Inputs.TAGS}" cannot have a mix of full name and non full name tags. Refer to https://github.com/redhat-actions/buildah-build#image-tag-inputs`);
|
||||||
|
}
|
||||||
|
if (!isFullImageNameTag && !normalizedImage) {
|
||||||
|
throw new Error(`Input "${Inputs.IMAGE}" must be provided when not using full image name tags. Refer to https://github.com/redhat-actions/buildah-build#image-tag-inputs`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const newImage = getFullImageName(normalizedImage, normalizedTagsList[0]);
|
||||||
|
const useOCI = core.getInput(Inputs.OCI) === "true";
|
||||||
|
|
||||||
|
const archs = getArch();
|
||||||
|
const platforms = getPlatform();
|
||||||
|
|
||||||
|
if ((archs.length > 0) && (platforms.length > 0)) {
|
||||||
|
throw new Error("The --platform option may not be used in combination with the --arch option.");
|
||||||
|
}
|
||||||
|
|
||||||
|
const builtImage = [];
|
||||||
|
if (containerFiles.length !== 0) {
|
||||||
|
builtImage.push(...await doBuildUsingContainerFiles(
|
||||||
|
cli,
|
||||||
|
newImage,
|
||||||
|
workspace,
|
||||||
|
containerFiles,
|
||||||
|
useOCI,
|
||||||
|
archs,
|
||||||
|
platforms,
|
||||||
|
labelsList,
|
||||||
|
buildahExtraArgs
|
||||||
|
));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if (platforms.length > 0) {
|
||||||
|
throw new Error("The --platform option is not supported for builds without containerfiles.");
|
||||||
|
}
|
||||||
|
builtImage.push(...await doBuildFromScratch(cli, newImage, useOCI, archs, labelsList, buildahExtraArgs));
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((archs.length > 1) || (platforms.length > 1)) {
|
||||||
|
core.info(`Creating manifest with tag${normalizedTagsList.length !== 1 ? "s" : ""} `
|
||||||
|
+ `"${normalizedTagsList.join(", ")}"`);
|
||||||
|
const builtManifest = [];
|
||||||
|
for (const tag of normalizedTagsList) {
|
||||||
|
const manifestName = getFullImageName(normalizedImage, tag);
|
||||||
|
// Force-remove existing manifest to prevent errors on recurring build on the same machine
|
||||||
|
await cli.manifestRm(manifestName);
|
||||||
|
await cli.manifestCreate(manifestName);
|
||||||
|
builtManifest.push(manifestName);
|
||||||
|
|
||||||
|
for (const arch of archs) {
|
||||||
|
const tagSuffix = removeIllegalCharacters(arch);
|
||||||
|
await cli.manifestAdd(manifestName, `${newImage}-${tagSuffix}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const platform of platforms) {
|
||||||
|
const tagSuffix = removeIllegalCharacters(platform);
|
||||||
|
await cli.manifestAdd(manifestName, `${newImage}-${tagSuffix}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
core.info(`✅ Successfully built image${builtImage.length !== 1 ? "s" : ""} "${builtImage.join(", ")}" `
|
||||||
|
+ `and manifest${builtManifest.length !== 1 ? "s" : ""} "${builtManifest.join(", ")}"`);
|
||||||
|
}
|
||||||
|
else if (normalizedTagsList.length > 1) {
|
||||||
|
await cli.tag(normalizedImage, normalizedTagsList);
|
||||||
|
}
|
||||||
|
else if (normalizedTagsList.length === 1) {
|
||||||
|
core.info(`✅ Successfully built image "${getFullImageName(normalizedImage, normalizedTagsList[0])}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
core.setOutput(Outputs.IMAGE, normalizedImage);
|
||||||
|
core.setOutput(Outputs.TAGS, tags);
|
||||||
|
core.setOutput(Outputs.IMAGE_WITH_TAG, newImage);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function doBuildUsingContainerFiles(
|
||||||
|
cli: BuildahCli,
|
||||||
|
newImage: string,
|
||||||
|
workspace: string,
|
||||||
|
containerFiles: string[],
|
||||||
|
useOCI: boolean,
|
||||||
|
archs: string[],
|
||||||
|
platforms: string[],
|
||||||
|
labels: string[],
|
||||||
|
extraArgs: string[]
|
||||||
|
): Promise<string[]> {
|
||||||
|
if (containerFiles.length === 1) {
|
||||||
|
core.info(`Performing build from Containerfile`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.info(`Performing build from ${containerFiles.length} Containerfiles`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const context = path.join(workspace, core.getInput(Inputs.CONTEXT));
|
||||||
|
const buildArgs = getInputList(Inputs.BUILD_ARGS);
|
||||||
|
const containerFileAbsPaths = containerFiles.map((file) => path.join(workspace, file));
|
||||||
|
const layers = core.getInput(Inputs.LAYERS);
|
||||||
|
const tlsVerify = core.getInput(Inputs.TLS_VERIFY) === "true";
|
||||||
|
|
||||||
|
const builtImage = [];
|
||||||
|
// since multi arch image can not have same tag
|
||||||
|
// therefore, appending arch/platform in the tag
|
||||||
|
if (archs.length > 0 || platforms.length > 0) {
|
||||||
|
for (const arch of archs) {
|
||||||
|
// handling it seperately as, there is no need of
|
||||||
|
// tagSuffix if only one image has to be built
|
||||||
|
let tagSuffix = "";
|
||||||
|
if (archs.length > 1) {
|
||||||
|
tagSuffix = `-${removeIllegalCharacters(arch)}`;
|
||||||
|
}
|
||||||
|
await cli.buildUsingDocker(
|
||||||
|
`${newImage}${tagSuffix}`,
|
||||||
|
context,
|
||||||
|
containerFileAbsPaths,
|
||||||
|
buildArgs,
|
||||||
|
useOCI,
|
||||||
|
labels,
|
||||||
|
layers,
|
||||||
|
extraArgs,
|
||||||
|
tlsVerify,
|
||||||
|
arch
|
||||||
|
);
|
||||||
|
builtImage.push(`${newImage}${tagSuffix}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const platform of platforms) {
|
||||||
|
let tagSuffix = "";
|
||||||
|
if (platforms.length > 1) {
|
||||||
|
tagSuffix = `-${removeIllegalCharacters(platform)}`;
|
||||||
|
}
|
||||||
|
await cli.buildUsingDocker(
|
||||||
|
`${newImage}${tagSuffix}`,
|
||||||
|
context,
|
||||||
|
containerFileAbsPaths,
|
||||||
|
buildArgs,
|
||||||
|
useOCI,
|
||||||
|
labels,
|
||||||
|
layers,
|
||||||
|
extraArgs,
|
||||||
|
tlsVerify,
|
||||||
|
undefined,
|
||||||
|
platform
|
||||||
|
);
|
||||||
|
builtImage.push(`${newImage}${tagSuffix}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
else if (archs.length === 1 || platforms.length === 1) {
|
||||||
|
await cli.buildUsingDocker(
|
||||||
|
newImage,
|
||||||
|
context,
|
||||||
|
containerFileAbsPaths,
|
||||||
|
buildArgs,
|
||||||
|
useOCI,
|
||||||
|
labels,
|
||||||
|
layers,
|
||||||
|
extraArgs,
|
||||||
|
tlsVerify,
|
||||||
|
archs[0],
|
||||||
|
platforms[0]
|
||||||
|
);
|
||||||
|
builtImage.push(newImage);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
await cli.buildUsingDocker(
|
||||||
|
newImage,
|
||||||
|
context,
|
||||||
|
containerFileAbsPaths,
|
||||||
|
buildArgs,
|
||||||
|
useOCI,
|
||||||
|
labels,
|
||||||
|
layers,
|
||||||
|
extraArgs,
|
||||||
|
tlsVerify
|
||||||
|
);
|
||||||
|
builtImage.push(newImage);
|
||||||
|
}
|
||||||
|
|
||||||
|
return builtImage;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function doBuildFromScratch(
|
||||||
|
cli: BuildahCli,
|
||||||
|
newImage: string,
|
||||||
|
useOCI: boolean,
|
||||||
|
archs: string[],
|
||||||
|
labels: string[],
|
||||||
|
extraArgs: string[]
|
||||||
|
): Promise<string[]> {
|
||||||
|
core.info(`Performing build from scratch`);
|
||||||
|
|
||||||
|
const baseImage = core.getInput(Inputs.BASE_IMAGE, { required: true });
|
||||||
|
const content = getInputList(Inputs.CONTENT);
|
||||||
|
const entrypoint = getInputList(Inputs.ENTRYPOINT);
|
||||||
|
const port = core.getInput(Inputs.PORT);
|
||||||
|
const workingDir = core.getInput(Inputs.WORKDIR);
|
||||||
|
const envs = getInputList(Inputs.ENVS);
|
||||||
|
const tlsVerify = core.getInput(Inputs.TLS_VERIFY) === "true";
|
||||||
|
|
||||||
|
const container = await cli.from(baseImage, tlsVerify, extraArgs);
|
||||||
|
const containerId = container.output.replace("\n", "");
|
||||||
|
|
||||||
|
const builtImage = [];
|
||||||
|
if (archs.length > 0) {
|
||||||
|
for (const arch of archs) {
|
||||||
|
let tagSuffix = "";
|
||||||
|
if (archs.length > 1) {
|
||||||
|
tagSuffix = `-${removeIllegalCharacters(arch)}`;
|
||||||
|
}
|
||||||
|
const newImageConfig: BuildahConfigSettings = {
|
||||||
|
entrypoint,
|
||||||
|
port,
|
||||||
|
workingdir: workingDir,
|
||||||
|
envs,
|
||||||
|
arch,
|
||||||
|
labels,
|
||||||
|
};
|
||||||
|
await cli.config(containerId, newImageConfig);
|
||||||
|
await cli.copy(containerId, content);
|
||||||
|
await cli.commit(containerId, `${newImage}${tagSuffix}`, useOCI);
|
||||||
|
builtImage.push(`${newImage}${tagSuffix}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const newImageConfig: BuildahConfigSettings = {
|
||||||
|
entrypoint,
|
||||||
|
port,
|
||||||
|
workingdir: workingDir,
|
||||||
|
envs,
|
||||||
|
labels,
|
||||||
|
};
|
||||||
|
await cli.config(containerId, newImageConfig);
|
||||||
|
await cli.copy(containerId, content);
|
||||||
|
await cli.commit(containerId, newImage, useOCI);
|
||||||
|
builtImage.push(newImage);
|
||||||
|
}
|
||||||
|
|
||||||
|
return builtImage;
|
||||||
|
}
|
||||||
|
|
||||||
|
run().catch(core.setFailed);
|
12
src/types.ts
Normal file
12
src/types.ts
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
/***************************************************************************************************
|
||||||
|
* Copyright (c) Red Hat, Inc. All rights reserved.
|
||||||
|
* Licensed under the MIT License. See LICENSE file in the project root for license information.
|
||||||
|
**************************************************************************************************/
|
||||||
|
|
||||||
|
type CommandResult = {
|
||||||
|
exitCode: number
|
||||||
|
output: string
|
||||||
|
error: string
|
||||||
|
};
|
||||||
|
|
||||||
|
export default CommandResult;
|
173
src/utils.ts
Normal file
173
src/utils.ts
Normal file
|
@ -0,0 +1,173 @@
|
||||||
|
/***************************************************************************************************
|
||||||
|
* Copyright (c) Red Hat, Inc. All rights reserved.
|
||||||
|
* Licensed under the MIT License. See LICENSE file in the project root for license information.
|
||||||
|
**************************************************************************************************/
|
||||||
|
|
||||||
|
import * as ini from "ini";
|
||||||
|
import { promises as fs } from "fs";
|
||||||
|
import * as core from "@actions/core";
|
||||||
|
import * as path from "path";
|
||||||
|
import * as io from "@actions/io";
|
||||||
|
import * as os from "os";
|
||||||
|
import { Inputs } from "./generated/inputs-outputs";
|
||||||
|
|
||||||
|
async function findStorageDriver(filePaths: string[]): Promise<string> {
|
||||||
|
let storageDriver = "";
|
||||||
|
for (const filePath of filePaths) {
|
||||||
|
core.debug(`Checking if the storage file exists at ${filePath}`);
|
||||||
|
if (await fileExists(filePath)) {
|
||||||
|
core.debug(`Storage file exists at ${filePath}`);
|
||||||
|
const fileContent = ini.parse(await fs.readFile(filePath, "utf-8"));
|
||||||
|
if (fileContent.storage.driver) {
|
||||||
|
storageDriver = fileContent.storage.driver;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return storageDriver;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function isStorageDriverOverlay(): Promise<boolean> {
|
||||||
|
let xdgConfigHome = path.join(os.homedir(), ".config");
|
||||||
|
if (process.env.XDG_CONFIG_HOME) {
|
||||||
|
xdgConfigHome = process.env.XDG_CONFIG_HOME;
|
||||||
|
}
|
||||||
|
const filePaths: string[] = [
|
||||||
|
"/etc/containers/storage.conf",
|
||||||
|
path.join(xdgConfigHome, "containers/storage.conf"),
|
||||||
|
];
|
||||||
|
const storageDriver = await findStorageDriver(filePaths);
|
||||||
|
return (storageDriver === "overlay");
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fileExists(filePath: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await fs.access(filePath);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function findFuseOverlayfsPath(): Promise<string | undefined> {
|
||||||
|
let fuseOverlayfsPath;
|
||||||
|
try {
|
||||||
|
fuseOverlayfsPath = await io.which("fuse-overlayfs");
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
if (err instanceof Error) {
|
||||||
|
core.debug(err.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fuseOverlayfsPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function splitByNewline(s: string): string[] {
|
||||||
|
return s.split(/\r?\n/);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getArch(): string[] {
|
||||||
|
const archs = getCommaSeperatedInput(Inputs.ARCHS);
|
||||||
|
|
||||||
|
const arch = core.getInput(Inputs.ARCH);
|
||||||
|
|
||||||
|
if (arch && archs.length > 0) {
|
||||||
|
core.warning(
|
||||||
|
`Both "${Inputs.ARCH}" and "${Inputs.ARCHS}" inputs are set. `
|
||||||
|
+ `Please use "${Inputs.ARCH}" if you want to provide multiple `
|
||||||
|
+ `ARCH else use ${Inputs.ARCH}". "${Inputs.ARCHS}" takes preference.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (archs.length > 0) {
|
||||||
|
return archs;
|
||||||
|
}
|
||||||
|
else if (arch) {
|
||||||
|
return [ arch ];
|
||||||
|
}
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getPlatform(): string[] {
|
||||||
|
const platform = core.getInput(Inputs.PLATFORM);
|
||||||
|
const platforms = getCommaSeperatedInput(Inputs.PLATFORMS);
|
||||||
|
|
||||||
|
if (platform && platforms.length > 0) {
|
||||||
|
core.warning(
|
||||||
|
`Both "${Inputs.PLATFORM}" and "${Inputs.PLATFORMS}" inputs are set. `
|
||||||
|
+ `Please use "${Inputs.PLATFORMS}" if you want to provide multiple `
|
||||||
|
+ `PLATFORM else use ${Inputs.PLATFORM}". "${Inputs.PLATFORMS}" takes preference.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (platforms.length > 0) {
|
||||||
|
core.debug("return platforms");
|
||||||
|
return platforms;
|
||||||
|
}
|
||||||
|
else if (platform) {
|
||||||
|
core.debug("return platform");
|
||||||
|
return [ platform ];
|
||||||
|
}
|
||||||
|
core.debug("return empty");
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getContainerfiles(): string[] {
|
||||||
|
// 'containerfile' should be used over 'dockerfile',
|
||||||
|
// see https://github.com/redhat-actions/buildah-build/issues/57
|
||||||
|
const containerfiles = getInputList(Inputs.CONTAINERFILES);
|
||||||
|
const dockerfiles = getInputList(Inputs.DOCKERFILES);
|
||||||
|
|
||||||
|
if (containerfiles.length !== 0 && dockerfiles.length !== 0) {
|
||||||
|
core.warning(
|
||||||
|
`Both "${Inputs.CONTAINERFILES}" and "${Inputs.DOCKERFILES}" inputs are set. `
|
||||||
|
+ `Please use only one of these two inputs, as they are aliases of one another. `
|
||||||
|
+ `"${Inputs.CONTAINERFILES}" takes precedence.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return containerfiles.length !== 0 ? containerfiles : dockerfiles;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getInputList(name: string): string[] {
|
||||||
|
const items = core.getInput(name);
|
||||||
|
if (!items) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
const splitItems = splitByNewline(items);
|
||||||
|
return splitItems
|
||||||
|
.reduce<string[]>(
|
||||||
|
(acc, line) => acc.concat(line).map((item) => item.trim()),
|
||||||
|
[],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getCommaSeperatedInput(name: string): string[] {
|
||||||
|
const items = core.getInput(name);
|
||||||
|
if (items.length === 0) {
|
||||||
|
core.debug("empty");
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
const splitItems = items.split(",");
|
||||||
|
return splitItems
|
||||||
|
.reduce<string[]>(
|
||||||
|
(acc, line) => acc.concat(line).map((item) => item.trim()),
|
||||||
|
[],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isFullImageName(image: string): boolean {
|
||||||
|
return image.indexOf(":") > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getFullImageName(image: string, tag: string): string {
|
||||||
|
if (isFullImageName(tag)) {
|
||||||
|
return tag;
|
||||||
|
}
|
||||||
|
return `${image}:${tag}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function removeIllegalCharacters(item: string): string {
|
||||||
|
return item.replace(/[^a-zA-Z0-9 ]/g, "");
|
||||||
|
}
|
10
tsconfig.json
Normal file
10
tsconfig.json
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
{
|
||||||
|
"extends": "@redhat-actions/tsconfig",
|
||||||
|
"compilerOptions": {
|
||||||
|
"rootDir": "src/",
|
||||||
|
"outDir": "out/"
|
||||||
|
},
|
||||||
|
"include": [
|
||||||
|
"src/"
|
||||||
|
],
|
||||||
|
}
|
Loading…
Add table
Add a link
Reference in a new issue