9 changed files with 247 additions and 705 deletions
@ -1,222 +1,28 @@ |
|||
name: Build |
|||
permissions: {} |
|||
|
|||
name: Build and Push Docker Image |
|||
on: |
|||
push: |
|||
paths: |
|||
- ".github/workflows/build.yml" |
|||
- "src/**" |
|||
- "migrations/**" |
|||
- "Cargo.*" |
|||
- "build.rs" |
|||
- "rust-toolchain.toml" |
|||
- "rustfmt.toml" |
|||
- "diesel.toml" |
|||
- "docker/Dockerfile.j2" |
|||
- "docker/DockerSettings.yaml" |
|||
|
|||
pull_request: |
|||
paths: |
|||
- ".github/workflows/build.yml" |
|||
- "src/**" |
|||
- "migrations/**" |
|||
- "Cargo.*" |
|||
- "build.rs" |
|||
- "rust-toolchain.toml" |
|||
- "rustfmt.toml" |
|||
- "diesel.toml" |
|||
- "docker/Dockerfile.j2" |
|||
- "docker/DockerSettings.yaml" |
|||
|
|||
branches-ignore: |
|||
- 'main' |
|||
jobs: |
|||
build: |
|||
name: Build and Test ${{ matrix.channel }} |
|||
permissions: |
|||
actions: write |
|||
contents: read |
|||
# We use Ubuntu 22.04 here because this matches the library versions used within the Debian docker containers |
|||
runs-on: ubuntu-22.04 |
|||
timeout-minutes: 120 |
|||
# Make warnings errors, this is to prevent warnings slipping through. |
|||
# This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes. |
|||
env: |
|||
RUSTFLAGS: "-Dwarnings" |
|||
strategy: |
|||
fail-fast: false |
|||
matrix: |
|||
channel: |
|||
- "rust-toolchain" # The version defined in rust-toolchain |
|||
- "msrv" # The supported MSRV |
|||
|
|||
docker: |
|||
runs-on: ubuntu-latest |
|||
steps: |
|||
# Install dependencies |
|||
- name: "Install dependencies Ubuntu" |
|||
run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config |
|||
# End Install dependencies |
|||
|
|||
# Checkout the repo |
|||
- name: "Checkout" |
|||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 |
|||
- |
|||
name: Login to Docker Hub |
|||
uses: docker/login-action@v3 |
|||
with: |
|||
persist-credentials: false |
|||
fetch-depth: 0 |
|||
# End Checkout the repo |
|||
|
|||
# Determine rust-toolchain version |
|||
- name: Init Variables |
|||
id: toolchain |
|||
shell: bash |
|||
run: | |
|||
if [[ "${{ matrix.channel }}" == 'rust-toolchain' ]]; then |
|||
RUST_TOOLCHAIN="$(grep -oP 'channel.*"(\K.*?)(?=")' rust-toolchain.toml)" |
|||
elif [[ "${{ matrix.channel }}" == 'msrv' ]]; then |
|||
RUST_TOOLCHAIN="$(grep -oP 'rust-version.*"(\K.*?)(?=")' Cargo.toml)" |
|||
else |
|||
RUST_TOOLCHAIN="${{ matrix.channel }}" |
|||
fi |
|||
echo "RUST_TOOLCHAIN=${RUST_TOOLCHAIN}" | tee -a "${GITHUB_OUTPUT}" |
|||
# End Determine rust-toolchain version |
|||
|
|||
|
|||
# Only install the clippy and rustfmt components on the default rust-toolchain |
|||
- name: "Install rust-toolchain version" |
|||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master @ Mar 18, 2025, 8:14 PM GMT+1 |
|||
if: ${{ matrix.channel == 'rust-toolchain' }} |
|||
username: ${{ vars.DOCKERHUB_USERNAME }} |
|||
password: ${{ secrets.DOCKERHUB_TOKEN }} |
|||
- |
|||
name: Set up QEMU |
|||
uses: docker/setup-qemu-action@v3 |
|||
- |
|||
name: Set up Docker Buildx |
|||
uses: docker/setup-buildx-action@v3 |
|||
- |
|||
name: Build and push |
|||
uses: docker/build-push-action@v6 |
|||
with: |
|||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" |
|||
components: clippy, rustfmt |
|||
# End Uses the rust-toolchain file to determine version |
|||
|
|||
|
|||
# Install the any other channel to be used for which we do not execute clippy and rustfmt |
|||
- name: "Install MSRV version" |
|||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master @ Mar 18, 2025, 8:14 PM GMT+1 |
|||
if: ${{ matrix.channel != 'rust-toolchain' }} |
|||
with: |
|||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" |
|||
# End Install the MSRV channel to be used |
|||
|
|||
# Set the current matrix toolchain version as default |
|||
- name: "Set toolchain ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} as default" |
|||
env: |
|||
RUST_TOOLCHAIN: ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} |
|||
run: | |
|||
# Remove the rust-toolchain.toml |
|||
rm rust-toolchain.toml |
|||
# Set the default |
|||
rustup default "${RUST_TOOLCHAIN}" |
|||
|
|||
# Show environment |
|||
- name: "Show environment" |
|||
run: | |
|||
rustc -vV |
|||
cargo -vV |
|||
# End Show environment |
|||
|
|||
# Enable Rust Caching |
|||
- name: Rust Caching |
|||
uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 |
|||
with: |
|||
# Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes. |
|||
# Like changing the build host from Ubuntu 20.04 to 22.04 for example. |
|||
# Only update when really needed! Use a <year>.<month>[.<inc>] format. |
|||
prefix-key: "v2023.07-rust" |
|||
# End Enable Rust Caching |
|||
|
|||
# Run cargo tests |
|||
# First test all features together, afterwards test them separately. |
|||
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc,query_logger" |
|||
id: test_sqlite_mysql_postgresql_mimalloc_logger |
|||
if: ${{ !cancelled() }} |
|||
run: | |
|||
cargo test --features sqlite,mysql,postgresql,enable_mimalloc,query_logger |
|||
|
|||
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc" |
|||
id: test_sqlite_mysql_postgresql_mimalloc |
|||
if: ${{ !cancelled() }} |
|||
run: | |
|||
cargo test --features sqlite,mysql,postgresql,enable_mimalloc |
|||
|
|||
- name: "test features: sqlite,mysql,postgresql" |
|||
id: test_sqlite_mysql_postgresql |
|||
if: ${{ !cancelled() }} |
|||
run: | |
|||
cargo test --features sqlite,mysql,postgresql |
|||
|
|||
- name: "test features: sqlite" |
|||
id: test_sqlite |
|||
if: ${{ !cancelled() }} |
|||
run: | |
|||
cargo test --features sqlite |
|||
|
|||
- name: "test features: mysql" |
|||
id: test_mysql |
|||
if: ${{ !cancelled() }} |
|||
run: | |
|||
cargo test --features mysql |
|||
|
|||
- name: "test features: postgresql" |
|||
id: test_postgresql |
|||
if: ${{ !cancelled() }} |
|||
run: | |
|||
cargo test --features postgresql |
|||
# End Run cargo tests |
|||
|
|||
|
|||
# Run cargo clippy, and fail on warnings |
|||
- name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc" |
|||
id: clippy |
|||
if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }} |
|||
run: | |
|||
cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc |
|||
# End Run cargo clippy |
|||
|
|||
|
|||
# Run cargo fmt (Only run on rust-toolchain defined version) |
|||
- name: "check formatting" |
|||
id: formatting |
|||
if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }} |
|||
run: | |
|||
cargo fmt --all -- --check |
|||
# End Run cargo fmt |
|||
|
|||
|
|||
# Check for any previous failures, if there are stop, else continue. |
|||
# This is useful so all test/clippy/fmt actions are done, and they can all be addressed |
|||
- name: "Some checks failed" |
|||
if: ${{ failure() }} |
|||
env: |
|||
TEST_DB_M_L: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }} |
|||
TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }} |
|||
TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }} |
|||
TEST_SQLITE: ${{ steps.test_sqlite.outcome }} |
|||
TEST_MYSQL: ${{ steps.test_mysql.outcome }} |
|||
TEST_POSTGRESQL: ${{ steps.test_postgresql.outcome }} |
|||
CLIPPY: ${{ steps.clippy.outcome }} |
|||
FMT: ${{ steps.formatting.outcome }} |
|||
run: | |
|||
echo "### :x: Checks Failed!" >> "${GITHUB_STEP_SUMMARY}" |
|||
echo "" >> "${GITHUB_STEP_SUMMARY}" |
|||
echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}" |
|||
echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}" |
|||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${TEST_DB_M_L}|" >> "${GITHUB_STEP_SUMMARY}" |
|||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}" |
|||
echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}" |
|||
echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}" |
|||
echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}" |
|||
echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}" |
|||
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}" |
|||
echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}" |
|||
echo "" >> "${GITHUB_STEP_SUMMARY}" |
|||
echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}" |
|||
echo "" >> "${GITHUB_STEP_SUMMARY}" |
|||
exit 1 |
|||
|
|||
|
|||
# Check for any previous failures, if there are stop, else continue. |
|||
# This is useful so all test/clippy/fmt actions are done, and they can all be addressed |
|||
- name: "All checks passed" |
|||
if: ${{ success() }} |
|||
run: | |
|||
echo "### :tada: Checks Passed!" >> "${GITHUB_STEP_SUMMARY}" |
|||
echo "" >> "${GITHUB_STEP_SUMMARY}" |
|||
context: . |
|||
push: true |
|||
tags: jacklull/vaultwarden:${{ github.sha }} |
|||
|
@ -1,28 +0,0 @@ |
|||
name: Check templates |
|||
permissions: {} |
|||
|
|||
on: [ push, pull_request ] |
|||
|
|||
jobs: |
|||
docker-templates: |
|||
permissions: |
|||
contents: read |
|||
runs-on: ubuntu-24.04 |
|||
timeout-minutes: 30 |
|||
|
|||
steps: |
|||
# Checkout the repo |
|||
- name: "Checkout" |
|||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 |
|||
with: |
|||
persist-credentials: false |
|||
# End Checkout the repo |
|||
|
|||
- name: Run make to rebuild templates |
|||
working-directory: docker |
|||
run: make |
|||
|
|||
- name: Check for unstaged changes |
|||
working-directory: docker |
|||
run: git diff --exit-code |
|||
continue-on-error: false |
@ -0,0 +1,23 @@ |
|||
# name: golangci-lint |
|||
# on: |
|||
# pull_request: |
|||
# branches-ignore: |
|||
# - main |
|||
# |
|||
# permissions: |
|||
# contents: read |
|||
# |
|||
# jobs: |
|||
# golangci: |
|||
# name: lint |
|||
# runs-on: ubuntu-latest |
|||
# steps: |
|||
# - uses: actions/checkout@v4 |
|||
# - uses: actions/setup-go@v5 |
|||
# with: |
|||
# go-version: stable |
|||
# - name: golangci-lint |
|||
# uses: golangci/golangci-lint-action@v8 |
|||
# with: |
|||
# version: v2.1 |
|||
# |
@ -1,57 +0,0 @@ |
|||
name: Hadolint |
|||
permissions: {} |
|||
|
|||
on: [ push, pull_request ] |
|||
|
|||
jobs: |
|||
hadolint: |
|||
name: Validate Dockerfile syntax |
|||
permissions: |
|||
contents: read |
|||
runs-on: ubuntu-24.04 |
|||
timeout-minutes: 30 |
|||
|
|||
steps: |
|||
# Start Docker Buildx |
|||
- name: Setup Docker Buildx |
|||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 |
|||
# https://github.com/moby/buildkit/issues/3969 |
|||
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills |
|||
with: |
|||
buildkitd-config-inline: | |
|||
[worker.oci] |
|||
max-parallelism = 2 |
|||
driver-opts: | |
|||
network=host |
|||
|
|||
# Download hadolint - https://github.com/hadolint/hadolint/releases |
|||
- name: Download hadolint |
|||
shell: bash |
|||
run: | |
|||
sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \ |
|||
sudo chmod +x /usr/local/bin/hadolint |
|||
env: |
|||
HADOLINT_VERSION: 2.12.0 |
|||
# End Download hadolint |
|||
# Checkout the repo |
|||
- name: Checkout |
|||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 |
|||
with: |
|||
persist-credentials: false |
|||
# End Checkout the repo |
|||
|
|||
# Test Dockerfiles with hadolint |
|||
- name: Run hadolint |
|||
shell: bash |
|||
run: hadolint docker/Dockerfile.{debian,alpine} |
|||
# End Test Dockerfiles with hadolint |
|||
|
|||
# Test Dockerfiles with docker build checks |
|||
- name: Run docker build check |
|||
shell: bash |
|||
run: | |
|||
echo "Checking docker/Dockerfile.debian" |
|||
docker build --check . -f docker/Dockerfile.debian |
|||
echo "Checking docker/Dockerfile.alpine" |
|||
docker build --check . -f docker/Dockerfile.alpine |
|||
# End Test Dockerfiles with docker build checks |
@ -1,320 +0,0 @@ |
|||
name: Release |
|||
permissions: {} |
|||
|
|||
on: |
|||
push: |
|||
branches: |
|||
- main |
|||
|
|||
tags: |
|||
# https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet |
|||
- '[1-2].[0-9]+.[0-9]+' |
|||
|
|||
jobs: |
|||
# https://github.com/marketplace/actions/skip-duplicate-actions |
|||
# Some checks to determine if we need to continue with building a new docker. |
|||
# We will skip this check if we are creating a tag, because that has the same hash as a previous run already. |
|||
skip_check: |
|||
# Only run this in the upstream repo and not on forks |
|||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }} |
|||
name: Cancel older jobs when running |
|||
permissions: |
|||
actions: write |
|||
runs-on: ubuntu-24.04 |
|||
outputs: |
|||
should_skip: ${{ steps.skip_check.outputs.should_skip }} |
|||
|
|||
steps: |
|||
- name: Skip Duplicates Actions |
|||
id: skip_check |
|||
uses: fkirc/skip-duplicate-actions@f75f66ce1886f00957d99748a42c724f4330bdcf # v5.3.1 |
|||
with: |
|||
cancel_others: 'true' |
|||
# Only run this when not creating a tag |
|||
if: ${{ github.ref_type == 'branch' }} |
|||
|
|||
docker-build: |
|||
needs: skip_check |
|||
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }} |
|||
name: Build Vaultwarden containers |
|||
permissions: |
|||
packages: write |
|||
contents: read |
|||
attestations: write |
|||
id-token: write |
|||
runs-on: ubuntu-24.04 |
|||
timeout-minutes: 120 |
|||
# Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them |
|||
services: |
|||
registry: |
|||
image: registry:2 |
|||
ports: |
|||
- 5000:5000 |
|||
env: |
|||
SOURCE_COMMIT: ${{ github.sha }} |
|||
SOURCE_REPOSITORY_URL: "https://github.com/${{ github.repository }}" |
|||
# The *_REPO variables need to be configured as repository variables |
|||
# Append `/settings/variables/actions` to your repo url |
|||
# DOCKERHUB_REPO needs to be 'index.docker.io/<user>/<repo>' |
|||
# Check for Docker hub credentials in secrets |
|||
HAVE_DOCKERHUB_LOGIN: ${{ vars.DOCKERHUB_REPO != '' && secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }} |
|||
# GHCR_REPO needs to be 'ghcr.io/<user>/<repo>' |
|||
# Check for Github credentials in secrets |
|||
HAVE_GHCR_LOGIN: ${{ vars.GHCR_REPO != '' && github.repository_owner != '' && secrets.GITHUB_TOKEN != '' }} |
|||
# QUAY_REPO needs to be 'quay.io/<user>/<repo>' |
|||
# Check for Quay.io credentials in secrets |
|||
HAVE_QUAY_LOGIN: ${{ vars.QUAY_REPO != '' && secrets.QUAY_USERNAME != '' && secrets.QUAY_TOKEN != '' }} |
|||
strategy: |
|||
matrix: |
|||
base_image: ["debian","alpine"] |
|||
|
|||
steps: |
|||
- name: Initialize QEMU binfmt support |
|||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 |
|||
with: |
|||
platforms: "arm64,arm" |
|||
|
|||
# Start Docker Buildx |
|||
- name: Setup Docker Buildx |
|||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 |
|||
# https://github.com/moby/buildkit/issues/3969 |
|||
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills |
|||
with: |
|||
cache-binary: false |
|||
buildkitd-config-inline: | |
|||
[worker.oci] |
|||
max-parallelism = 2 |
|||
driver-opts: | |
|||
network=host |
|||
|
|||
# Checkout the repo |
|||
- name: Checkout |
|||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 |
|||
# We need fetch-depth of 0 so we also get all the tag metadata |
|||
with: |
|||
persist-credentials: false |
|||
fetch-depth: 0 |
|||
|
|||
# Determine Base Tags and Source Version |
|||
- name: Determine Base Tags and Source Version |
|||
shell: bash |
|||
env: |
|||
REF_TYPE: ${{ github.ref_type }} |
|||
run: | |
|||
# Check which main tag we are going to build determined by ref_type |
|||
if [[ "${REF_TYPE}" == "tag" ]]; then |
|||
echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}" |
|||
elif [[ "${REF_TYPE}" == "branch" ]]; then |
|||
echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}" |
|||
fi |
|||
|
|||
# Get the Source Version for this release |
|||
GIT_EXACT_TAG="$(git describe --tags --abbrev=0 --exact-match 2>/dev/null || true)" |
|||
if [[ -n "${GIT_EXACT_TAG}" ]]; then |
|||
echo "SOURCE_VERSION=${GIT_EXACT_TAG}" | tee -a "${GITHUB_ENV}" |
|||
else |
|||
GIT_LAST_TAG="$(git describe --tags --abbrev=0)" |
|||
echo "SOURCE_VERSION=${GIT_LAST_TAG}-${SOURCE_COMMIT:0:8}" | tee -a "${GITHUB_ENV}" |
|||
fi |
|||
# End Determine Base Tags |
|||
|
|||
# Login to Docker Hub |
|||
- name: Login to Docker Hub |
|||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 |
|||
with: |
|||
username: ${{ secrets.DOCKERHUB_USERNAME }} |
|||
password: ${{ secrets.DOCKERHUB_TOKEN }} |
|||
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }} |
|||
|
|||
- name: Add registry for DockerHub |
|||
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }} |
|||
shell: bash |
|||
env: |
|||
DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }} |
|||
run: | |
|||
echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}" |
|||
|
|||
# Login to GitHub Container Registry |
|||
- name: Login to GitHub Container Registry |
|||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 |
|||
with: |
|||
registry: ghcr.io |
|||
username: ${{ github.repository_owner }} |
|||
password: ${{ secrets.GITHUB_TOKEN }} |
|||
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }} |
|||
|
|||
- name: Add registry for ghcr.io |
|||
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }} |
|||
shell: bash |
|||
env: |
|||
GHCR_REPO: ${{ vars.GHCR_REPO }} |
|||
run: | |
|||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}" |
|||
|
|||
# Login to Quay.io |
|||
- name: Login to Quay.io |
|||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 |
|||
with: |
|||
registry: quay.io |
|||
username: ${{ secrets.QUAY_USERNAME }} |
|||
password: ${{ secrets.QUAY_TOKEN }} |
|||
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }} |
|||
|
|||
- name: Add registry for Quay.io |
|||
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }} |
|||
shell: bash |
|||
env: |
|||
QUAY_REPO: ${{ vars.QUAY_REPO }} |
|||
run: | |
|||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}" |
|||
|
|||
- name: Configure build cache from/to |
|||
shell: bash |
|||
env: |
|||
GHCR_REPO: ${{ vars.GHCR_REPO }} |
|||
BASE_IMAGE: ${{ matrix.base_image }} |
|||
run: | |
|||
# |
|||
# Check if there is a GitHub Container Registry Login and use it for caching |
|||
if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then |
|||
echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}" | tee -a "${GITHUB_ENV}" |
|||
echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}" |
|||
else |
|||
echo "BAKE_CACHE_FROM=" |
|||
echo "BAKE_CACHE_TO=" |
|||
fi |
|||
# |
|||
|
|||
- name: Add localhost registry |
|||
shell: bash |
|||
run: | |
|||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}localhost:5000/vaultwarden/server" | tee -a "${GITHUB_ENV}" |
|||
|
|||
- name: Bake ${{ matrix.base_image }} containers |
|||
id: bake_vw |
|||
uses: docker/bake-action@4ba453fbc2db7735392b93edf935aaf9b1e8f747 # v6.5.0 |
|||
env: |
|||
BASE_TAGS: "${{ env.BASE_TAGS }}" |
|||
SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}" |
|||
SOURCE_VERSION: "${{ env.SOURCE_VERSION }}" |
|||
SOURCE_REPOSITORY_URL: "${{ env.SOURCE_REPOSITORY_URL }}" |
|||
CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}" |
|||
with: |
|||
pull: true |
|||
push: true |
|||
source: . |
|||
files: docker/docker-bake.hcl |
|||
targets: "${{ matrix.base_image }}-multi" |
|||
set: | |
|||
*.cache-from=${{ env.BAKE_CACHE_FROM }} |
|||
*.cache-to=${{ env.BAKE_CACHE_TO }} |
|||
|
|||
- name: Extract digest SHA |
|||
shell: bash |
|||
env: |
|||
BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }} |
|||
run: | |
|||
GET_DIGEST_SHA="$(jq -r '.["${{ matrix.base_image }}-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")" |
|||
echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}" |
|||
|
|||
# Attest container images |
|||
- name: Attest - docker.io - ${{ matrix.base_image }} |
|||
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} |
|||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3 |
|||
with: |
|||
subject-name: ${{ vars.DOCKERHUB_REPO }} |
|||
subject-digest: ${{ env.DIGEST_SHA }} |
|||
push-to-registry: true |
|||
|
|||
- name: Attest - ghcr.io - ${{ matrix.base_image }} |
|||
if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} |
|||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3 |
|||
with: |
|||
subject-name: ${{ vars.GHCR_REPO }} |
|||
subject-digest: ${{ env.DIGEST_SHA }} |
|||
push-to-registry: true |
|||
|
|||
- name: Attest - quay.io - ${{ matrix.base_image }} |
|||
if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} |
|||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3 |
|||
with: |
|||
subject-name: ${{ vars.QUAY_REPO }} |
|||
subject-digest: ${{ env.DIGEST_SHA }} |
|||
push-to-registry: true |
|||
|
|||
|
|||
# Extract the Alpine binaries from the containers |
|||
- name: Extract binaries |
|||
shell: bash |
|||
env: |
|||
REF_TYPE: ${{ github.ref_type }} |
|||
run: | |
|||
# Check which main tag we are going to build determined by ref_type |
|||
if [[ "${REF_TYPE}" == "tag" ]]; then |
|||
EXTRACT_TAG="latest" |
|||
elif [[ "${REF_TYPE}" == "branch" ]]; then |
|||
EXTRACT_TAG="testing" |
|||
fi |
|||
|
|||
# Check which base_image was used and append -alpine if needed |
|||
if [[ "${{ matrix.base_image }}" == "alpine" ]]; then |
|||
EXTRACT_TAG="${EXTRACT_TAG}-alpine" |
|||
fi |
|||
|
|||
# After each extraction the image is removed. |
|||
# This is needed because using different platforms doesn't trigger a new pull/download |
|||
|
|||
# Extract amd64 binary |
|||
docker create --name amd64 --platform=linux/amd64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |
|||
docker cp amd64:/vaultwarden vaultwarden-amd64-${{ matrix.base_image }} |
|||
docker rm --force amd64 |
|||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |
|||
|
|||
# Extract arm64 binary |
|||
docker create --name arm64 --platform=linux/arm64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |
|||
docker cp arm64:/vaultwarden vaultwarden-arm64-${{ matrix.base_image }} |
|||
docker rm --force arm64 |
|||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |
|||
|
|||
# Extract armv7 binary |
|||
docker create --name armv7 --platform=linux/arm/v7 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |
|||
docker cp armv7:/vaultwarden vaultwarden-armv7-${{ matrix.base_image }} |
|||
docker rm --force armv7 |
|||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |
|||
|
|||
# Extract armv6 binary |
|||
docker create --name armv6 --platform=linux/arm/v6 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |
|||
docker cp armv6:/vaultwarden vaultwarden-armv6-${{ matrix.base_image }} |
|||
docker rm --force armv6 |
|||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |
|||
|
|||
# Upload artifacts to Github Actions and Attest the binaries |
|||
- name: "Upload amd64 artifact ${{ matrix.base_image }}" |
|||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 |
|||
with: |
|||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }} |
|||
path: vaultwarden-amd64-${{ matrix.base_image }} |
|||
|
|||
- name: "Upload arm64 artifact ${{ matrix.base_image }}" |
|||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 |
|||
with: |
|||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }} |
|||
path: vaultwarden-arm64-${{ matrix.base_image }} |
|||
|
|||
- name: "Upload armv7 artifact ${{ matrix.base_image }}" |
|||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 |
|||
with: |
|||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }} |
|||
path: vaultwarden-armv7-${{ matrix.base_image }} |
|||
|
|||
- name: "Upload armv6 artifact ${{ matrix.base_image }}" |
|||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 |
|||
with: |
|||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }} |
|||
path: vaultwarden-armv6-${{ matrix.base_image }} |
|||
|
|||
- name: "Attest artifacts ${{ matrix.base_image }}" |
|||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3 |
|||
with: |
|||
subject-path: vaultwarden-* |
|||
# End Upload artifacts to Github Actions |
@ -1,30 +0,0 @@ |
|||
name: Cleanup |
|||
permissions: {} |
|||
|
|||
on: |
|||
workflow_dispatch: |
|||
inputs: |
|||
manual_trigger: |
|||
description: "Manual trigger buildcache cleanup" |
|||
required: false |
|||
default: "" |
|||
|
|||
schedule: |
|||
- cron: '0 1 * * FRI' |
|||
|
|||
jobs: |
|||
releasecache-cleanup: |
|||
name: Releasecache Cleanup |
|||
permissions: |
|||
packages: write |
|||
runs-on: ubuntu-24.04 |
|||
continue-on-error: true |
|||
timeout-minutes: 30 |
|||
steps: |
|||
- name: Delete vaultwarden-buildcache containers |
|||
uses: actions/delete-package-versions@e5bc658cc4c965c472efe991f8beea3981499c55 # v5.0.0 |
|||
with: |
|||
package-name: 'vaultwarden-buildcache' |
|||
package-type: 'container' |
|||
min-versions-to-keep: 0 |
|||
delete-only-untagged-versions: 'false' |
@ -1,53 +0,0 @@ |
|||
name: Trivy |
|||
permissions: {} |
|||
|
|||
on: |
|||
push: |
|||
branches: |
|||
- main |
|||
|
|||
tags: |
|||
- '*' |
|||
|
|||
pull_request: |
|||
branches: |
|||
- main |
|||
|
|||
schedule: |
|||
- cron: '08 11 * * *' |
|||
|
|||
jobs: |
|||
trivy-scan: |
|||
# Only run this in the upstream repo and not on forks |
|||
# When all forks run this at the same time, it is causing `Too Many Requests` issues |
|||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }} |
|||
name: Trivy Scan |
|||
permissions: |
|||
contents: read |
|||
actions: read |
|||
security-events: write |
|||
runs-on: ubuntu-24.04 |
|||
timeout-minutes: 30 |
|||
|
|||
steps: |
|||
- name: Checkout code |
|||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 |
|||
with: |
|||
persist-credentials: false |
|||
|
|||
- name: Run Trivy vulnerability scanner |
|||
uses: aquasecurity/trivy-action@6c175e9c4083a92bbca2f9724c8a5e33bc2d97a5 # v0.30.0 |
|||
env: |
|||
TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2,public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2 |
|||
TRIVY_JAVA_DB_REPOSITORY: docker.io/aquasec/trivy-java-db:1,public.ecr.aws/aquasecurity/trivy-java-db:1,ghcr.io/aquasecurity/trivy-java-db:1 |
|||
with: |
|||
scan-type: repo |
|||
ignore-unfixed: true |
|||
format: sarif |
|||
output: trivy-results.sarif |
|||
severity: CRITICAL,HIGH |
|||
|
|||
- name: Upload Trivy scan results to GitHub Security tab |
|||
uses: github/codeql-action/upload-sarif@86b04fb0e47484f7282357688f21d5d0e32175fe # v3.27.5 |
|||
with: |
|||
sarif_file: 'trivy-results.sarif' |
@ -1 +0,0 @@ |
|||
docker/Dockerfile.debian |
@ -0,0 +1,202 @@ |
|||
# syntax=docker/dockerfile:1 |
|||
# check=skip=FromPlatformFlagConstDisallowed,RedundantTargetPlatform |
|||
|
|||
# This file was generated using a Jinja2 template. |
|||
# Please make your changes in `DockerSettings.yaml` or `Dockerfile.j2` and then `make` |
|||
# This will generate two Dockerfile's `Dockerfile.debian` and `Dockerfile.alpine` |
|||
|
|||
# Using multistage build: |
|||
# https://docs.docker.com/develop/develop-images/multistage-build/ |
|||
# https://whitfin.io/speeding-up-rust-docker-builds/ |
|||
|
|||
####################### VAULT BUILD IMAGE ####################### |
|||
# The web-vault digest specifies a particular web-vault build on Docker Hub. |
|||
# Using the digest instead of the tag name provides better security, |
|||
# as the digest of an image is immutable, whereas a tag name can later |
|||
# be changed to point to a malicious image. |
|||
# |
|||
# To verify the current digest for a given tag name: |
|||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags, |
|||
# click the tag name to view the digest of the image it currently points to. |
|||
# - From the command line: |
|||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.3.1 |
|||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.3.1 |
|||
# [docker.io/vaultwarden/web-vault@sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd] |
|||
# |
|||
# - Conversely, to get the tag name from the digest: |
|||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd |
|||
# [docker.io/vaultwarden/web-vault:v2025.3.1] |
|||
# |
|||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd AS vault |
|||
|
|||
########################## Cross Compile Docker Helper Scripts ########################## |
|||
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts |
|||
## And these bash scripts do not have any significant difference if at all |
|||
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894 AS xx |
|||
|
|||
########################## BUILD IMAGE ########################## |
|||
# hadolint ignore=DL3006 |
|||
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.86.0-slim-bookworm AS build |
|||
COPY --from=xx / / |
|||
ARG TARGETARCH |
|||
ARG TARGETVARIANT |
|||
ARG TARGETPLATFORM |
|||
|
|||
SHELL ["/bin/bash", "-o", "pipefail", "-c"] |
|||
|
|||
# Build time options to avoid dpkg warnings and help with reproducible builds. |
|||
ENV DEBIAN_FRONTEND=noninteractive \ |
|||
LANG=C.UTF-8 \ |
|||
TZ=UTC \ |
|||
TERM=xterm-256color \ |
|||
CARGO_HOME="/root/.cargo" \ |
|||
USER="root" |
|||
|
|||
# Install clang to get `xx-cargo` working |
|||
# Install pkg-config to allow amd64 builds to find all libraries |
|||
# Install git so build.rs can determine the correct version |
|||
# Install the libc cross packages based upon the debian-arch |
|||
RUN apt-get update && \ |
|||
apt-get install -y \ |
|||
--no-install-recommends \ |
|||
clang \ |
|||
pkg-config \ |
|||
git \ |
|||
"libc6-$(xx-info debian-arch)-cross" \ |
|||
"libc6-dev-$(xx-info debian-arch)-cross" \ |
|||
"linux-libc-dev-$(xx-info debian-arch)-cross" && \ |
|||
xx-apt-get install -y \ |
|||
--no-install-recommends \ |
|||
gcc \ |
|||
libmariadb3 \ |
|||
libpq-dev \ |
|||
libpq5 \ |
|||
libssl-dev \ |
|||
zlib1g-dev && \ |
|||
# Force install arch dependend mariadb dev packages |
|||
# Installing them the normal way breaks several other packages (again) |
|||
apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \ |
|||
dpkg --force-all -i ./libmariadb-dev*.deb && \ |
|||
# Run xx-cargo early, since it sometimes seems to break when run at a later stage |
|||
echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo |
|||
|
|||
# Create CARGO_HOME folder and don't download rust docs |
|||
RUN mkdir -pv "${CARGO_HOME}" && \ |
|||
rustup set profile minimal |
|||
|
|||
# Creates a dummy project used to grab dependencies |
|||
RUN USER=root cargo new --bin /app |
|||
WORKDIR /app |
|||
|
|||
# Environment variables for Cargo on Debian based builds |
|||
ARG TARGET_PKG_CONFIG_PATH |
|||
|
|||
RUN source /env-cargo && \ |
|||
if xx-info is-cross ; then \ |
|||
# We can't use xx-cargo since that uses clang, which doesn't work for our libraries. |
|||
# Because of this we generate the needed environment variables here which we can load in the needed steps. |
|||
echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ |
|||
echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ |
|||
echo "export CROSS_COMPILE=1" >> /env-cargo && \ |
|||
echo "export PKG_CONFIG_ALLOW_CROSS=1" >> /env-cargo && \ |
|||
# For some architectures `xx-info` returns a triple which doesn't matches the path on disk |
|||
# In those cases you can override this by setting the `TARGET_PKG_CONFIG_PATH` build-arg |
|||
if [[ -n "${TARGET_PKG_CONFIG_PATH}" ]]; then \ |
|||
echo "export TARGET_PKG_CONFIG_PATH=${TARGET_PKG_CONFIG_PATH}" >> /env-cargo ; \ |
|||
else \ |
|||
echo "export PKG_CONFIG_PATH=/usr/lib/$(xx-info)/pkgconfig" >> /env-cargo ; \ |
|||
fi && \ |
|||
echo "# End of env-cargo" >> /env-cargo ; \ |
|||
fi && \ |
|||
# Output the current contents of the file |
|||
cat /env-cargo |
|||
|
|||
RUN source /env-cargo && \ |
|||
rustup target add "${CARGO_TARGET}" |
|||
|
|||
# Copies over *only* your manifests and build files |
|||
COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./ |
|||
COPY ./macros ./macros |
|||
|
|||
ARG CARGO_PROFILE=release |
|||
|
|||
# Configure the DB ARG as late as possible to not invalidate the cached layers above |
|||
ARG DB=sqlite,mysql,postgresql |
|||
|
|||
# Builds your dependencies and removes the |
|||
# dummy project, except the target folder |
|||
# This folder contains the compiled dependencies |
|||
RUN source /env-cargo && \ |
|||
cargo build --features ${DB} --profile "${CARGO_PROFILE}" --target="${CARGO_TARGET}" && \ |
|||
find . -not -path "./target*" -delete |
|||
|
|||
# Copies the complete project |
|||
# To avoid copying unneeded files, use .dockerignore |
|||
COPY . . |
|||
|
|||
ARG VW_VERSION |
|||
|
|||
# Builds again, this time it will be the actual source files being build |
|||
RUN source /env-cargo && \ |
|||
# Make sure that we actually build the project by updating the src/main.rs timestamp |
|||
# Also do this for build.rs to ensure the version is rechecked |
|||
touch build.rs src/main.rs && \ |
|||
# Create a symlink to the binary target folder to easy copy the binary in the final stage |
|||
cargo build --features ${DB} --profile "${CARGO_PROFILE}" --target="${CARGO_TARGET}" && \ |
|||
if [[ "${CARGO_PROFILE}" == "dev" ]] ; then \ |
|||
ln -vfsr "/app/target/${CARGO_TARGET}/debug" /app/target/final ; \ |
|||
else \ |
|||
ln -vfsr "/app/target/${CARGO_TARGET}/${CARGO_PROFILE}" /app/target/final ; \ |
|||
fi |
|||
|
|||
|
|||
######################## RUNTIME IMAGE ######################## |
|||
# Create a new stage with a minimal image |
|||
# because we already have a binary built |
|||
# |
|||
# To build these images you need to have qemu binfmt support. |
|||
# See the following pages to help install these tools locally |
|||
# Ubuntu/Debian: https://wiki.debian.org/QemuUserEmulation |
|||
# Arch Linux: https://wiki.archlinux.org/title/QEMU#Chrooting_into_arm/arm64_environment_from_x86_64 |
|||
# |
|||
# Or use a Docker image which modifies your host system to support this. |
|||
# The GitHub Actions Workflow uses the same image as used below. |
|||
# See: https://github.com/tonistiigi/binfmt |
|||
# Usage: docker run --privileged --rm tonistiigi/binfmt --install arm64,arm |
|||
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' |
|||
# |
|||
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742 |
|||
FROM --platform=$TARGETPLATFORM docker.io/library/debian:bookworm-slim |
|||
|
|||
ENV ROCKET_PROFILE="release" \ |
|||
ROCKET_ADDRESS=0.0.0.0 \ |
|||
ROCKET_PORT=80 \ |
|||
DEBIAN_FRONTEND=noninteractive |
|||
|
|||
# Create data folder and Install needed libraries |
|||
RUN mkdir /data && \ |
|||
apt-get update && apt-get install -y \ |
|||
--no-install-recommends \ |
|||
ca-certificates \ |
|||
curl \ |
|||
libmariadb-dev-compat \ |
|||
libpq5 \ |
|||
openssl && \ |
|||
apt-get clean && \ |
|||
rm -rf /var/lib/apt/lists/* |
|||
|
|||
VOLUME /data |
|||
EXPOSE 80 |
|||
|
|||
# Copies the files from the context (Rocket.toml file and web-vault) |
|||
# and the binary from the "build" stage to the current stage |
|||
WORKDIR / |
|||
|
|||
COPY docker/healthcheck.sh docker/start.sh / |
|||
|
|||
COPY --from=vault /web-vault ./web-vault |
|||
COPY --from=build /app/target/final/vaultwarden . |
|||
|
|||
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] |
|||
|
|||
CMD ["/start.sh"] |
Loading…
Reference in new issue