blob: a2a901728753f31d750df9c005d16efcb06d9482 [file] [log] [blame]
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
#
# Azure Pipelines CI build configuration
# Documentation at https://aka.ms/yaml
variables:
VERILATOR_VERSION: 4.104
TOOLCHAIN_PATH: /opt/buildcache/riscv
VERIBLE_VERSION: v0.0-808-g1e17daa
# Release tag from https://github.com/lowRISC/lowrisc-toolchains/releases
# if you update this, update the definition in util/container/Dockerfile
TOOLCHAIN_VERSION: 20200904-1
# This controls where builds happen, and gets picked up by build_consts.sh.
BUILD_ROOT: $(Build.ArtifactStagingDirectory)
VIVADO_VERSION: "2020.1"
trigger:
batch: true
branches:
include:
- "*"
tags:
include:
- "*"
pr:
branches:
include:
- "*"
jobs:
- job: lint
displayName: Run code quality checks (quick lint)
pool:
vmImage: ubuntu-18.04
steps:
- bash: |
sudo apt-get remove -y clang-6.0 libclang-common-6.0-dev libclang1-6.0 libllvm6.0
displayName: Remove existing Clang installation
- template: ci/install-package-dependencies.yml
- bash: |
set -x
python3 --version
yapf --version
isort --version
clang-format -version
flake8 --version
ninja --version
meson --version
doxygen --version
verible-verilog-lint --version
echo "PATH=$PATH"
printenv
displayName: Display environment information
- bash: |
fork_origin="$(git merge-base --fork-point origin/$SYSTEM_PULLREQUEST_TARGETBRANCH)"
changed_files="$(git diff --name-only --diff-filter=ACMRTUXB "$fork_origin" | grep -v /vendor/ | grep -v /lowrisc_misc-linters/ | grep -E '.py$')"
if [[ -n "$changed_files" ]]; then
set -e
xargs util/lintpy.py --tools flake8 -f <<< "$changed_files" | tee lintpy-output
fi
displayName: Run Python lint
continueOnError: true
- bash: |
make -C hw regs && git diff --exit-code
if [[ $? != 0 ]]; then
echo -n "##vso[task.logissue type=error]"
echo "Register headers not up-to-date. Regenerate them with 'make -C hw regs'."
exit 1
fi
make -C hw top && git diff --exit-code
if [[ $? != 0 ]]; then
echo -n "##vso[task.logissue type=error]"
echo "Autogenerated tops not up-to-date. Regenerate with 'make -C hw top'."
exit 1
fi
make -C hw otp-mmap && git diff --exit-code
if [[ $? != 0 ]]; then
echo -n "##vso[task.logissue type=error]"
echo "Autogenerated OTP memory map files not up-to-date. Regenerate with 'make -C hw otp-mmap'."
exit 1
fi
condition: always()
displayName: Ensure all generated files are clean and up-to-date
- bash: |
set -e
util/build_docs.py
# Upload Doxygen Warnings if Present
if [[ -f "build/docs-generated/sw/doxygen_warnings.log" ]]; then
echo -n "##vso[task.uploadfile]"
echo "${PWD}/build/docs-generated/sw/doxygen_warnings.log"
# Doxygen currently generates lots of warnings.
# echo -n "##vso[task.issue type=warning]"
# echo "Doxygen generated warnings. Use 'util/build_docs.py' to generate warning logfile."
fi
condition: always()
displayName: Render documentation
- bash: |
cd site/landing
../../build/docs-hugo/hugo
condition: always()
displayName: Render landing site
- bash: |
# XXX: As of today, task.logissue comments with 'sourcepath' set don't
# get reported to GitHub Checks annotations. Upstream bug report:
# https://developercommunity.visualstudio.com/content/problem/689794/pipelines-logging-command-logissue-does-not-report.html
#echo "##vso[task.issue type=error;sourcepath=/azure-pipelines.yml;linenumber=45;columnnumber=1;code=100;]Found something that could be a problem."
fork_origin="$(git merge-base --fork-point origin/$SYSTEM_PULLREQUEST_TARGETBRANCH)"
changed_files="$(git diff --name-only "$fork_origin" | grep -v /vendor/ | grep -E '\.(cpp|cc|c|h)$')"
if [[ -n "$changed_files" ]]; then
xargs git diff -U0 "$fork_origin" <<< "$changed_files" \
| clang-format-diff -p1 \
| tee clang-format-output
if [[ -s clang-format-output ]]; then
echo -n "##vso[task.logissue type=error]"
echo "C/C++ lint failed. Use 'git clang-format' with appropriate options to reformat the changed code."
exit 1
fi
fi
# This check is not idempotent, but checks changes to a base branch.
# Run it only on pull requests. Note that the git diff filter (ACMRTUXB) omits
# files that are deleted in a PR, since those break the checker script below.
condition: eq(variables['Build.Reason'], 'PullRequest')
displayName: Use clang-format to check C/C++ coding style
- bash: |
fork_origin="$(git merge-base --fork-point origin/$SYSTEM_PULLREQUEST_TARGETBRANCH)"
changed_files="$(git diff --name-only --diff-filter=ACMRTUXB "$fork_origin")"
if [[ -n "$changed_files" ]]; then
xargs util/fix_include_guard.py --dry-run <<< "$changed_files" | tee fix-include-guard-output
if [[ -s fix-include-guard-output ]]; then
echo -n "##vso[task.logissue type=error]"
echo "Include guard check failed. Please run util/fix_include_guard.py on the above files."
exit 1
fi
fi
condition: eq(variables['Build.Reason'], 'PullRequest')
displayName: Check formatting on header guards
- bash: |
util/dvsim/dvsim.py hw/top_earlgrey/lint/top_earlgrey_lint_cfgs.hjson \
--tool=veriblelint
if [ $? != 0 ]; then
echo -n "##vso[task.logissue type=error]"
echo "Verilog style lint of RTL sources with Verible failed. Run 'util/dvsim/dvsim.py -t veriblelint hw/top_earlgrey/lint/top_earlgrey_lint_cfgs.hjson' to check and fix all errors."
exit 1
fi
condition: eq(variables['Build.Reason'], 'PullRequest')
displayName: Style-Lint RTL Verilog source files with Verible
- bash: |
util/dvsim/dvsim.py hw/top_earlgrey/lint/top_earlgrey_dv_lint_cfgs.hjson \
--tool=veriblelint
if [ $? != 0 ]; then
echo -n "##vso[task.logissue type=error]"
echo "Verilog style lint of DV sources with Verible failed. Run 'util/dvsim/dvsim.py -t veriblelint hw/top_earlgrey/lint/top_earlgrey_dv_lint_cfgs.hjson' to check and fix all errors."
exit 1
fi
condition: eq(variables['Build.Reason'], 'PullRequest')
displayName: Style-Lint DV Verilog source files with Verible
- bash: |
commit_range="$(git merge-base --fork-point origin/$SYSTEM_PULLREQUEST_TARGETBRANCH)..HEAD"
# Notes:
# * Merge commits are not checked. We always use rebases instead of
# merges to keep a linear history, which makes merge commits disappear
# ultimately, making them only a CI artifact which should not be
# checked.
# * 'type=error' is used even for warnings. Only "errors" are shown in
# the GitHub checks API. However, warnings don't return a non-zero
# error code and don't fail the build step.
util/lint_commits.py \
--no-merges \
--error-msg-prefix="##vso[task.logissue type=error]" \
--warning-msg-prefix="##vso[task.logissue type=error]" \
"$commit_range"
# Only run on pull requests to check new commits only
condition: eq(variables['Build.Reason'], 'PullRequest')
displayName: Check commit metadata
- bash: |
set -e
only_doc_changes=0
has_otbn_changes=0
if [[ "$(Build.Reason)" = "PullRequest" ]]; then
# Conservative way of checking for documentation-only and OTBN changes.
# Only relevant for pipelines triggered from pull requests
fork_origin="$(git merge-base --fork-point origin/$SYSTEM_PULLREQUEST_TARGETBRANCH)" || {
echo >&2 Failed to find merge base with origin/$SYSTEM_PULLREQUEST_TARGETBRANCH
echo >&2 HEAD is $(git rev-parse HEAD)
exit 1
}
echo "Using $fork_origin as fork point from the target branch."
echo "Checking for doc-only changes in this pull request"
only_doc_changes="$(git diff --name-only "$fork_origin" | grep -v '\.md$' -q; echo $?)"
if [[ $only_doc_changes -eq 1 ]]; then
echo "PR is only doc changes"
else
echo "PR contains non doc changes"
fi
# Check if any changes were made to OTBN-related files (hardware, software or tooling)
echo "Checking if any OTBN changes are in this pull request"
has_otbn_changes="$(! git diff --name-only "$fork_origin" | grep '/otbn/' -q; echo $?)"
if [[ $has_otbn_changes -eq 1 ]]; then
echo "PR contains OTBN changes"
else
echo "PR doesn't contain OTBN changes"
fi
else
# For non PR pipeline runs always run OTBN related CI
has_otbn_changes=1
fi
echo "##vso[task.setvariable variable=onlyDocChanges;isOutput=true]${only_doc_changes}"
echo "##vso[task.setvariable variable=hasOTBNChanges;isOutput=true]${has_otbn_changes}"
displayName: Check what kinds of changes the PR contains
name: DetermineBuildType
- bash: |
fork_origin="$(git merge-base --fork-point origin/$SYSTEM_PULLREQUEST_TARGETBRANCH)"
changed_files="$(git diff --name-only --diff-filter=ACMRTUXB "$fork_origin")"
licence_checker="util/lowrisc_misc-linters/licence-checker/licence-checker.py --config util/licence-checker.hjson"
if [[ -n "$changed_files" ]]; then
set -o pipefail
xargs $licence_checker <<< "$changed_files" | tee licence-checker-output
if [[ $? != 0 ]]; then
echo -n "##vso[task.logissue type=error]"
echo "Licence header check failed. Please check output of $licence_checker on the noted failures."
exit 1
fi
fi
condition: eq(variables['Build.Reason'], 'PullRequest')
displayName: Check Licence Headers
- job: slow_lints
displayName: Run code quality checks (in-depth lint)
dependsOn: lint
pool:
vmImage: ubuntu-18.04
steps:
- template: ci/install-package-dependencies.yml
- bash: |
# Here we look for all *.vendor.hjson files in the repo to re-vendor them.
# We exclude the following:
# - Any in 'hw/vendor/lowrisc_ibex', because that directory is vendored.
find . \
-not \( -path './hw/vendor/lowrisc_ibex' -prune \) \
-name '*.vendor.hjson' \
| xargs -n1 util/vendor.py --verbose \
&& git diff --exit-code
if [[ $? != 0 ]]; then
echo -n "##vso[task.logissue type=error]"
echo "Vendored repositories not up-to-date. Run util/vendor.py to fix."
exit 1
fi
condition: always()
displayName: Check vendored directories are up-to-date
- job: sw_build
displayName: Build Software for Earl Grey toplevel design
dependsOn: lint
condition: and(succeeded(), eq(dependencies.lint.outputs['DetermineBuildType.onlyDocChanges'], '0'))
pool:
vmImage: ubuntu-18.04
steps:
- template: ci/install-package-dependencies.yml
- bash: |
set -x
sudo util/get-toolchain.py \
--install-dir="$TOOLCHAIN_PATH" \
--release-version="$TOOLCHAIN_VERSION" \
--update
displayName: Install toolchain
- bash: |
. util/build_consts.sh
./meson_init.sh -A
ninja -C "$OBJ_DIR" all
displayName: Build embedded targets
- bash: |
. util/build_consts.sh
ninja -C "$OBJ_DIR" test
displayName: Run unit tests
- template: ci/upload-artifacts-template.yml
parameters:
artifact: sw_build
- job: sw_build_nexysvideo
displayName: Build Software for Earl Grey toplevel design targeting the Nexys Video board
dependsOn: lint
condition: and(succeeded(), eq(dependencies.lint.outputs['DetermineBuildType.onlyDocChanges'], '0'))
pool:
vmImage: ubuntu-18.04
steps:
- template: ci/install-package-dependencies.yml
- bash: |
set -x
sudo util/get-toolchain.py \
--install-dir="$TOOLCHAIN_PATH" \
--release-version="$TOOLCHAIN_VERSION" \
--update
displayName: Install toolchain
- bash: |
. util/build_consts.sh
./hw/top_earlgrey/util/top_earlgrey_reduce.py
./meson_init.sh -A
ninja -C "$OBJ_DIR" all
displayName: Build embedded targets
- bash: |
. util/build_consts.sh
ninja -C "$OBJ_DIR" test
displayName: Run unit tests
- template: ci/upload-artifacts-template.yml
parameters:
artifact: sw_build_nexysvideo
# We continue building with GCC, despite defaulting to Clang. This is a copy of
# `sw_build` with `meson_init.sh` configured with the GCC toolchain, instead of
# the default toolchain.
- job: sw_build_gcc
displayName: Build Software for Earl Grey toplevel design (with GCC)
dependsOn: lint
condition: and(succeeded(), eq(dependencies.lint.outputs['DetermineBuildType.onlyDocChanges'], '0'))
pool:
vmImage: ubuntu-18.04
steps:
- template: ci/install-package-dependencies.yml
- bash: |
set -x
sudo util/get-toolchain.py \
--install-dir="$TOOLCHAIN_PATH" \
--release-version="$TOOLCHAIN_VERSION" \
--update
displayName: Install toolchain
- bash: |
. util/build_consts.sh
./meson_init.sh -A \
-t "$TOOLCHAIN_PATH/meson-riscv32-unknown-elf-gcc.txt"
ninja -C "$OBJ_DIR" all
displayName: Build embedded targets
- bash: |
. util/build_consts.sh
ninja -C "$OBJ_DIR" test
displayName: Run unit tests
- template: ci/upload-artifacts-template.yml
parameters:
artifact: sw_build_gcc
- job: top_earlgrey_verilator
displayName: Build Verilator simulation of the Earl Grey toplevel design
dependsOn: lint
condition: and(succeeded(), eq(dependencies.lint.outputs['DetermineBuildType.onlyDocChanges'], '0'))
pool:
vmImage: ubuntu-18.04
steps:
- template: ci/install-package-dependencies.yml
- bash: |
python3 --version
fusesoc --version
verilator --version
verible-verilog-lint --version
displayName: Display environment
- bash: |
. util/build_consts.sh
mkdir -p "$OBJ_DIR/hw"
mkdir -p "$BIN_DIR/hw/top_earlgrey"
# Compile the simulation without threading; the runners provided by
# Azure provide two virtual CPUs, which seems to equal one physical
# CPU (at most); the use of threading slows down the simulation.
fusesoc --cores-root=. \
run --flag=fileset_top --target=sim --setup --build \
--build-root="$OBJ_DIR/hw" \
lowrisc:systems:top_earlgrey_verilator \
--verilator_options="--no-threads"
cp "$OBJ_DIR/hw/sim-verilator/Vtop_earlgrey_verilator" \
"$BIN_DIR/hw/top_earlgrey"
displayName: Build simulation with Verilator
- template: ci/upload-artifacts-template.yml
parameters:
artifact: top_earlgrey_verilator
- job: top_englishbreakfast_verilator
displayName: Build Verilator simulation of the English Breakfast toplevel design
dependsOn: lint
condition: and(succeeded(), eq(dependencies.lint.outputs['DetermineBuildType.onlyDocChanges'], '0'))
pool:
vmImage: ubuntu-18.04
steps:
- template: ci/install-package-dependencies.yml
- bash: |
python3 --version
fusesoc --version
verilator --version
verible-verilog-lint --version
displayName: Display environment
- bash: |
. util/build_consts.sh
mkdir -p "$OBJ_DIR/hw"
mkdir -p "$BIN_DIR/hw/top_englishbreakfast"
# Compile the simulation without threading; the runners provided by
# Azure provide two virtual CPUs, which seems to equal one physical
# CPU (at most); the use of threading slows down the simulation.
fusesoc --cores-root=. \
run --flag=fileset_topgen --target=sim --setup --build \
--build-root="$OBJ_DIR/hw" \
lowrisc:systems:top_englishbreakfast_verilator \
--verilator_options="--no-threads"
cp "$OBJ_DIR/hw/sim-verilator/Vtop_englishbreakfast_verilator" \
"$BIN_DIR/hw/top_englishbreakfast"
displayName: Build simulation with Verilator
- template: ci/upload-artifacts-template.yml
parameters:
artifact: top_englishbreakfast_verilator
- job: execute_verilated_tests
displayName: Execute tests on the Verilated system
pool:
vmImage: ubuntu-18.04
dependsOn:
- top_earlgrey_verilator
- sw_build
steps:
- template: ci/install-package-dependencies.yml
- template: ci/download-artifacts-template.yml
- bash: |
# Install an additional pytest dependency for result upload.
pip3 install pytest-azurepipelines
. util/build_consts.sh
pytest --version
pytest test/systemtest/earlgrey/test_sim_verilator.py \
--log-cli-level=DEBUG \
--test-run-title="Run system tests with Verilator simulation" \
--napoleon-docstrings
displayName: Execute tests
- template: ci/run-riscv-compliance.yml
parameters:
rvc_test_suites:
- rv32i
- template: ci/run-riscv-compliance.yml
parameters:
rvc_test_suites:
- rv32im
- rv32imc
- rv32Zicsr
- job: otbn_standalone_tests
displayName: Run OTBN Smoke Test
dependsOn: lint
condition: and(succeeded(), eq(dependencies.lint.outputs['DetermineBuildType.hasOTBNChanges'], '1'))
pool:
vmImage: ubuntu-18.04
timeoutInMinutes: 10
steps:
- template: ci/install-package-dependencies.yml
- bash: |
set -x
sudo util/get-toolchain.py \
--install-dir="$TOOLCHAIN_PATH" \
--release-version="$TOOLCHAIN_VERSION" \
--update
echo "##vso[task.prependpath]$TOOLCHAIN_PATH/bin"
displayName: Install toolchain
- bash: |
python3 --version
fusesoc --version
verilator --version
displayName: Display environment
- bash: |
make -C hw/ip/otbn/dv/otbnsim test
displayName: OTBN ISS Test
- bash: |
./hw/ip/otbn/dv/smoke/run_smoke.sh
displayName: OTBN Smoke Test
- bash: |
make -C hw/ip/otbn/util asm-check
displayName: Assemble and link code snippets
- job: top_earlgrey_nexysvideo
displayName: Build NexysVideo variant of the Earl Grey toplevel design using Vivado
dependsOn:
- lint
# The bootrom is built into the FPGA image at synthesis time.
- sw_build_nexysvideo
condition: and(succeeded(), eq(dependencies.lint.outputs['DetermineBuildType.onlyDocChanges'], '0'))
pool: ci-public
timeoutInMinutes: 120 # 2 hours
steps:
- template: ci/install-package-dependencies.yml
- template: ci/download-artifacts-template.yml
- bash: |
set -e
. util/build_consts.sh
module load "xilinx/vivado/$(VIVADO_VERSION)"
mkdir -p "$OBJ_DIR/hw"
mkdir -p "$BIN_DIR/hw/top_earlgrey"
./hw/top_earlgrey/util/top_earlgrey_reduce.py
BOOTROM_VMEM="$BIN_DIR/sw/device/boot_rom/boot_rom_fpga_nexysvideo.32.vmem"
test -f "$BOOTROM_VMEM"
fusesoc --cores-root=. \
run --flag=fileset_top --target=synth --setup --build \
--build-root="$OBJ_DIR/hw" \
lowrisc:systems:top_earlgrey_nexysvideo \
--BootRomInitFile="$BOOTROM_VMEM"
cp "$OBJ_DIR/hw/synth-vivado/lowrisc_systems_top_earlgrey_nexysvideo_0.1.bit" \
"$BIN_DIR/hw/top_earlgrey"
displayName: Build bitstream with Vivado
- bash: |
. util/build_consts.sh
echo Synthesis log
cat $OBJ_DIR/hw/synth-vivado/lowrisc_systems_top_earlgrey_nexysvideo_0.1.runs/synth_1/runme.log || true
echo Implementation log
cat $OBJ_DIR/hw/synth-vivado/lowrisc_systems_top_earlgrey_nexysvideo_0.1.runs/impl_1/runme.log || true
condition: always()
displayName: Display synthesis and implementation logs
- template: ci/upload-artifacts-template.yml
parameters:
artifact: top_earlgrey_nexysvideo
- job: top_englishbreakfast_cw305
displayName: Build CW305 variant of the English Breakfast toplevel design using Vivado
dependsOn:
- lint
# The bootrom is built into the FPGA image at synthesis time.
# Currently, we can't have different versions of binaries in $BIN_DIR. Consequently, we are
# using the NexysVideo bootrom here and the resulting CW305 bitstream is not functional.
# By generating the CW305 bootrom binary we would break execute_fpga_tests executed on the
# NexysVideo.
- sw_build
condition: and(succeeded(), eq(dependencies.lint.outputs['DetermineBuildType.onlyDocChanges'], '0'))
pool: ci-public
timeoutInMinutes: 120 # 2 hours
steps:
- template: ci/install-package-dependencies.yml
- template: ci/download-artifacts-template.yml
- bash: |
set -e
. util/build_consts.sh
module load "xilinx/vivado/$(VIVADO_VERSION)"
mkdir -p "$OBJ_DIR/hw"
mkdir -p "$BIN_DIR/hw/top_englishbreakfast"
BOOTROM_VMEM="$BIN_DIR/sw/device/boot_rom/boot_rom_fpga_nexysvideo.32.vmem"
test -f "$BOOTROM_VMEM"
fusesoc --cores-root=. \
run --flag=fileset_topgen --target=synth --setup --build \
--build-root="$OBJ_DIR/hw" \
lowrisc:systems:top_englishbreakfast_cw305 \
--BootRomInitFile="$BOOTROM_VMEM"
cp "$OBJ_DIR/hw/synth-vivado/lowrisc_systems_top_englishbreakfast_cw305_0.1.bit" \
"$BIN_DIR/hw/top_englishbreakfast"
displayName: Build bitstream with Vivado
- template: ci/upload-artifacts-template.yml
parameters:
artifact: top_englishbreakfast_cw305
- job: execute_fpga_tests
displayName: Execute tests on FPGA
pool: FPGA
timeoutInMinutes: 30
dependsOn:
- top_earlgrey_nexysvideo
- sw_build_nexysvideo
steps:
- template: ci/install-package-dependencies.yml
- template: ci/download-artifacts-template.yml
- bash: |
set -e
module load "xilinx/vivado/$(VIVADO_VERSION)"
# Install an additional pytest dependency for result upload.
pip3 install pytest-azurepipelines
. util/build_consts.sh
pytest --version
pytest test/systemtest/earlgrey/test_fpga_nexysvideo.py \
--log-cli-level=DEBUG \
--test-run-title="Run system tests on Nexys Video FPGA board" \
--napoleon-docstrings
displayName: Execute tests
- job: deploy_release_artifacts
displayName: Package and deploy release distribution
pool:
vmImage: ubuntu-18.04
dependsOn:
- lint
- sw_build
- top_earlgrey_verilator
- top_earlgrey_nexysvideo
condition: eq(dependencies.lint.outputs['DetermineBuildType.onlyDocChanges'], '0')
steps:
- template: ci/install-package-dependencies.yml
- template: ci/download-artifacts-template.yml
- bash: |
. util/build_consts.sh
util/make_distribution.sh
tar --list -f $BIN_DIR/opentitan-*.tar.xz
# Put the resulting tar file into a directory the |publish| step below can reference.
mkdir "$BUILD_ROOT/dist-final"
mv $BIN_DIR/opentitan-*.tar.xz "$BUILD_ROOT/dist-final"
displayName: Create final dist directory out of partial ones
- publish: $(Build.ArtifactStagingDirectory)/dist-final
artifact: opentitan-dist
displayName: Upload release artifacts as Azure artifact
- task: GithubRelease@0
displayName: Upload to GitHub releases (only tags)
condition: and(succeeded(), startsWith(variables['Build.SourceBranch'], 'refs/tags/'))
inputs:
gitHubConnection: opentitan-release-upload
repositoryName: lowrisc/opentitan
addChangeLog: false
assets: |
$(Build.ArtifactStagingDirectory)/dist-final/*
- job: build_docker_containers
displayName: "Build Docker Containers"
pool:
vmImage: ubuntu-18.04
dependsOn:
- lint
steps:
- task: Docker@2
displayName: Build Developer Utility Container
inputs:
command: build
Dockerfile: ./util/container/Dockerfile
buildContext: .
- task: Docker@2
displayName: Build Documentation Builder Container
inputs:
command: build
tags: gcr.io/active-premise-257318/builder
Dockerfile: ./site/docs/builder.Dockerfile
buildContext: .
- task: Docker@2
displayName: Build Documentation Redirector Container
inputs:
command: build
Dockerfile: ./site/redirector/Dockerfile
buildContext: ./site/redirector