blob: b1a5fbe779d350dffe6e4489a980ae0acb35fbed [file] [log] [blame]
# Copyright 2022 The IREE Authors
#
# Licensed under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
name: CI
# A few notes:
#
# Variables:
# GitHub actions don't have variables or even support normal yaml anchors (they
# are specially disabled because...reasons?):
# See https://github.com/github-community/community/discussions/4501
# https://github.community/t/support-for-yaml-anchors/16128/92
# https://github.com/actions/runner/issues/1182
# Neither does it have any contexts that are available everywhere. The
# top-level `env` field is available in many places, but not all. We already
# have a "should-run" job that every other job depends on, so we leverage that
# for variables that every other job can use, since that *is* available in all
# sub-fields of the job.
# See https://docs.github.com/en/actions/learn-github-actions/contexts#context-availability
# and https://github.com/community/community/discussions/27370
#
# Runner label ordering:
# - self-hosted always has to be listed first in a runs-on block:
# https://docs.github.com/en/actions/hosting-your-own-runners/using-self-hosted-runners-in-a-workflow
#
# Pseudo-ternary hack:
# - We have to do a weird hack to get a pseudo-ternary operator. See
# https://github.com/actions/runner/issues/409, hence patterns like:
# `github.event_name == 'pull_request' && 'presubmit' || 'postsubmit'`
# to mean `'presubmit' if github.event_name == 'pull_request' else 'postsubmit'`
# Note that this only works if the true branch value is truthy. If it is falsey
# then we would always get the false branch condition (`p && false` is always
# false).
on:
workflow_dispatch:
pull_request:
push:
branches:
- main
concurrency:
# A PR number if a pull request and otherwise the commit hash. This cancels
# queued and in-progress runs for the same PR (presubmit) or commit
# (postsubmit).
group: ${{ github.event.number || github.sha }}
cancel-in-progress: true
env:
# This is mostly here so we can compute it once and use it in outputs below.
# See note above regarding lack of proper variables. Also see note about
# pseudo-ternary hack.
_CI_STAGE: ${{ github.event_name == 'pull_request' && 'presubmit' || 'postsubmit' }}
# This needs to be in env instead of the outputs of setup because it contains
# the run attempt and we want that to be the current attempt, not whatever
# attempt the setup step last ran in.
GCS_DIR: gs://iree-github-actions-${{ github.event_name == 'pull_request' && 'presubmit' || 'postsubmit' }}-artifacts/${{ github.run_id }}/${{ github.run_attempt }}
# Jobs are organized into groups and topologically sorted by dependencies
jobs:
setup:
runs-on: ubuntu-20.04
env:
# The commit being checked out is the merge commit for the PR. Its first
# parent will be the tip of main.
BASE_REF: HEAD^
PR_DESCRIPTION: ${{ github.event.pull_request.body }}
outputs:
should-run: ${{ steps.should-run.outputs.should-run }}
ci-stage: ${{ env._CI_STAGE }}
# Variables for dependent jobs. See comment at top.
runner-env: prod
runner-group: ${{ env._CI_STAGE }}
# Note that we can't flip the condition here because 0 is falsey. See
# comment at top.
write-caches: ${{ github.event_name != 'pull_request' && 1 || 0 }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
# We need the parent commit to do a diff
fetch-depth: 2
- name: "Computing whether CI should run"
id: should-run
run: |
# Just informative logging. There should only be two commits in the
# history here, but limiting the depth helps when copying from a local
# repo instead of using checkout, e.g. with
# https://github.com/nektos/act where there will be more.
git log --oneline --graph --max-count=3
ret=0
./build_tools/github_actions/should_ci_run.py || ret=$?
if (( ret==0 )); then
echo "::set-output name=should-run::true"
exit 0
elif (( ret==2 )); then
echo "::set-output name=should-run::false"
exit 0
fi
exit "${ret}"
################################### Basic ####################################
# Jobs that build all of IREE "normally"
##############################################################################
build_all:
needs: setup
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
env:
BUILD_DIR: full-build-dir
outputs:
# Pass through the build directory as output so it's available to
# dependent jobs.
build-dir: ${{ env.BUILD_DIR }}
build-dir-archive: ${{ steps.archive.outputs.build-dir-archive }}
build-dir-gcs-artifact: ${{ steps.upload.outputs.build-dir-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Building IREE"
run: |
./build_tools/github_actions/docker_run.sh \
gcr.io/iree-oss/base@sha256:5d43683c6b50aebe1fca6c85f2012f3b0fa153bf4dd268e8767b619b1891423a \
./build_tools/cmake/build_all.sh \
"${BUILD_DIR}"
# Things get more complicated here than when we're just building the
# runtime. The build directory is way bigger. We're also using on our own
# runners on GCE. So uploading to GitHub actions artifact storage hosted
# on Azure is dirt slow. We drop static libraries and object files, which
# aren't needed for testing. Then we do some minimal compression locally
# *in parallel* and upload to GCS. This can be further optimized.
# Especially decompression is still pretty slow. See #9881.
- name: "Creating build dir archive"
id: archive
env:
BUILD_DIR_ARCHIVE: ${{ env.BUILD_DIR }}.tar.zst
run: |
tar --exclude '*.a' --exclude '*.o' \
-I 'zstd -T0' \
-cf ${BUILD_DIR_ARCHIVE} ${BUILD_DIR}
echo "::set-output name=build-dir-archive::${BUILD_DIR_ARCHIVE}"
- name: "Uploading build dir archive"
id: upload
env:
BUILD_DIR_ARCHIVE: ${{ steps.archive.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ env.GCS_DIR }}/${{ steps.archive.outputs.build-dir-archive }}
run: |
gcloud alpha storage cp "${BUILD_DIR_ARCHIVE}" "${BUILD_DIR_GCS_ARTIFACT}"
echo "::set-output name=build-dir-gcs-artifact::${BUILD_DIR_GCS_ARTIFACT}"
build_test_all_bazel:
needs: setup
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Building with Bazel"
env:
IREE_BAZEL_WRITE_REMOTE_CACHE: ${{ needs.setup.outputs.write-caches }}
# This doesn't really need everything in the frontends image, but we
# want the cache to be shared with the integrations build (no point
# building LLVM twice) and the cache key is the docker container it's
# run in (to ensure correct cache hits).
run: |
./build_tools/github_actions/docker_run.sh \
--env "IREE_BAZEL_WRITE_REMOTE_CACHE=${IREE_BAZEL_WRITE_REMOTE_CACHE}" \
gcr.io/iree-oss/frontends-swiftshader@sha256:3d5b879672d7f302124ab3d1aa533a6949bd0adfc176884177844ac6767e23e9 \
./build_tools/bazel/build_core.sh
test_all:
needs: [setup, build_all]
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
env:
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}"
- name: "Testing all"
run: |
./build_tools/github_actions/docker_run.sh \
--env IREE_CUDA_DISABLE=1 \
gcr.io/iree-oss/swiftshader@sha256:5027d56cdfee743d956bffd035668f7784166a486c48c74b42e5882cb0c289bf \
./build_tools/cmake/ctest_all.sh \
"${BUILD_DIR}"
test_gpu:
needs: [setup, build_all]
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- gpu
- os-family=Linux
env:
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: Querying GPU information
run: |
./build_tools/scripts/check_cuda.sh
./build_tools/scripts/check_vulkan.sh
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}"
- name: "Testing with GPU"
run: |
./build_tools/github_actions/docker_run.sh \
--env IREE_VULKAN_F16_DISABLE=0 \
--env IREE_CUDA_DISABLE=0 \
--env CTEST_PARALLEL_LEVEL=2 \
--gpus all \
--env NVIDIA_DRIVER_CAPABILITIES=all \
gcr.io/iree-oss/nvidia@sha256:7c2f56db65e656c15e6c96b5812a8275dd53c82bf41221192f9ba8a451aad870 \
bash -euo pipefail -c \
"./build_tools/scripts/check_cuda.sh
./build_tools/scripts/check_vulkan.sh
./build_tools/cmake/ctest_all.sh ${BUILD_DIR}"
################################## Subsets ###################################
# Jobs that build some subset of IREE
##############################################################################
build_runtime:
needs: setup
if: needs.setup.outputs.should-run == 'true'
runs-on: ubuntu-20.04
env:
BUILD_DIR: build-runtime
outputs:
# Pass through the build directory as output so it's available to
# dependent jobs.
build-dir: ${{ env.BUILD_DIR }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Building runtime"
run: |
./build_tools/github_actions/docker_run.sh \
gcr.io/iree-oss/base@sha256:5d43683c6b50aebe1fca6c85f2012f3b0fa153bf4dd268e8767b619b1891423a \
./build_tools/cmake/build_runtime.sh \
"${BUILD_DIR}"
# Using a tar archive is necessary to preserve file permissions. See
# https://github.com/actions/upload-artifact#maintaining-file-permissions-and-case-sensitive-files
# The upload action already does its own gzip compression, so no need to
# do our own.
- name: "Create build dir archive"
run: tar -cf ${BUILD_DIR}.tar ${BUILD_DIR}
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0
with:
name: "${{ env.BUILD_DIR }}.tar"
path: "${{ env.BUILD_DIR }}.tar"
test_runtime:
needs: [setup, build_runtime]
if: needs.setup.outputs.should-run == 'true'
runs-on: ubuntu-20.04
env:
BUILD_DIR: ${{ needs.build_runtime.outputs.build-dir }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Downloading runtime build dir archive"
uses: actions/download-artifact@fb598a63ae348fa914e94cd0ff38f362e927b741 # v3.0.0
with:
name: "${{ env.BUILD_DIR }}.tar"
- name: "Extracting runtime build dir archive"
run: tar -xf ${BUILD_DIR}.tar
- name: "Testing runtime"
run: |
./build_tools/github_actions/docker_run.sh \
--env IREE_VULKAN_DISABLE=1 \
gcr.io/iree-oss/base@sha256:5d43683c6b50aebe1fca6c85f2012f3b0fa153bf4dd268e8767b619b1891423a \
./build_tools/cmake/ctest_all.sh \
"${BUILD_DIR}"
################################# Tensorflow #################################
# Jobs that build the IREE-Tensorflow integrations
##############################################################################
build_tf_integrations:
needs: setup
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
outputs:
binaries-dir: ${{ steps.build.outputs.binaries-dir }}
binaries-archive: ${{ steps.archive.outputs.binaries-archive }}
binaries-gcs-artifact: ${{ steps.upload.outputs.binaries-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Building TF binaries"
id: build
env:
IREE_TF_BINARIES_OUTPUT_DIR: iree-tf-binaries
IREE_BAZEL_WRITE_REMOTE_CACHE: ${{ needs.setup.outputs.write-caches }}
run: |
./build_tools/github_actions/docker_run.sh \
--env "IREE_BAZEL_WRITE_REMOTE_CACHE=${IREE_BAZEL_WRITE_REMOTE_CACHE}" \
--env "IREE_TF_BINARIES_OUTPUT_DIR=${IREE_TF_BINARIES_OUTPUT_DIR}" \
gcr.io/iree-oss/frontends-swiftshader@sha256:3d5b879672d7f302124ab3d1aa533a6949bd0adfc176884177844ac6767e23e9 \
build_tools/cmake/build_tf_binaries.sh
echo "::set-output name=binaries-dir::${IREE_TF_BINARIES_OUTPUT_DIR}"
- name: "Creating archive of binaries"
id: archive
env:
BINARIES_ARCHIVE: tf-binaries.tar
BINARIES_DIR: ${{ steps.build.outputs.binaries-dir }}
run: |
tar -cf "${BINARIES_ARCHIVE}" "${BINARIES_DIR}"
echo "::set-output name=binaries-archive::${BINARIES_ARCHIVE}"
- name: "Uploading binaries archive"
id: upload
env:
BINARIES_ARCHIVE: ${{ steps.archive.outputs.binaries-archive }}
BINARIES_GCS_ARTIFACT: ${{ env.GCS_DIR }}/${{ steps.archive.outputs.binaries-archive }}
run: |
gcloud alpha storage cp "${BINARIES_ARCHIVE}" "${BINARIES_GCS_ARTIFACT}"
echo "::set-output name=binaries-gcs-artifact::${BINARIES_GCS_ARTIFACT}"
test_tf_integrations:
needs: [setup, build_all, build_tf_integrations]
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
env:
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
TF_BINARIES_DIR: ${{ needs.build_tf_integrations.outputs.binaries-dir }}
TF_BINARIES_ARCHIVE: ${{ needs.build_tf_integrations.outputs.binaries-archive }}
TF_BINARIES_GCS_ARTIFACT: ${{ needs.build_tf_integrations.outputs.binaries-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Downloading TF binaries archive"
run: gcloud alpha storage cp "${TF_BINARIES_GCS_ARTIFACT}" "${TF_BINARIES_ARCHIVE}"
- name: "Extracting TF binaries archive"
run: tar -xvf "${TF_BINARIES_ARCHIVE}"
- name: "Symlinking TF binaries"
run: |
./integrations/tensorflow/symlink_binaries.sh "${TF_BINARIES_DIR}"
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}"
- name: "Running TF integrations tests"
run: |
./build_tools/github_actions/docker_run.sh \
gcr.io/iree-oss/frontends-swiftshader@sha256:3d5b879672d7f302124ab3d1aa533a6949bd0adfc176884177844ac6767e23e9 \
build_tools/cmake/run_tf_tests.sh \
"${BUILD_DIR}"
test_tf_integrations_gpu:
needs: [setup, build_all, build_tf_integrations]
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- gpu
- os-family=Linux
env:
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
TF_BINARIES_DIR: ${{ needs.build_tf_integrations.outputs.binaries-dir }}
TF_BINARIES_ARCHIVE: ${{ needs.build_tf_integrations.outputs.binaries-archive }}
TF_BINARIES_GCS_ARTIFACT: ${{ needs.build_tf_integrations.outputs.binaries-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Downloading TF binaries archive"
run: gcloud alpha storage cp "${TF_BINARIES_GCS_ARTIFACT}" "${TF_BINARIES_ARCHIVE}"
- name: "Extracting TF binaries archive"
run: tar -xvf "${TF_BINARIES_ARCHIVE}"
- name: "Symlinking TF binaries"
run: |
./integrations/tensorflow/symlink_binaries.sh "${TF_BINARIES_DIR}"
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}"
- name: "Running TF integrations tests"
run: |
./build_tools/github_actions/docker_run.sh \
--env IREE_LLVM_CPU_DISABLE=1 \
--gpus all \
--env NVIDIA_DRIVER_CAPABILITIES=all \
gcr.io/iree-oss/frontends-nvidia@sha256:28cd43f36b1ca0633bbd915911abe6d22b4aa16093f074e87016305322a0eba1 \
bash -euo pipefail -c \
"./build_tools/scripts/check_cuda.sh
./build_tools/scripts/check_vulkan.sh
build_tools/cmake/run_tf_tests.sh ${BUILD_DIR}"
######################## Community Model Coverage ############################
# Jobs that test IREE behavior on a set of models.
##############################################################################
test_shark_model_suite:
needs: [setup, build_all, build_tf_integrations]
# Disabled while failing, see https://github.com/nod-ai/SHARK/issues/373
if: false && needs.setup.outputs.should-run == 'true' && needs.setup.outputs.ci-stage == 'postsubmit'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- gpu
- os-family=Linux
env:
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
TF_BINARIES_DIR: ${{ needs.build_tf_integrations.outputs.binaries-dir }}
TF_BINARIES_ARCHIVE: ${{ needs.build_tf_integrations.outputs.binaries-archive }}
TF_BINARIES_GCS_ARTIFACT: ${{ needs.build_tf_integrations.outputs.binaries-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Downloading TF binaries archive"
run: gcloud alpha storage cp "${TF_BINARIES_GCS_ARTIFACT}" "${TF_BINARIES_ARCHIVE}"
- name: "Extracting TF binaries archive"
run: tar -xf "${TF_BINARIES_ARCHIVE}"
- name: "Symlinking TF binaries"
run: |
./integrations/tensorflow/symlink_binaries.sh "$(realpath "${TF_BINARIES_DIR}")"
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}"
- name: "Downloading SHARK"
id: download_shark
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
repository: nod-ai/SHARK
path: ${{ github.workspace }}/SHARK
- name: "Testing Shark Model Suite"
run: |
cd "${GITHUB_WORKSPACE}/SHARK"
NO_BACKEND=1 ./setup_venv.sh
source shark.venv/bin/activate
export TFPYBASE="${GITHUB_WORKSPACE}/integrations/tensorflow/python_projects"
export PYTHONPATH="${PYTHONPATH}:${BUILD_DIR}/compiler/bindings/python:${BUILD_DIR}/runtime/bindings/python"
export PYTHONPATH="${PYTHONPATH}:$TFPYBASE/iree_tf:$TFPYBASE/iree_tflite"
export SHARK_SKIP_TESTS="--ignore=shark/tests/test_shark_importer.py \
--ignore=benchmarks/tests/test_hf_benchmark.py \
--ignore=benchmarks/tests/test_benchmark.py"
export MODEL_LIST="bert_base_cased or mobilebert_uncased or MiniLM_L12_H384_uncased or module_resnet50 or mobilenet_v3 or squeezenet1_0 or vit_base_patch16_224"
pytest tank/test_models.py -k "cpu and ($MODEL_LIST)" $SHARK_SKIP_TESTS
pytest tank/test_models.py -k "vulkan and ($MODEL_LIST)" $SHARK_SKIP_TESTS
pytest tank/test_models.py -k "cuda and ($MODEL_LIST)" $SHARK_SKIP_TESTS
############################### Configurations ###############################
# Jobs that build IREE in some non-default configuration
##############################################################################
asan:
needs: setup
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Building and testing with AddressSanitizer"
run: |
./build_tools/github_actions/docker_run.sh \
gcr.io/iree-oss/swiftshader@sha256:5027d56cdfee743d956bffd035668f7784166a486c48c74b42e5882cb0c289bf \
./build_tools/cmake/build_and_test_asan.sh
tsan:
needs: setup
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Building and testing with ThreadSanitizer"
run: |
./build_tools/github_actions/docker_run.sh \
gcr.io/iree-oss/base@sha256:5d43683c6b50aebe1fca6c85f2012f3b0fa153bf4dd268e8767b619b1891423a \
./build_tools/cmake/build_and_test_tsan.sh
benchmarks:
needs: [setup, build_all, build_tf_integrations]
if: needs.setup.outputs.should-run == 'true'
uses: ./.github/workflows/benchmarks.yml
with:
# env.GCS_DIR is also duplicated in this workflow. See the note there on
# why this is.
runner-group: ${{ needs.setup.outputs.runner-group }}
runner-env: ${{ needs.setup.outputs.runner-env }}
build-dir: ${{ needs.build_all.outputs.build-dir }}
build-dir-archive: ${{ needs.build_all.outputs.build-dir-archive }}
build-dir-gcs-artifact: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
tf-binaries-dir: ${{ needs.build_tf_integrations.outputs.binaries-dir }}
tf-binaries-archive: ${{ needs.build_tf_integrations.outputs.binaries-archive }}
tf-binaries-gcs-artifact: ${{ needs.build_tf_integrations.outputs.binaries-gcs-artifact }}
############################## Crosscompilation ##############################
# Jobs that cross-compile IREE for other platforms
##############################################################################
android_arm64:
needs: [setup, build_all]
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
env:
ANDROID_ABI: "arm64-v8a"
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting install from build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}" "${BUILD_DIR}/install"
- name: "Building for Android"
run: |
build_tools/github_actions/docker_run.sh \
--env "ANDROID_ABI=${ANDROID_ABI}" \
--env "IREE_HOST_BINARY_ROOT=${BUILD_DIR}/install" \
gcr.io/iree-oss/android@sha256:76c2a52dcd6d07601227b965ac87d021c1d2d5e2d01f46ad58da28c89267f2ab \
build_tools/cmake/build_android.sh
baremetal_riscv32:
needs: [setup, build_all]
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
env:
BUILD_RISCV_DIR: "build-riscv32-baremetal"
BUILD_ARCH: "rv32-baremetal"
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting install from build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}" "${BUILD_DIR}/install"
- name: "Cross-compiling and testing riscv32 baremetal"
run: |
./build_tools/github_actions/docker_run.sh \
--env "RISCV_ARCH=${BUILD_ARCH}" \
--env "BUILD_RISCV_DIR=${BUILD_RISCV_DIR}" \
--env "BUILD_PRESET=test" \
--env "IREE_HOST_BINARY_ROOT=${BUILD_DIR}/install" \
gcr.io/iree-oss/riscv@sha256:d6f0e293a50faf5abbd564c1d1bb9dc6456d7ce93d07b131c381fa64c1daed62 \
bash -euo pipefail -c \
"./build_tools/cmake/build_riscv.sh && tests/riscv32/smoke.sh"
linux_riscv:
needs: [setup, build_all, build_tf_integrations]
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
strategy:
matrix:
architecture: [rv64, rv32-linux]
env:
BUILD_RISCV_DIR: build-${{ matrix.architecture }}
BUILD_ARCH: ${{ matrix.architecture }}
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
TF_BINARIES_DIR: ${{ needs.build_tf_integrations.outputs.binaries-dir }}
TF_BINARIES_ARCHIVE: ${{ needs.build_tf_integrations.outputs.binaries-archive }}
TF_BINARIES_GCS_ARTIFACT: ${{ needs.build_tf_integrations.outputs.binaries-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}"
- name: "Downloading TF binaries archive"
run: gcloud alpha storage cp "${TF_BINARIES_GCS_ARTIFACT}" "${TF_BINARIES_ARCHIVE}"
- name: "Extracting TF binaries archive"
run: tar -xf "${TF_BINARIES_ARCHIVE}"
- name: "Cross-compiling and testing RISC-V Linux"
run: |
./build_tools/github_actions/docker_run.sh \
--env "RISCV_ARCH=${BUILD_ARCH}" \
--env "BUILD_RISCV_DIR=${BUILD_RISCV_DIR}" \
--env "BUILD_PRESET=test" \
--env "IREE_HOST_BINARY_ROOT=${BUILD_DIR}/install" \
--env "IREE_IMPORT_TFLITE_BIN=${TF_BINARIES_DIR}/iree-import-tflite" \
--env "LLVM_BIN_DIR=${BUILD_DIR}/third_party/llvm-project/llvm/bin" \
gcr.io/iree-oss/riscv@sha256:d6f0e293a50faf5abbd564c1d1bb9dc6456d7ce93d07b131c381fa64c1daed62 \
bash -euo pipefail -c \
"./build_tools/cmake/build_riscv.sh && ./build_tools/cmake/test_riscv.sh"
emscripten:
needs: [setup, build_all]
if: needs.setup.outputs.should-run == 'true' && needs.setup.outputs.ci-stage == 'postsubmit'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
env:
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting install from build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}" "${BUILD_DIR}/install"
- name: "Building the runtime for the web using Emscripten"
run: |
build_tools/github_actions/docker_run.sh \
--env "IREE_HOST_BINARY_ROOT=${BUILD_DIR}/install" \
gcr.io/iree-oss/emscripten@sha256:8ccc1c8de11919faf23aeaa585b13e5c5050952db76c101d6f61367280a3546f \
build_tools/cmake/build_runtime_emscripten.sh
##############################################################################
# Depends on all the other jobs to provide a single anchor that indicates the
# final status. Status reporting will become more sophisticated in the future
# and we can hopefully avoid the need to explicitly list every single job...
summary:
# Even if you have an explicit if condition, you still need to override
# GitHub's default behavior of not running if any dependencies failed.
if: always()
runs-on: ubuntu-20.04
needs:
# Basic
- build_all
- build_test_all_bazel
- test_all
- test_gpu
# Subsets
- build_runtime
- test_runtime
# Tensorflow
- build_tf_integrations
- test_tf_integrations
- test_tf_integrations_gpu
# Model Coverage
- test_shark_model_suite
# Configurations
- asan
- tsan
# Crosscompilation
- android_arm64
- baremetal_riscv32
- linux_riscv
- emscripten
# Benchmark pipeline
- benchmarks
steps:
- name: Getting combined job status
run: |
echo '${{ toJson(needs.*.result) }}' \
| jq --exit-status 'all(.=="success" or .=="skipped")' > /dev/null