blob: 61131a791be985ae8968e3ca05528bb9491da185 [file] [log] [blame]
# Copyright 2022 The IREE Authors
#
# Licensed under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
name: CI
# A few notes:
#
# Variables:
# GitHub actions don't have variables or even support normal yaml anchors (they
# are specially disabled because...reasons?):
# See https://github.com/github-community/community/discussions/4501
# https://github.community/t/support-for-yaml-anchors/16128/92
# https://github.com/actions/runner/issues/1182
# Neither does it have any contexts that are available everywhere. The
# top-level `env` field is available in many places, but not all. We already
# have a "should-run" job that every other job depends on, so we leverage that
# for variables that every other job can use, since that *is* available in all
# sub-fields of the job.
# See https://docs.github.com/en/actions/learn-github-actions/contexts#context-availability
# and https://github.com/community/community/discussions/27370
#
# Runner label ordering:
# - self-hosted always has to be listed first in a runs-on block:
# https://docs.github.com/en/actions/hosting-your-own-runners/using-self-hosted-runners-in-a-workflow
#
# Pseudo-ternary hack:
# - We have to do a weird hack to get a pseudo-ternary operator. See
# https://github.com/actions/runner/issues/409, hence patterns like:
# `github.event_name == 'pull_request' && 'presubmit' || 'postsubmit'`
# to mean `'presubmit' if github.event_name == 'pull_request' else 'postsubmit'`
# Note that this only works if the true branch value is truthy. If it is falsey
# then we would always get the false branch condition (`p && false` is always
# false).
on:
workflow_dispatch:
pull_request:
push:
branches:
- main
concurrency:
# A PR number if a pull request and otherwise the commit hash. This cancels
# queued and in-progress runs for the same PR (presubmit) or commit
# (postsubmit).
group: ${{ github.event.number || github.sha }}
cancel-in-progress: true
env:
# This needs to be in env instead of the outputs of setup because it contains
# the run attempt and we want that to be the current attempt, not whatever
# attempt the setup step last ran in.
GCS_DIR: gs://iree-github-actions-${{ github.event_name == 'pull_request' && 'presubmit' || 'postsubmit' }}-artifacts/${{ github.run_id }}/${{ github.run_attempt }}
# Jobs are organized into groups and topologically sorted by dependencies
jobs:
setup:
runs-on: ubuntu-20.04
env:
# The commit being checked out is the merge commit for the PR. Its first
# parent will be the tip of main.
BASE_REF: HEAD^
PR_TITLE: ${{ github.event.pull_request.title }}
PR_BODY: ${{ github.event.pull_request.body }}
outputs:
should-run: ${{ steps.configure.outputs.should-run }}
ci-stage: ${{ steps.configure.outputs.ci-stage }}
runner-env: ${{ steps.configure.outputs.runner-env }}
runner-group: ${{ steps.configure.outputs.runner-group }}
write-caches: ${{ steps.configure.outputs.write-caches }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
# We need the parent commit to do a diff
fetch-depth: 2
- name: "Configuring CI options"
id: configure
run: |
# Just informative logging. There should only be two commits in the
# history here, but limiting the depth helps when copying from a local
# repo instead of using checkout, e.g. with
# https://github.com/nektos/act where there will be more.
git log --oneline --graph --max-count=3
./build_tools/github_actions/configure_ci.py
################################### Basic ####################################
# Jobs that build all of IREE "normally"
##############################################################################
build_all:
needs: setup
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
env:
BUILD_DIR: full-build-dir
outputs:
# Pass through the build directory as output so it's available to
# dependent jobs.
build-dir: ${{ env.BUILD_DIR }}
build-dir-archive: ${{ steps.archive.outputs.build-dir-archive }}
build-dir-gcs-artifact: ${{ steps.upload.outputs.build-dir-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Building IREE"
run: |
./build_tools/github_actions/docker_run.sh \
gcr.io/iree-oss/base@sha256:7c3027c48b94fc38e64488987fc7893c100526c57308d25cef0c6b76a2dfe117 \
./build_tools/cmake/build_all.sh \
"${BUILD_DIR}"
# The archive step below doesn't include these files. Remove them first to
# save disk space.
# TODO(#10739): This step can be removed once we enlarge the disk sapce.
- name: "Removing unused files"
run: |
find "${BUILD_DIR}" -type f -name "*.a" -o -type f -name "*.o" \
-print \
-delete
# Things get more complicated here than when we're just building the
# runtime. The build directory is way bigger. We're also using on our own
# runners on GCE. So uploading to GitHub actions artifact storage hosted
# on Azure is dirt slow. We drop static libraries and object files, which
# aren't needed for testing. Then we do some minimal compression locally
# *in parallel* and upload to GCS. This can be further optimized.
# Especially decompression is still pretty slow. See #9881.
- name: "Creating build dir archive"
id: archive
env:
BUILD_DIR_ARCHIVE: ${{ env.BUILD_DIR }}.tar.zst
run: |
tar -I 'zstd -T0' \
-cf ${BUILD_DIR_ARCHIVE} ${BUILD_DIR}
echo "::set-output name=build-dir-archive::${BUILD_DIR_ARCHIVE}"
- name: "Uploading build dir archive"
id: upload
env:
BUILD_DIR_ARCHIVE: ${{ steps.archive.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ env.GCS_DIR }}/${{ steps.archive.outputs.build-dir-archive }}
run: |
gcloud alpha storage cp "${BUILD_DIR_ARCHIVE}" "${BUILD_DIR_GCS_ARTIFACT}"
echo "::set-output name=build-dir-gcs-artifact::${BUILD_DIR_GCS_ARTIFACT}"
build_test_all_windows:
needs: setup
if: needs.setup.outputs.should-run == 'true' && needs.setup.outputs.ci-stage == 'postsubmit'
runs-on: managed-windows-cpu
defaults:
run:
shell: bash
env:
BUILD_DIR: build-windows
IREE_VULKAN_DISABLE: 1
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Setting up Python"
uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # v2
with:
python-version: "3.10" # Needs pybind >= 2.10.1 for Python >= 3.11
- name: "Installing Python packages"
run: |
python3 -m venv .venv
.venv/Scripts/activate.bat
python3 -m pip install -r runtime/bindings/python/iree/runtime/build_requirements.txt
- name: "Configuring MSVC"
uses: ilammy/msvc-dev-cmd@7315a94840631165970262a99c72cfb48a65d25d # v1.12.0
- name: "Building IREE"
run: ./build_tools/cmake/build_all.sh "${BUILD_DIR}"
- name: "Testing IREE"
run: ./build_tools/cmake/ctest_all.sh "${BUILD_DIR}"
build_test_all_bazel:
needs: setup
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Building with Bazel"
env:
IREE_BAZEL_WRITE_REMOTE_CACHE: ${{ needs.setup.outputs.write-caches }}
# This doesn't really need everything in the frontends image, but we
# want the cache to be shared with the integrations build (no point
# building LLVM twice) and the cache key is the docker container it's
# run in (to ensure correct cache hits).
run: |
./build_tools/github_actions/docker_run.sh \
--env "IREE_BAZEL_WRITE_REMOTE_CACHE=${IREE_BAZEL_WRITE_REMOTE_CACHE}" \
gcr.io/iree-oss/frontends-swiftshader@sha256:1e9adddc2823b4ba6492b07a271e76b6e2cf50f8aca1164bdbb9b8809ee6c338 \
./build_tools/bazel/build_core.sh
test_all:
needs: [setup, build_all]
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
env:
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}"
- name: "Testing all"
run: |
./build_tools/github_actions/docker_run.sh \
--env IREE_CUDA_DISABLE=1 \
gcr.io/iree-oss/swiftshader@sha256:e36550924e269fedd68b638cce9bd389b6bda58afeaac68b3146dbb6e9a91d35 \
./build_tools/cmake/ctest_all.sh \
"${BUILD_DIR}"
test_gpu:
needs: [setup, build_all]
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- gpu
- os-family=Linux
env:
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: Querying GPU information
run: |
./build_tools/scripts/check_cuda.sh
./build_tools/scripts/check_vulkan.sh
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}"
- name: "Testing with GPU"
run: |
./build_tools/github_actions/docker_run.sh \
--env IREE_VULKAN_F16_DISABLE=0 \
--env IREE_CUDA_DISABLE=0 \
--env CTEST_PARALLEL_LEVEL=2 \
--gpus all \
--env NVIDIA_DRIVER_CAPABILITIES=all \
gcr.io/iree-oss/nvidia@sha256:c26464423e6878f52a6d7705d5cd9ef0ac878699ef380f109793172159d9749b \
bash -euo pipefail -c \
"./build_tools/scripts/check_cuda.sh
./build_tools/scripts/check_vulkan.sh
./build_tools/cmake/ctest_all.sh ${BUILD_DIR}"
################################## Subsets ###################################
# Jobs that build some subset of IREE
##############################################################################
build_test_runtime:
needs: setup
if: needs.setup.outputs.should-run == 'true'
runs-on: ubuntu-20.04
env:
BUILD_DIR: build-runtime
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
- name: "Checking out runtime submodules"
run: ./build_tools/scripts/git/update_runtime_submodules.sh
- name: "Building runtime"
run: |
./build_tools/github_actions/docker_run.sh \
gcr.io/iree-oss/base@sha256:7c3027c48b94fc38e64488987fc7893c100526c57308d25cef0c6b76a2dfe117 \
./build_tools/cmake/build_runtime.sh \
"${BUILD_DIR}"
- name: "Testing runtime"
run: |
./build_tools/github_actions/docker_run.sh \
--env IREE_VULKAN_DISABLE=1 \
gcr.io/iree-oss/base@sha256:7c3027c48b94fc38e64488987fc7893c100526c57308d25cef0c6b76a2dfe117 \
./build_tools/cmake/ctest_all.sh \
"${BUILD_DIR}"
build_test_runtime_windows:
needs: setup
if: needs.setup.outputs.should-run == 'true'
runs-on: managed-windows-cpu
defaults:
run:
shell: bash
env:
BUILD_DIR: build-runtime-windows
IREE_VULKAN_DISABLE: 1
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
- name: "Checking out runtime submodules"
run: ./build_tools/scripts/git/update_runtime_submodules.sh
- name: "Configuring MSVC"
uses: ilammy/msvc-dev-cmd@7315a94840631165970262a99c72cfb48a65d25d # v1.12.0
- name: "Building runtime"
run: ./build_tools/cmake/build_runtime.sh "${BUILD_DIR}"
- name: "Testing runtime"
run: ./build_tools/cmake/ctest_all.sh "${BUILD_DIR}"
################################# Tensorflow #################################
# Jobs that build the IREE-Tensorflow integrations
##############################################################################
build_tf_integrations:
needs: setup
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
outputs:
binaries-dir: ${{ steps.build.outputs.binaries-dir }}
binaries-archive: ${{ steps.archive.outputs.binaries-archive }}
binaries-gcs-artifact: ${{ steps.upload.outputs.binaries-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Building TF binaries"
id: build
env:
IREE_TF_BINARIES_OUTPUT_DIR: iree-tf-binaries
IREE_BAZEL_WRITE_REMOTE_CACHE: ${{ needs.setup.outputs.write-caches }}
run: |
./build_tools/github_actions/docker_run.sh \
--env "IREE_BAZEL_WRITE_REMOTE_CACHE=${IREE_BAZEL_WRITE_REMOTE_CACHE}" \
--env "IREE_TF_BINARIES_OUTPUT_DIR=${IREE_TF_BINARIES_OUTPUT_DIR}" \
gcr.io/iree-oss/frontends-swiftshader@sha256:1e9adddc2823b4ba6492b07a271e76b6e2cf50f8aca1164bdbb9b8809ee6c338 \
build_tools/cmake/build_tf_binaries.sh
echo "::set-output name=binaries-dir::${IREE_TF_BINARIES_OUTPUT_DIR}"
- name: "Creating archive of binaries"
id: archive
env:
BINARIES_ARCHIVE: tf-binaries.tar
BINARIES_DIR: ${{ steps.build.outputs.binaries-dir }}
run: |
tar -cf "${BINARIES_ARCHIVE}" "${BINARIES_DIR}"
echo "::set-output name=binaries-archive::${BINARIES_ARCHIVE}"
- name: "Uploading binaries archive"
id: upload
env:
BINARIES_ARCHIVE: ${{ steps.archive.outputs.binaries-archive }}
BINARIES_GCS_ARTIFACT: ${{ env.GCS_DIR }}/${{ steps.archive.outputs.binaries-archive }}
run: |
gcloud alpha storage cp "${BINARIES_ARCHIVE}" "${BINARIES_GCS_ARTIFACT}"
echo "::set-output name=binaries-gcs-artifact::${BINARIES_GCS_ARTIFACT}"
test_tf_integrations:
needs: [setup, build_all, build_tf_integrations]
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
env:
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
TF_BINARIES_DIR: ${{ needs.build_tf_integrations.outputs.binaries-dir }}
TF_BINARIES_ARCHIVE: ${{ needs.build_tf_integrations.outputs.binaries-archive }}
TF_BINARIES_GCS_ARTIFACT: ${{ needs.build_tf_integrations.outputs.binaries-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Downloading TF binaries archive"
run: gcloud alpha storage cp "${TF_BINARIES_GCS_ARTIFACT}" "${TF_BINARIES_ARCHIVE}"
- name: "Extracting TF binaries archive"
run: tar -xvf "${TF_BINARIES_ARCHIVE}"
- name: "Symlinking TF binaries"
run: |
./integrations/tensorflow/symlink_binaries.sh "${TF_BINARIES_DIR}"
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}"
- name: "Running TF integrations tests"
run: |
./build_tools/github_actions/docker_run.sh \
gcr.io/iree-oss/frontends-swiftshader@sha256:1e9adddc2823b4ba6492b07a271e76b6e2cf50f8aca1164bdbb9b8809ee6c338 \
build_tools/cmake/run_tf_tests.sh \
"${BUILD_DIR}"
test_tf_integrations_gpu:
needs: [setup, build_all, build_tf_integrations]
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- gpu
- os-family=Linux
env:
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
TF_BINARIES_DIR: ${{ needs.build_tf_integrations.outputs.binaries-dir }}
TF_BINARIES_ARCHIVE: ${{ needs.build_tf_integrations.outputs.binaries-archive }}
TF_BINARIES_GCS_ARTIFACT: ${{ needs.build_tf_integrations.outputs.binaries-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Downloading TF binaries archive"
run: gcloud alpha storage cp "${TF_BINARIES_GCS_ARTIFACT}" "${TF_BINARIES_ARCHIVE}"
- name: "Extracting TF binaries archive"
run: tar -xvf "${TF_BINARIES_ARCHIVE}"
- name: "Symlinking TF binaries"
run: |
./integrations/tensorflow/symlink_binaries.sh "${TF_BINARIES_DIR}"
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}"
- name: "Running TF integrations tests"
run: |
./build_tools/github_actions/docker_run.sh \
--env IREE_LLVM_CPU_DISABLE=1 \
--gpus all \
--env NVIDIA_DRIVER_CAPABILITIES=all \
gcr.io/iree-oss/frontends-nvidia@sha256:8c724d50a9f4ed2acfa2b137720acb6c83a95351c20b5c930ba3e56d603a1312 \
bash -euo pipefail -c \
"./build_tools/scripts/check_cuda.sh
./build_tools/scripts/check_vulkan.sh
build_tools/cmake/run_tf_tests.sh ${BUILD_DIR}"
######################## Community Model Coverage ############################
# Jobs that test IREE behavior on a set of models.
##############################################################################
test_shark_model_suite:
needs: [setup, build_all, build_tf_integrations]
# Disabled while failing. See https://github.com/iree-org/iree/issues/10653
if: false && needs.setup.outputs.should-run == 'true' && needs.setup.outputs.ci-stage == 'postsubmit'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- gpu
- os-family=Linux
env:
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
TF_BINARIES_DIR: ${{ needs.build_tf_integrations.outputs.binaries-dir }}
TF_BINARIES_ARCHIVE: ${{ needs.build_tf_integrations.outputs.binaries-archive }}
TF_BINARIES_GCS_ARTIFACT: ${{ needs.build_tf_integrations.outputs.binaries-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Downloading TF binaries archive"
run: gcloud alpha storage cp "${TF_BINARIES_GCS_ARTIFACT}" "${TF_BINARIES_ARCHIVE}"
- name: "Extracting TF binaries archive"
run: tar -xf "${TF_BINARIES_ARCHIVE}"
- name: "Symlinking TF binaries"
run: |
./integrations/tensorflow/symlink_binaries.sh "$(realpath "${TF_BINARIES_DIR}")"
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}"
- name: "Downloading SHARK"
id: download_shark
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
repository: nod-ai/SHARK
path: ${{ github.workspace }}/SHARK
- name: "Testing Shark Model Suite"
run: |
cd "${GITHUB_WORKSPACE}/SHARK"
NO_BACKEND=1 ./setup_venv.sh
source shark.venv/bin/activate
export TFPYBASE="${GITHUB_WORKSPACE}/integrations/tensorflow/python_projects"
export PYTHONPATH="${PYTHONPATH}:${BUILD_DIR}/compiler/bindings/python:${BUILD_DIR}/runtime/bindings/python"
export PYTHONPATH="${PYTHONPATH}:$TFPYBASE/iree_tf:$TFPYBASE/iree_tflite"
export SHARK_SKIP_TESTS="--ignore=shark/tests/test_shark_importer.py \
--ignore=benchmarks/tests/test_hf_benchmark.py \
--ignore=benchmarks/tests/test_benchmark.py"
export MODEL_LIST="bert_base_cased or mobilebert_uncased or MiniLM_L12_H384_uncased or module_resnet50 or mobilenet_v3 or squeezenet1_0 or vit_base_patch16_224"
pytest tank/test_models.py -k "cpu and ($MODEL_LIST)" $SHARK_SKIP_TESTS
pytest tank/test_models.py -k "vulkan and ($MODEL_LIST)" $SHARK_SKIP_TESTS
pytest tank/test_models.py -k "cuda and ($MODEL_LIST)" $SHARK_SKIP_TESTS
############################### Configurations ###############################
# Jobs that build IREE in some non-default configuration
##############################################################################
asan:
needs: setup
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Building and testing with AddressSanitizer"
run: |
# Note that this uses the latest version of the clang compiler, etc.
# This gives us access to the latest features and validates that IREE
# builds using the latest versions.
./build_tools/github_actions/docker_run.sh \
gcr.io/iree-oss/swiftshader-bleeding-edge@sha256:d335b0885871356ebe697a8f3eac7ce7cbc3d84a458652e7cd95291a0aecfead \
./build_tools/cmake/build_and_test_asan.sh
tsan:
needs: setup
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
with:
submodules: true
- name: "Building and testing with ThreadSanitizer"
run: |
./build_tools/github_actions/docker_run.sh \
gcr.io/iree-oss/base@sha256:7c3027c48b94fc38e64488987fc7893c100526c57308d25cef0c6b76a2dfe117 \
./build_tools/cmake/build_and_test_tsan.sh
benchmarks:
needs: [setup, build_all, build_tf_integrations]
if: needs.setup.outputs.should-run == 'true'
uses: ./.github/workflows/benchmarks.yml
with:
# env.GCS_DIR is also duplicated in this workflow. See the note there on
# why this is.
runner-group: ${{ needs.setup.outputs.runner-group }}
runner-env: ${{ needs.setup.outputs.runner-env }}
build-dir: ${{ needs.build_all.outputs.build-dir }}
build-dir-archive: ${{ needs.build_all.outputs.build-dir-archive }}
build-dir-gcs-artifact: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
tf-binaries-dir: ${{ needs.build_tf_integrations.outputs.binaries-dir }}
tf-binaries-archive: ${{ needs.build_tf_integrations.outputs.binaries-archive }}
tf-binaries-gcs-artifact: ${{ needs.build_tf_integrations.outputs.binaries-gcs-artifact }}
############################## Crosscompilation ##############################
# Jobs that cross-compile IREE for other platforms
##############################################################################
# emscripten is not in the test matrix because it is set as
# postsubmission-only.
cross_compile_and_test:
needs: [setup, build_all]
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
strategy:
matrix:
target:
- platform: android
architecture: arm64-v8a
abi: arm64-v8a
docker_image: "gcr.io/iree-oss/android@sha256:99a14dfc482dded1f03460b0ff0a818ecc22688a018a2ccee24c5f21baf09e7b"
test_script: "echo 'bypass tests'"
- platform: riscv
architecture: rv64
abi: lp64d
docker_image: "gcr.io/iree-oss/riscv@sha256:cd2ee29950737f44b5ec54a32c37746e951fc74ef29a586e6d2559b113cdbb69"
test_script: "./build_tools/cmake/test_riscv.sh"
- platform: riscv
architecture: rv32-linux
abi: ilp32d
docker_image: "gcr.io/iree-oss/riscv@sha256:cd2ee29950737f44b5ec54a32c37746e951fc74ef29a586e6d2559b113cdbb69"
test_script: "./build_tools/cmake/test_riscv.sh"
- platform: riscv
architecture: rv32-baremetal
abi: ilp32
docker_image: "gcr.io/iree-oss/riscv@sha256:cd2ee29950737f44b5ec54a32c37746e951fc74ef29a586e6d2559b113cdbb69"
test_script: "./tests/riscv32/smoke.sh"
env:
PLATFORM: ${{ matrix.target.platform }}
ARCHITECTURE: ${{ matrix.target.architecture }}
ABI: ${{ matrix.target.abi }}
DOCKER_IMAGE: ${{ matrix.target.docker_image }}
TEST_SCRIPT: ${{ matrix.target.test_script }}
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
- name: "Checking out runtime submodules"
run: ./build_tools/scripts/git/update_runtime_submodules.sh
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}" "${BUILD_DIR}/install"
- name: "Build cross-compiling target"
run: |
./build_tools/github_actions/docker_run.sh \
--env "${PLATFORM^^}_ARCH=${ARCHITECTURE}" \
--env "BUILD_${PLATFORM^^}_DIR=build-${PLATFORM}-${ARCHITECTURE}" \
--env "${PLATFORM^^}_ABI=${ABI}" \
--env "BUILD_PRESET=test" \
--env "IREE_HOST_BINARY_ROOT=${BUILD_DIR}/install" \
"${DOCKER_IMAGE}" \
./build_tools/cmake/build_${PLATFORM}.sh
- name: "Test cross-compiling target"
run: |
./build_tools/github_actions/docker_run.sh \
--env "${PLATFORM^^}_ARCH=${ARCHITECTURE}" \
--env "BUILD_${PLATFORM^^}_DIR=build-${PLATFORM}-${ARCHITECTURE}" \
--env "BUILD_PRESET=test" \
"${DOCKER_IMAGE}" \
bash -euo pipefail -c \
"${TEST_SCRIPT}"
emscripten:
needs: [setup, build_all]
if: needs.setup.outputs.should-run == 'true' && needs.setup.outputs.ci-stage == 'postsubmit'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
env:
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
- name: "Checking out runtime submodules"
run: ./build_tools/scripts/git/update_runtime_submodules.sh
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting install from build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}" "${BUILD_DIR}/install"
- name: "Building the runtime for the web using Emscripten"
run: |
build_tools/github_actions/docker_run.sh \
--env "IREE_HOST_BINARY_ROOT=${BUILD_DIR}/install" \
gcr.io/iree-oss/emscripten@sha256:b576c4c695143a089066fe753259e9ec3c003658a85b40bf1a09236abc653ed6 \
build_tools/cmake/build_runtime_emscripten.sh
# TODO(#10391): Update the benchmark dependency with the new benchmark
# framework. The step only requires the compiled benchmark modules, not the
# result from the benchmark.
test_benchmark_suites:
needs: [setup, build_all, benchmarks]
if: needs.setup.outputs.should-run == 'true'
runs-on:
- self-hosted # must come first
- runner-group=${{ needs.setup.outputs.runner-group }}
- environment=${{ needs.setup.outputs.runner-env }}
- cpu
- os-family=Linux
strategy:
matrix:
target:
- platform: riscv
architecture: rv64
docker_image: "gcr.io/iree-oss/riscv@sha256:cd2ee29950737f44b5ec54a32c37746e951fc74ef29a586e6d2559b113cdbb69"
run_scripts: "./build_tools/cmake/build_riscv.sh && ./build_tools/cmake/test_riscv.sh"
- platform: riscv
architecture: rv32-linux
docker_image: "gcr.io/iree-oss/riscv@sha256:cd2ee29950737f44b5ec54a32c37746e951fc74ef29a586e6d2559b113cdbb69"
run_scripts: "./build_tools/cmake/build_riscv.sh && ./build_tools/cmake/test_riscv.sh"
- platform: host
architecture: x86_64
docker_image: "gcr.io/iree-oss/base@sha256:7c3027c48b94fc38e64488987fc7893c100526c57308d25cef0c6b76a2dfe117"
run_scripts: "./build_tools/cmake/test_benchmark_suites_host.sh"
env:
PLATFORM: ${{ matrix.target.platform }}
ARCHITECTURE: ${{ matrix.target.architecture }}
DOCKER_IMAGE: ${{ matrix.target.docker_image }}
RUN_SCRIPTS: ${{ matrix.target.run_scripts }}
BUILD_DIR: ${{ needs.build_all.outputs.build-dir }}
BUILD_DIR_ARCHIVE: ${{ needs.build_all.outputs.build-dir-archive }}
BUILD_DIR_GCS_ARTIFACT: ${{ needs.build_all.outputs.build-dir-gcs-artifact }}
BENCHMARKS_ARCHIVE: ${{ needs.benchmarks.outputs.benchmarks-archive }}
BENCHMARKS_GCS_ARTIFACT: ${{ needs.benchmarks.outputs.benchmarks-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
- name: "Checking out runtime submodules"
run: ./build_tools/scripts/git/update_runtime_submodules.sh
- name: "Downloading build dir archive"
run: gcloud alpha storage cp "${BUILD_DIR_GCS_ARTIFACT}" "${BUILD_DIR_ARCHIVE}"
- name: "Extracting build dir archive"
run: tar -xf "${BUILD_DIR_ARCHIVE}" "${BUILD_DIR}/install"
- name: "Downloading benchmarks archive"
id: download_benchmarks
run: gcloud alpha storage cp "${BENCHMARKS_GCS_ARTIFACT}" "${BENCHMARKS_ARCHIVE}"
- name: "Extracting benchmarks archive"
run: tar -xf "${BENCHMARKS_ARCHIVE}"
# We may need to use a tar archive later, but for now we only download one npy file.
- name: "Download benchmark suite module expected output"
id: download_expected_output
env:
BUILD_BENCHMARK_SUITE_DIR: ${{ needs.benchmarks.outputs.benchmarks-dir }}/benchmark_suites
EXPECTED_OUTPUT_GSC_ARTIFACT: gs://iree-model-artifacts/deeplab_v3_fp32_input_0_expected_output.npy
EXPECTED_OUTPUT_FILE: deeplab_v3_fp32_input_0_expected_output.npy
run: |
gcloud alpha storage cp "${EXPECTED_OUTPUT_GSC_ARTIFACT}" "${BUILD_BENCHMARK_SUITE_DIR}/${EXPECTED_OUTPUT_FILE}"
echo "::set-output name=benchmark-suite-dir::${BUILD_BENCHMARK_SUITE_DIR}"
- name: "Build iree-run-module and test benchmark suite modules"
env:
BUILD_BENCHMARK_SUITE_DIR: ${{ steps.download_expected_output.outputs.benchmark-suite-dir }}
run: |
./build_tools/github_actions/docker_run.sh \
--env "${PLATFORM^^}_ARCH=${ARCHITECTURE}" \
--env "BUILD_${PLATFORM^^}_DIR=build-${PLATFORM}-${ARCHITECTURE}" \
--env "BUILD_PRESET=benchmark-suite-test" \
--env "IREE_HOST_BINARY_ROOT=${BUILD_DIR}/install" \
--env "BUILD_BENCHMARK_SUITE_DIR=${BUILD_BENCHMARK_SUITE_DIR}" \
"${DOCKER_IMAGE}" \
bash -euo pipefail -c \
"${RUN_SCRIPTS}"
##############################################################################
# Depends on all the other jobs to provide a single anchor that indicates the
# final status. Status reporting will become more sophisticated in the future
# and we can hopefully avoid the need to explicitly list every single job...
summary:
# Even if you have an explicit if condition, you still need to override
# GitHub's default behavior of not running if any dependencies failed.
if: always()
runs-on: ubuntu-20.04
needs:
# Basic
- build_all
- build_test_all_windows
- build_test_all_bazel
- test_all
- test_gpu
# Subsets
- build_test_runtime
- build_test_runtime_windows
# Tensorflow
- build_tf_integrations
- test_tf_integrations
- test_tf_integrations_gpu
# Model Coverage
- test_shark_model_suite
# Configurations
- asan
- tsan
# Crosscompilation
- cross_compile_and_test
- emscripten
# Benchmark pipeline
- benchmarks
# Test modules from benchmark pipeline
- test_benchmark_suites
steps:
- name: Getting combined job status
run: |
echo '${{ toJson(needs.*.result) }}' \
| jq --exit-status 'all(.=="success" or .=="skipped")' > /dev/null