blob: 99f631ec4b55e83b49701e9d4fc0b0cb7e8550f0 [file] [log] [blame]
# Copyright 2022 The IREE Authors
#
# Licensed under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
# Workflow for execution-benchmark-related jobs. It is designed to be called
# from the main workflow ci.yml. The concurrency of this workflow is controlled
# by the caller's job.
name: Benchmark execution
on:
workflow_call:
inputs:
runner-group:
required: true
type: string
runner-env:
required: true
type: string
e2e-test-artifacts-dir:
required: true
type: string
e2e-test-artifacts-gcs-artifact-dir:
required: true
type: string
benchmark-tools-gcs-artifact-dir:
required: true
type: string
benchmark-presets:
required: true
type: string
outputs:
benchmark-results-dir:
description: "Local path that stores all benchmark results."
value: ${{ jobs.run_benchmarks.outputs.benchmark-results-dir }}
benchmark-results-gcs-artifact-dir:
description: "GCS path that stores all benchmark results."
value: ${{ jobs.run_benchmarks.outputs.benchmark-results-gcs-artifact-dir }}
env:
# This duplicates the variable from ci.yml. The variable needs to be in env
# instead of the outputs of setup because it contains the run attempt and we
# want that to be the current attempt, not whatever attempt the setup step
# last ran in. It therefore can't be passed in via inputs because the env
# context isn't available there.
GCS_DIR: gs://iree-github-actions-${{ github.event_name == 'pull_request' && 'presubmit' || 'postsubmit' }}-artifacts/${{ github.run_id }}/${{ github.run_attempt }}
jobs:
export_benchmark_config:
runs-on:
- self-hosted # must come first
- runner-group=${{ inputs.runner-group }}
- environment=${{ inputs.runner-env }}
- cpu
- os-family=Linux
outputs:
benchmark-matrix: ${{ steps.export.outputs.benchmark-matrix }}
benchmark-config: ${{ steps.export.outputs.benchmark-config }}
benchmark-config-gcs-artifact: ${{ steps.upload.outputs.benchmark-config-gcs-artifact }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
- name: "Checking out runtime submodules"
run: ./build_tools/scripts/git/update_runtime_submodules.sh
- name: "Exporting benchmark run config"
id: export
env:
BENCHMARK_CONFIG: "benchmark-config.json"
BENCHMARK_PRESETS: ${{ inputs.benchmark-presets }}
run: |
./build_tools/benchmarks/export_benchmark_config.py \
execution \
--benchmark_presets="${BENCHMARK_PRESETS}" \
--output="${BENCHMARK_CONFIG}"
echo "benchmark-config=${BENCHMARK_CONFIG}" >> "${GITHUB_OUTPUT}"
echo benchmark-matrix=$(jq \
'to_entries | map({"device_name": .key, "host_environment": .value.host_environment})' \
"${BENCHMARK_CONFIG}") \
>> "${GITHUB_OUTPUT}"
- name: "Uploading benchmark config"
id: upload
env:
BENCHMARK_CONFIG: ${{ steps.export.outputs.benchmark-config }}
BENCHMARK_CONFIG_GCS_ARTIFACT: ${{ env.GCS_DIR }}/${{ steps.export.outputs.benchmark-config }}
run: |
gcloud alpha storage cp \
"${BENCHMARK_CONFIG}" \
"${BENCHMARK_CONFIG_GCS_ARTIFACT}"
echo "benchmark-config-gcs-artifact=${BENCHMARK_CONFIG_GCS_ARTIFACT}" >> "${GITHUB_OUTPUT}"
run_benchmarks:
needs: [export_benchmark_config]
strategy:
# Matrix is dynamically generated by the job export_benchmark_config. So
# we only runs the benchmarks specified in inputs.benchmark-presets.
# All tasks in matrix are seen as a single job in Github CI and the job
# can output a single set of values.
matrix:
benchmark: ${{ fromJSON(needs.export_benchmark_config.outputs.benchmark-matrix) }}
runs-on:
- self-hosted # must come first
- runner-group=${{ inputs.runner-group }}
- environment=${{ inputs.runner-env }}
- machine-type=${{ matrix.benchmark.device_name }}
env:
DEVICE_NAME: ${{ matrix.benchmark.device_name }}
PLATFORM_ARCH: ${{ matrix.benchmark.host_environment.platform }}-${{ matrix.benchmark.host_environment.architecture }}
E2E_TEST_ARTIFACTS_GCS_ARTIFACT_DIR: ${{ inputs.e2e-test-artifacts-gcs-artifact-dir }}
E2E_TEST_ARTIFACTS_DIR: ${{ inputs.e2e-test-artifacts-dir }}
BENCHMARK_RESULTS_DIR: benchmark-results
outputs:
benchmark-results-dir: ${{ env.BENCHMARK_RESULTS_DIR }}
# Ideally this should be defined in env, so it can be used in the upload
# step. But Github CI doesn't allow us to access env.GCS in env.
benchmark-results-gcs-artifact-dir: ${{ env.GCS_DIR }}/${{ env.BENCHMARK_RESULTS_DIR }}
steps:
- name: "Checking out repository"
uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # v2
- name: "Checking out runtime submodules"
run: ./build_tools/scripts/git/update_runtime_submodules.sh
- name: "Downloading benchmark tools"
id: download-tools
env:
# See `build_benchmark_tools` step in ci.yml for the name format of
# benchmark tools artifacts.
BENCHMARK_TOOLS_ARCHIVE: ${{ env.PLATFORM_ARCH }}-benchmark-tools.tar
BENCHMARK_TOOLS_GCS_ARTIFACT: ${{ inputs.benchmark-tools-gcs-artifact-dir }}/${{ env.PLATFORM_ARCH }}-benchmark-tools.tar
run: |
gcloud alpha storage cp "${BENCHMARK_TOOLS_GCS_ARTIFACT}" "${BENCHMARK_TOOLS_ARCHIVE}"
echo "benchmark-tools-archive=${BENCHMARK_TOOLS_ARCHIVE}" >> "${GITHUB_OUTPUT}"
- name: "Downloading benchmark assets"
id: download-assets
env:
BENCHMARK_CONFIG: ${{ needs.export_benchmark_config.outputs.benchmark-config }}
BENCHMARK_CONFIG_GCS_ARTIFACT: ${{ needs.export_benchmark_config.outputs.benchmark-config-gcs-artifact }}
run: |
gcloud alpha storage cp "${BENCHMARK_CONFIG_GCS_ARTIFACT}" "${BENCHMARK_CONFIG}"
mkdir -p "${E2E_TEST_ARTIFACTS_DIR}"
jq -r \
--arg DEVICE_NAME "${DEVICE_NAME}" \
--arg GCS_ARTIFACT_DIR "${E2E_TEST_ARTIFACTS_GCS_ARTIFACT_DIR}" \
'.[$DEVICE_NAME] | .module_dir_paths | map("\($GCS_ARTIFACT_DIR)/\(.)") | join("\n")' \
"${BENCHMARK_CONFIG}" | \
gcloud alpha storage cp -r --read-paths-from-stdin "${E2E_TEST_ARTIFACTS_DIR}"
echo "benchmark-config=${BENCHMARK_CONFIG}" >> "${GITHUB_OUTPUT}"
- name: "Unpacking benchmark tools"
id: unpack-tools
env:
BENCHMARK_TOOLS_ARCHIVE: ${{ steps.download-tools.outputs.benchmark-tools-archive }}
# See `build_benchmark_tools` step in ci.yml for the name format of
# benchmark tools directory.
BENCHMARK_TOOLS_DIR: ${{ env.PLATFORM_ARCH }}-benchmark-tools-dir
run: |
tar -xf ${BENCHMARK_TOOLS_ARCHIVE}
echo "normal-benchmark-tools-dir=${BENCHMARK_TOOLS_DIR}/build/tools" >> "${GITHUB_OUTPUT}"
echo "traced-benchmark-tools-dir=${BENCHMARK_TOOLS_DIR}/build-traced/tools" >> "${GITHUB_OUTPUT}"
- name: "Running benchmarks"
id: run
env:
BENCHMARK_CONFIG: ${{ steps.download-assets.outputs.benchmark-config }}
IREE_DOCKER_WRAPPER: ./build_tools/github_actions/docker_run.sh
IREE_NORMAL_BENCHMARK_TOOLS_DIR: ${{ steps.unpack-tools.outputs.normal-benchmark-tools-dir }}
IREE_TRACED_BENCHMARK_TOOLS_DIR: ${{ steps.unpack-tools.outputs.traced-benchmark-tools-dir }}
IREE_DEVICE_NAME: ${{ env.DEVICE_NAME }}
IREE_E2E_TEST_ARTIFACTS_DIR: ${{ env.E2E_TEST_ARTIFACTS_DIR }}
IREE_RUN_CONFIG: run-config.json
IREE_BENCHMARK_RESULTS: ${{ env.BENCHMARK_RESULTS_DIR }}/benchmark-results-${{ matrix.benchmark.device_name }}.json
run: |
mkdir -p ${BENCHMARK_RESULTS_DIR}
jq --arg DEVICE_NAME "${IREE_DEVICE_NAME}" \
'.[$DEVICE_NAME] | .run_configs' \
"${BENCHMARK_CONFIG}" > "${IREE_RUN_CONFIG}"
./build_tools/benchmarks/run_benchmarks.sh
echo "benchmark-results=${IREE_BENCHMARK_RESULTS}" >> "${GITHUB_OUTPUT}"
- name: "Uploading benchmark results"
id: upload
env:
BENCHMARK_RESULTS: ${{ steps.run.outputs.benchmark-results }}
run: |
gcloud alpha storage cp \
"${BENCHMARK_RESULTS}" \
"${GCS_DIR}/${BENCHMARK_RESULTS}"