| name: SHARK |
| |
| on: |
| schedule: |
| - cron: '0 10 * * *' |
| workflow_dispatch: |
| |
| jobs: |
| setup: |
| runs-on: ubuntu-20.04 |
| env: |
| # The commit being checked out is the merge commit for the PR. Its first |
| # parent will be the tip of main. |
| BASE_REF: HEAD^ |
| PR_TITLE: ${{ github.event.pull_request.title }} |
| PR_BODY: ${{ github.event.pull_request.body }} |
| outputs: |
| should-run: ${{ steps.configure.outputs.should-run }} |
| ci-stage: ${{ steps.configure.outputs.ci-stage }} |
| runner-env: ${{ steps.configure.outputs.runner-env }} |
| runner-group: ${{ steps.configure.outputs.runner-group }} |
| write-caches: ${{ steps.configure.outputs.write-caches }} |
| shark-sha: ${{ steps.shark.outputs.shark-sha }} |
| artifact-upload-dir: ${{ steps.shark.outputs.artifact-upload-dir }} |
| steps: |
| - name: "Checking out repository" |
| uses: actions/checkout@e2f20e631ae6d7dd3b768f56a5d2af784dd54791 # v2.5.0 |
| with: |
| # We need the parent commit to do a diff |
| fetch-depth: 2 |
| - name: "Configuring CI options" |
| id: configure |
| run: | |
| # Just informative logging. There should only be two commits in the |
| # history here, but limiting the depth helps when copying from a local |
| # repo instead of using checkout, e.g. with |
| # https://github.com/nektos/act where there will be more. |
| git log --oneline --graph --max-count=3 |
| ./build_tools/github_actions/configure_ci.py |
| - name: "Checking out SHARK tank" |
| uses: actions/checkout@e2f20e631ae6d7dd3b768f56a5d2af784dd54791 # v2.5.0 |
| with: |
| repository: nod-ai/SHARK |
| path: ${{ github.workspace }}/SHARK |
| - name: "Calculating version info" |
| id: shark |
| run: | |
| cd ${{ github.workspace }}/SHARK |
| export SHARK_SHA=`git rev-parse --short=4 HEAD` |
| echo "shark-sha=${SHARK_SHA}" >> $GITHUB_OUTPUT |
| export DIR_NAME="$(date +'%Y-%m-%d').sha_${SHARK_SHA}.timestamp_$(date +'%s')" |
| export GCS_ARTIFACT_DIR="gs://shark-benchmark-artifacts/${DIR_NAME}" |
| echo "artifact-upload-dir=${GCS_ARTIFACT_DIR}" >> $GITHUB_OUTPUT |
| |
| benchmark_cpu: |
| needs: setup |
| if: needs.setup.outputs.should-run == 'true' |
| runs-on: |
| - self-hosted # must come first |
| - runner-group=${{ needs.setup.outputs.runner-group }} |
| - environment=${{ needs.setup.outputs.runner-env }} |
| - cpu |
| - os-family=Linux |
| env: |
| SHARK_SHA: ${{ needs.setup.outputs.shark-sha }} |
| GCS_UPLOAD_DIR: ${{ needs.setup.outputs.artifact-upload-dir }} |
| SHARK_OUTPUT_DIR: shark-output-dir |
| steps: |
| - name: "Checking out repository" |
| uses: actions/checkout@e2f20e631ae6d7dd3b768f56a5d2af784dd54791 # v2.5.0 |
| with: |
| submodules: true |
| - name: "Benchmarking SHARK tank on CPU" |
| # Only Tensorflow is working at the moment so limit benchmarking to TF models. |
| run: | |
| ./build_tools/github_actions/docker_run.sh \ |
| gcr.io/iree-oss/shark@sha256:764dd0af58955eaa46f785bac84c9dbc0f4301fa2cd2fc84c1a23996090b539a \ |
| ./build_tools/benchmarks/shark/run_shark.sh "${SHARK_SHA}" "cpu" "cpu" "${SHARK_OUTPUT_DIR}" |
| - name: "Uploading artifacts" |
| run: | |
| gcloud alpha storage cp "${SHARK_OUTPUT_DIR}/**" "${GCS_UPLOAD_DIR}/" |
| |
| benchmark_cuda: |
| needs: setup |
| if: needs.setup.outputs.should-run == 'true' |
| runs-on: |
| - self-hosted # must come first |
| - runner-group=${{ needs.setup.outputs.runner-group }} |
| - environment=${{ needs.setup.outputs.runner-env }} |
| - gpu |
| - os-family=Linux |
| env: |
| SHARK_SHA: ${{ needs.setup.outputs.shark-sha }} |
| GCS_UPLOAD_DIR: ${{ needs.setup.outputs.artifact-upload-dir }} |
| SHARK_OUTPUT_DIR: shark-output-dir |
| steps: |
| - name: "Checking out repository" |
| uses: actions/checkout@e2f20e631ae6d7dd3b768f56a5d2af784dd54791 # v2.5.0 |
| with: |
| submodules: true |
| - name: "Benchmarking SHARK tank on CUDA" |
| # Only Tensorflow is working at the moment so limit benchmarking to TF models. |
| run: | |
| ./build_tools/github_actions/docker_run.sh \ |
| --gpus all \ |
| gcr.io/iree-oss/shark@sha256:764dd0af58955eaa46f785bac84c9dbc0f4301fa2cd2fc84c1a23996090b539a \ |
| ./build_tools/benchmarks/shark/run_shark.sh "${SHARK_SHA}" "cuda" "cuda" "${SHARK_OUTPUT_DIR}" |
| - name: "Uploading artifacts" |
| run: | |
| gcloud alpha storage cp "${SHARK_OUTPUT_DIR}/**" "${GCS_UPLOAD_DIR}/" |
| |
| generate_report: |
| needs: [setup, benchmark_cpu, benchmark_cuda] |
| if: needs.setup.outputs.should-run == 'true' |
| runs-on: |
| - self-hosted # must come first |
| - runner-group=${{ needs.setup.outputs.runner-group }} |
| - environment=${{ needs.setup.outputs.runner-env }} |
| - cpu |
| - os-family=Linux |
| env: |
| GCS_BASELINES_DIR: "gs://shark-benchmark-artifacts/baselines" |
| GCS_LATEST_DIR: "gs://shark-benchmark-artifacts/latest" |
| GCS_UPLOAD_DIR: ${{ needs.setup.outputs.artifact-upload-dir }} |
| BENCHMARK_RESULTS_DIR: benchmark-results |
| steps: |
| - name: "Checking out repository" |
| uses: actions/checkout@e2f20e631ae6d7dd3b768f56a5d2af784dd54791 # v2.5.0 |
| with: |
| submodules: true |
| - name: "Download benchmark results" |
| run: | |
| mkdir "${BENCHMARK_RESULTS_DIR}" |
| gcloud alpha storage cp "${GCS_UPLOAD_DIR}/**" "${BENCHMARK_RESULTS_DIR}/" |
| - name: "Downloading baselines" |
| run: | |
| gcloud alpha storage cp "${GCS_BASELINES_DIR}/**" "${BENCHMARK_RESULTS_DIR}/" |
| - name: "Generating report" |
| run: | |
| ./build_tools/github_actions/docker_run.sh \ |
| gcr.io/iree-oss/shark@sha256:764dd0af58955eaa46f785bac84c9dbc0f4301fa2cd2fc84c1a23996090b539a \ |
| ./build_tools/benchmarks/shark/run_report.sh "${BENCHMARK_RESULTS_DIR}" \ |
| "${BENCHMARK_RESULTS_DIR}/cpu_baseline.csv" \ |
| "${BENCHMARK_RESULTS_DIR}/cuda_baseline.csv" \ |
| "${BENCHMARK_RESULTS_DIR}/summary.html" |
| - name: "Uploading summary" |
| run: | |
| gcloud alpha storage cp "${BENCHMARK_RESULTS_DIR}/summary.html" "${GCS_UPLOAD_DIR}/" |
| - name: "Updating latest" |
| run: | |
| gcloud alpha storage cp "${BENCHMARK_RESULTS_DIR}/summary.html" "${GCS_LATEST_DIR}/" |