diff --git a/.github/workflows/Benchmark.yml b/.github/workflows/Benchmark.yml index 2f13d23d..56669b2d 100644 --- a/.github/workflows/Benchmark.yml +++ b/.github/workflows/Benchmark.yml @@ -1,25 +1,77 @@ name: Run benchmarks - on: - pull_request: - types: [labeled, opened, synchronize, reopened] - -# Only trigger the benchmark job when you add `run benchmark` label to the PR + pull_request_target: + types: + - labeled jobs: - Benchmark: - runs-on: ubuntu-latest - if: contains(github.event.pull_request.labels.*.name, 'run benchmark') + bmark: + name: Julia ${{ matrix.version }} - macOS - ${{ matrix.arch }} - ${{ github.event_name }} + if: contains(github.event.pull_request.labels.*.name, 'benchmarks') + runs-on: [self-hosted, macOS] + strategy: + fail-fast: false + matrix: + version: + - 1 + arch: + - aarch64 steps: - - uses: actions/checkout@v2 - - uses: julia-actions/setup-julia@latest + - uses: actions/checkout@v3 + - uses: julia-actions/setup-julia@v1 with: - version: lts - - uses: julia-actions/julia-buildpkg@latest - - name: Install dependencies - run: julia -e 'using Pkg; pkg"add PkgBenchmark BenchmarkCI"' + version: ${{ matrix.version }} + arch: ${{ matrix.arch }} + - uses: julia-actions/julia-buildpkg@v1 + - name: Installing non-registered dependencies + run: | + using Pkg + pkg1 = PackageSpec(url = "https://github.com/JuliaSmoothOptimizers/JSOBenchmarks.jl.git", rev = "main") + pkg_list = [pkg1] + Pkg.add(pkg_list) + shell: julia --project=benchmark --color=yes {0} + - name: Install benchmark dependencies + run: julia --project=benchmark -e 'using Pkg; Pkg.instantiate()' + - name: Sanitize project name + id: sanitize + run: + echo "REPONAME=${{ github.event.repository.name }}" | sed -e 's/\.jl$//' >> $GITHUB_OUTPUT - name: Run benchmarks - run: julia -e 'using BenchmarkCI; BenchmarkCI.judge(;baseline = "origin/main", script = joinpath(pwd(), "benchmark", "benchmarks.jl"))' - - name: Post results - run: julia -e 'using BenchmarkCI; BenchmarkCI.postjudge()' + run: julia --project=benchmark -e 'using JSOBenchmarks; run_benchmarks("${{ steps.sanitize.outputs.REPONAME }}", "benchmark", reference_branch = "bmark-workflow")' env: + GITHUB_AUTH: ${{ secrets.GIST_TOKEN }} + # - name: Post benchmark results in PR + # uses: thollander/actions-comment-pull-request@v2 + # with: + # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # message: "Full benchmark results stored as artifacts. Summary: ${{ secrets.BMARK_GIST_URL }}" + - name: Build comment + id: build-comment + uses: actions/github-script@v6 + with: + github-token: ${{ github.token }} + result-encoding: string + script: | + const fs = require('fs'); + return fs.readFileSync("${{ github.workspace }}/${{ github.sha }}.md", "utf8").toString(); + - name: Comment in PR + uses: thollander/actions-comment-pull-request@v2 + with: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + message: ${{ steps.build-comment.outputs.result }} + - name: Upload benchmark plots + uses: edunad/actions-image@v2.0.0 + with: + path: './*.png' + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + title: 'Benchmarks at a Glance' + annotationLevel: 'notice' + - name: Upload artifacts + uses: actions/upload-artifact@v2 + with: + name: benchmarks + path: | + profiles_commit_vs_main_*.svg + *_vs_main_*.jld2 + main.md + ${{ github.sha }}.md + judgement_${{ github.sha }}.md \ No newline at end of file