Skip to content

add benchmark results in README #8

add benchmark results in README

add benchmark results in README #8

Workflow file for this run

name: build
on: [push]
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
go-version: [1.23]
steps:
- name: Check out Source
uses: actions/checkout@v4
- name: Set up Go ${{ matrix.go-version }}
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go-version }}
cache: true
- name: Display Go version
run: go version
- name: Test
run: go test -v -coverprofile=cover.out ./...
- name: Upload coverage reports to Codecov
uses: codecov/[email protected]
with:
token: ${{ secrets.CODECOV_TOKEN }}
slug: jub0bs/errutil
benchmark:
needs: test
strategy:
matrix:
os: [ubuntu-latest]
go-version: [1.23]
name: Benchmark comparison ${{ matrix.os }} @ Go ${{ matrix.go-version }}
runs-on: ${{ matrix.os }}
steps:
- name: Check out Code (previous)
uses: actions/checkout@v4
with:
ref: ${{ github.base_ref }}
path: previous
- name: Check out Code (new)
uses: actions/checkout@v4
with:
path: new
- name: Set up Go ${{ matrix.go-version }}
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go-version }}
- name: Install benchstat
run: go install golang.org/x/perf/cmd/benchstat@latest
- name: Run Benchmark (previous)
run: |
cd previous
go test -run '^$' -bench '^(BenchmarkAs|BenchmarkFind)$' -count 10 . -benchtime 10000x > benchmark.txt
- name: Run Benchmark (new)
run: |
cd new
go test -run '^$' -bench '^(BenchmarkAs|BenchmarkFind)$' -count 10 . -benchtime 10000x > benchmark.txt
- name: Run benchstat
# Mostly to compare allocations;
# measurements of execution speed in GitHub Actions are unreliable.
run: |
benchstat previous/benchmark.txt new/benchmark.txt
- name: Run Benchmark (against standard library)
run: |
cd new
go test -run '^$' -bench '^(BenchmarkAsAgainstErrorsPkg|BenchmarkFindAgainstErrorsPkg)$' -count 10 . -benchtime 10000x > benchmark.txt
- name: Run benchstat (against standard library)
# Mostly to compare allocations;
# measurements of execution speed in GitHub Actions are unreliable.
run: |
benchstat -col "/v@(errors errutil)" new/benchmark.txt