-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add benchmarks for cpu crunching (#30)
* add benchmark for cpu crunching * verbose running and tuning * fix crunch plot, self tune benchmarks * nicer plot styling * format ticks with sprintf * stronger typing and more tags in benchmark results * renamed suite for compatibility with PkgBenchmark * update .gitignore with pkgbenchmark artifacts * add benchmark readme * fix formatting * fix plot ticks * create suite subdirectory for benchmarks * make results printing info log * Update benchmark/README.md Co-authored-by: Josh Ott <[email protected]> --------- Co-authored-by: Josh Ott <[email protected]>
- Loading branch information
Showing
5 changed files
with
124 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -6,6 +6,7 @@ authors = ["SmalRat <[email protected]>", | |
version = "0.1.0" | ||
|
||
[deps] | ||
BenchmarkTools = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf" | ||
Cairo = "159f3aea-2a34-519c-b102-8c37f9878175" | ||
Colors = "5ae59095-9a9b-59fe-a467-6f913c188581" | ||
Dagger = "d58978e5-989f-55fb-8d15-ea34adc7bf54" | ||
|
@@ -20,6 +21,7 @@ Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6" | |
LightGraphs = "093fc24a-ae57-5d10-9952-331d41423f4d" | ||
MetaGraphs = "626554b9-1ddb-594c-aa3c-2596fe9399a5" | ||
Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" | ||
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7" | ||
TimespanLogging = "a526e669-04d3-4846-9525-c66122c55f63" | ||
|
||
[compat] | ||
|
@@ -29,4 +31,4 @@ Dagger = "0.18.11" | |
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" | ||
|
||
[targets] | ||
test = ["Test"] | ||
test = ["Test"] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
# FrameworkDemo benchmarks | ||
|
||
Run benchmarks from the project's main directory | ||
|
||
## Usage | ||
|
||
Run benchmark script | ||
|
||
``` | ||
julia --project benchmark/benchmarks.jl | ||
``` | ||
|
||
or benchmark with [PkgBenchmark](https://github.com/JuliaCI/PkgBenchmark.jl) | ||
|
||
```julia | ||
using PkgBenchmark | ||
import FrameworkDemo | ||
|
||
benchmarkpkg(FrameworkDemo) | ||
``` | ||
|
||
|
||
## Developing benchmarks | ||
|
||
The benchmarks are based on [BenchmarkTools](https://github.com/JuliaCi/BenchmarkTools.jl) and try to follow a standard benchmark structure with `BenchmarkTools::BenchmarkGroups` | ||
|
||
Add new benchmarks: | ||
|
||
```julia | ||
SUITE["new_benchmark"] = BenchmarkGroup(["tag1", "tag2", "etc"]) | ||
SUITE["new_benchmark"]["foo"] = @benchmarkable foo($bar) | ||
``` | ||
|
||
Add result processing function (e.g. for visualization) | ||
|
||
```julia | ||
function plot_foo(results::BenchmarkGroup) | ||
foo_results = results["new_benchmark"]["foo"] | ||
do_something(foo_results) | ||
end | ||
|
||
push!(result_processors, plot_foo) # register function | ||
``` | ||
|
||
The functions added to `results_processors` will be called automatically when executing `benchmark/benchmarks.jl` script. Alternatively the functions can be added with `postprocess`argument of `PkgBenchmark.benchmarkpkg`. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
using FrameworkDemo | ||
using BenchmarkTools | ||
using Plots | ||
|
||
SUITE = BenchmarkGroup() | ||
result_processors = Function[] | ||
|
||
include("suite/cpu_crunching.jl") | ||
|
||
if abspath(PROGRAM_FILE) == @__FILE__ | ||
@info "tuning benchmark suite" | ||
tune!(SUITE, verbose=true) | ||
@info "running benchmark suite" | ||
results = run(SUITE, verbose=true) | ||
@info "running benchmark suite" results | ||
for processor in result_processors | ||
processor(results) | ||
end | ||
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,54 @@ | ||
import Printf | ||
|
||
SUITE["cpu_crunching"] = BenchmarkGroup(["cpu_crunching"]) | ||
|
||
SUITE["cpu_crunching"]["find_primes"] = BenchmarkGroup(["find_primes"]) | ||
for i in exp10.(range(0, stop=6, length=10)) | ||
n = ceil(Int, i) | ||
SUITE["cpu_crunching"]["find_primes"][n] = @benchmarkable FrameworkDemo.find_primes($n) evals = 1 samples = 1 | ||
end | ||
|
||
SUITE["cpu_crunching"]["crunch_for_seconds"] = BenchmarkGroup(["crunch_for_seconds"]) | ||
coef = FrameworkDemo.calculate_coefficients() | ||
for i in exp10.(range(-6, stop=1.5, length=10)) | ||
SUITE["cpu_crunching"]["crunch_for_seconds"][i] = @benchmarkable FrameworkDemo.crunch_for_seconds($i, $coef) evals = 1 samples = 1 | ||
end | ||
|
||
function log_ticks(range) | ||
start = floor(log10(minimum(range))) | ||
stop = ceil(log10(maximum(range))) | ||
return 10.0 .^ (start:stop) | ||
end | ||
|
||
function plot_find_primes(results::BenchmarkGroup) | ||
primes_r = sort(collect(results["cpu_crunching"]["find_primes"]), by=first) | ||
x = first.(primes_r) | ||
y = primes_r .|> last .|> minimum .|> time |> x -> x * 1e-9 | ||
p = plot(x, y, xaxis=:log10, yaxis=:log10, xlabel="n", ylabel="time [s]", | ||
title="find_primes(n)", label="find_primes", | ||
marker=(:circle, 5), linewidth=3, | ||
xticks=log_ticks(x), yticks=log_ticks(y), | ||
xguidefonthalign=:right, yguidefontvalign=:top, legend=:topleft) | ||
filename = "bench_find_primes.png" | ||
savefig(p, filename) | ||
@info "Results of benchmark cpu_crunching/find_primes written to $filename" | ||
end | ||
|
||
push!(result_processors, plot_find_primes) | ||
|
||
function plot_crunch_for_seconds(results::BenchmarkGroup) | ||
crunch_r = sort(collect(results["cpu_crunching"]["crunch_for_seconds"]), by=first) | ||
x = first.(crunch_r) | ||
y = crunch_r .|> last .|> minimum .|> time |> x -> x * 1e-9 | ||
p = plot(x, (y - x) ./ x, xaxis=:log10, xlabel="t [s]", ylabel="Time relative error", | ||
yformatter=x -> Printf.@sprintf("%.1f%%", 100 * x), | ||
xticks=log_ticks(x), | ||
title="crunch_for_seconds(t)", label="crunch_for_seconds", | ||
marker=(:circle, 5), linewidth=3, | ||
xguidefonthalign=:right, yguidefontvalign=:top, legend=:bottomright) | ||
filename = "bench_crunch_for_seconds.png" | ||
savefig(p, filename) | ||
@info "Results of benchmark cpu_crunching/crunch_for_seconds written to $filename" | ||
end | ||
|
||
push!(result_processors, plot_crunch_for_seconds) |