From f9d1ee97ff257c6abc434c00ece612588a54a9b4 Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Thu, 29 Jun 2023 21:34:44 +1200 Subject: [PATCH] Refactor test/Benchmarks (#2234) --- test/Benchmarks/Benchmarks.jl | 80 ++++++++++++++++++++--------------- test/runtests.jl | 1 + 2 files changed, 46 insertions(+), 35 deletions(-) diff --git a/test/Benchmarks/Benchmarks.jl b/test/Benchmarks/Benchmarks.jl index 29d4a8483a..9a08f1399e 100644 --- a/test/Benchmarks/Benchmarks.jl +++ b/test/Benchmarks/Benchmarks.jl @@ -4,62 +4,72 @@ # Use of this source code is governed by an MIT-style license that can be found # in the LICENSE.md file or at https://opensource.org/licenses/MIT. +module TestBenchmarks + using Test + import MathOptInterface as MOI -import MathOptInterface.Utilities as MOIU -const NUM_BENCHMARKS = length(MOI.Benchmarks.BENCHMARKS) +function runtests() + for name in names(@__MODULE__; all = true) + if startswith("$(name)", "test_") + @testset "$(name)" begin + getfield(@__MODULE__, name)() + end + end + end + return +end -@testset "suite" begin +function test_suite() suite = MOI.Benchmarks.suite() do - return MOIU.MockOptimizer(MOIU.Model{Float64}()) + return MOI.Utilities.MockOptimizer(MOI.Utilities.Model{Float64}()) end - @test length(suite.data) == NUM_BENCHMARKS - + @test length(suite.data) == length(MOI.Benchmarks.BENCHMARKS) suite = MOI.Benchmarks.suite(exclude = [r"delete_"]) do - return MOIU.MockOptimizer(MOIU.Model{Float64}()) + return MOI.Utilities.MockOptimizer(MOI.Utilities.Model{Float64}()) end # Note: update this value whenever more benchmarks are added to # `src/Benchmarks/Benchmarks.jl`. - @test 6 <= length(suite.data) <= NUM_BENCHMARKS - 3 + @test 6 <= length(suite.data) <= length(MOI.Benchmarks.BENCHMARKS) - 3 + return end -@testset "Perform benchmark" begin +function test_baseline() params = joinpath(@__DIR__, "baseline_params.json") baseline = joinpath(@__DIR__, "baseline_baseline.json") @test !isfile(params) @test !isfile(baseline) - @testset "create_baseline" begin - suite = MOI.Benchmarks.suite() do - return MOIU.MockOptimizer(MOIU.Model{Float64}()) - end - MOI.Benchmarks.create_baseline( - suite, - "baseline"; - directory = @__DIR__, - seconds = 2, - verbose = true, - ) + suite = MOI.Benchmarks.suite() do + return MOI.Utilities.MockOptimizer(MOI.Utilities.Model{Float64}()) end + MOI.Benchmarks.create_baseline( + suite, + "baseline"; + directory = @__DIR__, + samples = 1, + verbose = true, + ) @test isfile(params) @test isfile(baseline) - @testset "compare_against_baseline" begin - suite = MOI.Benchmarks.suite() do - return MOIU.MockOptimizer(MOIU.Model{Float64}()) - end - MOI.Benchmarks.compare_against_baseline( - suite, - "baseline"; - directory = @__DIR__, - seconds = 2, - verbose = true, - ) + suite = MOI.Benchmarks.suite() do + return MOI.Utilities.MockOptimizer(MOI.Utilities.Model{Float64}()) end + MOI.Benchmarks.compare_against_baseline( + suite, + "baseline"; + directory = @__DIR__, + samples = 1, + verbose = true, + ) rm(params) rm(baseline) - @testset "Report" begin - report = read(joinpath(@__DIR__, "report.txt"), String) - @test occursin("=> invariant", report) - end + report = read(joinpath(@__DIR__, "report.txt"), String) + @test occursin("=> invariant", report) rm(joinpath(@__DIR__, "report.txt")) + return end + +end + +TestBenchmarks.runtests() diff --git a/test/runtests.jl b/test/runtests.jl index 523f3f1cc4..5aade447bb 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -23,6 +23,7 @@ end for submodule in ["Nonlinear", "Bridges", "FileFormats", "Test", "Utilities", "Benchmarks"] include("$(submodule)/$(submodule).jl") + GC.gc() # Force GC run here to reduce memory pressure end # Test hygiene of @model macro