Skip to content

Commit

Permalink
fix: package versions and some test fixes
Browse files Browse the repository at this point in the history
test: try fixing load order

revert: load order change
  • Loading branch information
avik-pal committed Dec 30, 2024
1 parent 8ca0808 commit 919da19
Show file tree
Hide file tree
Showing 12 changed files with 22 additions and 28 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ MPI = "0.20.19"
MacroTools = "0.5.13"
Markdown = "1.10"
NCCL = "0.1.1"
NNlib = "0.9.24"
NNlib = "0.9.26"
Optimisers = "0.4.1"
Preferences = "1.4.3"
Random = "1.10"
Expand Down
2 changes: 1 addition & 1 deletion docs/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ LuxCore = "1.2"
LuxLib = "1.3.4"
LuxTestUtils = "1.5"
MLDataDevices = "1.6"
NNlib = "0.9.24"
NNlib = "0.9.26"
Optimisers = "0.4.1"
Pkg = "1.10"
Printf = "1.10"
Expand Down
2 changes: 1 addition & 1 deletion lib/LuxCore/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ EnzymeCore = "0.8.6"
Functors = "0.5"
MLDataDevices = "1.6"
Random = "1.10"
Reactant = "0.2.11"
Reactant = "0.2.6"
ReverseDiff = "1.15"
Setfield = "1"
Tracker = "0.2.36"
Expand Down
2 changes: 1 addition & 1 deletion lib/LuxLib/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ LuxCore = "1.2"
MKL = "0.7"
MLDataDevices = "1.6"
Markdown = "1.10"
NNlib = "0.9.24"
NNlib = "0.9.26"
Octavian = "0.3.28"
Preferences = "1.4.3"
Polyester = "0.7.15"
Expand Down
2 changes: 1 addition & 1 deletion lib/LuxLib/test/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ LoopVectorization = "0.12.171"
LuxTestUtils = "1.5"
MKL = "0.7"
MLDataDevices = "1.6"
NNlib = "0.9.21"
NNlib = "0.9.26"
Octavian = "0.3.28"
Pkg = "1.10"
Random = "1.10"
Expand Down
2 changes: 1 addition & 1 deletion lib/MLDataDevices/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ Metal = "1"
OneHotArrays = "0.2.5"
Preferences = "1.4"
Random = "1.10"
Reactant = "0.2.11"
Reactant = "0.2.6"
RecursiveArrayTools = "3.8"
ReverseDiff = "1.15"
SparseArrays = "1.10"
Expand Down
2 changes: 1 addition & 1 deletion src/helpers/training.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ using Static: StaticBool, Static, False, True

using ..Lux: Lux, Utils, ReactantCompatibleOptimisers
using LuxCore: LuxCore, AbstractLuxLayer
using MLDataDevices: MLDataDevices, ReactantDevice, get_device_type, cpu_device
using MLDataDevices: MLDataDevices, ReactantDevice, get_device_type

"""
TrainState
Expand Down
2 changes: 1 addition & 1 deletion test/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ LuxLib = "1.3.4"
LuxTestUtils = "1.5"
MLDataDevices = "1.6"
MLUtils = "0.4.3"
NNlib = "0.9.24"
NNlib = "0.9.26"
Octavian = "0.3.28"
OneHotArrays = "0.2.5"
Optimisers = "0.4.1"
Expand Down
26 changes: 11 additions & 15 deletions test/helpers/loss_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -46,12 +46,12 @@

@testset "$mode" for (mode, aType, dev, ongpu) in MODES
x = rand(10) |> aType
__f = sum Broadcast.BroadcastFunction(LuxOps.xlogx)
@test_gradients(__f, x; atol=1.0f-3, rtol=1.0f-3, soft_fail=[AutoFiniteDiff()])
@test_gradients(sumBroadcast.BroadcastFunction(LuxOps.xlogx),
x; atol=1.0f-3, rtol=1.0f-3, soft_fail=[AutoFiniteDiff()])

y = rand(10) |> aType
__f = sum Broadcast.BroadcastFunction(LuxOps.xlogy)
@test_gradients(__f, x, y; atol=1.0f-3, rtol=1.0f-3, soft_fail=[AutoFiniteDiff()])
@test_gradients(sumBroadcast.BroadcastFunction(LuxOps.xlogy),
x, y; atol=1.0f-3, rtol=1.0f-3, soft_fail=[AutoFiniteDiff()])
end
end

Expand Down Expand Up @@ -79,8 +79,7 @@ end
@jet loss_mean(ŷ, y)
@jet loss_sum(ŷ, y)

__f = Base.Fix2(loss_mean, y)
@test_gradients(__f, ŷ; atol=1.0f-3, rtol=1.0f-3)
@test_gradients(Base.Fix2(loss_mean, y), ŷ; atol=1.0f-3, rtol=1.0f-3)
end

@testset "MSLE" begin
Expand All @@ -93,8 +92,7 @@ end

@test @inferred(Zygote.gradient(MSLELoss(), ŷ, y)) isa Any broken=ongpu

__f = Base.Fix2(MSLELoss(), y)
@test_gradients(__f, ŷ; atol=1.0f-3, rtol=1.0f-3)
@test_gradients(Base.Fix2(MSLELoss(), y), ŷ; atol=1.0f-3, rtol=1.0f-3)
end
end
end
Expand Down Expand Up @@ -203,9 +201,8 @@ end

@test @inferred(Zygote.gradient(bceloss, σ.(logŷ), y)) isa Any

__f = Base.Fix2(bceloss, y)
σlogŷ = σ.(logŷ)
@test_gradients(__f, σlogŷ; atol=1.0f-3, rtol=1.0f-3)
@test_gradients(Base.Fix2(bceloss, y), σ.(logŷ); atol=1.0f-3, rtol=1.0f-3,
enzyme_set_runtime_activity=true)
end

@testset "Logit BinaryCrossEntropyLoss" begin
Expand All @@ -225,8 +222,8 @@ end

@test @inferred(Zygote.gradient(logitbceloss, logŷ, y)) isa Any

__f = Base.Fix2(logitbceloss, y)
@test_gradients(__f, logŷ; atol=1.0f-3, rtol=1.0f-3)
@test_gradients(Base.Fix2(logitbceloss, y), logŷ; atol=1.0f-3, rtol=1.0f-3,
enzyme_set_runtime_activity=true)
end

@testset "BinaryFocalLoss" begin
Expand All @@ -248,8 +245,7 @@ end

@test @inferred(Zygote.gradient(BinaryFocalLoss(), ŷ, y)) isa Any broken=ongpu

__f = Base.Fix2(BinaryFocalLoss(), y)
@test_gradients(__f, ŷ; atol=1.0f-3, rtol=1.0f-3)
@test_gradients(Base.Fix2(BinaryFocalLoss(), y), ŷ; atol=1.0f-3, rtol=1.0f-3)
end

@testset "FocalLoss" begin
Expand Down
2 changes: 1 addition & 1 deletion test/layers/normalize_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@

@jet m(x, ps, Lux.testmode(st))
@test_gradients(sumabs2first, m, x, ps, st; atol=1.0f-3,
rtol=1.0f-3, skip_backends=[AutoFiniteDiff()], broken_backends)
rtol=1.0f-3, skip_backends=[AutoFiniteDiff()])

# with activation function
m = BatchNorm(2, sigmoid; affine)
Expand Down
4 changes: 1 addition & 3 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -127,9 +127,7 @@ const RETESTITEMS_NWORKER_THREADS = parse(
string(max(Hwloc.num_virtual_cores() ÷ RETESTITEMS_NWORKERS, 1))))

@testset "Lux.jl Tests" begin
for (i, tag) in enumerate(LUX_TEST_GROUP)
@info "Running tests for group: [$(i)/$(length(LUX_TEST_GROUP))] $tag"

@testset "[$(tag)] [$(i)/$(length(LUX_TEST_GROUP))]" for (i, tag) in enumerate(LUX_TEST_GROUP)
nworkers = (tag == "reactant") || (BACKEND_GROUP == "amdgpu") ? 0 :
RETESTITEMS_NWORKERS

Expand Down
2 changes: 1 addition & 1 deletion test/setup_modes.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
using Lux, MLDataDevices
using Lux, MLDataDevices, Pkg

if !@isdefined(BACKEND_GROUP)
const BACKEND_GROUP = lowercase(get(ENV, "BACKEND_GROUP", "all"))
Expand Down

0 comments on commit 919da19

Please sign in to comment.