Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

More lazy strings #2402

Merged
merged 1 commit into from
Mar 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/functor.jl
Original file line number Diff line number Diff line change
Expand Up @@ -64,12 +64,12 @@
"""
function testmode!(m, mode)
inactive = if mode isa Symbol
mode === :auto || throw(ArgumentError("testmode! accepts only the symbol :auto, got :$mode"))
mode === :auto || throw(ArgumentError(lazy"testmode! accepts only the symbol :auto, got :$mode"))

Check warning on line 67 in src/functor.jl

View check run for this annotation

Codecov / codecov/patch

src/functor.jl#L67

Added line #L67 was not covered by tests
nothing
elseif mode isa Union{Bool,Nothing}
mode
else
throw(ArgumentError("testmode! does not accept $(repr(mode)) as the 2nd argument"))
throw(ArgumentError(lazy"testmode! does not accept $(repr(mode)) as the 2nd argument"))

Check warning on line 72 in src/functor.jl

View check run for this annotation

Codecov / codecov/patch

src/functor.jl#L72

Added line #L72 was not covered by tests
end
foreach(x -> testmode!(x, inactive), trainable(m))
m
Expand Down
6 changes: 3 additions & 3 deletions src/layers/basic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@
end

_show_layers(io, layers::Tuple) = join(io, layers, ", ")
_show_layers(io, layers::NamedTuple) = join(io, ["$k = $v" for (k, v) in pairs(layers)], ", ")
_show_layers(io, layers::NamedTuple) = join(io, [lazy"$k = $v" for (k, v) in pairs(layers)], ", ")
_show_layers(io, layers::AbstractVector) = (print(io, "["); join(io, layers, ", "); print(io, "]"))

# This is a temporary and naive implementation
Expand Down Expand Up @@ -531,7 +531,7 @@
nl = length(layers)
nx = length(xs)
if (nl != nx)
throw(ArgumentError("Parallel with $nl sub-layers can take one input or $nl inputs, but got $nx inputs"))
throw(ArgumentError(lazy"Parallel with $nl sub-layers can take one input or $nl inputs, but got $nx inputs"))
end
end
ChainRulesCore.@non_differentiable _parallel_check(nl, nx)
Expand Down Expand Up @@ -616,7 +616,7 @@
lx = length(x)
N = length(layers)
if T <: Tuple && lx != N
throw(ArgumentError("PairwiseFusion with $N sub-layers can take one input or $N inputs, but got $lx inputs"))
throw(ArgumentError(lazy"PairwiseFusion with $N sub-layers can take one input or $N inputs, but got $lx inputs"))

Check warning on line 619 in src/layers/basic.jl

View check run for this annotation

Codecov / codecov/patch

src/layers/basic.jl#L619

Added line #L619 was not covered by tests
end
end
ChainRulesCore.@non_differentiable _pairwise_check(lx, N, T)
Expand Down
4 changes: 2 additions & 2 deletions src/layers/macro.jl
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@

for j in 1:length(rest)
ex = rest[j]
Meta.isexpr(ex, :(=)) || error("The macro `@layer` expects here `keyword = (fields...,)`, got $ex")
Meta.isexpr(ex, :(=)) || error("The macro `@layer` expects here `keyword = (fields...,)`, got ", ex)

name = if ex.args[1] == :trainable
:(Optimisers.trainable)
Expand Down Expand Up @@ -153,4 +153,4 @@

_noquotenode(s::Symbol) = s
_noquotenode(q::QuoteNode) = q.value # lets you write trainable=(:x,:y) instead of (x,y)
_noquotenode(ex) = error("expected a symbol here, as a field name, but got $ex")
_noquotenode(ex) = error("expected a symbol here, as a field name, but got ", ex)

Check warning on line 156 in src/layers/macro.jl

View check run for this annotation

Codecov / codecov/patch

src/layers/macro.jl#L156

Added line #L156 was not covered by tests
6 changes: 3 additions & 3 deletions src/layers/normalise.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
# Internal function, used only in this file.
_tidy_active(mode::Bool) = mode
_tidy_active(::Nothing) = nothing
_tidy_active(mode) = mode === :auto ? nothing : throw(ArgumentError("active = $(repr(mode)) is not accepted, must be true/false/nothing or :auto"))
_tidy_active(mode) = mode === :auto ? nothing : throw(ArgumentError(lazy"active = $(repr(mode)) is not accepted, must be true/false/nothing or :auto"))

Check warning on line 7 in src/layers/normalise.jl

View check run for this annotation

Codecov / codecov/patch

src/layers/normalise.jl#L7

Added line #L7 was not covered by tests

"""
Dropout(p; [dims, rng, active])
Expand Down Expand Up @@ -74,7 +74,7 @@
Dropout(p::Real, dims, active) = Dropout(p, dims, active, default_rng())

function Dropout(p::Real; dims=:, active::Union{Bool,Nothing} = nothing, rng = default_rng())
0 ≤ p ≤ 1 || throw(ArgumentError("Dropout expects 0 ≤ p ≤ 1, got p = $p"))
0 ≤ p ≤ 1 || throw(ArgumentError(lazy"Dropout expects 0 ≤ p ≤ 1, got p = $p"))
Dropout(p, dims, active, rng)
end

Expand Down Expand Up @@ -126,7 +126,7 @@

AlphaDropout(p, active) = AlphaDropout(p, active, default_rng())
function AlphaDropout(p; rng = default_rng(), active::Union{Bool,Nothing} = nothing)
0 ≤ p ≤ 1 || throw(ArgumentError("AlphaDropout expects 0 ≤ p ≤ 1, got p = $p"))
0 ≤ p ≤ 1 || throw(ArgumentError(lazy"AlphaDropout expects 0 ≤ p ≤ 1, got p = $p"))
AlphaDropout(p, active, rng)
end

Expand Down
2 changes: 1 addition & 1 deletion src/optimise/train.jl
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ function train!(loss, ps::Params, data, opt::AbstractOptimiser; cb = () -> ())
loss(batchmemaybe(d)...)
end
if !isfinite(l)
throw(DomainError("Loss is $l on data item $i, stopping training"))
throw(DomainError(lazy"Loss is $l on data item $i, stopping training"))
end
update!(opt, ps, gs)
cb()
Expand Down
2 changes: 1 addition & 1 deletion src/outputsize.jl
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ end
function _makelazy(ex::Expr)
n = _underscoredepth(ex)
n == 0 && return ex
n == 1 && error("@autosize doesn't expect an underscore here: $ex")
n == 1 && error("@autosize doesn't expect an underscore here: ", ex)
n == 2 && return :($LazyLayer($(string(ex)), $(_makefun(ex)), nothing))
n > 2 && return Expr(ex.head, map(_makelazy, ex.args)...)
end
Expand Down
2 changes: 1 addition & 1 deletion src/train.jl
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ function train!(loss, model, data, opt; cb = nothing)
d_splat = d isa Tuple ? d : (d,)
l, gs = Zygote.withgradient(m -> loss(m, d_splat...), model)
if !isfinite(l)
throw(DomainError("Loss is $l on data item $i, stopping training"))
throw(DomainError(lazy"Loss is $l on data item $i, stopping training"))
end
opt, model = Optimisers.update!(opt, model, gs[1])
@logprogress Base.haslength(data) ? i/length(data) : nothing
Expand Down
Loading