Skip to content

Commit

Permalink
Merge pull request #34 from MartinuzziFrancesco/fm/srnn
Browse files Browse the repository at this point in the history
Adding StackedRNN
  • Loading branch information
MartinuzziFrancesco authored Dec 22, 2024
2 parents ff95400 + 36c585e commit c529f08
Show file tree
Hide file tree
Showing 19 changed files with 108 additions and 12 deletions.
1 change: 1 addition & 0 deletions docs/pages.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ pages=[
"API Documentation" => [
"Cells" => "api/cells.md",
"Layers" => "api/layers.md",
"Wrappers" => "api/wrappers.md",
],
"Roadmap" => "roadmap.md"
]
2 changes: 1 addition & 1 deletion docs/src/api/layers.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Cell wrappers
# Layers

```@docs
RAN
Expand Down
5 changes: 5 additions & 0 deletions docs/src/api/wrappers.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Wrappers

```@docs
StackedRNN
```
27 changes: 16 additions & 11 deletions src/RecurrentLayers.jl
Original file line number Diff line number Diff line change
Expand Up @@ -45,21 +45,26 @@ end
export MGUCell, LiGRUCell, IndRNNCell, RANCell, LightRUCell, RHNCell,
RHNCellUnit, NASCell, MUT1Cell, MUT2Cell, MUT3Cell, SCRNCell, PeepholeLSTMCell,
FastRNNCell, FastGRNNCell

export MGU, LiGRU, IndRNN, RAN, LightRU, NAS, RHN, MUT1, MUT2, MUT3,
SCRN, PeepholeLSTM, FastRNN, FastGRNN

export StackedRNN

@compat(public, (initialstates))

include("mgu_cell.jl")
include("ligru_cell.jl")
include("indrnn_cell.jl")
include("ran_cell.jl")
include("lightru_cell.jl")
include("rhn_cell.jl")
include("nas_cell.jl")
include("mut_cell.jl")
include("scrn_cell.jl")
include("peepholelstm_cell.jl")
include("fastrnn_cell.jl")
include("cells/mgu_cell.jl")
include("cells/ligru_cell.jl")
include("cells/indrnn_cell.jl")
include("cells/ran_cell.jl")
include("cells/lightru_cell.jl")
include("cells/rhn_cell.jl")
include("cells/nas_cell.jl")
include("cells/mut_cell.jl")
include("cells/scrn_cell.jl")
include("cells/peepholelstm_cell.jl")
include("cells/fastrnn_cell.jl")

include("wrappers/stackedrnn.jl")

end #module
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
61 changes: 61 additions & 0 deletions src/wrappers/stackedrnn.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
# based on https://fluxml.ai/Flux.jl/stable/guide/models/recurrence/
struct StackedRNN{L,D,S}
layers::L
dropout::D
states::S
end

Flux.@layer StackedRNN trainable=(layers)

@doc raw"""
StackedRNN(rlayer, (input_size, hidden_size), args...;
num_layers = 1, kwargs...)
Constructs a stack of recurrent layers given the recurrent layer type.
Arguments:
- `rlayer`: Any recurrent layer such as [MGU](@ref), [RHN](@ref), etc... or
[`Flux.RNN`](@extref), [`Flux.LSTM`](@extref), etc.
- `input_size`: Defines the input dimension for the first layer.
- `hidden_size`: defines the dimension of the hidden layer.
- `num_layers`: The number of layers to stack. Default is 1.
- `args...`: Additional positional arguments passed to the recurrent layer.
- `kwargs...`: Additional keyword arguments passed to the recurrent layers.
Returns:
A `StackedRNN` instance containing the specified number of RNN layers and their initial states.
"""
function StackedRNN(rlayer, (input_size, hidden_size)::Pair, args...;
num_layers::Int = 1,
dropout::Number = 0.0,
dims = :,
active::Union{Bool,Nothing} = nothing,
rng = Flux.default_rng(),
kwargs...)
#build container
layers = []
#warn for dropout and num_layers
if num_layers ==1 && dropout != 0.0
@warn("Dropout is not applied when num_layers is 1.")
end

for idx in 1:num_layers
in_size = idx == 1 ? input_size : hidden_size
push!(layers, rlayer(in_size => hidden_size, args...; kwargs...))
end
states = [initialstates(layer) for layer in layers]

return StackedRNN(layers,
Dropout(dropout; dims = dims, active = active, rng = rng),
states)
end

function (stackedrnn::StackedRNN)(inp::AbstractArray)
for (idx,(layer, state)) in enumerate(zip(stackedrnn.layers, stackedrnn.states))
inp = layer(inp, state)
if !(idx == length(stackedrnn.layers))
inp = stackedrnn.dropout(inp)
end
end
return inp
end
4 changes: 4 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,8 @@ end

@safetestset "Layers" begin
include("test_layers.jl")
end

@safetestset "Wrappers" begin
include("test_wrappers.jl")
end
20 changes: 20 additions & 0 deletions test/test_wrappers.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
using RecurrentLayers
using Flux
using Test

layers = [RNN, GRU, GRUv3, LSTM, MGU, LiGRU, RAN, LightRU, NAS, MUT1, MUT2, MUT3,
SCRN, PeepholeLSTM, FastRNN, FastGRNN]

@testset "Sizes for StackedRNN with layer: $layer" for layer in layers
wrap = StackedRNN(layer, 2 => 4)

inp = rand(Float32, 2, 3, 1)
output = wrap(inp)
@test output isa Array{Float32, 3}
@test size(output) == (4, 3, 1)

inp = rand(Float32, 2, 3)
output = wrap(inp)
@test output isa Array{Float32, 2}
@test size(output) == (4, 3)
end

0 comments on commit c529f08

Please sign in to comment.