From 611cc06acf4abf8fbfba94171a085cb3ea53e30d Mon Sep 17 00:00:00 2001 From: MartinuzziFrancesco Date: Fri, 8 Nov 2024 12:01:32 +0100 Subject: [PATCH] actaully fixing docs --- docs/make.jl | 2 +- src/indrnn_cell.jl | 24 ++++++++++++------------ 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/docs/make.jl b/docs/make.jl index 1ebc9b5..da31161 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -3,7 +3,7 @@ using Documenter include("pages.jl") DocMeta.setdocmeta!(RecurrentLayers, :DocTestSetup, :(using RecurrentLayers); recursive=true) -mathengine = MathJax3() +mathengine = Documenter.MathJax() makedocs(; modules=[RecurrentLayers], diff --git a/src/indrnn_cell.jl b/src/indrnn_cell.jl index c8c69e7..752f828 100644 --- a/src/indrnn_cell.jl +++ b/src/indrnn_cell.jl @@ -14,22 +14,22 @@ Flux.@layer IndRNNCell recurrent_kernel_init = glorot_uniform, bias = true) - # Arguments +# Arguments - - `in => out`: input and inner dimension of the layer - - `σ`: activation function. Default is `tanh` - - `kernel_init`: initializer for the input to hidden weights - - `recurrent_kernel_init`: initializer for the hidden to hidden weights - - `bias`: include a bias or not. Default is `true` +- `in => out`: input and inner dimension of the layer +- `σ`: activation function. Default is `tanh` +- `kernel_init`: initializer for the input to hidden weights +- `recurrent_kernel_init`: initializer for the hidden to hidden weights +- `bias`: include a bias or not. Default is `true` - # Equations - ```math - \mathbf{h}_{t+1} = \sigma(\mathbf{W} \mathbf{x}_t + \mathbf{u} \odot \mathbf{h}_{t} + \mathbf{b}) - ``` +# Equations +```math +\mathbf{h}_{t+1} = \sigma(\mathbf{W} \mathbf{x}_t + \mathbf{u} \odot \mathbf{h}_{t} + \mathbf{b}) +``` - # Forward +# Forward - rnncell(inp, [state]) + rnncell(inp, [state]) """ function IndRNNCell((in, out)::Pair, σ=relu;