diff --git a/docs/src/.vitepress/config.mts b/docs/src/.vitepress/config.mts index 171b55be5..dafa3b08e 100644 --- a/docs/src/.vitepress/config.mts +++ b/docs/src/.vitepress/config.mts @@ -89,7 +89,9 @@ export default defineConfig({ text: 'Building Blocks', items: [ { text: 'LuxCore', link: '/api/Building_Blocks/LuxCore' }, { text: 'LuxLib', link: '/api/Building_Blocks/LuxLib' }, - { text: 'WeightInitializers', link: '/api/Building_Blocks/WeightInitializers' } + { text: 'WeightInitializers', link: '/api/Building_Blocks/WeightInitializers' }, + { text: 'NNlib', link: 'https://fluxml.ai/NNlib.jl/dev/' }, + { text: 'Activation Functions', link: 'https://fluxml.ai/NNlib.jl/dev/reference/#Activation-Functions' } ] }, { @@ -203,7 +205,9 @@ export default defineConfig({ text: 'Building Blocks', collapsed: false, items: [ { text: 'LuxCore', link: '/api/Building_Blocks/LuxCore' }, { text: 'LuxLib', link: '/api/Building_Blocks/LuxLib' }, - { text: 'WeightInitializers', link: '/api/Building_Blocks/WeightInitializers' }] + { text: 'WeightInitializers', link: '/api/Building_Blocks/WeightInitializers' }, + { text: 'NNlib', link: 'https://fluxml.ai/NNlib.jl/dev/' }, + { text: 'Activation Functions', link: 'https://fluxml.ai/NNlib.jl/dev/reference/#Activation-Functions' }] }, { text: 'Domain Specific Modeling', collapsed: false, items: [ diff --git a/src/layers/basic.jl b/src/layers/basic.jl index 7c9a30f88..9513fd3b4 100644 --- a/src/layers/basic.jl +++ b/src/layers/basic.jl @@ -15,6 +15,22 @@ Reshapes the passed array to have a size of `(dims..., :)` - AbstractArray of size `(dims..., size(x, ndims(x)))` - Empty `NamedTuple()` + +## Example + +```jldoctest +julia> model = ReshapeLayer((2, 2)) +ReshapeLayer(output_dims = (2, 2, :)) + +julia> rng = Random.default_rng(); + Random.seed!(rng, 0); + ps, st = Lux.setup(rng, model); + x = randn(rng, Float32, (4, 1, 3)); + +julia> y, st_new = model(x, ps, st); + size(y) +(2, 2, 3) +``` """ struct ReshapeLayer{N} <: AbstractExplicitLayer dims::NTuple{N, Int} @@ -48,6 +64,22 @@ Flattens the passed array into a matrix. - AbstractMatrix of size `(:, size(x, ndims(x)))` - Empty `NamedTuple()` + +## Example + +```jldoctest +julia> model = FlattenLayer() +FlattenLayer() + +julia> rng = Random.default_rng(); + Random.seed!(rng, 0); + ps, st = Lux.setup(rng, model); + x = randn(rng, Float32, (2, 2, 2, 2)); + +julia> y, st_new = model(x, ps, st); + size(y) +(8, 2) +``` """ @kwdef @concrete struct FlattenLayer <: AbstractExplicitLayer N = nothing @@ -100,6 +132,22 @@ end As the name suggests does nothing but allows pretty printing of layers. Whatever input is passed is returned. + +# Example + +```jldoctest +julia> model = NoOpLayer() +NoOpLayer() + +julia> rng = Random.default_rng(); + Random.seed!(rng, 0); + ps, st = Lux.setup(rng, model); + x = 1 +1 + +julia> y, st_new = model(x, ps, st) +(1, NamedTuple()) +``` """ struct NoOpLayer <: AbstractExplicitLayer end diff --git a/src/layers/containers.jl b/src/layers/containers.jl index 9a59c3fa7..a11434a7e 100644 --- a/src/layers/containers.jl +++ b/src/layers/containers.jl @@ -122,6 +122,26 @@ with `connection`. `fields = layer_1, layer_2, ..., layer_N` (naming changes if using the kwargs API) See also [`SkipConnection`](@ref) which is `Parallel` with one identity. + +## Example + +```jldoctest +julia> model = Parallel(nothing, Dense(2, 1), Dense(2, 1)) +Parallel( + layer_1 = Dense(2 => 1), # 3 parameters + layer_2 = Dense(2 => 1), # 3 parameters +) # Total: 6 parameters, + # plus 0 states. + +julia> using Random; + rng = Random.seed!(123); + ps, st = Lux.setup(rng, model); + x1 = randn(rng, Float32, 2); + x2 = randn(rng, Float32, 2); + +julia> size.(first(model((x1, x2), ps, st))) +((1,), (1,)) +``` """ @concrete struct Parallel{T <: NamedTuple} <: AbstractExplicitContainerLayer{(:layers,)} connection