From 986be93a635a2d8209496762a52c9a78172245b8 Mon Sep 17 00:00:00 2001 From: minsoo Date: Mon, 24 Jun 2024 13:49:59 -0700 Subject: [PATCH] docstrings + readme --- README.md | 55 ++++++++++++--------- src/MultiResponseVarianceComponentModels.jl | 12 ++++- src/eigen.jl | 42 +++++++++++++++- src/multivariate_calculus.jl | 12 +++-- src/parse.jl | 6 +-- 5 files changed, 94 insertions(+), 33 deletions(-) diff --git a/README.md b/README.md index 18e9822..b8b56ea 100644 --- a/README.md +++ b/README.md @@ -21,39 +21,48 @@ pkg> add https://github.com/Hua-Zhou/MultiResponseVarianceComponentModels.jl.git ## Examples ```julia using MultiResponseVarianceComponentModels, LinearAlgebra, Random + # simulation -Random.seed!(1234) -n = 1_000 # n of observations -d = 4 # n of responses -p = 10 # n of covariates -m = 3 # n of variance components -X = rand(n, p) -B = rand(p, d) -V = [zeros(n, n) for _ in 1:m] # kernel matrices -Σ = [zeros(d, d) for _ in 1:m] # variance components -for i in 1:m - Vi = randn(n, n) - copy!(V[i], Vi' * Vi) - Σi = randn(d, d) - copy!(Σ[i], Σi' * Σi) -end -Ω = zeros(n * d, n * d) # overall nd-by-nd covariance matrix Ω -for i = 1:m - Ω += kron(Σ[i], V[i]) +begin + Random.seed!(1234) + n = 1_000 # n of observations + d = 4 # n of responses + p = 10 # n of covariates + m = 3 # n of variance components + X = rand(n, p) + B = rand(p, d) + V = [zeros(n, n) for _ in 1:m] # kernel matrices + Σ = [zeros(d, d) for _ in 1:m] # variance components + for i in 1:m + Vi = randn(n, n) + copy!(V[i], Vi' * Vi) + Σi = randn(d, d) + copy!(Σ[i], Σi' * Σi) + end + Ω = zeros(n * d, n * d) # overall nd-by-nd covariance matrix Ω + for i = 1:m + Ω += kron(Σ[i], V[i]) + end + Ωchol = cholesky(Ω) + Y = X * B + reshape(Ωchol.L * randn(n * d), n, d) end -Ωchol = cholesky(Ω) -Y = X * B + reshape(Ωchol.L * randn(n * d), n, d) + # maximum likelihood estimation model = MRVCModel(Y, X, V) @timev fit!(model) # ~ 30 seconds + # residual maximum likelihood estimation model = MRVCModel(Y, X, V; reml = true) @timev fit!(model) # ~ 30 seconds +# variance components estimates model.Σ -reduce(hcat, [hcat(vec(Σ[i]), vec(model.Σ[i])) for i in 1:m]) -model.Σcov # sampling variance by inverse of Fisher information matrix -diag(model.Σcov) # (binomial(d, 2) + d) * m variance/covariance parameters +# comparison of true values and estimates +reduce(hcat, [hcat(vech(Σ[i]), vech(model.Σ[i])) for i in 1:m]) +# sampling variance by inverse of Fisher information matrix +model.Σcov +diag(model.Σcov) # m * (binomial(d, 2) + d) parameters +# log-likelihood model.logl ``` ## References diff --git a/src/MultiResponseVarianceComponentModels.jl b/src/MultiResponseVarianceComponentModels.jl index fce3fe1..f4caaab 100644 --- a/src/MultiResponseVarianceComponentModels.jl +++ b/src/MultiResponseVarianceComponentModels.jl @@ -11,11 +11,17 @@ export VCModel, MultiResponseVarianceComponentModel, MRVCModel, MRTVCModel, - # fit.jl + # fit.jl or eigen.jl fit!, loglikelihood!, + loglikelihood, + loglikelihood_reml, update_res!, update_Ω!, + update_B!, + update_B_reml!, + fisher_B!, + fisher_B_reml!, fisher_Σ!, # missing.jl permute, @@ -27,7 +33,9 @@ export VCModel, kron_axpy!, kron_reduction!, vech, - ◺ + ◺, + duplication, + commutation abstract type VCModel end diff --git a/src/eigen.jl b/src/eigen.jl index 8a41b13..495c96e 100644 --- a/src/eigen.jl +++ b/src/eigen.jl @@ -99,7 +99,7 @@ function fit!( update_B_reml!(model) update_res_reml!(model) mul!(model.R̃Φ_reml, model.R̃_reml, model.Φ) - copyto!(model.logl_reml, loglikelihood_reml!(model)) + copyto!(model.logl_reml, loglikelihood_reml(model)) model.se ? fisher_B_reml!(model) : nothing end log && IterativeSolvers.shrink!(history) @@ -201,6 +201,11 @@ function update_res_reml!( model.R̃ end +""" + loglikelihood(model::MRTVCModel) + +Return the residual log-likelihood, assuming `model.Λ`, `model.Φ`, `model.logdetΣ2`, and `model.R̃Φ` are updated. +""" function loglikelihood( model :: MRTVCModel{T} ) where T <: BlasReal @@ -217,7 +222,12 @@ function loglikelihood( logl /= -2 end -function loglikelihood_reml!( +""" + loglikelihood_reml(model::MRTVCModel) + +Return the log-likelihood, assuming `model.Λ`, `model.Φ`, `model.logdetΣ2`, and `model.R̃Φ_reml` are updated. +""" +function loglikelihood_reml( model :: MRTVCModel{T} ) where T <: BlasReal n, d = size(model.Ỹ_reml, 1), size(model.Ỹ_reml, 2) @@ -233,6 +243,11 @@ function loglikelihood_reml!( logl /= -2 end +""" + update_B!(model::MRTVCModel) + +Update the regression coefficients `model.B`, assuming `model.Λ` and `model.Φ` are updated. +""" function update_B!( model :: MRTVCModel{T} ) where T <: BlasReal @@ -260,6 +275,11 @@ function update_B!( model.B end +""" + update_B_reml!(model::MRTVCModel) + +Update the regression coefficients `model.B_reml`, assuming `model.Λ` and `model.Φ` are updated. +""" function update_B_reml!( model :: MRTVCModel{T} ) where T <: BlasReal @@ -287,6 +307,12 @@ function update_B_reml!( model.B_reml end +""" + fisher_B!(model::MRTVCModel) + +Compute the sampling variance-covariance `model.Bcov` of regression coefficients `model.B`, +assuming `model.Λ` and `model.Φ` are updated. +""" function fisher_B!( model :: MRTVCModel{T} ) where T <: BlasReal @@ -302,6 +328,12 @@ function fisher_B!( copyto!(model.Bcov, pinv(model.storage_pd_pd)) end +""" + fisher_B_reml!(model::MRTVCModel) + +Compute the sampling variance-covariance `model.Bcov_reml` of regression coefficients `model.B_reml`, +assuming `model.Λ` and `model.Φ` are updated. +""" function fisher_B_reml!( model :: MRTVCModel{T} ) where T <: BlasReal @@ -317,6 +349,12 @@ function fisher_B_reml!( copyto!(model.Bcov_reml, pinv(model.storage_pd_pd_reml)) end +""" + fisher_Σ!(model::MRTVCModel) + +Compute the sampling variance-covariance `model.Σcov` of variance component estimates `model.Σ`, +assuming `model.Λ` and `model.Φ` are updated. +""" function fisher_Σ!( model :: MRTVCModel{T} ) where T <: BlasReal diff --git a/src/multivariate_calculus.jl b/src/multivariate_calculus.jl index c599034..1377c4a 100644 --- a/src/multivariate_calculus.jl +++ b/src/multivariate_calculus.jl @@ -61,7 +61,7 @@ indicates `A` and `B` are symmetric. C end -function duplication(n) +function duplication(n::Int) D = zeros(Int, abs2(n), ◺(n)) vechidx = 1 for j in 1:n @@ -82,8 +82,14 @@ Return lower triangular part of `A`. vech(A::AbstractVecOrMat) = [A[i, j] for i in 1:size(A, 1), j in 1:size(A, 2) if i ≥ j] function commutation(m::Int, n::Int) - mn = m * n - reshape(kron(vec(Matrix{Float64}(I, m, m)), Matrix{Float64}(I, n, n)), mn, mn) + K = zeros(Int, m * n, m * n) + colK = 1 + @inbounds for j in 1:n, i in 1:m + rowK = n * (i - 1) + j + K[rowK, colK] = 1 + colK += 1 + end + K end commutation(m::Int) = commutation(m, m) diff --git a/src/parse.jl b/src/parse.jl index dec3c9f..2f9e8d3 100644 --- a/src/parse.jl +++ b/src/parse.jl @@ -33,7 +33,7 @@ function h2( ses = zeros(T, m, d) tot = sum([model.Σ[l] for l in 1:m]) idx = findvar(d) - s = binomial(d, 2) + d + s = ◺(d) for j in 1:d for i in 1:m w = [idx[j] + s * (l - 1) for l in 1:m] @@ -55,7 +55,7 @@ function h2( end function findvar(d::Int) - s, r = binomial(d, 2) + d, d + s, r = ◺(d), d idx = ones(Int, d) for j in 2:length(idx) idx[j] = idx[j - 1] + r @@ -77,7 +77,7 @@ function rg( rgs = [zeros(T, d, d) for _ in 1:m] ses = [ones(T, d, d) for _ in 1:m] idx = findvar(d) - s = binomial(d, 2) + d + s = ◺(d) for i in 1:m D = Diagonal(model.Σ[i]) for j in 1:d