Ce serveur Gitlab sera éteint le 30 juin 2020, pensez à migrer vos projets vers les serveurs gitlab-research.centralesupelec.fr et gitlab-student.centralesupelec.fr !

Commit ae194338 authored by Dahua Lin's avatar Dahua Lin

UnivariateMixture tested

parent bcd1cdc1
......@@ -39,6 +39,8 @@ export
ContinuousMatrixDistribution,
SufficientStats,
AbstractMvNormal,
UnivariateMixture,
MultivariateMixture,
# distribution types
Arcsine,
......@@ -196,6 +198,7 @@ export
ncategories, # the number of categories in a Categorical distribution
pdf, # probability density function (ContinuousDistribution)
pmf, # probability mass function (DiscreteDistribution)
priorprobs, # prior probabilities
quantile, # inverse of cdf (defined for p in (0,1))
qqbuild, # build a paired quantiles data structure for qqplots
sampler, # create a Sampler object for efficient samples
......
......@@ -13,7 +13,7 @@ typealias MatrixvariateMixture MixtureModel{Matrixvariate}
#### Constructors
function MixtureModel{C<:Distribution}(components::Vector{C}, prior::Categorical)
length(complements) == ncategories(prior) ||
length(components) == ncategories(prior) ||
error("Inconsistent sizes of components and prior.")
VF = variate_form(C)
VS = value_support(C)
......@@ -34,10 +34,11 @@ length(d::MultivariateMixture) = dim(d.components[1])
size(d::MatrixvariateMixture) = size(d.components[1])
components(d::MixtureModel) = d.components
priorprobs(d::MixtureModel) = d.prior.prob
function mean(d::UnivariateMixture)
cs = d.components
p = d.prior
cs = components(d)
p = priorprobs(d)
m = 0.0
for i = 1:length(cs)
pi = p[i]
......@@ -49,8 +50,8 @@ function mean(d::UnivariateMixture)
end
function mean(d::MultivariateMixture)
cs = d.components
p = d.prior
cs = components(d)
p = priorprobs(d)
m = zeros(length(d))
for i = 1:length(cs)
pi = p[i]
......@@ -62,8 +63,8 @@ function mean(d::MultivariateMixture)
end
function var(d::UnivariateMixture)
cs = d.components
p = d.prior
cs = components(d)
p = priorprobs(d)
K = length(cs)
means = Array(Float64, K)
m = 0.0
......@@ -90,8 +91,8 @@ end
#### Evaluation
function _mixpdf1(d::MixtureModel, x)
cs = d.components
p = d.prior
cs = components(d)
p = priorprobs(d)
v = 0.0
for i = 1:length(cs)
pi = p[i]
......@@ -103,8 +104,8 @@ function _mixpdf1(d::MixtureModel, x)
end
function _mixpdf!(r::DenseArray, d::MixtureModel, x)
cs = d.components
p = d.prior
cs = components(d)
p = priorprobs(d)
fill!(r, 0.0)
t = Array(Float64, size(r))
for i = 1:length(cs)
......@@ -132,8 +133,8 @@ function _mixlogpdf1(d::MixtureModel, x)
# such that the argument of exp is in a reasonable range
#
cs = d.components
p = d.prior
cs = components(d)
p = priorprobs(d)
K = length(cs)
lp = Array(Float64, K)
m = -Inf # m <- the maximum of log(p(cs[i], x)) + log(pri[i])
......@@ -157,8 +158,8 @@ function _mixlogpdf1(d::MixtureModel, x)
end
function _mixlogpdf!(r::DenseArray, d::MixtureModel, x)
cs = d.components
p = d.prior
cs = components(d)
p = priorprobs(d)
K = length(cs)
n = length(r)
Lp = Array(Float64, n, K)
......@@ -184,6 +185,7 @@ function _mixlogpdf!(r::DenseArray, d::MixtureModel, x)
fill!(r, 0.0)
for i = 1:K
if p[i] > 0.0
lp_i = view(Lp, :, i)
for j = 1:n
r[j] += exp(lp_i[j] - m[j])
end
......@@ -219,8 +221,16 @@ function MixtureSampler{VF,VS}(d::MixtureModel{VF,VS})
MixtureSampler{VF,VS,eltype(csamplers)}(csamplers, psampler)
end
rand(d::MixtureModel) = rand(d.components[rand(d.prior)])
rand(s::MixtureSampler) = rand(s.csamplers[rand(s.psampler)])
_rand!(s::MixtureSampler, x::DenseVector) = _rand!(s.csampler[rand(s.psampler)], x)
_rand!(s::MixtureSampler{Multivariate}, x::DenseVector) = _rand!(s.csamplers[rand(s.psampler)], x)
sampler(d::MixtureModel) = MixtureSampler(d)
using Distributions
using Base.Test
# Univariate Mixture
function test_univariate_mixture(g::UnivariateMixture, n::Int, ns::Int)
X = zeros(n)
for i = 1:10
X[i] = rand(g)
end
cs = components(g)
pr = priorprobs(g)
@assert length(cs) == length(pr)
# mean
mu = 0.0
for k = 1:length(cs)
mu += pr[k] * mean(cs[k])
end
@test_approx_eq mean(g) mu
# ground-truth
p0 = zeros(n)
for i = 1:n
p0_i = 0.0
x_i = X[i]
for k = 1:length(cs)
p0_i += pr[k] * pdf(cs[k], x_i)
end
p0[i] = p0_i
end
lp0 = log(p0)
for i = 1:n
@test_approx_eq pdf(g, X[i]) p0[i]
@test_approx_eq logpdf(g, X[i]) lp0[i]
end
p_e = pdf(g, X)
lp_e = logpdf(g, X)
@assert isa(p_e, Vector{Float64}) && length(p_e) == n
@assert isa(lp_e, Vector{Float64}) && length(lp_e) == n
@test_approx_eq p_e p0
@test_approx_eq lp_e lp0
# sampling
Xs = rand(g, ns)
@test isa(Xs, Vector{Float64})
@test_approx_eq_eps mean(Xs) mean(g) 0.01
end
println(" testing UnivariateMixture")
g_u = MixtureModel(
Normal[Normal(0.0, 1.0), Normal(2.0, 1.0), Normal(-4.0, 1.5)],
[0.2, 0.5, 0.3])
@test isa(g_u, MixtureModel{Univariate, Continuous, Normal})
test_univariate_mixture(g_u, 1000, 10^6)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment