📄 sampling.cpp
字号:
#include <cmath>
#include <boost/numeric/ublas/matrix.hpp>
#include "cholesky.hpp"
#include "lufactor.hpp"
#include "matrix_types.hpp"
#include "sampling.hpp"
using namespace std;
using namespace boost::numeric::ublas;
using namespace ulapack;
const double GaussLikelihood::PI = 3.14159265358979;
GaussLikelihood::GaussLikelihood(const Vector &p, const Matrix &P) : gmean(p)
// Precompute the likelihood normaliser and the inverse-factored covariance
{
int dim = static_cast<int>(p.size());
Pfi = chol(P, false); // lower-triangular Cholesky
C = pow(2.*PI, dim/2) * det_chol(Pfi);
inv_inplace(Pfi);
}
double GaussLikelihood::likelihood(const Vector &x) const
{
return exp(compute_numerator(x)) / C;
}
double GaussLikelihood::loglikelihood(const Vector &x) const
{
return compute_numerator(x) - log(C);
}
double GaussLikelihood::compute_numerator(const Vector &x) const
{
Vector M = prod(Pfi, x-gmean); // normalised innovation
return -0.5*inner_prod(M, M);
}
#if 0
TODOs:
void sample_gaussian(Vector &s, const Vector &x, const Matrix &P)
{
len= length(x);
S= chol(P)';
X = randn(len,n);
s = S*X + x*ones(1,n);
}
void sample_mean(Vector &xm, )
{
if abs(1-sum(w)) > eps, warning('Weights should be normalised'), end
D = size(x,1); % sample dimension
w = repmat(w,D,1);
xw = w .* x;
xm = sum(xw, 2);
P = xw*x' - xm*xm';
}
resample_stratified()
{
w = w / sum(w); % normalise
Neff = 1 / sum(w .^ 2);
len = length(w);
keep = zeros(1,len);
select = stratified_random(len);
w = cumsum(w);
ctr=1;
for i=1:len
while ctr<=len & select(ctr)<w(i)
keep(ctr)= i;
ctr=ctr+1;
end
end
}
#endif
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -