27struct covariance_ordering
29 covariance_ordering(Eigen::VectorXd
const &cov) : covariance_(cov) {}
30 bool operator()(
int a,
int b)
const
32 return (covariance_[a] > covariance_[b]);
34 Eigen::VectorXd
const &covariance_;
37GP::GP() : covFunc_(nullptr),
38 covFuncProj_(nullptr),
39 data_loc_(Eigen::VectorXd()),
40 data_out_(Eigen::VectorXd()),
41 data_var_(Eigen::VectorXd()),
42 gram_matrix_(Eigen::MatrixXd()),
43 alpha_(Eigen::VectorXd()),
44 chol_gram_matrix_(Eigen::LDLT<Eigen::MatrixXd>()),
46 use_explicit_trend_(false),
47 feature_vectors_(Eigen::MatrixXd()),
48 feature_matrix_(Eigen::MatrixXd()),
49 chol_feature_matrix_(Eigen::LDLT<Eigen::MatrixXd>()),
50 beta_(Eigen::VectorXd())
54 covFunc_(covFunc.clone()),
55 covFuncProj_(nullptr),
56 data_loc_(Eigen::VectorXd()),
57 data_out_(Eigen::VectorXd()),
58 data_var_(Eigen::VectorXd()),
59 gram_matrix_(Eigen::MatrixXd()),
60 alpha_(Eigen::VectorXd()),
61 chol_gram_matrix_(Eigen::LDLT<Eigen::MatrixXd>()),
63 use_explicit_trend_(false),
64 feature_vectors_(Eigen::MatrixXd()),
65 feature_matrix_(Eigen::MatrixXd()),
66 chol_feature_matrix_(Eigen::LDLT<Eigen::MatrixXd>()),
67 beta_(Eigen::VectorXd())
70GP::GP(
const double noise_variance,
72 covFunc_(covFunc.clone()),
73 covFuncProj_(nullptr),
74 data_loc_(Eigen::VectorXd()),
75 data_out_(Eigen::VectorXd()),
76 data_var_(Eigen::VectorXd()),
77 gram_matrix_(Eigen::MatrixXd()),
78 alpha_(Eigen::VectorXd()),
79 chol_gram_matrix_(Eigen::LDLT<Eigen::MatrixXd>()),
80 log_noise_sd_(std::log(noise_variance)),
81 use_explicit_trend_(false),
82 feature_vectors_(Eigen::MatrixXd()),
83 feature_matrix_(Eigen::MatrixXd()),
84 chol_feature_matrix_(Eigen::LDLT<Eigen::MatrixXd>()),
85 beta_(Eigen::VectorXd())
90 delete this->covFunc_;
91 delete this->covFuncProj_;
94GP::GP(
const GP &that) :
96 covFuncProj_(nullptr),
97 data_loc_(that.data_loc_),
98 data_out_(that.data_out_),
99 data_var_(that.data_var_),
100 gram_matrix_(that.gram_matrix_),
102 chol_gram_matrix_(that.chol_gram_matrix_),
103 log_noise_sd_(that.log_noise_sd_),
104 use_explicit_trend_(that.use_explicit_trend_),
105 feature_vectors_(that.feature_vectors_),
106 feature_matrix_(that.feature_matrix_),
107 chol_feature_matrix_(that.chol_feature_matrix_),
110 covFunc_ = that.covFunc_->
clone();
111 covFuncProj_ = that.covFuncProj_->
clone();
117 if (data_loc_.size() != 0 || data_out_.size() != 0)
120 covFunc_ = covFunc.
clone();
128 covFuncProj_ = covFuncProj.
clone();
131void GP::disableOutputProjection()
134 covFuncProj_ =
nullptr;
137GP &GP::operator=(
const GP &that)
142 covFunc_ = that.covFunc_->
clone();
146 data_loc_ = that.data_loc_;
147 data_out_ = that.data_out_;
148 data_var_ = that.data_var_;
149 gram_matrix_ = that.gram_matrix_;
150 alpha_ = that.alpha_;
151 chol_gram_matrix_ = that.chol_gram_matrix_;
152 log_noise_sd_ = that.log_noise_sd_;
157Eigen::VectorXd GP::drawSample(
const Eigen::VectorXd &locations)
const
159 return drawSample(locations,
160 math_tools::generate_normal_random_matrix(locations.rows(), locations.cols()));
163Eigen::VectorXd GP::drawSample(
const Eigen::VectorXd &locations,
164 const Eigen::VectorXd &random_vector)
const
166 Eigen::MatrixXd prior_covariance;
167 Eigen::MatrixXd kernel_matrix;
168 Eigen::LLT<Eigen::MatrixXd> chol_kernel_matrix;
169 Eigen::MatrixXd samples;
172 prior_covariance = covFunc_->
evaluate(locations, locations);
173 kernel_matrix = prior_covariance;
175 if (gram_matrix_.cols() == 0)
177 kernel_matrix = prior_covariance + JITTER * Eigen::MatrixXd::Identity(
178 prior_covariance.rows(), prior_covariance.cols());
182 Eigen::MatrixXd mixed_covariance;
183 mixed_covariance = covFunc_->
evaluate(locations, data_loc_);
184 Eigen::MatrixXd posterior_covariance;
185 posterior_covariance = prior_covariance - mixed_covariance *
186 (chol_gram_matrix_.solve(mixed_covariance.transpose()));
187 kernel_matrix = posterior_covariance + JITTER * Eigen::MatrixXd::Identity(
188 posterior_covariance.rows(), posterior_covariance.cols());
190 chol_kernel_matrix = kernel_matrix.llt();
193 samples = chol_kernel_matrix.matrixL() * random_vector;
196 return samples + std::exp(log_noise_sd_) *
202 assert(data_loc_.rows() > 0 &&
"Error: the GP is not yet initialized!");
205 Eigen::MatrixXd data_cov = covFunc_->
evaluate(data_loc_, data_loc_);
208 gram_matrix_.swap(data_cov);
209 if (data_var_.rows() == 0)
211 gram_matrix_ += (std::exp(2 * log_noise_sd_) + JITTER) *
212 Eigen::MatrixXd::Identity(gram_matrix_.rows(), gram_matrix_.cols());
216 gram_matrix_ += data_var_.asDiagonal();
220 chol_gram_matrix_ = gram_matrix_.ldlt();
223 alpha_ = chol_gram_matrix_.solve(data_out_);
225 if (use_explicit_trend_)
227 feature_vectors_ = Eigen::MatrixXd(2, data_loc_.rows());
229 feature_vectors_.row(0) = Eigen::MatrixXd::Ones(1, data_loc_.rows());
230 feature_vectors_.row(1) = data_loc_.array();
232 feature_matrix_ = feature_vectors_ * chol_gram_matrix_.solve(feature_vectors_.transpose());
233 chol_feature_matrix_ = feature_matrix_.ldlt();
235 beta_ = chol_feature_matrix_.solve(feature_vectors_) * alpha_;
239void GP::infer(
const Eigen::VectorXd &data_loc,
240 const Eigen::VectorXd &data_out,
241 const Eigen::VectorXd &data_var )
243 data_loc_ = data_loc;
244 data_out_ = data_out;
245 if (data_var.rows() > 0)
247 data_var_ = data_var;
252void GP::inferSD(
const Eigen::VectorXd &data_loc,
253 const Eigen::VectorXd &data_out,
254 const int n,
const Eigen::VectorXd &data_var ,
255 const double prediction_point )
257 Eigen::VectorXd covariance;
259 Eigen::VectorXd prediction_loc(1);
260 if ( math_tools::isNaN(prediction_point) )
263 prediction_loc = data_loc.tail(1);
267 prediction_loc << prediction_point;
271 covariance = covFunc_->
evaluate(data_loc, prediction_loc);
274 std::vector<int> index(covariance.size(), 0);
275 for (
size_t i = 0 ; i != index.size() ; i++)
281 std::sort(index.begin(), index.end(),
282 covariance_ordering(covariance)
285 bool use_var = data_var.rows() > 0;
287 if (n < data_loc.rows())
289 std::vector<double> loc_arr(n);
290 std::vector<double> out_arr(n);
291 std::vector<double> var_arr(n);
293 for (
int i = 0; i < n; ++i)
295 loc_arr[i] = data_loc[index[i]];
296 out_arr[i] = data_out[index[i]];
299 var_arr[i] = data_var[index[i]];
303 data_loc_ = Eigen::Map<Eigen::VectorXd>(loc_arr.data(), n, 1);
304 data_out_ = Eigen::Map<Eigen::VectorXd>(out_arr.data(), n, 1);
307 data_var_ = Eigen::Map<Eigen::VectorXd>(var_arr.data(), n, 1);
312 data_loc_ = data_loc;
313 data_out_ = data_out;
316 data_var_ = data_var;
324 gram_matrix_ = Eigen::MatrixXd();
325 chol_gram_matrix_ = Eigen::LDLT<Eigen::MatrixXd>();
326 data_loc_ = Eigen::VectorXd();
327 data_out_ = Eigen::VectorXd();
330Eigen::VectorXd GP::predict(
const Eigen::VectorXd &locations, Eigen::VectorXd* variances )
const
333 Eigen::MatrixXd prior_cov = covFunc_->
evaluate(locations, locations);
335 if (data_loc_.rows() == 0)
337 if (variances !=
nullptr)
339 (*variances) = prior_cov.diagonal();
341 return Eigen::VectorXd::Zero(locations.size());
346 Eigen::MatrixXd mixed_cov = covFunc_->
evaluate(locations, data_loc_);
349 Eigen::MatrixXd phi(2, locations.rows());
350 if (use_explicit_trend_)
352 phi.row(0) = Eigen::MatrixXd::Ones(1, locations.rows());
353 phi.row(1) = locations.array();
355 return predict(prior_cov, mixed_cov, phi, variances);
357 return predict(prior_cov, mixed_cov, Eigen::MatrixXd(), variances);
361Eigen::VectorXd GP::predictProjected(
const Eigen::VectorXd &locations, Eigen::VectorXd* variances )
const
366 if (covFuncProj_ ==
nullptr)
372 covFunc = covFuncProj_;
375 assert(covFunc !=
nullptr);
378 Eigen::MatrixXd prior_cov = covFunc->
evaluate(locations, locations);
380 if (data_loc_.rows() == 0)
382 if (variances !=
nullptr)
384 (*variances) = prior_cov.diagonal();
386 return Eigen::VectorXd::Zero(locations.size());
391 Eigen::MatrixXd mixed_cov = covFunc->
evaluate(locations, data_loc_);
393 Eigen::MatrixXd phi(2, locations.rows());
394 if (use_explicit_trend_)
397 phi.row(0) = Eigen::MatrixXd::Ones(1, locations.rows());
398 phi.row(1) = locations.array();
400 return predict(prior_cov, mixed_cov, phi, variances);
402 return predict(prior_cov, mixed_cov, Eigen::MatrixXd(), variances);
406Eigen::VectorXd GP::predict(
const Eigen::MatrixXd &prior_cov,
const Eigen::MatrixXd &mixed_cov,
407 const Eigen::MatrixXd &phi , Eigen::VectorXd* variances )
412 Eigen::VectorXd m = mixed_cov * alpha_;
415 Eigen::MatrixXd gamma = chol_gram_matrix_.solve(mixed_cov.transpose());
420 if (use_explicit_trend_)
422 R = phi - feature_vectors_ * gamma;
424 m += R.transpose() * beta_;
427 if (variances !=
nullptr)
430 Eigen::MatrixXd v = prior_cov - mixed_cov * gamma;
433 if (use_explicit_trend_)
435 assert(R.size() > 0);
436 v += R.transpose() * chol_feature_matrix_.solve(R);
439 (*variances) = v.diagonal();
444void GP::setHyperParameters(
const Eigen::VectorXd &hyperParameters)
446 assert(hyperParameters.rows() == covFunc_->
getParameterCount() + covFunc_->getExtraParameterCount() + 1 &&
447 "Wrong number of hyperparameters supplied to setHyperParameters()!");
448 log_noise_sd_ = hyperParameters[0];
450 covFunc_->setExtraParameters(hyperParameters.tail(covFunc_->getExtraParameterCount()));
451 if (data_loc_.rows() > 0)
457 if (covFuncProj_ !=
nullptr)
460 covFuncProj_->setExtraParameters(hyperParameters.tail(covFunc_->getExtraParameterCount()));
464Eigen::VectorXd GP::getHyperParameters()
const
466 Eigen::VectorXd hyperParameters(covFunc_->
getParameterCount() + covFunc_->getExtraParameterCount() + 1);
467 hyperParameters << log_noise_sd_, covFunc_->
getParameters(), covFunc_->getExtraParameters();
468 return hyperParameters;
471void GP::enableExplicitTrend()
473 use_explicit_trend_ =
true;
476void GP::disableExplicitTrend()
478 use_explicit_trend_ =
false;
Base class definition for covariance functions.
virtual Eigen::MatrixXd evaluate(const Eigen::VectorXd &x1, const Eigen::VectorXd &x2)=0
virtual void setParameters(const Eigen::VectorXd ¶ms)=0
Method to set the hyper-parameters.
virtual int getParameterCount() const =0
Returns the number of hyper-parameters.
virtual const Eigen::VectorXd & getParameters() const =0
Returns the hyper-parameters.
virtual CovFunc * clone() const =0
Produces a clone to be able to copy the object.
The file holds the covariance functions that can be used with the GP class.
The GP class implements the Gaussian Process functionality.