Kstars

gaussian_process.h
Go to the documentation of this file.
1/*
2 SPDX-FileCopyrightText: 2014-2017 Max Planck Society.
3 All rights reserved.
4
5 SPDX-License-Identifier: BSD-3-Clause
6*/
7
8/**
9 * @file
10 * @date 2014-2017
11 * @copyright Max Planck Society
12 *
13 * @author Edgar D. Klenske <edgar.klenske@tuebingen.mpg.de>
14 * @author Stephan Wenninger <stephan.wenninger@tuebingen.mpg.de>
15 * @author Raffi Enficiaud <raffi.enficiaud@tuebingen.mpg.de>
16 *
17 * @brief The GP class implements the Gaussian Process functionality.
18 */
19
20#ifndef GAUSSIAN_PROCESS_H
21#define GAUSSIAN_PROCESS_H
22
23#include <Eigen/Dense>
24#include <vector>
25#include <list>
26#include <memory>
27#include <utility>
28#include <cstdint>
29#include <cmath>
31
32// Constants
33
34// Jitter is the minimal "noise" on the otherwise noiseless kernel matrices to
35// make the Cholesky decomposition stable.
36#define JITTER 1e-6
37
38class GP
39{
40private:
41 covariance_functions::CovFunc* covFunc_;
42 covariance_functions::CovFunc* covFuncProj_;
43 Eigen::VectorXd data_loc_;
44 Eigen::VectorXd data_out_;
45 Eigen::VectorXd data_var_;
46 Eigen::MatrixXd gram_matrix_;
47 Eigen::VectorXd alpha_;
48 Eigen::LDLT<Eigen::MatrixXd> chol_gram_matrix_;
49 double log_noise_sd_;
50 bool use_explicit_trend_;
51 Eigen::MatrixXd feature_vectors_;
52 Eigen::MatrixXd feature_matrix_;
53 Eigen::LDLT<Eigen::MatrixXd> chol_feature_matrix_;
54 Eigen::VectorXd beta_;
55
56public:
57 typedef std::pair<Eigen::VectorXd, Eigen::MatrixXd> VectorMatrixPair;
58
59 GP(); // allowing the standard constructor makes the use so much easier!
60 GP(const covariance_functions::CovFunc& covFunc);
61 GP(const double noise_variance,
62 const covariance_functions::CovFunc& covFunc);
63 ~GP(); // Need to tidy up
64
65 GP(const GP& that);
66 GP& operator=(const GP& that);
67
68 /*! Sets the covariance function
69 *
70 * This operation is possible only if there is not inference going on in the
71 * current instance. This is useful after initialisation.
72 */
73 bool setCovarianceFunction(const covariance_functions::CovFunc& covFunc);
74
75 /*!
76 * Sets the output projection covariance function.
77 */
78 void enableOutputProjection(const covariance_functions::CovFunc& covFunc);
79
80 /*!
81 * Removes the output projection covariance function.
82 */
83 void disableOutputProjection();
84
85 /*!
86 * Returns a GP sample for the given locations.
87 *
88 * Returns a sample of the prior if the Gram matrix is empty.
89 */
90 Eigen::VectorXd drawSample(const Eigen::VectorXd& locations) const;
91
92 /*!
93 * Returns a sample of the GP based on a given vector of random numbers.
94 */
95 Eigen::VectorXd drawSample(const Eigen::VectorXd& locations,
96 const Eigen::VectorXd& random_vector) const;
97
98 /*!
99 * Builds an inverts the Gram matrix for a given set of datapoints.
100 *
101 * This function works on the already stored data and doesn't return
102 * anything. The work is done here, I/O somewhere else.
103 */
104 void infer();
105
106 /*!
107 * Stores the given datapoints in the form of data location \a data_loc,
108 * the output values \a data_out and noise vector \a data_sig.
109 * Calls infer() everytime so that the Gram matrix is rebuild and the
110 * Cholesky decomposition is computed.
111 */
112 void infer(const Eigen::VectorXd& data_loc,
113 const Eigen::VectorXd& data_out,
114 const Eigen::VectorXd& data_var = Eigen::VectorXd());
115
116 /*!
117 * Calculates the GP based on a subset of data (SD) approximation. The data
118 * vector for the GP consists of a subset of n most important data points,
119 * where the importance is defined as covariance to the prediction point. If
120 * no prediction point is given, the last data point is used (extrapolation
121 * mode).
122 */
123 void inferSD(const Eigen::VectorXd& data_loc,
124 const Eigen::VectorXd& data_out,
125 const int n,
126 const Eigen::VectorXd& data_var = Eigen::VectorXd(),
127 const double prediction_point = std::numeric_limits<double>::quiet_NaN());
128
129 /*!
130 * Sets the GP back to the prior:
131 * Removes datapoints, empties the Gram matrix.
132 */
133 void clearData();
134
135 /*!
136 * Predicts the mean and covariance for a vector of locations.
137 *
138 * This function just builds the prior and mixed covariance matrices and
139 * calls the other predict afterwards.
140 */
141 Eigen::VectorXd predict(const Eigen::VectorXd& locations, Eigen::VectorXd* variances = nullptr) const;
142
143 /*!
144 * Predicts the mean and covariance for a vector of locations based on
145 * the output projection.
146 *
147 * This function just builds the prior and mixed covariance matrices and
148 * calls the other predict afterwards.
149 */
150 Eigen::VectorXd predictProjected(const Eigen::VectorXd& locations, Eigen::VectorXd* variances = nullptr) const;
151
152 /*!
153 * Does the real work for predict. Solves the Cholesky decomposition for the
154 * given matrices. The Gram matrix and measurements need to be cached
155 * already.
156 */
157 Eigen::VectorXd predict(const Eigen::MatrixXd& prior_cov, const Eigen::MatrixXd& mixed_cov,
158 const Eigen::MatrixXd& phi = Eigen::MatrixXd() , Eigen::VectorXd* variances = nullptr) const;
159
160 /*!
161 * Sets the hyperparameters to the given vector.
162 */
163 void setHyperParameters(const Eigen::VectorXd& hyperParameters);
164
165 /*!
166 * Returns the hyperparameters to the given vector.
167 */
168 Eigen::VectorXd getHyperParameters() const;
169
170 /*!
171 * Enables the use of a explicit linear basis function.
172 */
173 void enableExplicitTrend();
174
175 /*!
176 * Disables the use of a explicit linear basis function.
177 */
178 void disableExplicitTrend();
179
180
181};
182
183#endif // ifndef GAUSSIAN_PROCESS_H
The file holds the covariance functions that can be used with the GP class.
This file is part of the KDE documentation.
Documentation copyright © 1996-2025 The KDE developers.
Generated on Fri Jan 24 2025 11:53:01 by doxygen 1.13.2 written by Dimitri van Heesch, © 1997-2006

KDE's Doxygen guidelines are available online.