30 #ifndef SHARK_UNSUPERVISED_RBM_NEURONLAYERS_GAUSSIANLAYER_H 31 #define SHARK_UNSUPERVISED_RBM_NEURONLAYERS_GAUSSIANLAYER_H 61 const RealVector&
bias()
const{
73 m_bias.resize(newSize);
86 template<
class Input,
class BetaVector>
90 SIZE_CHECK(input.size1() == statistics.size1());
92 for(std::size_t i = 0; i != input.size1(); ++i){
109 template<
class Matrix,
class Rng>
110 void sample(StatisticsBatch
const& statistics, Matrix& state,
double alpha, Rng& rng)
const{
112 SIZE_CHECK(statistics.size1() == state.size1());
113 SIZE_CHECK(statistics.size2() == state.size2());
116 for(std::size_t i = 0; i != state.size1();++i){
117 for(std::size_t j = 0; j != state.size2();++j){
119 state(i,j) = normal();
132 template<
class Matrix>
133 RealVector
logProbability(StatisticsBatch
const& statistics, Matrix
const& state)
const{
135 SIZE_CHECK(statistics.size1() == state.size1());
136 SIZE_CHECK(statistics.size2() == state.size2());
138 RealVector logProbabilities(state.size1(),1.0);
139 for(std::size_t s = 0; s != state.size1();++s){
140 for(std::size_t i = 0; i != state.size2();++i){
141 logProbabilities(s) -= 0.5*
sqr(statistics(s,i)-state(s,i));
144 return logProbabilities;
153 template<
class Matrix>
154 Matrix
const&
phi(Matrix
const& state)
const{
169 RealMatrix
const&
mean(StatisticsBatch
const& statistics)
const{
179 template<
class Matrix,
class BetaVector>
186 std::size_t batchSize = state.size1();
187 RealVector energies =
prod(state,m_bias);
189 for(std::size_t i = 0; i != batchSize; ++i){
209 template<
class Input>
213 double logNormalizationTerm = std::log(
SQRT_2_PI) - 0.5 * std::log(beta);
215 for(std::size_t i = 0; i !=
size(); ++i){
216 lnResult += 0.5 *
sqr(inputs(i)+m_bias(i))*
beta;
217 lnResult += logNormalizationTerm;
229 template<
class Vector,
class SampleBatch>
232 sum_rows(samples.statistics,derivative);
235 template<
class Vector,
class SampleBatch,
class Vector2 >
238 noalias(derivative) +=
prod(weights,samples.statistics);
246 template<
class Vector,
class SampleBatch>
258 template<
class Vector,
class SampleBatch,
class WeightVector>
259 void parameterDerivative(Vector& derivative, SampleBatch
const& samples, WeightVector
const& weights)
const{
261 noalias(derivative) +=
prod(weights,samples.state);
271 m_bias = newParameters;