28 #ifndef SHARK_UNSUPERVISED_RBM_NEURONLAYERS_BIPOLARLAYER_H 29 #define SHARK_UNSUPERVISED_RBM_NEURONLAYERS_BIPOLARLAYER_H 59 const RealVector&
bias()
const{
72 m_bias.resize(newSize);
86 template<
class Input,
class BetaVector>
90 SIZE_CHECK(input.size1() == statistics.size1());
92 for(std::size_t i = 0; i != input.size1(); ++i){
108 template<
class Matrix,
class Rng>
109 void sample(StatisticsBatch
const& statistics, Matrix& state,
double alpha, Rng& rng)
const{
111 SIZE_CHECK(statistics.size1() == state.size1());
112 SIZE_CHECK(statistics.size2() == state.size2());
117 for(std::size_t s = 0; s != state.size1();++s){
118 for(std::size_t i = 0; i != state.size2();++i){
119 state(s,i) =
coinToss(statistics(s,i));
120 if(state(s,i)==0) state(s,i)=-1.;
125 for(
size_t s = 0; s != state.size1(); ++s){
126 for (
size_t i = 0; i != state.size2(); i++) {
127 double prob = statistics(s,i);
128 if (state(s,i) == -1) {
130 prob = (1. - alpha) * prob + alpha * prob / (1. - prob);
132 prob = (1. - alpha) * prob + alpha;
136 prob = (1. - alpha) * prob + alpha * (1. - (1. - prob) / prob);
138 prob = (1. - alpha) * prob;
142 if(state(s,i)==0) state(s,i)=-1.;
155 template<
class Matrix>
156 RealVector
logProbability(StatisticsBatch
const& statistics, Matrix
const& state)
const{
158 SIZE_CHECK(statistics.size1() == state.size1());
159 SIZE_CHECK(statistics.size2() == state.size2());
161 RealVector logProbabilities(state.size1(),1.0);
162 for(std::size_t s = 0; s != state.size1();++s){
163 for(std::size_t i = 0; i != state.size2();++i){
164 logProbabilities(s) += (state(s,i) > 0.0)? std::log(statistics(s,i)) : std::log(1-statistics(s,i));
167 return logProbabilities;
176 template<
class Matrix>
177 Matrix
const&
phi(Matrix
const& state)
const{
188 return 2*statistics - 1;
194 RealMatrix
mean(StatisticsBatch
const& statistics)
const{
197 return 2*statistics - 1;
206 template<
class Matrix,
class BetaVector>
210 RealVector energies = beta*
prod(state,m_bias);
225 template<
class Input>
228 long double logFactorization = 0;
229 for(std::size_t i = 0; i != inputs.size(); ++i){
230 long double arg = std::abs((inputs(i)+m_bias(i))*beta);
231 logFactorization +=
softPlus(-2*arg)+arg;
233 return logFactorization;
243 template<
class Vector,
class SampleBatch>
246 sumRows(2*samples.statistics,derivative);
247 derivative -= samples.size();
257 template<
class Vector,
class SampleBatch,
class WeightVector>
260 noalias(derivative) += 2*
prod(weights,samples.statistics) -
sum(weights);
269 template<
class Vector,
class SampleBatch>
272 sumRows(samples.state,derivative);
281 template<
class Vector,
class SampleBatch,
class WeightVector>
282 void parameterDerivative(Vector& derivative, SampleBatch
const& samples, WeightVector
const& weights)
const{
284 noalias(derivative) +=
prod(weights,samples.state);
294 m_bias = newParameters;