KernelBasisDistance.h
Go to the documentation of this file.
1 /*!
2  *
3  *
4  * \brief -
5  * \author O.Krause
6  * \date 2014
7  *
8  *
9  * \par Copyright 1995-2015 Shark Development Team
10  *
11  * <BR><HR>
12  * This file is part of Shark.
13  * <http://image.diku.dk/shark/>
14  *
15  * Shark is free software: you can redistribute it and/or modify
16  * it under the terms of the GNU Lesser General Public License as published
17  * by the Free Software Foundation, either version 3 of the License, or
18  * (at your option) any later version.
19  *
20  * Shark is distributed in the hope that it will be useful,
21  * but WITHOUT ANY WARRANTY; without even the implied warranty of
22  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23  * GNU Lesser General Public License for more details.
24  *
25  * You should have received a copy of the GNU Lesser General Public License
26  * along with Shark. If not, see <http://www.gnu.org/licenses/>.
27  *
28  */
29 #ifndef SHARK_OBJECTIVEFUNCTIONS_KERNELBASISDISTANCE_H
30 #define SHARK_OBJECTIVEFUNCTIONS_KERNELBASISDISTANCE_H
31 
32 #include <shark/Core/DLLSupport.h>
35 
36 namespace shark{
37 
38 /// \brief Computes the squared distance between the optimal point in a basis to the point represented by a KernelExpansion.
39 ///
40 /// Assume we are given a kernel expansion \f$ w = \sum_i \alpha_i k(x_i, \cdot) \f$. The KernelBasisDistance takes
41 /// a new set of basis vectors \f$ z_i \f$ and finds the linear combination in that space which is closest
42 /// to \f$ w \f$ . More formally the function measures the squared distance in the kernel-induced feature space:
43 /// \f[ f(z) = \min_{\beta} \frac 1 2 \| \sum_j \beta_j k(z_j, \cdot) - w \|^2 . \f]
44 /// In vector notation with \f$ (K_x)_{i,j} = k(x_i,x_j) \f$, \f$ (K_z)_{i,j} = k(z_i,z_j) \f$ and \f$ (K_{zx})_{i,j} = k(z_i,x_j) \f$ it computes:
45 ///\f[ f(z) = \min_{\beta} \frac 1 2 \beta^T K_z \beta - \beta^T K_{zx} \alpha + \frac 1 2 \alpha^TK_x \alpha . \f]
46 /// The last term is independent of \f$ z_i \f$. Thus it is omitted in the actual computation. That is, the value is offset by a constant and the minimum is not 0.
47 /// The input of the function consists of a vector which is the concatenation \f$ v=[z_1, z_2,\dots,z_k] \f$ of all basis vectors.
48 ///
49 /// The target point \f$ w \f$ is set as a KernelExpansion in the constructor. If the kernel is differentiable
50 /// with respect to the input point then this objective function is differentiable as well.
51 ///
52 /// The kernel expansion can represent more than one single point, in this case the error is the sum of approximation errors.
54 {
55 public:
56  /// \brief Constructs the objective function.
57  ///
58  /// This functions calls sparsify on the kernel expansion to save computation time in the case of sparse bases.
59  ///
60  /// \param kernelExpansion a pointer to the kernel expansion to approximate
61  /// \param numApproximatingVectors the number of vectors used to approximate the point - the basis size
63 
64  /// \brief Returns the name of the class
65  std::string name() const
66  { return "KernelBasisDistance"; }
67 
68  /// \brief Returns the number of vectors the uses to approximate the point - the basis size
69  std::size_t numApproximatingVectors() const{
70  return m_numApproximatingVectors;
71  }
72 
73  /// \brief Returns a reference the number of vectors the uses to approximate the point - the basis size
74  std::size_t& numApproximatingVectors(){
75  return m_numApproximatingVectors;
76  }
77  /// \brief Returns a starting point of the algorithm
78  ///
79  /// Returns a random subset of the basis of the kernel expansion
81 
82  /// \brief Returns the number of variables of the function.
83  SHARK_EXPORT_SYMBOL std::size_t numberOfVariables()const;
84 
85  /// \brief Given an input basis, returns the point with the minimum error.
86  SHARK_EXPORT_SYMBOL RealMatrix findOptimalBeta(RealVector const& input)const;
87 
88  /// \brief Evaluate the (sum of) squared distance(s) between the closes point in the basis to the point(s) represented by the kernel expansion.
89  ///
90  /// See the class description for more details on this computation.
91  SHARK_EXPORT_SYMBOL double eval(RealVector const& input) const;
92 
93  /// \brief computes the derivative of the function with respect to the supplied basis.
94  ///
95  /// Assume \f$ \beta \f$ to be the optimal value. Then the derivative with respect to the basis vectors is:
96  /// \f[ \frac{ \partial f}{\partial z_l} = \beta_l \sum_i \beta_i \frac{ \partial f}{\partial z_l} k(z_l,z_i) - \beta_l \sum_i \alpha_i \frac{ \partial f}{\partial z_l} k(z_l, x_i) \f]
98 
99 private:
100  /// \brief Sets up and solves the regression problem for the base z.
101  ///
102  /// calculates K_z, the linear part of the system of equations and solves for beta.
103  SHARK_EXPORT_SYMBOL void setupAndSolve(RealMatrix& beta, RealVector const& input, RealMatrix& Kz, RealMatrix& linear)const;
104 
105  /// \brief Returns the error of the solution found
106  SHARK_EXPORT_SYMBOL double errorOfSolution(RealMatrix const& beta, RealMatrix const& Kz, RealMatrix const& linear)const;
107 
108  KernelExpansion<RealVector>* mep_expansion; ///< kernel expansion to approximate
109  std::size_t m_numApproximatingVectors; ///< number of vectors in the basis
110 };
111 
112 
113 }
114 #endif