Stan Math Library  2.20.0
reverse mode automatic differentiation
log_softmax.hpp
Go to the documentation of this file.
1 #ifndef STAN_MATH_FWD_MAT_FUN_LOG_SOFTMAX_HPP
2 #define STAN_MATH_FWD_MAT_FUN_LOG_SOFTMAX_HPP
3 
4 #include <stan/math/fwd/core.hpp>
9 
10 namespace stan {
11 namespace math {
12 
13 template <typename T>
14 inline Eigen::Matrix<fvar<T>, Eigen::Dynamic, 1> log_softmax(
15  const Eigen::Matrix<fvar<T>, Eigen::Dynamic, 1>& alpha) {
16  using Eigen::Dynamic;
17  using Eigen::Matrix;
18 
19  Matrix<T, Dynamic, 1> alpha_t(alpha.size());
20  for (int k = 0; k < alpha.size(); ++k)
21  alpha_t(k) = alpha(k).val_;
22 
23  Matrix<T, Dynamic, 1> softmax_alpha_t = softmax(alpha_t);
24  Matrix<T, Dynamic, 1> log_softmax_alpha_t = log_softmax(alpha_t);
25 
26  Matrix<fvar<T>, Dynamic, 1> log_softmax_alpha(alpha.size());
27  for (int k = 0; k < alpha.size(); ++k) {
28  log_softmax_alpha(k).val_ = log_softmax_alpha_t(k);
29  log_softmax_alpha(k).d_ = 0;
30  }
31 
32  for (int m = 0; m < alpha.size(); ++m) {
33  T negative_alpha_m_d_times_softmax_alpha_t_m
34  = -alpha(m).d_ * softmax_alpha_t(m);
35  for (int k = 0; k < alpha.size(); ++k) {
36  if (m == k)
37  log_softmax_alpha(k).d_
38  += alpha(m).d_ + negative_alpha_m_d_times_softmax_alpha_t_m;
39  else
40  log_softmax_alpha(k).d_ += negative_alpha_m_d_times_softmax_alpha_t_m;
41  }
42  }
43 
44  return log_softmax_alpha;
45 }
46 
47 } // namespace math
48 } // namespace stan
49 #endif
Eigen::Matrix< fvar< T >, Eigen::Dynamic, 1 > softmax(const Eigen::Matrix< fvar< T >, Eigen::Dynamic, 1 > &alpha)
Definition: softmax.hpp:12
Eigen::Matrix< fvar< T >, Eigen::Dynamic, 1 > log_softmax(const Eigen::Matrix< fvar< T >, Eigen::Dynamic, 1 > &alpha)
Definition: log_softmax.hpp:14
This template class represents scalars used in forward-mode automatic differentiation, which consist of values and directional derivatives of the specified template type.
Definition: fvar.hpp:41

     [ Stan Home Page ] © 2011–2018, Stan Development Team.