11class Dense : 
public Layer {
 
   13  Dense(
int nNeurons, ACTIVATION activation = ACTIVATION::SIGMOID,
 
   14        WEIGHT_INIT weightInit = WEIGHT_INIT::RANDOM, 
int bias = 0) {
 
   15    type = LayerType::DENSE;
 
   17    this->nNeurons = nNeurons;
 
   18    this->weightInit = weightInit;
 
   19    this->activation = activation;
 
   20    this->setActivation(activation);
 
   35  Eigen::MatrixXd 
getBiases()
 const { 
return biases; };
 
   48    std::cout << this->weights << 
"\n";
 
 
   56    return slug + std::to_string(nNeurons) + activationSlug;
 
 
   66  virtual Eigen::MatrixXd 
feedInputs(Eigen::MatrixXd inputs,
 
   67                                     bool training = 
false)
 override {
 
   69    if (weights.rows() == 0 && weights.cols() == 0) {
 
   74    if (inputs.cols() != weights.rows()) {
 
   75      Eigen::MatrixXd transposedMat = inputs.transpose();
 
   76      inputs = transposedMat;
 
   79    assert(inputs.cols() == weights.rows());
 
   80    return this->computeOutputs(inputs, training);
 
 
   87  friend class cereal::access;
 
   91  std::string slug = 
"dns";
 
   92  std::string activationSlug = 
"";
 
   93  Eigen::MatrixXd biases;
 
   94  WEIGHT_INIT weightInit;
 
   95  Eigen::MatrixXd weights;
 
   96  Eigen::MatrixXd cachedWeights;
 
   97  Eigen::MatrixXd cachedBiases;
 
   98  ACTIVATION activation;
 
   99  Eigen::MatrixXd (*activate)(
const Eigen::MatrixXd &);
 
  100  Eigen::MatrixXd (*diff)(
const Eigen::MatrixXd &);
 
  102  template <
class Archive>
 
  103  void save(Archive &ar)
 const {
 
  104    ar(cereal::base_class<Layer>(
this), nNeurons, biases, weights, activation);
 
  107  template <
class Archive>
 
  108  void load(Archive &ar) {
 
  109    ar(cereal::base_class<Layer>(
this), nNeurons, biases, weights, activation);
 
  110    setActivation(activation);
 
  118  void init(
int numRows)
 override {
 
  120    double mean = 0, stddev = 0;
 
  121    this->weights = Eigen::MatrixXd::Zero(numRows, nNeurons);
 
  124    if (this->weightInit == WEIGHT_INIT::CONSTANT) {
 
  125      this->weights = Eigen::MatrixXd::Constant(numRows, nNeurons, 1);
 
  130    switch (this->weightInit) {
 
  131      case WEIGHT_INIT::GLOROT:
 
  133        stddev = sqrt(
static_cast<double>(2) / (numRows + nNeurons));
 
  135      case WEIGHT_INIT::HE:
 
  137        stddev = sqrt(2.0 / numRows);
 
  139      case WEIGHT_INIT::LECUN:
 
  141        stddev = sqrt(1.0 / numRows);
 
  148    this->weightInit == WEIGHT_INIT::RANDOM
 
  149        ? randomWeightInit(&(this->weights), -1, 1)
 
  150        : randomDistMatrixInit(&(this->weights), mean, stddev);
 
  162  Eigen::MatrixXd computeOutputs(Eigen::MatrixXd inputs,
 
  163                                 bool training)
 override {
 
  165    if (biases.rows() == 0 && biases.cols() == 0) {
 
  166      biases = Eigen::MatrixXd::Constant(1, nNeurons, bias);
 
  170    Eigen::MatrixXd wSum = inputs * weights;
 
  172    wSum.rowwise() += biases.row(0);
 
  174    Eigen::MatrixXd a = activate(wSum);
 
  177    if (training) outputs = a;
 
  189  void setActivation(ACTIVATION activation) {
 
  190    if (type == LayerType::FLATTEN) {
 
  196    switch (activation) {
 
  197      case ACTIVATION::SIGMOID:
 
  198        this->activate = Sigmoid::activate;
 
  199        this->diff = Sigmoid::diff;
 
  200        this->activationSlug = Sigmoid::slug;
 
  202      case ACTIVATION::RELU:
 
  203        this->activate = Relu::activate;
 
  204        this->diff = Relu::diff;
 
  205        this->activationSlug = Relu::slug;
 
  207      case ACTIVATION::SOFTMAX:
 
  208        this->activate = Softmax::activate;
 
  209        this->diff = Softmax::diff;
 
  210        this->activationSlug = Softmax::slug;
 
  216        assert(
false && 
"Activation not defined");