LCOV - code coverage report
Current view: top level - nntrainer/layers/loss - cross_entropy_sigmoid_loss_layer.cpp (source / functions) Coverage Total Hit
Test: coverage_filtered.info Lines: 96.2 % 26 25
Test Date: 2025-12-14 20:38:17 Functions: 100.0 % 2 2

            Line data    Source code
       1              : // SPDX-License-Identifier: Apache-2.0
       2              : /**
       3              :  * Copyright (C) 2021 Parichay Kapoor <pk.kapoor@samsung.com>
       4              :  *
       5              :  * @file   cross_entropy_sigmoid_loss_layer.cpp
       6              :  * @date   24 June 2021
       7              :  * @brief  This is MSE Loss Layer Class of Neural Network
       8              :  * @see    https://github.com/nnstreamer/nntrainer
       9              :  * @author Parichay Kapoor <pk.kapoor@samsung.com>
      10              :  * @bug    No known bugs except for NYI items
      11              :  *
      12              :  */
      13              : 
      14              : #include <cmath>
      15              : 
      16              : #include <cross_entropy_sigmoid_loss_layer.h>
      17              : 
      18              : #include <acti_func.h>
      19              : #include <layer_context.h>
      20              : #include <lazy_tensor.h>
      21              : #include <util_func.h>
      22              : 
      23              : namespace nntrainer {
      24              : 
      25              : static constexpr size_t SINGLE_INOUT_IDX = 0;
      26              : 
      27         5000 : void CrossEntropySigmoidLossLayer::forwarding(RunLayerContext &context,
      28              :                                               bool training) {
      29         5000 :   Tensor &hidden_ = context.getOutput(SINGLE_INOUT_IDX);
      30         5000 :   Tensor &y = context.getInput(SINGLE_INOUT_IDX);
      31              : 
      32              :   // fill the output
      33         5000 :   hidden_ = y.apply<float>(ActiFunc::sigmoid<float>, hidden_);
      34              : 
      35         5000 :   if (context.isLabelAvailable(SINGLE_INOUT_IDX)) {
      36         5000 :     Tensor &y2 = context.getLabel(SINGLE_INOUT_IDX);
      37              :     // @todo: change this to apply_i
      38              :     // @note: the output should be logit before applying sigmoid
      39              :     // log(1 + exp(-abs(y))) + max(y, 0)
      40         5000 :     Tensor mid_term = y.apply<float>(static_cast<float (*)(float)>(&std::fabs))
      41        10000 :                         .multiply(-1.0f)
      42        10000 :                         .apply<float>(static_cast<float (*)(float)>(&std::exp))
      43        15000 :                         .add(1.0f)
      44        10000 :                         .apply<float>(logFloat<float>);
      45        10000 :     mid_term = mid_term.add(y.apply<float>(ActiFunc::relu<float>));
      46              : 
      47              :     // y * y2
      48         5000 :     Tensor end_term = y2.chain().multiply_i(y).run();
      49              : 
      50              :     // loss = log(1 + exp(-abs(y))) + max(y, 0) - (y * y2)
      51        10000 :     l = mid_term.subtract(end_term).average();
      52              : 
      53              :     // update the loss value
      54         5000 :     LossLayer::updateLoss(context, l);
      55         5000 :   }
      56         5000 : }
      57              : 
      58         5000 : void CrossEntropySigmoidLossLayer::calcDerivative(RunLayerContext &context) {
      59         5000 :   Tensor &ret_derivative = context.getOutgoingDerivative(SINGLE_INOUT_IDX);
      60         5000 :   const Tensor &y2 = context.getIncomingDerivative(SINGLE_INOUT_IDX);
      61         5000 :   Tensor &y = context.getInput(SINGLE_INOUT_IDX);
      62              : 
      63         5000 :   y.apply<float>(ActiFunc::sigmoid<float>, ret_derivative);
      64         5000 :   ret_derivative.subtract_i(y2);
      65         5000 :   if (ret_derivative.divide_i(static_cast<float>(ret_derivative.size())) !=
      66              :       ML_ERROR_NONE) {
      67              :     throw std::runtime_error("[CrossEntropySigmoidLossLayer::calcDerivative] "
      68            0 :                              "Error when calculating loss");
      69              :   }
      70         5000 : }
      71              : 
      72              : } // namespace nntrainer
        

Generated by: LCOV version 2.0-1