LCOV - code coverage report
Current view: top level - nntrainer/layers - activation_layer.cpp (source / functions) Coverage Total Hit
Test: coverage_filtered.info Lines: 97.1 % 34 33
Test Date: 2025-12-14 20:38:17 Functions: 100.0 % 6 6

            Line data    Source code
       1              : // SPDX-License-Identifier: Apache-2.0
       2              : /**
       3              :  * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
       4              :  *
       5              :  * @file   activation_layer.cpp
       6              :  * @date   17 June 2020
       7              :  * @see    https://github.com/nnstreamer/nntrainer
       8              :  * @author Jihoon Lee <jhoon.it.lee@samsung.com>
       9              :  * @bug    No known bugs except for NYI items
      10              :  * @brief  This is Activation Layer Class for Neural Network
      11              :  *
      12              :  */
      13              : 
      14              : #include <algorithm>
      15              : #include <cmath>
      16              : #include <fstream>
      17              : #include <functional>
      18              : #include <iostream>
      19              : #include <stdexcept>
      20              : #include <vector>
      21              : 
      22              : #include <activation_layer.h>
      23              : #include <common_properties.h>
      24              : #include <cpu_backend.h>
      25              : #include <layer_context.h>
      26              : #include <nntrainer_error.h>
      27              : #include <nntrainer_log.h>
      28              : #include <node_exporter.h>
      29              : #include <tensor.h>
      30              : #include <tensor_wrap_specs.h>
      31              : #include <util_func.h>
      32              : 
      33              : namespace nntrainer {
      34          758 : ActivationLayer::ActivationLayer() :
      35          758 :   Layer(), activation_props(new PropTypes(props::Activation())) {
      36          758 :   acti_func.setActiFunc(ActivationType::ACT_NONE);
      37          758 : }
      38              : 
      39              : static constexpr size_t SINGLE_INOUT_IDX = 0;
      40              : 
      41          432 : void ActivationLayer::finalize(InitLayerContext &context) {
      42              :   auto &act = std::get<props::Activation>(*activation_props);
      43          432 :   NNTR_THROW_IF(act.empty(), std::invalid_argument)
      44              :     << "activation has not been set!";
      45          432 :   if (context.getActivationDataType() == TensorDim::DataType::FP16) {
      46              : #ifdef ENABLE_FP16
      47              :     acti_func.setActiFunc<_FP16>(act.get());
      48              : #else
      49            0 :     NNTR_THROW_IF(true, std::invalid_argument) << "enable-fp16 is not set!";
      50              : #endif
      51          432 :   } else if (context.getActivationDataType() == TensorDim::DataType::FP32) {
      52          432 :     acti_func.setActiFunc<float>(act.get());
      53              :   }
      54              : 
      55          432 :   NNTR_THROW_IF(context.getNumInputs() != 1, std::invalid_argument)
      56              :     << "activation layer, " << context.getName()
      57              :     << "requires exactly one input, but given: " << context.getNumInputs()
      58              :     << ", check graph connection if it is correct";
      59              : 
      60              :   /// @todo for only certain types of activation needs lifespan of
      61              :   /// forward_derivative order
      62              :   std::vector<VarGradSpecV2> out_specs;
      63              :   out_specs.push_back(
      64          432 :     InitLayerContext::outSpec(context.getInputDimensions()[0], "out",
      65              :                               TensorLifespan::FORWARD_DERIV_LIFESPAN));
      66          432 :   context.requestOutputs(std::move(out_specs));
      67          432 :   acti_func.setInPlace(context.getInPlace());
      68          432 : }
      69              : 
      70         1281 : void ActivationLayer::forwarding(RunLayerContext &context, bool training) {
      71         1281 :   Tensor &hidden_ = context.getOutput(SINGLE_INOUT_IDX);
      72         1281 :   Tensor &input_ = context.getInput(SINGLE_INOUT_IDX);
      73              :   acti_func.run_fn(input_, hidden_);
      74         1281 : }
      75              : 
      76          693 : void ActivationLayer::calcDerivative(RunLayerContext &context) {
      77          693 :   const Tensor &deriv = context.getIncomingDerivative(SINGLE_INOUT_IDX);
      78          693 :   Tensor &ret = context.getOutgoingDerivative(SINGLE_INOUT_IDX);
      79          693 :   Tensor &in = context.getInput(SINGLE_INOUT_IDX);
      80          693 :   Tensor &out = context.getOutput(SINGLE_INOUT_IDX);
      81              : 
      82              :   acti_func.run_prime_fn(in, out, ret, deriv);
      83          693 : }
      84              : 
      85          196 : void ActivationLayer::exportTo(Exporter &exporter,
      86              :                                const ml::train::ExportMethods &method) const {
      87          196 :   exporter.saveResult(*activation_props, method, this);
      88          196 : }
      89              : 
      90         6119 : void ActivationLayer::setProperty(const std::vector<std::string> &values) {
      91         6119 :   auto left = loadProperties(values, *activation_props);
      92         6126 :   NNTR_THROW_IF(!left.empty(), std::invalid_argument)
      93              :     << "Failed to set property";
      94              : 
      95              :   auto &act = std::get<props::Activation>(*activation_props);
      96         6098 :   if (!act.empty())
      97         4637 :     acti_func.setActiFunc(act.get());
      98         6112 : }
      99              : 
     100              : }; // namespace nntrainer
        

Generated by: LCOV version 2.0-1