LCOV - code coverage report
Current view: top level - nntrainer/layers - layer_normalization_layer.h (source / functions) Coverage Total Hit
Test: coverage_filtered.info Lines: 100.0 % 7 7
Test Date: 2025-12-14 20:38:17 Functions: 100.0 % 5 5

            Line data    Source code
       1              : // SPDX-License-Identifier: Apache-2.0
       2              : /**
       3              :  * Copyright (C) 2022 hyeonseok Lee <hs89.lee@samsung.com>
       4              :  *
       5              :  * @file   layer_normalization_layer.h
       6              :  * @date   25 July 2022
       7              :  * @see    https://github.com/nnstreamer/nntrainer
       8              :  *         https://arxiv.org/abs/1607.06450
       9              :  * @author hyeonseok Lee <hs89.lee@samsung.com>
      10              :  * @bug    No known bugs except for NYI items
      11              :  * @brief  This is Layer Normalization Layer Class for Neural Network
      12              :  *
      13              :  */
      14              : 
      15              : #ifndef __LAYER_NORMALIZATION_LAYER_H__
      16              : #define __LAYER_NORMALIZATION_LAYER_H__
      17              : #ifdef __cplusplus
      18              : 
      19              : #include <array>
      20              : #include <functional>
      21              : #include <vector>
      22              : 
      23              : #include <common_properties.h>
      24              : #include <layer_devel.h>
      25              : 
      26              : namespace nntrainer {
      27              : 
      28              : /**
      29              :  * @class   LayerNormalizationLayer
      30              :  * @brief   Layer Noramlization Layer
      31              :  */
      32              : class LayerNormalizationLayer : public Layer {
      33              : public:
      34              :   /**
      35              :    * @brief     Constructor of LayerNormalizationLayer
      36              :    */
      37              :   LayerNormalizationLayer();
      38              : 
      39              :   /**
      40              :    * @brief     Destructor of LayerNormalizationLayer
      41              :    */
      42          567 :   ~LayerNormalizationLayer() {}
      43              : 
      44              :   /**
      45              :    * @brief  Move constructor of LayerNormalizationLayer
      46              :    * @param[in] rhs LayerNormalizationLayer to be moved
      47              :    */
      48              :   LayerNormalizationLayer(LayerNormalizationLayer &&rhs) noexcept = default;
      49              : 
      50              :   /**
      51              :    * @brief  Move assignment operator
      52              :    * @param[in] rhs LayerNormalizationLayer to be moved
      53              :    */
      54              :   LayerNormalizationLayer &operator=(LayerNormalizationLayer &&rhs) = default;
      55              : 
      56              :   /**
      57              :    * @copydoc Layer::finalize(InitLayerContext &context)
      58              :    */
      59              :   void finalize(InitLayerContext &context) override;
      60              : 
      61              :   /**
      62              :    * @copydoc Layer::forwarding(RunLayerContext &context, bool training)
      63              :    */
      64              :   void forwarding(RunLayerContext &context, bool training) override;
      65              : 
      66              :   /**
      67              :    * @copydoc Layer::incremental_forwarding(RunLayerContext &context, unsigned
      68              :    * int from, unsigned int to, bool training)
      69              :    */
      70              :   void incremental_forwarding(RunLayerContext &context, unsigned int from,
      71              :                               unsigned int to, bool training) override;
      72              : 
      73              :   /**
      74              :    * @copydoc Layer::calcDerivative(RunLayerContext &context)
      75              :    */
      76              :   void calcDerivative(RunLayerContext &context) override;
      77              : 
      78              :   /**
      79              :    * @copydoc Layer::calcGradient(RunLayerContext &context)
      80              :    */
      81              :   void calcGradient(RunLayerContext &context) override;
      82              : 
      83              :   /**
      84              :    * @copydoc Layer::exportTo(Exporter &exporter, const ml::train::ExportMethods
      85              :    * method)
      86              :    */
      87              :   void exportTo(Exporter &exporter,
      88              :                 const ml::train::ExportMethods &method) const override;
      89              : 
      90              :   /**
      91              :    * @copydoc Layer::getType()
      92              :    */
      93         4409 :   const std::string getType() const override {
      94         4409 :     return LayerNormalizationLayer::type;
      95              :   };
      96              : 
      97              :   /**
      98              :    * @copydoc Layer::supportBackwarding()
      99              :    */
     100          412 :   bool supportBackwarding() const override { return true; }
     101              : 
     102              :   /**
     103              :    * @brief Initialize the in-place settings of the layer
     104              :    * @return InPlaceType
     105              :    */
     106           82 :   InPlaceType initializeInPlace() final {
     107           82 :     is_inplace = true;
     108           82 :     return InPlaceType::NON_RESTRICTING;
     109              :   }
     110              : 
     111              :   using Layer::setProperty;
     112              : 
     113              :   /**
     114              :    * @copydoc Layer::setProperty(const std::vector<std::string> &values)
     115              :    */
     116              :   void setProperty(const std::vector<std::string> &values) override;
     117              : 
     118              :   /**
     119              :    * @copydoc Layer::setBatch(RunLayerContext &context, unsigned int batch)
     120              :    */
     121              :   void setBatch(RunLayerContext &context, unsigned int batch) override;
     122              : 
     123              :   static constexpr const char *type = "layer_normalization";
     124              : 
     125              : private:
     126              :   std::vector<unsigned int> normalize_axes; /**< normalize axes */
     127              :   std::vector<unsigned int>
     128              :     remain_axes; /**< remained axes (exclusive with normalize axes) */
     129              : 
     130              :   std::array<unsigned int, 7> wt_idx;
     131              :   std::tuple<std::vector<props::Axis>, props::Epsilon, props::GammaInitializer,
     132              :              props::BetaInitializer, props::WeightDecay, props::BiasDecay>
     133              :     layer_normalization_props;
     134              : };
     135              : 
     136              : } // namespace nntrainer
     137              : 
     138              : #endif /* __cplusplus */
     139              : #endif /* __LAYER_NORMALIZATION_LAYER_H__ */
        

Generated by: LCOV version 2.0-1