LCOV - code coverage report
Current view: top level - nntrainer/layers - bn_layer.h (source / functions) Coverage Total Hit
Test: coverage_filtered.info Lines: 100.0 % 7 7
Test Date: 2025-12-14 20:38:17 Functions: 80.0 % 5 4

            Line data    Source code
       1              : /**
       2              :  * Copyright (C) 2020 Samsung Electronics Co., Ltd. All Rights Reserved.
       3              :  *
       4              :  * Licensed under the Apache License, Version 2.0 (the "License");
       5              :  * you may not use this file except in compliance with the License.
       6              :  * You may obtain a copy of the License at
       7              :  *   http://www.apache.org/licenses/LICENSE-2.0
       8              :  * Unless required by applicable law or agreed to in writing, software
       9              :  * distributed under the License is distributed on an "AS IS" BASIS,
      10              :  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      11              :  * See the License for the specific language governing permissions and
      12              :  * limitations under the License.
      13              :  *
      14              :  * @file        bn_layer.h
      15              :  * @date        14 May 2020
      16              :  * @brief       This is Batch Normalization Layer Class of Neural Network
      17              :  * @see         https://github.com/nnstreamer/nntrainer
      18              :  * @author      Jijoong Moon <jijoong.moon@samsung.com>
      19              :  * @bug         No known bugs except for NYI items
      20              :  *
      21              :  */
      22              : 
      23              : #ifndef __BN_LAYER_H__
      24              : #define __BN_LAYER_H__
      25              : #ifdef __cplusplus
      26              : 
      27              : #include <array>
      28              : #include <functional>
      29              : #include <vector>
      30              : 
      31              : #include <common_properties.h>
      32              : #include <layer_devel.h>
      33              : 
      34              : namespace nntrainer {
      35              : 
      36              : /**
      37              :  * @class   BatchNormalizationLayer
      38              :  * @brief   Batch Noramlization Layer
      39              :  */
      40              : class BatchNormalizationLayer : public Layer {
      41              : public:
      42              :   /**
      43              :    * @brief     Constructor of Batch Normalization Layer
      44              :    */
      45              :   BatchNormalizationLayer();
      46              : 
      47              :   /**
      48              :    * @brief     Destructor of BatchNormalizationLayer
      49              :    */
      50           56 :   ~BatchNormalizationLayer() {}
      51              : 
      52              :   /**
      53              :    *  @brief  Move constructor of Pooling 2D Layer.
      54              :    *  @param[in] BatchNormalization &&
      55              :    */
      56              :   BatchNormalizationLayer(BatchNormalizationLayer &&rhs) noexcept = default;
      57              : 
      58              :   /**
      59              :    * @brief  Move assignment operator.
      60              :    * @parma[in] rhs BatchNormalizationLayer to be moved.
      61              :    */
      62              :   BatchNormalizationLayer &operator=(BatchNormalizationLayer &&rhs) = default;
      63              : 
      64              :   /**
      65              :    * @copydoc Layer::finalize(InitLayerContext &context)
      66              :    */
      67              :   void finalize(InitLayerContext &context) override;
      68              : 
      69              :   /**
      70              :    * @copydoc Layer::forwarding(RunLayerContext &context, bool training)
      71              :    */
      72              :   void forwarding(RunLayerContext &context, bool training) override;
      73              : 
      74              :   /**
      75              :    * @copydoc Layer::calcDerivative(RunLayerContext &context)
      76              :    */
      77              :   void calcDerivative(RunLayerContext &context) override;
      78              : 
      79              :   /**
      80              :    * @copydoc Layer::calcGradient(RunLayerContext &context)
      81              :    */
      82              :   void calcGradient(RunLayerContext &context) override;
      83              : 
      84              :   /**
      85              :    * @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods
      86              :    * method)
      87              :    */
      88              :   void exportTo(Exporter &exporter,
      89              :                 const ml::train::ExportMethods &method) const override;
      90              : 
      91              :   /**
      92              :    * @copydoc Layer::getType()
      93              :    */
      94          862 :   const std::string getType() const override {
      95          862 :     return BatchNormalizationLayer::type;
      96              :   };
      97              : 
      98              :   /**
      99              :    * @copydoc Layer::supportBackwarding()
     100              :    */
     101           39 :   bool supportBackwarding() const override { return true; }
     102              : 
     103              :   /**
     104              :    * @brief Initialize the in-place settings of the layer
     105              :    * @return InPlaceType
     106              :    */
     107           18 :   InPlaceType initializeInPlace() final {
     108           18 :     is_inplace = true;
     109           18 :     return InPlaceType::NON_RESTRICTING;
     110              :   }
     111              : 
     112              :   using Layer::setProperty;
     113              : 
     114              :   /**
     115              :    * @copydoc Layer::setProperty(const PropertyType type, const std::string
     116              :    * &value)
     117              :    */
     118              :   void setProperty(const std::vector<std::string> &values) override;
     119              : 
     120              :   /**
     121              :    * @copydoc Layer::setBatch(RunLayerContext &context, unsigned int batch)
     122              :    */
     123              :   void setBatch(RunLayerContext &context, unsigned int batch) override;
     124              : 
     125              :   static constexpr const char *type = "batch_normalization";
     126              : 
     127              :   /**
     128              :    * @copydoc Layer::save(std::ofstream &file,
     129              :    *      RunLayerContext &run_context,
     130              :    *      bool opt_var,
     131              :    *      ml::train::ExecutionMode mode,
     132              :    *      bool trainable,
     133              :    *      TensorDim::DataType definedWeightDataType)
     134              :    */
     135              :   void save(std::ofstream &file, RunLayerContext &run_context, bool opt_var,
     136              :             ml::train::ExecutionMode mode, bool trainable,
     137              :             TensorDim::DataType definedWeightDataType) const override;
     138              : 
     139              :   /**
     140              :    * @copydoc Layer::read(std::ifstream &file, RunLayerContext &context, bool
     141              :    * opt_var, ml::train::ExecutionMode mode, bool trainable, TensorDim::DataType
     142              :    * definedWeightDataType, bool fsu)
     143              :    */
     144              :   void read(std::ifstream &file, RunLayerContext &context, bool opt_var,
     145              :             ml::train::ExecutionMode mode, bool trainable,
     146              :             TensorDim::DataType definedWeightDataType, bool fsu = false,
     147              :             size_t start_offset = 0, bool read_from_offset = false,
     148              :             int file_fd = -1) override;
     149              : 
     150              :   /**
     151              :    * @copydoc Layer::read(ReadSource src, RunLayerContext &context, bool
     152              :    * opt_var, ml::train::ExecutionMode mode, bool trainable, TensorDim::DataType
     153              :    * definedWeightDataType, bool fsu)
     154              :    */
     155              :   void read(ReadSource src, RunLayerContext &context, bool opt_var,
     156              :             ml::train::ExecutionMode mode, bool trainable,
     157              :             TensorDim::DataType definedWeightDataType, bool fsu = false,
     158              :             size_t start_offset = 0, bool read_from_offset = false) override;
     159              : 
     160              : private:
     161              :   float divider; /**< size of the axes of the reduced */
     162              : 
     163              :   std::vector<unsigned int> axes_to_reduce; /**< target axes to reduce */
     164              :   std::array<unsigned int, 11>
     165              :     wt_idx; /**< indices of the weights and tensors */
     166              :   std::tuple<props::Epsilon, props::MuInitializer, props::VarInitializer,
     167              :              props::BetaInitializer, props::GammaInitializer, props::Momentum,
     168              :              props::Axis, props::WeightDecay, props::BiasDecay>
     169              :     bn_props;
     170              : };
     171              : 
     172              : } // namespace nntrainer
     173              : 
     174              : #endif /* __cplusplus */
     175              : #endif /* __BN_LAYER_H__ */
        

Generated by: LCOV version 2.0-1