LCOV - code coverage report
Current view: top level - nntrainer/layers - reshape_layer.cpp (source / functions) Coverage Total Hit
Test: coverage_filtered.info Lines: 96.9 % 32 31
Test Date: 2025-12-14 20:38:17 Functions: 100.0 % 5 5

            Line data    Source code
       1              : // SPDX-License-Identifier: Apache-2.0
       2              : /**
       3              :  * Copyright (C) 2020 Jijoong Moon <jijoong.moon@samsung.com>
       4              :  *
       5              :  * @file   flatten_layer.cpp
       6              :  * @date   16 June 2020
       7              :  * @see    https://github.com/nnstreamer/nntrainer
       8              :  * @author Jijoong Moon <jijoong.moon@samsung.com>
       9              :  * @bug    No known bugs except for NYI items
      10              :  * @brief  This is Flatten Layer Class for Neural Network
      11              :  *
      12              :  * @todo Update flatten to work in-place properly.
      13              :  */
      14              : 
      15              : #include <layer_context.h>
      16              : #include <nntrainer_error.h>
      17              : #include <nntrainer_log.h>
      18              : #include <node_exporter.h>
      19              : #include <reshape_layer.h>
      20              : namespace nntrainer {
      21              : 
      22              : static constexpr size_t SINGLE_INOUT_IDX = 0;
      23              : 
      24          108 : void ReshapeLayer::finalize(InitLayerContext &context) {
      25          108 :   NNTR_THROW_IF(context.getNumInputs() != 1, std::invalid_argument)
      26              :     << "Reshape only supports 1 input for now";
      27              : 
      28              :   const TensorDim &in_dim = context.getInputDimensions()[0];
      29              : 
      30              :   auto &target_shape = std::get<props::TargetShape>(reshape_props);
      31          108 :   NNTR_THROW_IF(target_shape.empty(), std::invalid_argument)
      32              :     << "Reshape layer must be provided with target shape";
      33          108 :   TensorDim out_dim = target_shape.get();
      34              : 
      35          108 :   if ((int)out_dim.getDataLen() == -1) {
      36            4 :     out_dim.height(1);
      37            4 :     out_dim.channel(1);
      38            4 :     out_dim.width(in_dim.getFeatureLen());
      39          104 :   } else if (out_dim.getFeatureLen() != in_dim.getFeatureLen()) {
      40              :     throw std::invalid_argument(
      41            0 :       "Target and input size mismatch for reshape layer");
      42              :   }
      43              : 
      44          108 :   out_dim.batch(in_dim.batch());
      45              :   out_dim.setDataType(context.getActivationDataType());
      46          108 :   context.setOutputDimensions({out_dim});
      47          108 : }
      48              : 
      49          539 : void ReshapeLayer::forwarding(RunLayerContext &context, bool training) {
      50          539 :   if (!context.getInPlace()) {
      51          306 :     context.getOutput(SINGLE_INOUT_IDX)
      52          306 :       .copyData(context.getInput(SINGLE_INOUT_IDX));
      53              :   }
      54          539 : }
      55              : 
      56          248 : void ReshapeLayer::calcDerivative(RunLayerContext &context) {
      57          248 :   if (!context.getInPlace()) {
      58          132 :     context.getOutgoingDerivative(SINGLE_INOUT_IDX)
      59          132 :       .copyData(context.getIncomingDerivative(SINGLE_INOUT_IDX));
      60              :   }
      61          248 : }
      62              : 
      63          161 : void ReshapeLayer::setProperty(const std::vector<std::string> &values) {
      64          161 :   auto remain_props = loadProperties(values, reshape_props);
      65          160 :   if (!remain_props.empty()) {
      66              :     std::string msg = "[ReshapeLayer] Unknown Layer Properties count " +
      67            2 :                       std::to_string(remain_props.size());
      68            4 :     throw exception::not_supported(msg);
      69              :   }
      70          160 : }
      71              : 
      72            3 : void ReshapeLayer::exportTo(Exporter &exporter,
      73              :                             const ml::train::ExportMethods &method) const {
      74            3 :   exporter.saveResult(reshape_props, method, this);
      75            3 : }
      76              : 
      77              : } /* namespace nntrainer */
        

Generated by: LCOV version 2.0-1