Line data Source code
1 : // SPDX-License-Identifier: Apache-2.0
2 : /**
3 : * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
4 : *
5 : * @file preprocess_l2norm_layer.cpp
6 : * @date 09 Jan 2021
7 : * @brief This file contains the simple l2norm layer which normalizes
8 : * the given feature
9 : * @see https://github.com/nnstreamer/nntrainer
10 : * @author Jihoon Lee <jhoon.it.lee@samsung.com>
11 : * @bug No known bugs except for NYI items
12 : *
13 : */
14 :
15 : #include <iostream>
16 : #include <regex>
17 : #include <sstream>
18 :
19 : #include <layer_context.h>
20 : #include <nntrainer_error.h>
21 : #include <nntrainer_log.h>
22 : #include <node_exporter.h>
23 : #include <preprocess_l2norm_layer.h>
24 :
25 : namespace nntrainer {
26 : static constexpr size_t SINGLE_INOUT_IDX = 0;
27 :
28 3 : void PreprocessL2NormLayer::finalize(InitLayerContext &context) {
29 : const auto &input_dim = context.getInputDimensions()[0];
30 3 : NNTR_THROW_IF(context.getNumInputs() != 1, std::invalid_argument)
31 : << "l2norm layer is designed for a single input only";
32 3 : NNTR_THROW_IF(input_dim.channel() != 1 || input_dim.height() != 1,
33 : std::invalid_argument)
34 : << "l2norm layer is designed for channel and height is 1 for now, please "
35 : "check";
36 :
37 3 : context.setOutputDimensions(context.getInputDimensions());
38 3 : }
39 :
40 175 : void PreprocessL2NormLayer::forwarding(RunLayerContext &context,
41 : bool training) {
42 175 : const float epsilon = std::get<props::Epsilon>(l2norm_props);
43 175 : auto &hidden_ = context.getOutput(SINGLE_INOUT_IDX);
44 :
45 175 : auto &input_ = context.getInput(SINGLE_INOUT_IDX);
46 :
47 350 : for (unsigned int b = 0; b < input_.batch(); ++b) {
48 175 : auto input_slice = input_.getBatchSlice(b, 1);
49 175 : auto hidden_slice = hidden_.getBatchSlice(b, 1);
50 175 : input_slice.multiply(1 / (input_slice.l2norm() + epsilon), hidden_slice);
51 175 : }
52 175 : }
53 :
54 0 : void PreprocessL2NormLayer::calcDerivative(RunLayerContext &context) {
55 : throw std::invalid_argument("[L2Norm::calcDerivative] This Layer "
56 0 : "does not support backward propagation");
57 : }
58 :
59 13 : void PreprocessL2NormLayer::setProperty(
60 : const std::vector<std::string> &values) {
61 13 : auto remain_props = loadProperties(values, l2norm_props);
62 13 : NNTR_THROW_IF(!remain_props.empty(), std::invalid_argument)
63 1 : << "[PreprocessL2Norm Layer] Unknown Layer Properties count " +
64 2 : std::to_string(remain_props.size());
65 13 : }
66 :
67 : } // namespace nntrainer
|