Line data Source code
1 : // SPDX-License-Identifier: Apache-2.0
2 : /**
3 : * Copyright (C) 2020 Jijoong Moon <jijoong.moon@samsung.com>
4 : *
5 : * @file flatten_layer.cpp
6 : * @date 16 June 2020
7 : * @see https://github.com/nnstreamer/nntrainer
8 : * @author Jijoong Moon <jijoong.moon@samsung.com>
9 : * @author hyeonseok Lee <hs89.lee@samsung.com>
10 : * @bug No known bugs except for NYI items
11 : * @brief This is Flatten Layer Class for Neural Network
12 : *
13 : * @todo Update flatten to work in-place properly.
14 : */
15 :
16 : #include <flatten_layer.h>
17 : #include <layer_context.h>
18 : #include <nntrainer_error.h>
19 : #include <nntrainer_log.h>
20 : #include <node_exporter.h>
21 :
22 : namespace nntrainer {
23 :
24 : static constexpr size_t SINGLE_INOUT_IDX = 0;
25 :
26 99 : void FlattenLayer::finalize(InitLayerContext &context) {
27 : const TensorDim &in_dim = context.getInputDimensions()[0];
28 :
29 : std::string target_shape;
30 :
31 : const unsigned int start_dimension =
32 99 : std::get<props::StartDimension>(flatten_props).get();
33 : const unsigned int end_dimension =
34 99 : std::get<props::EndDimension>(flatten_props).get();
35 :
36 99 : if (in_dim.getFormat() == ml::train::TensorDim::Format::NHWC) {
37 :
38 0 : NNTR_THROW_IF((start_dimension != 1) &&
39 : (end_dimension != ml::train::TensorDim::MAXDIM - 1),
40 : std::invalid_argument)
41 : << "NHWC format does not support start and end dimension property of "
42 : "flatten layer";
43 :
44 : target_shape =
45 0 : "target_shape=" + std::to_string(in_dim.getFeatureLen()) + ":1:1";
46 : } else {
47 :
48 99 : NNTR_THROW_IF(start_dimension > end_dimension, std::invalid_argument)
49 : << "start_dimension is bigger than end_dimension";
50 :
51 99 : TensorDim target_dim = in_dim;
52 :
53 : unsigned int flattened_size = 1;
54 388 : for (unsigned int i = start_dimension; i <= end_dimension; ++i) {
55 289 : flattened_size *= in_dim[i];
56 289 : target_dim[i] = 1;
57 : }
58 99 : target_dim[end_dimension] = flattened_size;
59 :
60 297 : target_shape = "target_shape=" + std::to_string(target_dim[1]) + ":" +
61 297 : std::to_string(target_dim[2]) + ":" +
62 297 : std::to_string(target_dim[3]);
63 : }
64 :
65 297 : ReshapeLayer::setProperty({target_shape});
66 :
67 : /** @note the output dimension is in invalid state till finalize of
68 : * reshape_layer is finished */
69 99 : ReshapeLayer::finalize(context);
70 :
71 99 : if (in_dim.channel() == 1 && in_dim.height() == 1) {
72 24 : ml_logw("Warning: the flatten layer is redundant");
73 : }
74 99 : }
75 :
76 626 : void FlattenLayer::setProperty(const std::vector<std::string> &values) {
77 626 : auto remain_props = loadProperties(values, flatten_props);
78 623 : remain_props = loadProperties(remain_props, reshape_props);
79 623 : if (!remain_props.empty()) {
80 : std::string msg = "[FlattenLayer] Unknown Layer Properties count " +
81 6 : std::to_string(values.size());
82 12 : throw exception::not_supported(msg);
83 : }
84 623 : }
85 :
86 54 : void FlattenLayer::exportTo(Exporter &exporter,
87 : const ml::train::ExportMethods &method) const {
88 54 : exporter.saveResult(reshape_props, method, this);
89 54 : }
90 :
91 : } /* namespace nntrainer */
|