Line data Source code
1 : // SPDX-License-Identifier: Apache-2.0
2 : /**
3 : * Copyright (C) 2025 Jijoong Moon <jijoong.moon@samsung.com>
4 : *
5 : * @file tensor_layer.cpp
6 : * @date 17 Jan 2025
7 : * @brief This is QNN Tensor Layer Class of Neural Network
8 : * @see https://github.com/nnstreamer/nntrainer
9 : * @author Jijoong Moon <jijoong.moon@samsung.com>
10 : * @bug No known bugs except for NYI items
11 : *
12 : */
13 :
14 : #include <iostream>
15 : #include <layer_context.h>
16 : #include <nntrainer_error.h>
17 : #include <nntrainer_log.h>
18 : #include <node_exporter.h>
19 : #include <string>
20 : #include <tensor_layer.h>
21 : #include <tensor_wrap_specs.h>
22 : #include <util_func.h>
23 :
24 : namespace nntrainer {
25 :
26 : static constexpr size_t SINGLE_INOUT_IDX = 0;
27 :
28 0 : TensorLayer::TensorLayer() : Layer(), tensor_props({}, {}, {}, {}) {}
29 :
30 0 : void TensorLayer::setProperty(const std::vector<std::string> &values) {
31 0 : auto remain_props = loadProperties(values, tensor_props);
32 0 : NNTR_THROW_IF(!remain_props.empty(), std::invalid_argument)
33 0 : << "[TensorLayer] Unknown Layer Properties count " +
34 0 : std::to_string(values.size());
35 0 : }
36 :
37 0 : void TensorLayer::finalize(InitLayerContext &context) {
38 : auto &dims = std::get<std::vector<props::TensorDimension>>(tensor_props);
39 0 : std::vector<TensorDim> t_dims(dims.begin(), dims.end());
40 :
41 : auto &t_dtype = std::get<std::vector<props::TensorDataType>>(tensor_props);
42 : auto &t_name = std::get<std::vector<props::TensorName>>(tensor_props);
43 : auto &t_life = std::get<std::vector<props::TensorLife>>(tensor_props);
44 :
45 0 : NNTR_THROW_IF(t_dims.empty(), std::invalid_argument)
46 : << "Tensor dimension is not provided";
47 0 : n_tensor = t_dims.size();
48 :
49 0 : if (t_dtype.empty()) {
50 0 : ml_logi("Set Activation Tensor DataType");
51 0 : t_dtype.reserve(t_dims.size());
52 0 : for (auto t : t_dims)
53 0 : t_dtype.push_back(context.getActivationDataType());
54 : }
55 :
56 0 : if (t_life.empty()) {
57 0 : ml_logi("Set max Tensor LifeSpan");
58 0 : t_life.reserve(t_dims.size());
59 0 : for (auto t : t_dims)
60 0 : t_life.push_back(nntrainer::TensorLifespan::MAX_LIFESPAN);
61 : }
62 :
63 : auto engine = context.getComputeEngineType();
64 :
65 0 : NNTR_THROW_IF((t_dims.size() != t_dtype.size() ||
66 : t_dims.size() != t_life.size() ||
67 : t_dims.size() != t_name.size()),
68 : std::invalid_argument)
69 : << "Size of Dimensions, Types, Formats should be matched!";
70 :
71 0 : tensor_idx.reserve(t_dims.size());
72 :
73 0 : for (unsigned int i = 0; i < t_dims.size(); ++i) {
74 : t_dims[i].setFormat(context.getFormat());
75 0 : t_dims[i].setDataType(t_dtype[i]);
76 0 : std::string name = context.getName() + "_t" + std::to_string(i);
77 0 : if (!t_name.empty())
78 0 : name = t_name[i];
79 :
80 0 : tensor_idx.push_back(context.requestTensor(
81 : t_dims[i], name, Initializer::NONE, true, t_life[i], true, engine));
82 : }
83 :
84 0 : context.setOutputDimensions(t_dims);
85 0 : }
86 :
87 0 : void TensorLayer::forwarding(RunLayerContext &context, bool training) {
88 0 : if (!context.getInPlace()) {
89 0 : for (unsigned int i = 0; i < n_tensor; ++i) {
90 0 : Tensor &input_ = context.getInput(i);
91 0 : Tensor &hidden_ = context.getOutput(i);
92 0 : hidden_.copyData(input_);
93 : }
94 : }
95 0 : }
96 :
97 0 : void TensorLayer::calcDerivative(RunLayerContext &context) {
98 : throw exception::not_supported(
99 0 : "calcDerivative for input layer is not supported");
100 : }
101 :
102 0 : void TensorLayer::exportTo(Exporter &exporter,
103 : const ml::train::ExportMethods &method) const {
104 0 : exporter.saveResult(tensor_props, method, this);
105 0 : }
106 :
107 : } /* namespace nntrainer */
|