Line data Source code
1 : // SPDX-License-Identifier: Apache-2.0
2 : /**
3 : * Copyright (C) 2020 Parichay Kapoor <pk.kapoor@samsung.com>
4 : *
5 : * @file fc_layer.h
6 : * @date 14 May 2020
7 : * @brief This is Fully Connected Layer Class of Neural Network
8 : * @see https://github.com/nnstreamer/nntrainer
9 : * @author Jijoong Moon <jijoong.moon@samsung.com>
10 : * @bug No known bugs except for NYI items
11 : *
12 : */
13 :
14 : #ifndef __FC_LAYER_H__
15 : #define __FC_LAYER_H__
16 : #ifdef __cplusplus
17 :
18 : #include <common_properties.h>
19 : #include <layer_impl.h>
20 :
21 : namespace nntrainer {
22 :
23 : /**
24 : * @class FullyConnecedLayer
25 : * @brief fully connected layer
26 : */
27 : class FullyConnectedLayer : public LayerImpl {
28 : public:
29 : /**
30 : * @brief Constructor of Fully Connected Layer
31 : */
32 : FullyConnectedLayer();
33 :
34 : /**
35 : * @brief Destructor of Fully Connected Layer
36 : */
37 1886 : ~FullyConnectedLayer() = default;
38 :
39 : /**
40 : * @brief Move constructor.
41 : * @param[in] FullyConnected &&
42 : */
43 : FullyConnectedLayer(FullyConnectedLayer &&rhs) noexcept = default;
44 :
45 : /**
46 : * @brief Move assignment operator.
47 : * @parma[in] rhs FullyConnectedLayer to be moved.
48 : */
49 : FullyConnectedLayer &operator=(FullyConnectedLayer &&rhs) = default;
50 :
51 : /**
52 : * @copydoc Layer::finalize(InitLayerContext &context)
53 : */
54 : void finalize(InitLayerContext &context) override;
55 :
56 : /**
57 : * @copydoc Layer::forwarding(RunLayerContext &context, bool training)
58 : */
59 : void forwarding(RunLayerContext &context, bool training) override;
60 :
61 : /**
62 :  * @copydoc Layer::incremental_forwarding(RunLayerContext &context, unsigned
63 :  * int from, unsigned int to, bool training)
64 :  */
65 : void incremental_forwarding(RunLayerContext &context, unsigned int from,
66 : unsigned int to, bool training) override;
67 :
68 : /**
69 : * @copydoc Layer::calcDerivative(RunLayerContext &context)
70 : */
71 : void calcDerivative(RunLayerContext &context) override;
72 :
73 : /**
74 : * @copydoc Layer::calcGradient(RunLayerContext &context)
75 : * @note
76 : * [note for LoRA] implicit calcDerivative is implicitly applied.
77 : * The weight is already updated with the LoRA's (W = W + W_lora)
78 : */
79 : void calcGradient(RunLayerContext &context) override;
80 :
81 : /**
82 : * @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods
83 : * method)
84 : */
85 : void exportTo(Exporter &exporter,
86 : const ml::train::ExportMethods &method) const override;
87 :
88 : /**
89 : * @copydoc Layer::getType()
90 : */
91 19477 : const std::string getType() const override {
92 19477 : return FullyConnectedLayer::type;
93 : };
94 :
95 : /**
96 : * @copydoc Layer::supportBackwarding()
97 : */
98 1229 : bool supportBackwarding() const override { return true; }
99 :
100 : /**
101 : * @copydoc Layer::setProperty(const PropertyType type, const std::string
102 : * &value)
103 : */
104 : void setProperty(const std::vector<std::string> &values) override;
105 :
106 : /**
107 : * @copydoc Layer::setBatch(RunLayerContext &context, unsigned int batch)
108 : */
109 : void setBatch(nntrainer::RunLayerContext &context,
110 : unsigned int batch) override;
111 :
112 : static constexpr const char *type = "fully_connected";
113 :
114 : private:
115 : float lora_scaling;
116 : std::tuple<props::Unit, props::LoraRank, props::LoraAlpha>
117 : fc_props; /**< fc layer properties :
118 : unit - number of output neurons,
119 : lora_rank - rank of lora (optional)
120 : lora_scaling - scaling factor of LoRA apply, i.e.,
121 : lora_scaling = alpha / lora_rank */
122 : std::array<unsigned int, 2> weight_idx; /**< indices of the weights */
123 : std::array<unsigned int, 4> lora_idx; /**< indices of the lora weights */
124 : std::unique_ptr<nntrainer::Quantizer> quantizer;
125 : };
126 : } // namespace nntrainer
127 :
128 : #endif /* __cplusplus */
129 : #endif /* __FC_LAYER_H__ */
|