Line data Source code
1 : // SPDX-License-Identifier: Apache-2.0
2 : /**
3 : * Copyright (C) 2021 Parichay Kapoor <pk.kapoor@samsung.com>
4 : *
5 : * @file optimizer_wrapped.cpp
6 : * @date 10 December 2021
7 : * @brief This is Optimizer Wrapped interface class
8 : * @see https://github.com/nnstreamer/nntrainer
9 : * @author Parichay Kapoor <pk.kapoor@samsung.com>
10 : * @bug No known bugs except for NYI items
11 : *
12 : * @details wraps the optimizer and learning rate scheduler together
13 : */
14 :
15 : #include <common_properties.h>
16 : #include <engine.h>
17 : #include <lr_scheduler_constant.h>
18 : #include <lr_scheduler_exponential.h>
19 : #include <nntrainer_log.h>
20 : #include <node_exporter.h>
21 : #include <optimizer_wrapped.h>
22 :
23 : namespace nntrainer {
24 :
25 : /**
26 : * @brief Optimizer wrapped creator with constructor for optimizer
27 : */
28 : std::unique_ptr<OptimizerWrapped>
29 29 : createOptimizerWrapped(const ml::train::OptimizerType &type,
30 : const std::vector<std::string> &properties) {
31 29 : auto &eg = nntrainer::Engine::Global();
32 57 : return createOptimizerWrapped(eg.createOptimizerObject(type), properties);
33 : }
34 :
35 : /**
36 : * @brief Optimizer wrapped creator with constructor for optimizer
37 : */
38 : std::unique_ptr<OptimizerWrapped>
39 743 : createOptimizerWrapped(const std::string &type,
40 : const std::vector<std::string> &properties) {
41 743 : auto &eg = nntrainer::Engine::Global();
42 1478 : return createOptimizerWrapped(eg.createOptimizerObject(type), properties);
43 : }
44 :
45 : /**
46 : * @brief Optimizer wrapped creator with constructor for optimizer
47 : */
48 : std::unique_ptr<OptimizerWrapped>
49 763 : createOptimizerWrapped(std::unique_ptr<OptimizerCore> &&opt,
50 : const std::vector<std::string> &properties) {
51 763 : auto opt_wrapped = std::make_unique<OptimizerWrapped>(std::move(opt));
52 :
53 763 : opt_wrapped->setProperty(properties);
54 757 : return opt_wrapped;
55 : }
56 :
57 763 : OptimizerWrapped::OptimizerWrapped(std::unique_ptr<OptimizerCore> &&opt) :
58 : optimizer(std::move(opt)),
59 : lr_sched(),
60 763 : props(props::LearningRate(), props::DecayRate(), props::DecaySteps()) {
61 763 : std::get<props::LearningRate>(props).set(optimizer->getDefaultLearningRate());
62 763 : }
63 :
64 16844 : const std::string OptimizerWrapped::getType() const {
65 16844 : return optimizer->getType();
66 : }
67 :
68 802 : void OptimizerWrapped::setProperty(const std::vector<std::string> &values) {
69 802 : auto remain_props = loadProperties(values, props);
70 800 : optimizer->setProperty(remain_props);
71 800 : }
72 :
73 15628 : double OptimizerWrapped::getLearningRate(size_t iteration) {
74 15628 : return lr_sched->getLearningRate(iteration);
75 : }
76 :
77 15627 : void OptimizerWrapped::applyGradient(RunOptimizerContext &context) {
78 15627 : optimizer->applyGradient(context);
79 15627 : }
80 :
81 240 : void OptimizerWrapped::exportTo(Exporter &exporter,
82 : const ml::train::ExportMethods &method) const {
83 240 : optimizer->exportTo(exporter, method);
84 240 : lr_sched->exportTo(exporter, method);
85 240 : }
86 :
87 616 : void OptimizerWrapped::finalize() {
88 : auto const &props_lr = std::get<props::LearningRate>(props);
89 : auto const &props_dr = std::get<props::DecayRate>(props);
90 : auto const &props_ds = std::get<props::DecaySteps>(props);
91 :
92 : /** if lr_sched already set and property not empty, error */
93 616 : bool props_empty = props_dr.empty() && props_ds.empty();
94 :
95 83 : NNTR_THROW_IF(!props_empty && lr_sched, std::invalid_argument)
96 0 : << "Multiple learning rate schedulers set for the optimizer " << getType();
97 :
98 : /** if lr_sched not set, make lr_sched from properties */
99 616 : if (!lr_sched) {
100 606 : if (!props_empty) {
101 83 : ml_logw(
102 : "Either decay_rate or decay_steps properties are set in optimizer. "
103 : "Please set these properties in learning rate scheduler");
104 166 : lr_sched = std::make_unique<ExponentialLearningRateScheduler>();
105 83 : if (!props_dr.empty())
106 332 : lr_sched->setProperty({"decay_rate=" + std::to_string(props_dr.get())});
107 83 : if (!props_ds.empty())
108 166 : lr_sched->setProperty(
109 166 : {"decay_steps=" + std::to_string(props_ds.get())});
110 : } else {
111 1046 : lr_sched = std::make_unique<ConstantLearningRateScheduler>();
112 : }
113 2424 : lr_sched->setProperty({"learning_rate=" + std::to_string(props_lr.get())});
114 10 : } else if (lr_sched && !props_lr.empty()) {
115 20 : ml_logw("Learning rate property is set in both optimizer and learning rate "
116 : "scheduler. The value which is set in Optimizer will be ignored.");
117 : }
118 :
119 616 : lr_sched->finalize();
120 616 : optimizer->finalize();
121 616 : }
122 :
123 0 : void OptimizerWrapped::read(std::ifstream &file) { optimizer->read(file); }
124 :
125 0 : void OptimizerWrapped::save(std::ofstream &file) { optimizer->save(file); }
126 :
127 : std::vector<TensorDim>
128 3722 : OptimizerWrapped::getOptimizerVariableDim(const TensorDim &dim) {
129 3722 : return optimizer->getOptimizerVariableDim(dim);
130 : }
131 :
132 12 : int OptimizerWrapped::setLearningRateScheduler(
133 : std::shared_ptr<ml::train::LearningRateScheduler> lrs) {
134 12 : lr_sched = std::static_pointer_cast<nntrainer::LearningRateScheduler>(lrs);
135 :
136 12 : return ML_ERROR_NONE;
137 : }
138 :
139 0 : nntrainer::LearningRateScheduler *OptimizerWrapped::getLearningRateScheduler() {
140 0 : return lr_sched.get();
141 : }
142 :
143 : } // namespace nntrainer
|