LCOV - code coverage report
Current view: top level - nntrainer/optimizers - optimizer_wrapped.h (source / functions) Coverage Total Hit
Test: coverage_filtered.info Lines: 100.0 % 1 1
Test Date: 2025-12-14 20:38:17 Functions: 100.0 % 2 2

            Line data    Source code
       1              : // SPDX-License-Identifier: Apache-2.0
       2              : /**
       3              :  * Copyright (C) 2021 Parichay Kapoor <pk.kapoor@samsung.com>
       4              :  *
       5              :  * @file   optimizer_wrapped.h
       6              :  * @date   10 December 2021
       7              :  * @brief  This is Optimizer Wrapped interface class
       8              :  * @see    https://github.com/nnstreamer/nntrainer
       9              :  * @author Parichay Kapoor <pk.kapoor@samsung.com>
      10              :  * @bug    No known bugs except for NYI items
      11              :  *
      12              :  * @details wraps the optimizer and learning rate scheduler together
      13              :  */
      14              : 
      15              : #ifndef __OPTIMIZER_WRAPPER_H__
      16              : #define __OPTIMIZER_WRAPPER_H__
      17              : 
      18              : #if __cplusplus
      19              : 
      20              : #include <string>
      21              : #include <vector>
      22              : 
      23              : #include <common_properties.h>
      24              : #include <lr_scheduler.h>
      25              : #include <optimizer.h>
      26              : #include <optimizer_devel.h>
      27              : 
      28              : namespace nntrainer {
      29              : 
      30              : using OptimizerCore = nntrainer::Optimizer;
      31              : 
      32              : /**
      33              :  * @class   Optimizer Base class for optimizers
      34              :  * @brief   Base class for all optimizers
      35              :  */
      36              : class OptimizerWrapped : public ml::train::Optimizer {
      37              : public:
      38              :   /**
      39              :    * @brief Constructor of OptimizerWrapped class
      40              :    * @param opt optimizer to wrap
      41              :    *
      42              :    */
      43              :   OptimizerWrapped(std::unique_ptr<OptimizerCore> &&opt);
      44              : 
      45              :   /**
      46              :    * @brief     Destructor of Optimizer Class
      47              :    */
      48         1526 :   ~OptimizerWrapped() = default;
      49              : 
      50              :   /**
      51              :    * Support all the interface requirements by ml::train::Optimizer
      52              :    */
      53              : 
      54              :   /**
      55              :    * @brief     get Optimizer Type
      56              :    * @retval    Optimizer type
      57              :    */
      58              :   const std::string getType() const override;
      59              : 
      60              :   /**
      61              :    * @brief     Default allowed properties
      62              :    * Available for all optimizers
      63              :    * - learning_rate : float
      64              :    *
      65              :    * Available for SGD and Adam optimizers
      66              :    * - decay_rate : float,
      67              :    * - decay_steps : float,
      68              :    *
      69              :    * Available for Adam optimizer
      70              :    * - beta1 : float,
      71              :    * - beta2 : float,
      72              :    * - epsilon : float,
      73              :    */
      74              : 
      75              :   /**
      76              :    * @brief     set Optimizer Parameters
      77              :    * @param[in] values Optimizer Parameter list
      78              :    * @details   This function accepts vector of properties in the format -
      79              :    *  { std::string property_name, void * property_val, ...}
      80              :    */
      81              :   void setProperty(const std::vector<std::string> &values) override;
      82              : 
      83              :   /**
      84              :    * @brief Set the Learning Rate Scheduler object
      85              :    *
      86              :    * @param lrs the learning rate scheduler object
      87              :    */
      88              :   int setLearningRateScheduler(
      89              :     std::shared_ptr<ml::train::LearningRateScheduler> lrs) override;
      90              : 
      91              :   /**
      92              :    * Support all the interface requirements by nntrainer::Optimizer
      93              :    */
      94              : 
      95              :   /**
      96              :    * @brief     get Learning Rate for the given iteration
      97              :    * @param[in] iteration Iteration for the learning rate
      98              :    * @retval    Learning rate in double
      99              :    * @detail    the return value of this function and getLearningRate() must
     100              :    * match for iteration == 0.
     101              :    */
     102              :   double getLearningRate(size_t iteration);
     103              : 
     104              :   /**
     105              :    * @brief     apply gradient to weight
     106              :    * @param[in] context Optimizer context
     107              :    */
     108              :   void applyGradient(RunOptimizerContext &context);
     109              : 
     110              :   /**
     111              :    * @brief this function helps exporting the optimizer in a predefined format,
     112              :    * while workarounding issue caused by templated function type eraser
     113              :    *
     114              :    * @param     exporter exporter that contains exporting logic
     115              :    * @param     method enum value to identify how it should be exported to
     116              :    */
     117              :   void exportTo(Exporter &exporter,
     118              :                 const ml::train::ExportMethods &method) const;
     119              : 
     120              :   /**
     121              :    * @brief     finalize optimizer.
     122              :    */
     123              :   void finalize();
     124              : 
     125              :   /**
     126              :    * @brief     Read Training optimizer parameters from file
     127              :    * @param[in] file input stream file
     128              :    */
     129              :   void read(std::ifstream &file);
     130              : 
     131              :   /**
     132              :    * @brief     Save Training optimizer parameters from file
     133              :    * @param[in] file output stream file
     134              :    */
     135              :   void save(std::ofstream &file);
     136              : 
     137              :   /**
     138              :    * @brief     Get dimension of extra variables if the optimizer needs any.
     139              :    * @param dim Dimension of tensor to be added as a optimizer variable
     140              :    * @return    Vector of dimensions
     141              :    */
     142              :   std::vector<TensorDim> getOptimizerVariableDim(const TensorDim &dim);
     143              : 
     144              :   /**
     145              :    * @brief Get the Learning Rate Scheduler object
     146              :    *
     147              :    * @return the learning rate scheduler object
     148              :    */
     149              :   nntrainer::LearningRateScheduler *getLearningRateScheduler();
     150              : 
     151              : private:
     152              :   std::unique_ptr<OptimizerCore> optimizer; /**< the underlying optimizer */
     153              :   std::shared_ptr<nntrainer::LearningRateScheduler>
     154              :     lr_sched; /**< the underlying learning rate scheduler */
     155              : 
     156              :   /** @todo remove DecayRate, DecaySteps*/
     157              :   std::tuple<props::LearningRate, props::DecayRate, props::DecaySteps>
     158              :     props; /**< lr scheduler props for backward compatibility */
     159              : };
     160              : 
     161              : /**
     162              :  * @brief Optimizer wrapped creator with constructor for optimizer
     163              :  *
     164              :  * @params[in] type Type of the optimizer to be constructed
     165              :  * @params[in] properties Properties of the optimizer
     166              :  */
     167              : std::unique_ptr<OptimizerWrapped>
     168              : createOptimizerWrapped(const ml::train::OptimizerType &type,
     169              :                        const std::vector<std::string> &properties = {});
     170              : 
     171              : /**
     172              :  * @brief Optimizer wrapped creator with constructor for optimizer
     173              :  *
     174              :  * @params[in] type Type of the optimizer to be constructed
     175              :  * @params[in] properties Properties of the optimizer
     176              :  */
     177              : std::unique_ptr<OptimizerWrapped>
     178              : createOptimizerWrapped(const std::string &type,
     179              :                        const std::vector<std::string> &properties = {});
     180              : 
     181              : /**
     182              :  * @brief Optimizer wrapped creator with constructor for optimizer
     183              :  *
     184              :  * @params[in] type Type of the optimizer to be constructed
     185              :  * @params[in] properties Properties of the optimizer
     186              :  */
     187              : std::unique_ptr<OptimizerWrapped>
     188              : createOptimizerWrapped(std::unique_ptr<OptimizerCore> &&opt,
     189              :                        const std::vector<std::string> &properties = {});
     190              : 
     191              : } // namespace nntrainer
     192              : 
     193              : #endif // __cpluscplus
     194              : #endif // __OPTIMIZER_WRAPPER_H__
        

Generated by: LCOV version 2.0-1