LCOV - code coverage report
Current view: top level - nntrainer - app_context.cpp (source / functions) Coverage Total Hit
Test: coverage_filtered.info Lines: 71.6 % 215 154
Test Date: 2026-01-12 20:43:37 Functions: 70.6 % 17 12

            Line data    Source code
       1              : // SPDX-License-Identifier: Apache-2.0
       2              : /**
       3              :  * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
       4              :  *
       5              :  * @file   app_context.cpp
       6              :  * @date   10 November 2020
       7              :  * @brief  This file contains app context related functions and classes that
       8              :  * manages the global configuration of the current environment
       9              :  * @see    https://github.com/nntrainer/nntrainer
      10              :  * @author Jihoon Lee <jhoon.it.lee@samsung.com>
      11              :  * @bug    No known bugs except for NYI items
      12              :  *
      13              :  */
      14              : 
      15              : #include <filesystem>
      16              : #include <iostream>
      17              : #include <sstream>
      18              : #include <string>
      19              : #include <vector>
      20              : 
      21              : #include <iniparser.h>
      22              : 
      23              : #include <app_context.h>
      24              : #include <layer.h>
      25              : #include <nntrainer_error.h>
      26              : #include <nntrainer_log.h>
      27              : #include <optimizer.h>
      28              : #include <util_func.h>
      29              : 
      30              : #include <adam.h>
      31              : #include <adamw.h>
      32              : #include <lion.h>
      33              : #include <sgd.h>
      34              : 
      35              : #include <activation_layer.h>
      36              : #include <add_layer.h>
      37              : #include <addition_layer.h>
      38              : #include <attention_layer.h>
      39              : #include <bn_layer.h>
      40              : #include <cast_layer.h>
      41              : #include <centroid_knn.h>
      42              : #include <channel_shuffle.h>
      43              : #include <concat_layer.h>
      44              : #include <constant_derivative_loss_layer.h>
      45              : #include <conv1d_layer.h>
      46              : #include <conv2d_layer.h>
      47              : #include <conv2d_transpose_layer.h>
      48              : #include <cosine_layer.h>
      49              : #include <cross_entropy_sigmoid_loss_layer.h>
      50              : #include <cross_entropy_softmax_loss_layer.h>
      51              : #include <divide_layer.h>
      52              : #include <dropout.h>
      53              : #include <dynamic_library_loader.h>
      54              : #include <embedding.h>
      55              : #include <fc_layer.h>
      56              : #include <flatten_layer.h>
      57              : #include <gather_layer.h>
      58              : #include <gru.h>
      59              : #include <grucell.h>
      60              : #include <identity_layer.h>
      61              : #include <input_layer.h>
      62              : #include <layer_normalization_layer.h>
      63              : #include <lr_scheduler_constant.h>
      64              : #include <lr_scheduler_cosine.h>
      65              : #include <lr_scheduler_exponential.h>
      66              : #include <lr_scheduler_linear.h>
      67              : #include <lr_scheduler_step.h>
      68              : #include <lstm.h>
      69              : #include <lstmcell.h>
      70              : #include <matmul_layer.h>
      71              : #include <mol_attention_layer.h>
      72              : #include <mse_loss_layer.h>
      73              : #include <multi_head_attention_layer.h>
      74              : #include <multiout_layer.h>
      75              : #include <multiply_layer.h>
      76              : #include <negative_layer.h>
      77              : #include <nntrainer_error.h>
      78              : #include <permute_layer.h>
      79              : #include <plugged_layer.h>
      80              : #include <plugged_optimizer.h>
      81              : #include <pooling2d_layer.h>
      82              : #include <positional_encoding_layer.h>
      83              : #include <pow_layer.h>
      84              : #include <preprocess_flip_layer.h>
      85              : #include <preprocess_l2norm_layer.h>
      86              : #include <preprocess_translate_layer.h>
      87              : #include <reduce_mean_layer.h>
      88              : #include <reduce_sum_layer.h>
      89              : #include <rnn.h>
      90              : #include <rnncell.h>
      91              : #include <sine_layer.h>
      92              : #include <slice_layer.h>
      93              : #include <split_layer.h>
      94              : #include <sqrt_layer.h>
      95              : #include <subtract_layer.h>
      96              : #include <tangent_layer.h>
      97              : #include <tensor_layer.h>
      98              : #include <time_dist.h>
      99              : #include <upsample2d_layer.h>
     100              : #include <weight_layer.h>
     101              : #include <zoneout_lstmcell.h>
     102              : 
     103              : #ifdef ENABLE_TFLITE_BACKBONE
     104              : #include <tflite_layer.h>
     105              : #endif
     106              : 
     107              : #ifdef ENABLE_NNSTREAMER_BACKBONE
     108              : #include <nnstreamer_layer.h>
     109              : #endif
     110              : 
     111              : /// add #ifdef across platform
     112              : static std::string solib_suffix = ".so";
     113              : static std::string layerlib_suffix = "layer.so";
     114              : static std::string optimizerlib_suffix = "optimizer.so";
     115              : static const std::string func_tag = "[AppContext] ";
     116              : 
     117              : #ifdef NNTRAINER_CONF_PATH
     118              : constexpr const char *DEFAULT_CONF_PATH = NNTRAINER_CONF_PATH;
     119              : #else
     120              : constexpr const char *DEFAULT_CONF_PATH = "/etc/nntrainer.ini";
     121              : #endif
     122              : 
     123              : constexpr const char *getConfPath() { return DEFAULT_CONF_PATH; }
     124              : 
     125              : namespace nntrainer {
     126              : 
     127              : namespace {
     128              : 
     129              : /**
     130              :  * @brief Get the plugin path from conf ini
     131              :  *
     132              :  * @return std::string plugin path
     133              :  */
     134           27 : std::string getConfig(const std::string &key) {
     135           27 :   std::string conf_path{getConfPath()};
     136              : 
     137           54 :   ml_logd("%s conf path: %s", func_tag.c_str(), conf_path.c_str());
     138           54 :   if (!isFileExist(conf_path)) {
     139           54 :     ml_logw(
     140              :       "%s conf path does not exist, skip getting plugin path from the conf",
     141              :       func_tag.c_str());
     142              :     return std::string();
     143              :   }
     144              : 
     145            0 :   dictionary *ini = iniparser_load(conf_path.c_str());
     146            0 :   NNTR_THROW_IF(ini == nullptr, std::runtime_error)
     147              :     << func_tag << "loading ini failed";
     148              : 
     149              :   std::string value;
     150            0 :   int nsec = iniparser_getnsec(ini);
     151            0 :   for (int i = 0; i < nsec; i++) {
     152            0 :     std::string query(iniparser_getsecname(ini, i));
     153              :     query += ":";
     154              :     query += key;
     155              : 
     156            0 :     value = std::string(iniparser_getstring(ini, query.c_str(), ""));
     157            0 :     if (!value.empty())
     158              :       break;
     159              :   }
     160              : 
     161            0 :   if (value.empty())
     162            0 :     ml_logd("key %s is not found in config(%s)", key.c_str(),
     163              :             conf_path.c_str());
     164              : 
     165            0 :   iniparser_freedict(ini);
     166              : 
     167            0 :   return value;
     168              : }
     169              : 
     170              : /**
     171              :  * @brief Get the plugin paths
     172              :  *
     173              :  * @return std::vector<std::string> list of paths to search for
     174              :  */
     175           27 : std::vector<std::string> getPluginPaths() {
     176              :   std::vector<std::string> ret;
     177              : 
     178              :   /*** @note NNTRAINER_PATH is an environment variable stating a @a directory
     179              :    * where you would like to look for the layers, while NNTRAINER_CONF_PATH is a
     180              :    * (buildtime hardcoded @a file path) to locate configuration file *.ini file
     181              :    */
     182              :   /*** @note for now, NNTRAINER_PATH is a SINGLE PATH rather than series of path
     183              :    * like PATH environment variable. this could be improved but for now, it is
     184              :    * enough
     185              :    */
     186           27 :   const char *env_path = std::getenv("NNTRAINER_PATH");
     187           27 :   if (env_path != nullptr) {
     188            4 :     if (isFileExist(env_path)) {
     189            2 :       ml_logd("NNTRAINER_PATH is defined and valid. path: %s", env_path);
     190            2 :       ret.emplace_back(env_path);
     191              :     } else {
     192            0 :       ml_logw("NNTRAINER_PATH is given but it is not valid. path: %s",
     193              :               env_path);
     194              :     }
     195              :   }
     196              : 
     197           54 :   std::string plugin_path = getConfig("layer");
     198           27 :   if (!plugin_path.empty()) {
     199            0 :     ret.emplace_back(plugin_path);
     200            0 :     ml_logd("DEFAULT CONF PATH, path: %s", plugin_path.c_str());
     201              :   }
     202              : 
     203           27 :   return ret;
     204            0 : }
     205              : 
     206              : /**
     207              :  * @brief Get the Full Path from given string
     208              :  * @details path is resolved in the following order
     209              :  * 1) if @a path is absolute, return path
     210              :  * ----------------------------------------
     211              :  * 2) if @a base == "" && @a path == "", return "."
     212              :  * 3) if @a base == "" && @a path != "", return @a path
     213              :  * 4) if @a base != "" && @a path == "", return @a base
     214              :  * 5) if @a base != "" && @a path != "", return @a base + "/" + path
     215              :  *
     216              :  * @param path path to calculate from base
     217              :  * @param base base path
     218              :  * @return const std::string
     219              :  */
     220           22 : const std::string getFullPath(const std::string &path,
     221              :                               const std::string &base) {
     222              :   /// if path is absolute, return path
     223           44 :   if (std::filesystem::path(path).is_absolute()) {
     224              :     return path;
     225              :   }
     226              : 
     227            8 :   if (base == std::string()) {
     228            8 :     return path == std::string() ? "." : path;
     229              :   }
     230              : 
     231            4 :   return path == std::string()
     232            4 :            ? base
     233            8 :            : std::filesystem::path(base).append(path).string();
     234              : }
     235              : 
     236              : } // namespace
     237              : 
     238              : std::mutex factory_mutex;
     239              : 
     240              : std::once_flag global_app_context_init_flag;
     241              : 
     242           27 : void AppContext::initialize() noexcept {
     243              :   try {
     244           27 :     setMemAllocator(std::make_shared<MemAllocator>());
     245              : 
     246           27 :     add_default_object();
     247           27 :     add_extension_object();
     248            0 :   } catch (std::exception &e) {
     249            0 :     ml_loge("registering layers failed!!, reason: %s", e.what());
     250            0 :   } catch (...) {
     251            0 :     ml_loge("registering layer failed due to unknown reason");
     252            0 :   }
     253           27 : };
     254              : 
     255           27 : void AppContext::add_default_object() {
     256              :   /// @note all layers should be added to the app_context to guarantee that
     257              :   /// createLayer/createOptimizer class is created
     258              :   using OptType = ml::train::OptimizerType;
     259           27 :   registerFactory(nntrainer::createOptimizer<SGD>, SGD::type, OptType::SGD);
     260           27 :   registerFactory(nntrainer::createOptimizer<Adam>, Adam::type, OptType::ADAM);
     261           27 :   registerFactory(nntrainer::createOptimizer<AdamW>, AdamW::type,
     262              :                   OptType::ADAMW);
     263           27 :   registerFactory(nntrainer::createOptimizer<Lion>, Lion::type, OptType::LION);
     264           27 :   registerFactory(AppContext::unknownFactory<nntrainer::Optimizer>, "unknown",
     265              :                   OptType::UNKNOWN);
     266              : 
     267              :   using LRType = LearningRateSchedulerType;
     268           27 :   registerFactory(
     269              :     ml::train::createLearningRateScheduler<ConstantLearningRateScheduler>,
     270              :     ConstantLearningRateScheduler::type, LRType::CONSTANT);
     271           27 :   registerFactory(
     272              :     ml::train::createLearningRateScheduler<ExponentialLearningRateScheduler>,
     273              :     ExponentialLearningRateScheduler::type, LRType::EXPONENTIAL);
     274           27 :   registerFactory(
     275              :     ml::train::createLearningRateScheduler<StepLearningRateScheduler>,
     276              :     StepLearningRateScheduler::type, LRType::STEP);
     277           27 :   registerFactory(ml::train::createLearningRateScheduler<
     278              :                     CosineAnnealingLearningRateScheduler>,
     279              :                   CosineAnnealingLearningRateScheduler::type, LRType::COSINE);
     280           27 :   registerFactory(
     281              :     ml::train::createLearningRateScheduler<LinearLearningRateScheduler>,
     282              :     LinearLearningRateScheduler::type, LRType::LINEAR);
     283              : 
     284              :   using LayerType = ml::train::LayerType;
     285           27 :   registerFactory(nntrainer::createLayer<InputLayer>, InputLayer::type,
     286              :                   LayerType::LAYER_IN);
     287           27 :   registerFactory(nntrainer::createLayer<WeightLayer>, WeightLayer::type,
     288              :                   LayerType::LAYER_WEIGHT);
     289           27 :   registerFactory(nntrainer::createLayer<AddLayer>, AddLayer::type,
     290              :                   LayerType::LAYER_ADD);
     291           27 :   registerFactory(nntrainer::createLayer<SubtractLayer>, SubtractLayer::type,
     292              :                   LayerType::LAYER_SUBTRACT);
     293           27 :   registerFactory(nntrainer::createLayer<MultiplyLayer>, MultiplyLayer::type,
     294              :                   LayerType::LAYER_MULTIPLY);
     295           27 :   registerFactory(nntrainer::createLayer<DivideLayer>, DivideLayer::type,
     296              :                   LayerType::LAYER_DIVIDE);
     297           27 :   registerFactory(nntrainer::createLayer<PowLayer>, PowLayer::type,
     298              :                   LayerType::LAYER_POW);
     299           27 :   registerFactory(nntrainer::createLayer<SQRTLayer>, SQRTLayer::type,
     300              :                   LayerType::LAYER_SQRT);
     301           27 :   registerFactory(nntrainer::createLayer<SineLayer>, SineLayer::type,
     302              :                   LayerType::LAYER_SINE);
     303           27 :   registerFactory(nntrainer::createLayer<CosineLayer>, CosineLayer::type,
     304              :                   LayerType::LAYER_COSINE);
     305           27 :   registerFactory(nntrainer::createLayer<TangentLayer>, TangentLayer::type,
     306              :                   LayerType::LAYER_TANGENT);
     307           27 :   registerFactory(nntrainer::createLayer<MatMulLayer>, MatMulLayer::type,
     308              :                   LayerType::LAYER_MATMUL);
     309           27 :   registerFactory(nntrainer::createLayer<CastLayer>, CastLayer::type,
     310              :                   LayerType::LAYER_CAST);
     311           27 :   registerFactory(nntrainer::createLayer<GatherLayer>, GatherLayer::type,
     312              :                   LayerType::LAYER_GATHER);
     313           27 :   registerFactory(nntrainer::createLayer<SliceLayer>, SliceLayer::type,
     314              :                   LayerType::LAYER_SLICE);
     315           27 :   registerFactory(nntrainer::createLayer<NegativeLayer>, NegativeLayer::type,
     316              :                   LayerType::LAYER_NEG);
     317           27 :   registerFactory(nntrainer::createLayer<FullyConnectedLayer>,
     318              :                   FullyConnectedLayer::type, LayerType::LAYER_FC);
     319           27 :   registerFactory(nntrainer::createLayer<BatchNormalizationLayer>,
     320              :                   BatchNormalizationLayer::type, LayerType::LAYER_BN);
     321           27 :   registerFactory(nntrainer::createLayer<LayerNormalizationLayer>,
     322              :                   LayerNormalizationLayer::type,
     323              :                   LayerType::LAYER_LAYER_NORMALIZATION);
     324           27 :   registerFactory(nntrainer::createLayer<Conv2DLayer>, Conv2DLayer::type,
     325              :                   LayerType::LAYER_CONV2D);
     326           27 :   registerFactory(nntrainer::createLayer<Conv2DTransposeLayer>,
     327              :                   Conv2DTransposeLayer::type,
     328              :                   LayerType::LAYER_CONV2D_TRANSPOSE);
     329           27 :   registerFactory(nntrainer::createLayer<Conv1DLayer>, Conv1DLayer::type,
     330              :                   LayerType::LAYER_CONV1D);
     331           27 :   registerFactory(nntrainer::createLayer<Pooling2DLayer>, Pooling2DLayer::type,
     332              :                   LayerType::LAYER_POOLING2D);
     333           27 :   registerFactory(nntrainer::createLayer<FlattenLayer>, FlattenLayer::type,
     334              :                   LayerType::LAYER_FLATTEN);
     335           27 :   registerFactory(nntrainer::createLayer<ReshapeLayer>, ReshapeLayer::type,
     336              :                   LayerType::LAYER_RESHAPE);
     337           27 :   registerFactory(nntrainer::createLayer<ActivationLayer>,
     338              :                   ActivationLayer::type, LayerType::LAYER_ACTIVATION);
     339           27 :   registerFactory(nntrainer::createLayer<AdditionLayer>, AdditionLayer::type,
     340              :                   LayerType::LAYER_ADDITION);
     341           27 :   registerFactory(nntrainer::createLayer<ConcatLayer>, ConcatLayer::type,
     342              :                   LayerType::LAYER_CONCAT);
     343           27 :   registerFactory(nntrainer::createLayer<MultiOutLayer>, MultiOutLayer::type,
     344              :                   LayerType::LAYER_MULTIOUT);
     345           27 :   registerFactory(nntrainer::createLayer<EmbeddingLayer>, EmbeddingLayer::type,
     346              :                   LayerType::LAYER_EMBEDDING);
     347           27 :   registerFactory(nntrainer::createLayer<RNNLayer>, RNNLayer::type,
     348              :                   LayerType::LAYER_RNN);
     349           27 :   registerFactory(nntrainer::createLayer<RNNCellLayer>, RNNCellLayer::type,
     350              :                   LayerType::LAYER_RNNCELL);
     351           27 :   registerFactory(nntrainer::createLayer<LSTMLayer>, LSTMLayer::type,
     352              :                   LayerType::LAYER_LSTM);
     353           27 :   registerFactory(nntrainer::createLayer<LSTMCellLayer>, LSTMCellLayer::type,
     354              :                   LayerType::LAYER_LSTMCELL);
     355           27 :   registerFactory(nntrainer::createLayer<ZoneoutLSTMCellLayer>,
     356              :                   ZoneoutLSTMCellLayer::type,
     357              :                   LayerType::LAYER_ZONEOUT_LSTMCELL);
     358           27 :   registerFactory(nntrainer::createLayer<SplitLayer>, SplitLayer::type,
     359              :                   LayerType::LAYER_SPLIT);
     360           27 :   registerFactory(nntrainer::createLayer<GRULayer>, GRULayer::type,
     361              :                   LayerType::LAYER_GRU);
     362           27 :   registerFactory(nntrainer::createLayer<GRUCellLayer>, GRUCellLayer::type,
     363              :                   LayerType::LAYER_GRUCELL);
     364           27 :   registerFactory(nntrainer::createLayer<PermuteLayer>, PermuteLayer::type,
     365              :                   LayerType::LAYER_PERMUTE);
     366           27 :   registerFactory(nntrainer::createLayer<DropOutLayer>, DropOutLayer::type,
     367              :                   LayerType::LAYER_DROPOUT);
     368           27 :   registerFactory(nntrainer::createLayer<AttentionLayer>, AttentionLayer::type,
     369              :                   LayerType::LAYER_ATTENTION);
     370           27 :   registerFactory(nntrainer::createLayer<MoLAttentionLayer>,
     371              :                   MoLAttentionLayer::type, LayerType::LAYER_MOL_ATTENTION);
     372           27 :   registerFactory(nntrainer::createLayer<MultiHeadAttentionLayer>,
     373              :                   MultiHeadAttentionLayer::type,
     374              :                   LayerType::LAYER_MULTI_HEAD_ATTENTION);
     375           27 :   registerFactory(nntrainer::createLayer<ReduceMeanLayer>,
     376              :                   ReduceMeanLayer::type, LayerType::LAYER_REDUCE_MEAN);
     377           27 :   registerFactory(nntrainer::createLayer<ReduceSumLayer>, ReduceSumLayer::type,
     378              :                   LayerType::LAYER_REDUCE_SUM);
     379           27 :   registerFactory(nntrainer::createLayer<PositionalEncodingLayer>,
     380              :                   PositionalEncodingLayer::type,
     381              :                   LayerType::LAYER_POSITIONAL_ENCODING);
     382           27 :   registerFactory(nntrainer::createLayer<IdentityLayer>, IdentityLayer::type,
     383              :                   LayerType::LAYER_IDENTITY);
     384           27 :   registerFactory(nntrainer::createLayer<Upsample2dLayer>,
     385              :                   Upsample2dLayer::type, LayerType::LAYER_UPSAMPLE2D);
     386              : 
     387           27 :   registerFactory(nntrainer::createLayer<ChannelShuffle>, ChannelShuffle::type,
     388              :                   LayerType::LAYER_CHANNEL_SHUFFLE);
     389              : 
     390              : #ifdef ENABLE_NNSTREAMER_BACKBONE
     391              :   registerFactory(nntrainer::createLayer<NNStreamerLayer>,
     392              :                   NNStreamerLayer::type, LayerType::LAYER_BACKBONE_NNSTREAMER);
     393              : #endif
     394              : #ifdef ENABLE_TFLITE_BACKBONE
     395           27 :   registerFactory(nntrainer::createLayer<TfLiteLayer>, TfLiteLayer::type,
     396              :                   LayerType::LAYER_BACKBONE_TFLITE);
     397              : #endif
     398           27 :   registerFactory(nntrainer::createLayer<CentroidKNN>, CentroidKNN::type,
     399              :                   LayerType::LAYER_CENTROID_KNN);
     400              : 
     401              :   /** preprocess layers */
     402           27 :   registerFactory(nntrainer::createLayer<PreprocessFlipLayer>,
     403              :                   PreprocessFlipLayer::type, LayerType::LAYER_PREPROCESS_FLIP);
     404           27 :   registerFactory(nntrainer::createLayer<PreprocessTranslateLayer>,
     405              :                   PreprocessTranslateLayer::type,
     406              :                   LayerType::LAYER_PREPROCESS_TRANSLATE);
     407           27 :   registerFactory(nntrainer::createLayer<PreprocessL2NormLayer>,
     408              :                   PreprocessL2NormLayer::type,
     409              :                   LayerType::LAYER_PREPROCESS_L2NORM);
     410              : 
     411              :   /** register losses */
     412           27 :   registerFactory(nntrainer::createLayer<MSELossLayer>, MSELossLayer::type,
     413              :                   LayerType::LAYER_LOSS_MSE);
     414           27 :   registerFactory(nntrainer::createLayer<CrossEntropySigmoidLossLayer>,
     415              :                   CrossEntropySigmoidLossLayer::type,
     416              :                   LayerType::LAYER_LOSS_CROSS_ENTROPY_SIGMOID);
     417           27 :   registerFactory(nntrainer::createLayer<CrossEntropySoftmaxLossLayer>,
     418              :                   CrossEntropySoftmaxLossLayer::type,
     419              :                   LayerType::LAYER_LOSS_CROSS_ENTROPY_SOFTMAX);
     420           27 :   registerFactory(nntrainer::createLayer<ConstantDerivativeLossLayer>,
     421              :                   ConstantDerivativeLossLayer::type,
     422              :                   LayerType::LAYER_LOSS_CONSTANT_DERIVATIVE);
     423              : 
     424           27 :   registerFactory(nntrainer::createLayer<TimeDistLayer>, TimeDistLayer::type,
     425              :                   LayerType::LAYER_TIME_DIST);
     426              : 
     427           27 :   registerFactory(AppContext::unknownFactory<nntrainer::Layer>, "unknown",
     428              :                   LayerType::LAYER_UNKNOWN);
     429           27 : }
     430              : 
     431           27 : void AppContext::add_extension_object() {
     432           27 :   auto dir_list = getPluginPaths();
     433              : 
     434           29 :   for (auto &path : dir_list) {
     435              :     try {
     436            2 :       registerPluggableFromDirectory(path);
     437            0 :     } catch (std::exception &e) {
     438            0 :       ml_logw("tried to register extension from %s but failed, reason: %s",
     439              :               path.c_str(), e.what());
     440            0 :     }
     441              :   }
     442           27 : }
     443              : 
     444            0 : void AppContext::setWorkingDirectory(const std::string &base) {
     445            0 :   if (!std::filesystem::is_directory(base)) {
     446            0 :     std::stringstream ss;
     447              :     ss << func_tag << "path is not directory or has no permission: " << base;
     448            0 :     throw std::invalid_argument(ss.str().c_str());
     449            0 :   }
     450              : 
     451            0 :   char *ret = getRealpath(base.c_str(), nullptr);
     452              : 
     453            0 :   if (ret == nullptr) {
     454            0 :     std::stringstream ss;
     455            0 :     ss << func_tag << "failed to get canonical path for the path: ";
     456            0 :     throw std::invalid_argument(ss.str().c_str());
     457            0 :   }
     458              : 
     459            0 :   working_path_base = std::string(ret);
     460            0 :   ml_logd("working path base has set: %s", working_path_base.c_str());
     461            0 :   free(ret);
     462            0 : }
     463              : 
     464            0 : const std::string AppContext::getWorkingPath(const std::string &path) {
     465            0 :   return getFullPath(path, working_path_base);
     466              : }
     467              : 
     468              : /**
     469              :  * @brief base case of iterate_prop, iterate_prop iterates the given tuple
     470              :  *
     471              :  * @tparam I size of tuple(automated)
     472              :  * @tparam V container type of properties
     473              :  * @tparam Ts types from tuple
     474              :  * @param prop property container to be added to
     475              :  * @param tup tuple to be iterated
     476              :  * @return void
     477              :  */
     478              : template <size_t I = 0, typename V, typename... Ts>
     479              : typename std::enable_if<I == sizeof...(Ts), void>::type inline parse_properties(
     480              :   V &props, std::tuple<Ts...> &tup) {
     481              :   // end of recursion.
     482              : }
     483              : 
     484              : /**
     485              :  * @brief base case of iterate_prop, iterate_prop iterates the given tuple
     486              :  *
     487              :  * @tparam I size of tuple(automated)
     488              :  * @tparam V container type of properties
     489              :  * @tparam Ts types from tuple
     490              :  * @param prop property container to be added to
     491              :  * @param tup tuple to be iterated
     492              :  * @return void
     493              :  */
     494              : template <size_t I = 0, typename V, typename... Ts>
     495              :   typename std::enable_if <
     496            0 :   I<sizeof...(Ts), void>::type inline parse_properties(V &props,
     497              :                                                        std::tuple<Ts...> &tup) {
     498            0 :   std::string name = std::get<I>(tup);
     499            0 :   std::string prop = getConfig(name);
     500            0 :   if (!prop.empty())
     501            0 :     props.push_back(name + "=" + prop);
     502              : 
     503            0 :   parse_properties<I + 1>(props, tup);
     504            0 : }
     505              : 
     506            0 : const std::vector<std::string> AppContext::getProperties(void) {
     507              :   std::vector<std::string> properties;
     508              : 
     509            0 :   auto props = std::tuple("fsu", "fsu_path");
     510            0 :   parse_properties(properties, props);
     511              : 
     512            0 :   return properties;
     513            0 : }
     514              : 
     515           18 : int AppContext::registerLayer(const std::string &library_path,
     516              :                               const std::string &base_path) {
     517           18 :   const std::string full_path = getFullPath(library_path, base_path);
     518              : 
     519              :   void *handle = DynamicLibraryLoader::loadLibrary(full_path.c_str(),
     520              :                                                    RTLD_LAZY | RTLD_LOCAL);
     521              :   const char *error_msg = DynamicLibraryLoader::getLastError();
     522              : 
     523           21 :   NNTR_THROW_IF(handle == nullptr, std::invalid_argument)
     524              :     << func_tag << "open plugin failed, reason: " << error_msg;
     525              : 
     526              :   nntrainer::LayerPluggable *pluggable =
     527              :     reinterpret_cast<nntrainer::LayerPluggable *>(
     528              :       DynamicLibraryLoader::loadSymbol(handle, "ml_train_layer_pluggable"));
     529              : 
     530              :   error_msg = DynamicLibraryLoader::getLastError();
     531            0 :   auto close_dl = [handle] { DynamicLibraryLoader::freeLibrary(handle); };
     532           15 :   NNTR_THROW_IF_CLEANUP(error_msg != nullptr || pluggable == nullptr,
     533              :                         std::invalid_argument, close_dl)
     534              :     << func_tag << "loading symbol failed, reason: " << error_msg;
     535              : 
     536           15 :   auto layer = pluggable->createfunc();
     537           18 :   NNTR_THROW_IF_CLEANUP(layer == nullptr, std::invalid_argument, close_dl)
     538              :     << func_tag << "created pluggable layer is null";
     539           15 :   auto type = layer->getType();
     540           15 :   NNTR_THROW_IF_CLEANUP(type == "", std::invalid_argument, close_dl)
     541              :     << func_tag << "custom layer must specify type name, but it is empty";
     542           15 :   pluggable->destroyfunc(layer);
     543              : 
     544              :   FactoryType<nntrainer::Layer> factory_func =
     545              :     [pluggable](const PropsType &prop) {
     546              :       std::unique_ptr<nntrainer::Layer> layer =
     547            6 :         std::make_unique<internal::PluggedLayer>(pluggable);
     548              : 
     549              :       return layer;
     550              :     };
     551              : 
     552           45 :   return registerFactory<nntrainer::Layer>(std::move(factory_func), type);
     553              : }
     554              : 
     555            4 : int AppContext::registerOptimizer(const std::string &library_path,
     556              :                                   const std::string &base_path) {
     557            4 :   const std::string full_path = getFullPath(library_path, base_path);
     558              : 
     559              :   void *handle = DynamicLibraryLoader::loadLibrary(full_path.c_str(),
     560              :                                                    RTLD_LAZY | RTLD_LOCAL);
     561              :   const char *error_msg = DynamicLibraryLoader::getLastError();
     562              : 
     563            5 :   NNTR_THROW_IF(handle == nullptr, std::invalid_argument)
     564              :     << func_tag << "open plugin failed, reason: " << error_msg;
     565              : 
     566              :   nntrainer::OptimizerPluggable *pluggable =
     567              :     reinterpret_cast<nntrainer::OptimizerPluggable *>(
     568              :       DynamicLibraryLoader::loadSymbol(handle, "ml_train_optimizer_pluggable"));
     569              : 
     570              :   error_msg = DynamicLibraryLoader::getLastError();
     571            0 :   auto close_dl = [handle] { DynamicLibraryLoader::freeLibrary(handle); };
     572            3 :   NNTR_THROW_IF_CLEANUP(error_msg != nullptr || pluggable == nullptr,
     573              :                         std::invalid_argument, close_dl)
     574              :     << func_tag << "loading symbol failed, reason: " << error_msg;
     575              : 
     576            3 :   auto optimizer = pluggable->createfunc();
     577            4 :   NNTR_THROW_IF_CLEANUP(optimizer == nullptr, std::invalid_argument, close_dl)
     578              :     << func_tag << "created pluggable optimizer is null";
     579            3 :   auto type = optimizer->getType();
     580            3 :   NNTR_THROW_IF_CLEANUP(type == "", std::invalid_argument, close_dl)
     581              :     << func_tag << "custom optimizer must specify type name, but it is empty";
     582            3 :   pluggable->destroyfunc(optimizer);
     583              : 
     584              :   FactoryType<nntrainer::Optimizer> factory_func =
     585              :     [pluggable](const PropsType &prop) {
     586              :       std::unique_ptr<nntrainer::Optimizer> optimizer =
     587            2 :         std::make_unique<internal::PluggedOptimizer>(pluggable);
     588              : 
     589              :       return optimizer;
     590              :     };
     591              : 
     592            9 :   return registerFactory<nntrainer::Optimizer>(std::move(factory_func), type);
     593              : }
     594              : 
     595              : std::vector<int>
     596           10 : AppContext::registerPluggableFromDirectory(const std::string &base_path) {
     597           20 :   const auto directory_exist = std::filesystem::is_directory(base_path);
     598              : 
     599           14 :   NNTR_THROW_IF(!directory_exist, std::invalid_argument)
     600              :     << func_tag << "failed to open the directory: " << base_path;
     601              : 
     602            6 :   std::vector<int> keys = {};
     603              : 
     604           98 :   for (const auto &entry : std::filesystem::directory_iterator(base_path)) {
     605              :     const auto &entry_name = entry.path().string();
     606              : 
     607           40 :     if (endswith(entry_name, solib_suffix)) {
     608           14 :       if (endswith(entry_name, layerlib_suffix)) {
     609              :         try {
     610           12 :           int key = registerLayer(entry_name, base_path);
     611           12 :           keys.emplace_back(key);
     612            0 :         } catch (std::exception &e) {
     613            0 :           throw;
     614            0 :         }
     615            2 :       } else if (endswith(entry_name, optimizerlib_suffix)) {
     616              :         try {
     617            2 :           int key = registerOptimizer(entry_name, base_path);
     618            2 :           keys.emplace_back(key);
     619            0 :         } catch (std::exception &e) {
     620            0 :           throw;
     621            0 :         }
     622              :       }
     623              :     }
     624              :   }
     625              : 
     626            6 :   return keys;
     627            0 : }
     628              : 
     629              : template <typename T>
     630         1929 : const int AppContext::registerFactory(const FactoryType<T> factory,
     631              :                                       const std::string &key,
     632              :                                       const int int_key) {
     633              :   static_assert(isSupported<T>::value,
     634              :                 "given type is not supported for current app context");
     635              : 
     636              :   auto &index = std::get<IndexType<T>>(factory_map);
     637              :   auto &str_map = std::get<StrIndexType<T>>(index);
     638              :   auto &int_map = std::get<IntIndexType>(index);
     639              : 
     640         3871 :   std::string assigned_key = key == "" ? factory({})->getType() : key;
     641              : 
     642              :   std::transform(assigned_key.begin(), assigned_key.end(), assigned_key.begin(),
     643        16803 :                  [](unsigned char c) { return std::tolower(c); });
     644              : 
     645              :   const std::lock_guard<std::mutex> lock(factory_mutex);
     646         1929 :   if (str_map.find(assigned_key) != str_map.end()) {
     647            2 :     std::stringstream ss;
     648              :     ss << "cannot register factory with already taken key: " << key;
     649            6 :     throw std::invalid_argument(ss.str().c_str());
     650            2 :   }
     651              : 
     652         3822 :   if (int_key != -1 && int_map.find(int_key) != int_map.end()) {
     653            1 :     std::stringstream ss;
     654            1 :     ss << "cannot register factory with already taken int key: " << int_key;
     655            3 :     throw std::invalid_argument(ss.str().c_str());
     656            1 :   }
     657              : 
     658         1926 :   int assigned_int_key = int_key == -1 ? str_map.size() + 1 : int_key;
     659              : 
     660         1926 :   str_map[assigned_key] = factory;
     661              :   int_map[assigned_int_key] = assigned_key;
     662              : 
     663         1929 :   ml_logd("factory has registered with key: %s, int_key: %d",
     664              :           assigned_key.c_str(), assigned_int_key);
     665              : 
     666         3852 :   return assigned_int_key;
     667              : }
     668              : 
     669              : /**
     670              :  * @copydoc const int AppContext::registerFactory
     671              :  */
     672              : template const int AppContext::registerFactory<nntrainer::Optimizer>(
     673              :   const FactoryType<nntrainer::Optimizer> factory, const std::string &key,
     674              :   const int int_key);
     675              : 
     676              : /**
     677              :  * @copydoc const int AppContext::registerFactory
     678              :  */
     679              : template const int AppContext::registerFactory<nntrainer::Layer>(
     680              :   const FactoryType<nntrainer::Layer> factory, const std::string &key,
     681              :   const int int_key);
     682              : 
     683              : /**
     684              :  * @copydoc const int AppContext::registerFactory
     685              :  */
     686              : template const int
     687              : AppContext::registerFactory<ml::train::LearningRateScheduler>(
     688              :   const FactoryType<ml::train::LearningRateScheduler> factory,
     689              :   const std::string &key, const int int_key);
     690              : 
     691              : } // namespace nntrainer
        

Generated by: LCOV version 2.0-1