LCOV - code coverage report
Current view: top level - nntrainer - app_context.cpp (source / functions) Coverage Total Hit
Test: coverage_filtered.info Lines: 71.5 % 214 153
Test Date: 2025-12-14 20:38:17 Functions: 70.6 % 17 12

            Line data    Source code
       1              : // SPDX-License-Identifier: Apache-2.0
       2              : /**
       3              :  * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
       4              :  *
       5              :  * @file   app_context.cpp
       6              :  * @date   10 November 2020
       7              :  * @brief  This file contains app context related functions and classes that
       8              :  * manages the global configuration of the current environment
       9              :  * @see    https://github.com/nnstreamer/nntrainer
      10              :  * @author Jihoon Lee <jhoon.it.lee@samsung.com>
      11              :  * @bug    No known bugs except for NYI items
      12              :  *
      13              :  */
      14              : 
      15              : #include <filesystem>
      16              : #include <iostream>
      17              : #include <sstream>
      18              : #include <string>
      19              : #include <vector>
      20              : 
      21              : #include <iniparser.h>
      22              : 
      23              : #include <app_context.h>
      24              : #include <layer.h>
      25              : #include <nntrainer_error.h>
      26              : #include <nntrainer_log.h>
      27              : #include <optimizer.h>
      28              : #include <util_func.h>
      29              : 
      30              : #include <adam.h>
      31              : #include <adamw.h>
      32              : #include <sgd.h>
      33              : 
      34              : #include <activation_layer.h>
      35              : #include <add_layer.h>
      36              : #include <addition_layer.h>
      37              : #include <attention_layer.h>
      38              : #include <bn_layer.h>
      39              : #include <cast_layer.h>
      40              : #include <centroid_knn.h>
      41              : #include <channel_shuffle.h>
      42              : #include <concat_layer.h>
      43              : #include <constant_derivative_loss_layer.h>
      44              : #include <conv1d_layer.h>
      45              : #include <conv2d_layer.h>
      46              : #include <conv2d_transpose_layer.h>
      47              : #include <cosine_layer.h>
      48              : #include <cross_entropy_sigmoid_loss_layer.h>
      49              : #include <cross_entropy_softmax_loss_layer.h>
      50              : #include <divide_layer.h>
      51              : #include <dropout.h>
      52              : #include <dynamic_library_loader.h>
      53              : #include <embedding.h>
      54              : #include <fc_layer.h>
      55              : #include <flatten_layer.h>
      56              : #include <gather_layer.h>
      57              : #include <gru.h>
      58              : #include <grucell.h>
      59              : #include <identity_layer.h>
      60              : #include <input_layer.h>
      61              : #include <layer_normalization_layer.h>
      62              : #include <lr_scheduler_constant.h>
      63              : #include <lr_scheduler_cosine.h>
      64              : #include <lr_scheduler_exponential.h>
      65              : #include <lr_scheduler_linear.h>
      66              : #include <lr_scheduler_step.h>
      67              : #include <lstm.h>
      68              : #include <lstmcell.h>
      69              : #include <matmul_layer.h>
      70              : #include <mol_attention_layer.h>
      71              : #include <mse_loss_layer.h>
      72              : #include <multi_head_attention_layer.h>
      73              : #include <multiout_layer.h>
      74              : #include <multiply_layer.h>
      75              : #include <negative_layer.h>
      76              : #include <nntrainer_error.h>
      77              : #include <permute_layer.h>
      78              : #include <plugged_layer.h>
      79              : #include <plugged_optimizer.h>
      80              : #include <pooling2d_layer.h>
      81              : #include <positional_encoding_layer.h>
      82              : #include <pow_layer.h>
      83              : #include <preprocess_flip_layer.h>
      84              : #include <preprocess_l2norm_layer.h>
      85              : #include <preprocess_translate_layer.h>
      86              : #include <reduce_mean_layer.h>
      87              : #include <reduce_sum_layer.h>
      88              : #include <rnn.h>
      89              : #include <rnncell.h>
      90              : #include <sine_layer.h>
      91              : #include <slice_layer.h>
      92              : #include <split_layer.h>
      93              : #include <sqrt_layer.h>
      94              : #include <subtract_layer.h>
      95              : #include <tangent_layer.h>
      96              : #include <tensor_layer.h>
      97              : #include <time_dist.h>
      98              : #include <upsample2d_layer.h>
      99              : #include <weight_layer.h>
     100              : #include <zoneout_lstmcell.h>
     101              : 
     102              : #ifdef ENABLE_TFLITE_BACKBONE
     103              : #include <tflite_layer.h>
     104              : #endif
     105              : 
     106              : #ifdef ENABLE_NNSTREAMER_BACKBONE
     107              : #include <nnstreamer_layer.h>
     108              : #endif
     109              : 
     110              : /// add #ifdef across platform
     111              : static std::string solib_suffix = ".so";
     112              : static std::string layerlib_suffix = "layer.so";
     113              : static std::string optimizerlib_suffix = "optimizer.so";
     114              : static const std::string func_tag = "[AppContext] ";
     115              : 
     116              : #ifdef NNTRAINER_CONF_PATH
     117              : constexpr const char *DEFAULT_CONF_PATH = NNTRAINER_CONF_PATH;
     118              : #else
     119              : constexpr const char *DEFAULT_CONF_PATH = "/etc/nntrainer.ini";
     120              : #endif
     121              : 
     122              : constexpr const char *getConfPath() { return DEFAULT_CONF_PATH; }
     123              : 
     124              : namespace nntrainer {
     125              : 
     126              : namespace {
     127              : 
     128              : /**
     129              :  * @brief Get the plugin path from conf ini
     130              :  *
     131              :  * @return std::string plugin path
     132              :  */
     133           27 : std::string getConfig(const std::string &key) {
     134           27 :   std::string conf_path{getConfPath()};
     135              : 
     136           54 :   ml_logd("%s conf path: %s", func_tag.c_str(), conf_path.c_str());
     137           54 :   if (!isFileExist(conf_path)) {
     138           54 :     ml_logw(
     139              :       "%s conf path does not exist, skip getting plugin path from the conf",
     140              :       func_tag.c_str());
     141              :     return std::string();
     142              :   }
     143              : 
     144            0 :   dictionary *ini = iniparser_load(conf_path.c_str());
     145            0 :   NNTR_THROW_IF(ini == nullptr, std::runtime_error)
     146              :     << func_tag << "loading ini failed";
     147              : 
     148              :   std::string value;
     149            0 :   int nsec = iniparser_getnsec(ini);
     150            0 :   for (int i = 0; i < nsec; i++) {
     151            0 :     std::string query(iniparser_getsecname(ini, i));
     152              :     query += ":";
     153              :     query += key;
     154              : 
     155            0 :     value = std::string(iniparser_getstring(ini, query.c_str(), ""));
     156            0 :     if (!value.empty())
     157              :       break;
     158              :   }
     159              : 
     160            0 :   if (value.empty())
     161            0 :     ml_logd("key %s is not found in config(%s)", key.c_str(),
     162              :             conf_path.c_str());
     163              : 
     164            0 :   iniparser_freedict(ini);
     165              : 
     166            0 :   return value;
     167              : }
     168              : 
     169              : /**
     170              :  * @brief Get the plugin paths
     171              :  *
     172              :  * @return std::vector<std::string> list of paths to search for
     173              :  */
     174           27 : std::vector<std::string> getPluginPaths() {
     175              :   std::vector<std::string> ret;
     176              : 
     177              :   /*** @note NNTRAINER_PATH is an environment variable stating a @a directory
     178              :    * where you would like to look for the layers, while NNTRAINER_CONF_PATH is a
     179              :    * (buildtime hardcoded @a file path) to locate configuration file *.ini file
     180              :    */
     181              :   /*** @note for now, NNTRAINER_PATH is a SINGLE PATH rather than series of path
     182              :    * like PATH environment variable. this could be improved but for now, it is
     183              :    * enough
     184              :    */
     185           27 :   const char *env_path = std::getenv("NNTRAINER_PATH");
     186           27 :   if (env_path != nullptr) {
     187            4 :     if (isFileExist(env_path)) {
     188            2 :       ml_logd("NNTRAINER_PATH is defined and valid. path: %s", env_path);
     189            2 :       ret.emplace_back(env_path);
     190              :     } else {
     191            0 :       ml_logw("NNTRAINER_PATH is given but it is not valid. path: %s",
     192              :               env_path);
     193              :     }
     194              :   }
     195              : 
     196           54 :   std::string plugin_path = getConfig("layer");
     197           27 :   if (!plugin_path.empty()) {
     198            0 :     ret.emplace_back(plugin_path);
     199            0 :     ml_logd("DEFAULT CONF PATH, path: %s", plugin_path.c_str());
     200              :   }
     201              : 
     202           27 :   return ret;
     203            0 : }
     204              : 
     205              : /**
     206              :  * @brief Get the Full Path from given string
     207              :  * @details path is resolved in the following order
     208              :  * 1) if @a path is absolute, return path
     209              :  * ----------------------------------------
     210              :  * 2) if @a base == "" && @a path == "", return "."
     211              :  * 3) if @a base == "" && @a path != "", return @a path
     212              :  * 4) if @a base != "" && @a path == "", return @a base
     213              :  * 5) if @a base != "" && @a path != "", return @a base + "/" + path
     214              :  *
     215              :  * @param path path to calculate from base
     216              :  * @param base base path
     217              :  * @return const std::string
     218              :  */
     219           22 : const std::string getFullPath(const std::string &path,
     220              :                               const std::string &base) {
     221              :   /// if path is absolute, return path
     222           44 :   if (std::filesystem::path(path).is_absolute()) {
     223              :     return path;
     224              :   }
     225              : 
     226            8 :   if (base == std::string()) {
     227            8 :     return path == std::string() ? "." : path;
     228              :   }
     229              : 
     230            4 :   return path == std::string()
     231            4 :            ? base
     232            8 :            : std::filesystem::path(base).append(path).string();
     233              : }
     234              : 
     235              : } // namespace
     236              : 
     237              : std::mutex factory_mutex;
     238              : 
     239              : std::once_flag global_app_context_init_flag;
     240              : 
     241           27 : void AppContext::initialize() noexcept {
     242              :   try {
     243           27 :     setMemAllocator(std::make_shared<MemAllocator>());
     244              : 
     245           27 :     add_default_object();
     246           27 :     add_extension_object();
     247            0 :   } catch (std::exception &e) {
     248            0 :     ml_loge("registering layers failed!!, reason: %s", e.what());
     249            0 :   } catch (...) {
     250            0 :     ml_loge("registering layer failed due to unknown reason");
     251            0 :   }
     252           27 : };
     253              : 
     254           27 : void AppContext::add_default_object() {
     255              :   /// @note all layers should be added to the app_context to guarantee that
     256              :   /// createLayer/createOptimizer class is created
     257              :   using OptType = ml::train::OptimizerType;
     258           27 :   registerFactory(nntrainer::createOptimizer<SGD>, SGD::type, OptType::SGD);
     259           27 :   registerFactory(nntrainer::createOptimizer<Adam>, Adam::type, OptType::ADAM);
     260           27 :   registerFactory(nntrainer::createOptimizer<AdamW>, AdamW::type,
     261              :                   OptType::ADAMW);
     262           27 :   registerFactory(AppContext::unknownFactory<nntrainer::Optimizer>, "unknown",
     263              :                   OptType::UNKNOWN);
     264              : 
     265              :   using LRType = LearningRateSchedulerType;
     266           27 :   registerFactory(
     267              :     ml::train::createLearningRateScheduler<ConstantLearningRateScheduler>,
     268              :     ConstantLearningRateScheduler::type, LRType::CONSTANT);
     269           27 :   registerFactory(
     270              :     ml::train::createLearningRateScheduler<ExponentialLearningRateScheduler>,
     271              :     ExponentialLearningRateScheduler::type, LRType::EXPONENTIAL);
     272           27 :   registerFactory(
     273              :     ml::train::createLearningRateScheduler<StepLearningRateScheduler>,
     274              :     StepLearningRateScheduler::type, LRType::STEP);
     275           27 :   registerFactory(ml::train::createLearningRateScheduler<
     276              :                     CosineAnnealingLearningRateScheduler>,
     277              :                   CosineAnnealingLearningRateScheduler::type, LRType::COSINE);
     278           27 :   registerFactory(
     279              :     ml::train::createLearningRateScheduler<LinearLearningRateScheduler>,
     280              :     LinearLearningRateScheduler::type, LRType::LINEAR);
     281              : 
     282              :   using LayerType = ml::train::LayerType;
     283           27 :   registerFactory(nntrainer::createLayer<InputLayer>, InputLayer::type,
     284              :                   LayerType::LAYER_IN);
     285           27 :   registerFactory(nntrainer::createLayer<WeightLayer>, WeightLayer::type,
     286              :                   LayerType::LAYER_WEIGHT);
     287           27 :   registerFactory(nntrainer::createLayer<AddLayer>, AddLayer::type,
     288              :                   LayerType::LAYER_ADD);
     289           27 :   registerFactory(nntrainer::createLayer<SubtractLayer>, SubtractLayer::type,
     290              :                   LayerType::LAYER_SUBTRACT);
     291           27 :   registerFactory(nntrainer::createLayer<MultiplyLayer>, MultiplyLayer::type,
     292              :                   LayerType::LAYER_MULTIPLY);
     293           27 :   registerFactory(nntrainer::createLayer<DivideLayer>, DivideLayer::type,
     294              :                   LayerType::LAYER_DIVIDE);
     295           27 :   registerFactory(nntrainer::createLayer<PowLayer>, PowLayer::type,
     296              :                   LayerType::LAYER_POW);
     297           27 :   registerFactory(nntrainer::createLayer<SQRTLayer>, SQRTLayer::type,
     298              :                   LayerType::LAYER_SQRT);
     299           27 :   registerFactory(nntrainer::createLayer<SineLayer>, SineLayer::type,
     300              :                   LayerType::LAYER_SINE);
     301           27 :   registerFactory(nntrainer::createLayer<CosineLayer>, CosineLayer::type,
     302              :                   LayerType::LAYER_COSINE);
     303           27 :   registerFactory(nntrainer::createLayer<TangentLayer>, TangentLayer::type,
     304              :                   LayerType::LAYER_TANGENT);
     305           27 :   registerFactory(nntrainer::createLayer<MatMulLayer>, MatMulLayer::type,
     306              :                   LayerType::LAYER_MATMUL);
     307           27 :   registerFactory(nntrainer::createLayer<CastLayer>, CastLayer::type,
     308              :                   LayerType::LAYER_CAST);
     309           27 :   registerFactory(nntrainer::createLayer<GatherLayer>, GatherLayer::type,
     310              :                   LayerType::LAYER_GATHER);
     311           27 :   registerFactory(nntrainer::createLayer<SliceLayer>, SliceLayer::type,
     312              :                   LayerType::LAYER_SLICE);
     313           27 :   registerFactory(nntrainer::createLayer<NegativeLayer>, NegativeLayer::type,
     314              :                   LayerType::LAYER_NEG);
     315           27 :   registerFactory(nntrainer::createLayer<FullyConnectedLayer>,
     316              :                   FullyConnectedLayer::type, LayerType::LAYER_FC);
     317           27 :   registerFactory(nntrainer::createLayer<BatchNormalizationLayer>,
     318              :                   BatchNormalizationLayer::type, LayerType::LAYER_BN);
     319           27 :   registerFactory(nntrainer::createLayer<LayerNormalizationLayer>,
     320              :                   LayerNormalizationLayer::type,
     321              :                   LayerType::LAYER_LAYER_NORMALIZATION);
     322           27 :   registerFactory(nntrainer::createLayer<Conv2DLayer>, Conv2DLayer::type,
     323              :                   LayerType::LAYER_CONV2D);
     324           27 :   registerFactory(nntrainer::createLayer<Conv2DTransposeLayer>,
     325              :                   Conv2DTransposeLayer::type,
     326              :                   LayerType::LAYER_CONV2D_TRANSPOSE);
     327           27 :   registerFactory(nntrainer::createLayer<Conv1DLayer>, Conv1DLayer::type,
     328              :                   LayerType::LAYER_CONV1D);
     329           27 :   registerFactory(nntrainer::createLayer<Pooling2DLayer>, Pooling2DLayer::type,
     330              :                   LayerType::LAYER_POOLING2D);
     331           27 :   registerFactory(nntrainer::createLayer<FlattenLayer>, FlattenLayer::type,
     332              :                   LayerType::LAYER_FLATTEN);
     333           27 :   registerFactory(nntrainer::createLayer<ReshapeLayer>, ReshapeLayer::type,
     334              :                   LayerType::LAYER_RESHAPE);
     335           27 :   registerFactory(nntrainer::createLayer<ActivationLayer>,
     336              :                   ActivationLayer::type, LayerType::LAYER_ACTIVATION);
     337           27 :   registerFactory(nntrainer::createLayer<AdditionLayer>, AdditionLayer::type,
     338              :                   LayerType::LAYER_ADDITION);
     339           27 :   registerFactory(nntrainer::createLayer<ConcatLayer>, ConcatLayer::type,
     340              :                   LayerType::LAYER_CONCAT);
     341           27 :   registerFactory(nntrainer::createLayer<MultiOutLayer>, MultiOutLayer::type,
     342              :                   LayerType::LAYER_MULTIOUT);
     343           27 :   registerFactory(nntrainer::createLayer<EmbeddingLayer>, EmbeddingLayer::type,
     344              :                   LayerType::LAYER_EMBEDDING);
     345           27 :   registerFactory(nntrainer::createLayer<RNNLayer>, RNNLayer::type,
     346              :                   LayerType::LAYER_RNN);
     347           27 :   registerFactory(nntrainer::createLayer<RNNCellLayer>, RNNCellLayer::type,
     348              :                   LayerType::LAYER_RNNCELL);
     349           27 :   registerFactory(nntrainer::createLayer<LSTMLayer>, LSTMLayer::type,
     350              :                   LayerType::LAYER_LSTM);
     351           27 :   registerFactory(nntrainer::createLayer<LSTMCellLayer>, LSTMCellLayer::type,
     352              :                   LayerType::LAYER_LSTMCELL);
     353           27 :   registerFactory(nntrainer::createLayer<ZoneoutLSTMCellLayer>,
     354              :                   ZoneoutLSTMCellLayer::type,
     355              :                   LayerType::LAYER_ZONEOUT_LSTMCELL);
     356           27 :   registerFactory(nntrainer::createLayer<SplitLayer>, SplitLayer::type,
     357              :                   LayerType::LAYER_SPLIT);
     358           27 :   registerFactory(nntrainer::createLayer<GRULayer>, GRULayer::type,
     359              :                   LayerType::LAYER_GRU);
     360           27 :   registerFactory(nntrainer::createLayer<GRUCellLayer>, GRUCellLayer::type,
     361              :                   LayerType::LAYER_GRUCELL);
     362           27 :   registerFactory(nntrainer::createLayer<PermuteLayer>, PermuteLayer::type,
     363              :                   LayerType::LAYER_PERMUTE);
     364           27 :   registerFactory(nntrainer::createLayer<DropOutLayer>, DropOutLayer::type,
     365              :                   LayerType::LAYER_DROPOUT);
     366           27 :   registerFactory(nntrainer::createLayer<AttentionLayer>, AttentionLayer::type,
     367              :                   LayerType::LAYER_ATTENTION);
     368           27 :   registerFactory(nntrainer::createLayer<MoLAttentionLayer>,
     369              :                   MoLAttentionLayer::type, LayerType::LAYER_MOL_ATTENTION);
     370           27 :   registerFactory(nntrainer::createLayer<MultiHeadAttentionLayer>,
     371              :                   MultiHeadAttentionLayer::type,
     372              :                   LayerType::LAYER_MULTI_HEAD_ATTENTION);
     373           27 :   registerFactory(nntrainer::createLayer<ReduceMeanLayer>,
     374              :                   ReduceMeanLayer::type, LayerType::LAYER_REDUCE_MEAN);
     375           27 :   registerFactory(nntrainer::createLayer<ReduceSumLayer>, ReduceSumLayer::type,
     376              :                   LayerType::LAYER_REDUCE_SUM);
     377           27 :   registerFactory(nntrainer::createLayer<PositionalEncodingLayer>,
     378              :                   PositionalEncodingLayer::type,
     379              :                   LayerType::LAYER_POSITIONAL_ENCODING);
     380           27 :   registerFactory(nntrainer::createLayer<IdentityLayer>, IdentityLayer::type,
     381              :                   LayerType::LAYER_IDENTITY);
     382           27 :   registerFactory(nntrainer::createLayer<Upsample2dLayer>,
     383              :                   Upsample2dLayer::type, LayerType::LAYER_UPSAMPLE2D);
     384              : 
     385           27 :   registerFactory(nntrainer::createLayer<ChannelShuffle>, ChannelShuffle::type,
     386              :                   LayerType::LAYER_CHANNEL_SHUFFLE);
     387              : 
     388              : #ifdef ENABLE_NNSTREAMER_BACKBONE
     389              :   registerFactory(nntrainer::createLayer<NNStreamerLayer>,
     390              :                   NNStreamerLayer::type, LayerType::LAYER_BACKBONE_NNSTREAMER);
     391              : #endif
     392              : #ifdef ENABLE_TFLITE_BACKBONE
     393           27 :   registerFactory(nntrainer::createLayer<TfLiteLayer>, TfLiteLayer::type,
     394              :                   LayerType::LAYER_BACKBONE_TFLITE);
     395              : #endif
     396           27 :   registerFactory(nntrainer::createLayer<CentroidKNN>, CentroidKNN::type,
     397              :                   LayerType::LAYER_CENTROID_KNN);
     398              : 
     399              :   /** preprocess layers */
     400           27 :   registerFactory(nntrainer::createLayer<PreprocessFlipLayer>,
     401              :                   PreprocessFlipLayer::type, LayerType::LAYER_PREPROCESS_FLIP);
     402           27 :   registerFactory(nntrainer::createLayer<PreprocessTranslateLayer>,
     403              :                   PreprocessTranslateLayer::type,
     404              :                   LayerType::LAYER_PREPROCESS_TRANSLATE);
     405           27 :   registerFactory(nntrainer::createLayer<PreprocessL2NormLayer>,
     406              :                   PreprocessL2NormLayer::type,
     407              :                   LayerType::LAYER_PREPROCESS_L2NORM);
     408              : 
     409              :   /** register losses */
     410           27 :   registerFactory(nntrainer::createLayer<MSELossLayer>, MSELossLayer::type,
     411              :                   LayerType::LAYER_LOSS_MSE);
     412           27 :   registerFactory(nntrainer::createLayer<CrossEntropySigmoidLossLayer>,
     413              :                   CrossEntropySigmoidLossLayer::type,
     414              :                   LayerType::LAYER_LOSS_CROSS_ENTROPY_SIGMOID);
     415           27 :   registerFactory(nntrainer::createLayer<CrossEntropySoftmaxLossLayer>,
     416              :                   CrossEntropySoftmaxLossLayer::type,
     417              :                   LayerType::LAYER_LOSS_CROSS_ENTROPY_SOFTMAX);
     418           27 :   registerFactory(nntrainer::createLayer<ConstantDerivativeLossLayer>,
     419              :                   ConstantDerivativeLossLayer::type,
     420              :                   LayerType::LAYER_LOSS_CONSTANT_DERIVATIVE);
     421              : 
     422           27 :   registerFactory(nntrainer::createLayer<TimeDistLayer>, TimeDistLayer::type,
     423              :                   LayerType::LAYER_TIME_DIST);
     424              : 
     425           27 :   registerFactory(AppContext::unknownFactory<nntrainer::Layer>, "unknown",
     426              :                   LayerType::LAYER_UNKNOWN);
     427           27 : }
     428              : 
     429           27 : void AppContext::add_extension_object() {
     430           27 :   auto dir_list = getPluginPaths();
     431              : 
     432           29 :   for (auto &path : dir_list) {
     433              :     try {
     434            2 :       registerPluggableFromDirectory(path);
     435            0 :     } catch (std::exception &e) {
     436            0 :       ml_logw("tried to register extension from %s but failed, reason: %s",
     437              :               path.c_str(), e.what());
     438            0 :     }
     439              :   }
     440           27 : }
     441              : 
     442            0 : void AppContext::setWorkingDirectory(const std::string &base) {
     443            0 :   if (!std::filesystem::is_directory(base)) {
     444            0 :     std::stringstream ss;
     445              :     ss << func_tag << "path is not directory or has no permission: " << base;
     446            0 :     throw std::invalid_argument(ss.str().c_str());
     447            0 :   }
     448              : 
     449            0 :   char *ret = getRealpath(base.c_str(), nullptr);
     450              : 
     451            0 :   if (ret == nullptr) {
     452            0 :     std::stringstream ss;
     453            0 :     ss << func_tag << "failed to get canonical path for the path: ";
     454            0 :     throw std::invalid_argument(ss.str().c_str());
     455            0 :   }
     456              : 
     457            0 :   working_path_base = std::string(ret);
     458            0 :   ml_logd("working path base has set: %s", working_path_base.c_str());
     459            0 :   free(ret);
     460            0 : }
     461              : 
     462            0 : const std::string AppContext::getWorkingPath(const std::string &path) {
     463            0 :   return getFullPath(path, working_path_base);
     464              : }
     465              : 
     466              : /**
     467              :  * @brief base case of iterate_prop, iterate_prop iterates the given tuple
     468              :  *
     469              :  * @tparam I size of tuple(automated)
     470              :  * @tparam V container type of properties
     471              :  * @tparam Ts types from tuple
     472              :  * @param prop property container to be added to
     473              :  * @param tup tuple to be iterated
     474              :  * @return void
     475              :  */
     476              : template <size_t I = 0, typename V, typename... Ts>
     477              : typename std::enable_if<I == sizeof...(Ts), void>::type inline parse_properties(
     478              :   V &props, std::tuple<Ts...> &tup) {
     479              :   // end of recursion.
     480              : }
     481              : 
     482              : /**
     483              :  * @brief base case of iterate_prop, iterate_prop iterates the given tuple
     484              :  *
     485              :  * @tparam I size of tuple(automated)
     486              :  * @tparam V container type of properties
     487              :  * @tparam Ts types from tuple
     488              :  * @param prop property container to be added to
     489              :  * @param tup tuple to be iterated
     490              :  * @return void
     491              :  */
     492              : template <size_t I = 0, typename V, typename... Ts>
     493              :   typename std::enable_if <
     494            0 :   I<sizeof...(Ts), void>::type inline parse_properties(V &props,
     495              :                                                        std::tuple<Ts...> &tup) {
     496            0 :   std::string name = std::get<I>(tup);
     497            0 :   std::string prop = getConfig(name);
     498            0 :   if (!prop.empty())
     499            0 :     props.push_back(name + "=" + prop);
     500              : 
     501            0 :   parse_properties<I + 1>(props, tup);
     502            0 : }
     503              : 
     504            0 : const std::vector<std::string> AppContext::getProperties(void) {
     505              :   std::vector<std::string> properties;
     506              : 
     507            0 :   auto props = std::tuple("fsu", "fsu_path");
     508            0 :   parse_properties(properties, props);
     509              : 
     510            0 :   return properties;
     511            0 : }
     512              : 
     513           18 : int AppContext::registerLayer(const std::string &library_path,
     514              :                               const std::string &base_path) {
     515           18 :   const std::string full_path = getFullPath(library_path, base_path);
     516              : 
     517              :   void *handle = DynamicLibraryLoader::loadLibrary(full_path.c_str(),
     518              :                                                    RTLD_LAZY | RTLD_LOCAL);
     519              :   const char *error_msg = DynamicLibraryLoader::getLastError();
     520              : 
     521           21 :   NNTR_THROW_IF(handle == nullptr, std::invalid_argument)
     522              :     << func_tag << "open plugin failed, reason: " << error_msg;
     523              : 
     524              :   nntrainer::LayerPluggable *pluggable =
     525              :     reinterpret_cast<nntrainer::LayerPluggable *>(
     526              :       DynamicLibraryLoader::loadSymbol(handle, "ml_train_layer_pluggable"));
     527              : 
     528              :   error_msg = DynamicLibraryLoader::getLastError();
     529            0 :   auto close_dl = [handle] { DynamicLibraryLoader::freeLibrary(handle); };
     530           15 :   NNTR_THROW_IF_CLEANUP(error_msg != nullptr || pluggable == nullptr,
     531              :                         std::invalid_argument, close_dl)
     532              :     << func_tag << "loading symbol failed, reason: " << error_msg;
     533              : 
     534           15 :   auto layer = pluggable->createfunc();
     535           18 :   NNTR_THROW_IF_CLEANUP(layer == nullptr, std::invalid_argument, close_dl)
     536              :     << func_tag << "created pluggable layer is null";
     537           15 :   auto type = layer->getType();
     538           15 :   NNTR_THROW_IF_CLEANUP(type == "", std::invalid_argument, close_dl)
     539              :     << func_tag << "custom layer must specify type name, but it is empty";
     540           15 :   pluggable->destroyfunc(layer);
     541              : 
     542              :   FactoryType<nntrainer::Layer> factory_func =
     543              :     [pluggable](const PropsType &prop) {
     544              :       std::unique_ptr<nntrainer::Layer> layer =
     545            6 :         std::make_unique<internal::PluggedLayer>(pluggable);
     546              : 
     547              :       return layer;
     548              :     };
     549              : 
     550           45 :   return registerFactory<nntrainer::Layer>(std::move(factory_func), type);
     551              : }
     552              : 
     553            4 : int AppContext::registerOptimizer(const std::string &library_path,
     554              :                                   const std::string &base_path) {
     555            4 :   const std::string full_path = getFullPath(library_path, base_path);
     556              : 
     557              :   void *handle = DynamicLibraryLoader::loadLibrary(full_path.c_str(),
     558              :                                                    RTLD_LAZY | RTLD_LOCAL);
     559              :   const char *error_msg = DynamicLibraryLoader::getLastError();
     560              : 
     561            5 :   NNTR_THROW_IF(handle == nullptr, std::invalid_argument)
     562              :     << func_tag << "open plugin failed, reason: " << error_msg;
     563              : 
     564              :   nntrainer::OptimizerPluggable *pluggable =
     565              :     reinterpret_cast<nntrainer::OptimizerPluggable *>(
     566              :       DynamicLibraryLoader::loadSymbol(handle, "ml_train_optimizer_pluggable"));
     567              : 
     568              :   error_msg = DynamicLibraryLoader::getLastError();
     569            0 :   auto close_dl = [handle] { DynamicLibraryLoader::freeLibrary(handle); };
     570            3 :   NNTR_THROW_IF_CLEANUP(error_msg != nullptr || pluggable == nullptr,
     571              :                         std::invalid_argument, close_dl)
     572              :     << func_tag << "loading symbol failed, reason: " << error_msg;
     573              : 
     574            3 :   auto optimizer = pluggable->createfunc();
     575            4 :   NNTR_THROW_IF_CLEANUP(optimizer == nullptr, std::invalid_argument, close_dl)
     576              :     << func_tag << "created pluggable optimizer is null";
     577            3 :   auto type = optimizer->getType();
     578            3 :   NNTR_THROW_IF_CLEANUP(type == "", std::invalid_argument, close_dl)
     579              :     << func_tag << "custom optimizer must specify type name, but it is empty";
     580            3 :   pluggable->destroyfunc(optimizer);
     581              : 
     582              :   FactoryType<nntrainer::Optimizer> factory_func =
     583              :     [pluggable](const PropsType &prop) {
     584              :       std::unique_ptr<nntrainer::Optimizer> optimizer =
     585            2 :         std::make_unique<internal::PluggedOptimizer>(pluggable);
     586              : 
     587              :       return optimizer;
     588              :     };
     589              : 
     590            9 :   return registerFactory<nntrainer::Optimizer>(std::move(factory_func), type);
     591              : }
     592              : 
     593              : std::vector<int>
     594           10 : AppContext::registerPluggableFromDirectory(const std::string &base_path) {
     595           20 :   const auto directory_exist = std::filesystem::is_directory(base_path);
     596              : 
     597           14 :   NNTR_THROW_IF(!directory_exist, std::invalid_argument)
     598              :     << func_tag << "failed to open the directory: " << base_path;
     599              : 
     600            6 :   std::vector<int> keys = {};
     601              : 
     602           98 :   for (const auto &entry : std::filesystem::directory_iterator(base_path)) {
     603              :     const auto &entry_name = entry.path().string();
     604              : 
     605           40 :     if (endswith(entry_name, solib_suffix)) {
     606           14 :       if (endswith(entry_name, layerlib_suffix)) {
     607              :         try {
     608           12 :           int key = registerLayer(entry_name, base_path);
     609           12 :           keys.emplace_back(key);
     610            0 :         } catch (std::exception &e) {
     611            0 :           throw;
     612            0 :         }
     613            2 :       } else if (endswith(entry_name, optimizerlib_suffix)) {
     614              :         try {
     615            2 :           int key = registerOptimizer(entry_name, base_path);
     616            2 :           keys.emplace_back(key);
     617            0 :         } catch (std::exception &e) {
     618            0 :           throw;
     619            0 :         }
     620              :       }
     621              :     }
     622              :   }
     623              : 
     624            6 :   return keys;
     625            0 : }
     626              : 
     627              : template <typename T>
     628         1902 : const int AppContext::registerFactory(const FactoryType<T> factory,
     629              :                                       const std::string &key,
     630              :                                       const int int_key) {
     631              :   static_assert(isSupported<T>::value,
     632              :                 "given type is not supported for current app context");
     633              : 
     634              :   auto &index = std::get<IndexType<T>>(factory_map);
     635              :   auto &str_map = std::get<StrIndexType<T>>(index);
     636              :   auto &int_map = std::get<IntIndexType>(index);
     637              : 
     638         3817 :   std::string assigned_key = key == "" ? factory({})->getType() : key;
     639              : 
     640              :   std::transform(assigned_key.begin(), assigned_key.end(), assigned_key.begin(),
     641        16695 :                  [](unsigned char c) { return std::tolower(c); });
     642              : 
     643              :   const std::lock_guard<std::mutex> lock(factory_mutex);
     644         1902 :   if (str_map.find(assigned_key) != str_map.end()) {
     645            2 :     std::stringstream ss;
     646              :     ss << "cannot register factory with already taken key: " << key;
     647            6 :     throw std::invalid_argument(ss.str().c_str());
     648            2 :   }
     649              : 
     650         3768 :   if (int_key != -1 && int_map.find(int_key) != int_map.end()) {
     651            1 :     std::stringstream ss;
     652            1 :     ss << "cannot register factory with already taken int key: " << int_key;
     653            3 :     throw std::invalid_argument(ss.str().c_str());
     654            1 :   }
     655              : 
     656         1899 :   int assigned_int_key = int_key == -1 ? str_map.size() + 1 : int_key;
     657              : 
     658         1899 :   str_map[assigned_key] = factory;
     659              :   int_map[assigned_int_key] = assigned_key;
     660              : 
     661         1902 :   ml_logd("factory has registered with key: %s, int_key: %d",
     662              :           assigned_key.c_str(), assigned_int_key);
     663              : 
     664         3798 :   return assigned_int_key;
     665              : }
     666              : 
     667              : /**
     668              :  * @copydoc const int AppContext::registerFactory
     669              :  */
     670              : template const int AppContext::registerFactory<nntrainer::Optimizer>(
     671              :   const FactoryType<nntrainer::Optimizer> factory, const std::string &key,
     672              :   const int int_key);
     673              : 
     674              : /**
     675              :  * @copydoc const int AppContext::registerFactory
     676              :  */
     677              : template const int AppContext::registerFactory<nntrainer::Layer>(
     678              :   const FactoryType<nntrainer::Layer> factory, const std::string &key,
     679              :   const int int_key);
     680              : 
     681              : /**
     682              :  * @copydoc const int AppContext::registerFactory
     683              :  */
     684              : template const int
     685              : AppContext::registerFactory<ml::train::LearningRateScheduler>(
     686              :   const FactoryType<ml::train::LearningRateScheduler> factory,
     687              :   const std::string &key, const int int_key);
     688              : 
     689              : } // namespace nntrainer
        

Generated by: LCOV version 2.0-1