ONE - On-device Neural Engine
Loading...
Searching...
No Matches
onert::api::Session Struct Reference

#include <Session.h>

Public Member Functions

 ~Session ()
 
NNFW_STATUS load_model_from_path (const char *path)
 
NNFW_STATUS prepare ()
 
NNFW_STATUS run ()
 
NNFW_STATUS run_async ()
 
NNFW_STATUS await ()
 
NNFW_STATUS set_input (uint32_t index, NNFW_TYPE type, const void *buffer, size_t length)
 
NNFW_STATUS set_output (uint32_t index, NNFW_TYPE type, void *buffer, size_t length)
 
NNFW_STATUS input_size (uint32_t *number)
 
NNFW_STATUS output_size (uint32_t *number)
 
NNFW_STATUS set_input_layout (uint32_t index, NNFW_LAYOUT layout)
 
NNFW_STATUS set_output_layout (uint32_t index, NNFW_LAYOUT layout)
 
NNFW_STATUS set_input_type (uint32_t index, NNFW_TYPE type)
 
NNFW_STATUS set_output_type (uint32_t index, NNFW_TYPE type)
 
NNFW_STATUS set_input_tensorinfo (uint32_t index, const nnfw_tensorinfo *ti)
 
NNFW_STATUS input_tensorinfo (uint32_t index, nnfw_tensorinfo *ti)
 
NNFW_STATUS output_tensorinfo (uint32_t index, nnfw_tensorinfo *ti)
 
NNFW_STATUS set_available_backends (const char *backends)
 
NNFW_STATUS set_workspace (const char *dir)
 
NNFW_STATUS configure_signature (const char *signature)
 
NNFW_STATUS set_signature_run (const char *signature)
 
NNFW_STATUS set_config (const char *key, const char *value)
 
NNFW_STATUS get_config (const char *key, char *value, size_t value_size)
 
NNFW_STATUS load_circle_from_buffer (uint8_t *buffer, size_t size)
 
NNFW_STATUS get_output (uint32_t index, nnfw_tensorinfo *out_info, const void **out_buffer)
 
NNFW_STATUS register_custom_operation (const std::string &id, nnfw_custom_eval eval_func)
 
NNFW_STATUS input_tensorindex (const char *tensorname, uint32_t *index)
 
NNFW_STATUS output_tensorindex (const char *tensorname, uint32_t *index)
 
NNFW_STATUS run_with_auto_compilation (const char *target, NNFW_CODEGEN_PREF pref)
 
NNFW_STATUS set_odc_param_minmax_records_count (int minmax_records_count)
 
NNFW_STATUS delete_odc_minmax_file ()
 
NNFW_STATUS set_backends_per_operation (const char *backend_settings)
 Set backends with string-encoded mapping from operation index to backend type (cpu, acl_cl)
 
NNFW_STATUS train_get_traininfo (nnfw_train_info *info)
 
NNFW_STATUS train_set_traininfo (const nnfw_train_info *info)
 
NNFW_STATUS train_prepare ()
 
NNFW_STATUS train_input_tensorinfo (uint32_t index, nnfw_tensorinfo *ti)
 
NNFW_STATUS train_expected_tensorinfo (uint32_t index, nnfw_tensorinfo *ti)
 
NNFW_STATUS train_set_input (uint32_t index, const void *input, const nnfw_tensorinfo *input_tensorinfo)
 
NNFW_STATUS train_set_expected (uint32_t index, const void *expected, const nnfw_tensorinfo *expected_tensorinfo)
 
NNFW_STATUS train_set_output (uint32_t index, NNFW_TYPE type, void *buffer, size_t length)
 
NNFW_STATUS train_run (bool update_weights)
 
NNFW_STATUS train_get_loss (uint32_t index, float *loss)
 
NNFW_STATUS train_export_circle (const char *path)
 
NNFW_STATUS train_export_circleplus (const char *path)
 
NNFW_STATUS train_import_checkpoint (const char *path)
 
NNFW_STATUS train_export_checkpoint (const char *path)
 
NNFW_STATUS set_quantization_type (NNFW_QUANTIZE_TYPE qtype)
 
NNFW_STATUS set_quantized_model_path (const char *path)
 
NNFW_STATUS quantize ()
 
NNFW_STATUS set_codegen_model_path (const char *path)
 
NNFW_STATUS codegen (const char *target, NNFW_CODEGEN_PREF pref)
 
NNFW_STATUS set_prepare_config (const NNFW_PREPARE_CONFIG key, const char *value)
 
NNFW_STATUS reset_prepare_config ()
 
NNFW_STATUS set_execute_config (const NNFW_RUN_CONFIG key, const char *value)
 
NNFW_STATUS reset_execute_config ()
 

Static Public Member Functions

static NNFW_STATUS create (Session **session)
 Factory method. It creates and initialize Session.
 
static NNFW_STATUS deprecated (const char *msg)
 

Detailed Description

Definition at line 42 of file Session.h.

Constructor & Destructor Documentation

◆ ~Session()

onert::api::Session::~Session ( )
default

Member Function Documentation

◆ await()

NNFW_STATUS onert::api::Session::await ( )

Definition at line 547 of file Session.cc.

548{
549 if (!isStateRunning())
550 {
551 std::cerr << "Error during Session::run_await : "
552 << "run_await should be run after run_async" << std::endl;
553 return NNFW_STATUS_ERROR;
554 }
555
556 _execution->waitFinish();
557
558 _state = State::FINISHED_RUN;
560}
@ NNFW_STATUS_NO_ERROR
Definition onert-micro.h:88
@ NNFW_STATUS_ERROR
Definition onert-micro.h:93

References NNFW_STATUS_ERROR, and NNFW_STATUS_NO_ERROR.

◆ codegen()

NNFW_STATUS onert::api::Session::codegen ( const char *  target,
NNFW_CODEGEN_PREF  pref 
)

Definition at line 2122 of file Session.cc.

2123{
2124 try
2125 {
2126 if (isStateInitialized() || isStateRunning())
2127 {
2128 std::cerr << "Error during Session::codegen : Invalid state" << std::endl;
2130 }
2131
2132 std::string target_str{target};
2133 if (target_str.empty() || target_str.size() < 5 ||
2134 target_str.substr(target_str.size() - 4) != "-gen")
2135 {
2136 std::cerr << "Error during Session::codegen : Invalid target" << std::endl;
2137 return NNFW_STATUS_ERROR;
2138 }
2139
2140 onert::odc::CodegenPreference codegen_pref;
2141 switch (pref)
2142 {
2145 break;
2148 break;
2151 break;
2154 break;
2155 default:
2156 std::cerr << "Error during Session::codegen : Invalid preference" << std::endl;
2157 return NNFW_STATUS_ERROR;
2158 }
2159
2160 assert(_codegen_manager != nullptr);
2161 auto export_model_path = std::filesystem::path(_codegen_manager->exportModelPath());
2162 const auto model_type = target_str.substr(0, target_str.size() - 4);
2163 // If the export_model_path is not set, it generates a compiled model path
2164 // automatically.
2165 if (export_model_path.empty())
2166 {
2167 // The compiled model path is the same directory of the original model/package with
2168 // target backend extension.
2169 export_model_path = _model_path.replace_extension(model_type);
2170 _codegen_manager->exportModelPath(export_model_path.string());
2171 }
2172
2173 _codegen_manager->codegen(_model_path, target, codegen_pref);
2174
2175 // Replace model
2176 // TODO Support buffer replace, not file reload
2177 return loadModelFile(export_model_path, model_type);
2178 }
2179 catch (const std::exception &e)
2180 {
2181 std::cerr << "Error during Session::compile : " << e.what() << std::endl;
2182 return NNFW_STATUS_ERROR;
2183 }
2184}
Option< std::string > target(optname("--target"), overview("select target language to emit for given architecture." "Valid values are '" NNC_TARGET_ARM_CPP "', '" NNC_TARGET_X86_CPP "', '" NNC_TARGET_ARM_GPU_CPP "', '" NNC_TARGET_INTERPRETER "'"), std::string(), optional(false), optvalues(NNC_TARGET_ARM_CPP "," NNC_TARGET_X86_CPP "," NNC_TARGET_ARM_GPU_CPP "," NNC_TARGET_INTERPRETER), nullptr, separators("="))
Definition Options.h:47
@ NNFW_CODEGEN_PREF_DEFAULT
@ NNFW_CODEGEN_PREF_MEMORY_FIRST
@ NNFW_CODEGEN_PREF_COMPILE_TIME_FIRST
@ NNFW_CODEGEN_PREF_PERFORMANCE_FIRST
@ NNFW_STATUS_INVALID_STATE
Definition onert-micro.h:97

References onert::odc::CODEGEN_PREF_COMPILE_TIME_FIRST, onert::odc::CODEGEN_PREF_DEFAULT, onert::odc::CODEGEN_PREF_MEMORY_FIRST, onert::odc::CODEGEN_PREF_PERFORMANCE_FIRST, NNFW_CODEGEN_PREF_COMPILE_TIME_FIRST, NNFW_CODEGEN_PREF_DEFAULT, NNFW_CODEGEN_PREF_MEMORY_FIRST, NNFW_CODEGEN_PREF_PERFORMANCE_FIRST, NNFW_STATUS_ERROR, and NNFW_STATUS_INVALID_STATE.

◆ configure_signature()

NNFW_STATUS onert::api::Session::configure_signature ( const char *  signature)

Definition at line 1074 of file Session.cc.

1075{
1076 if (!signature)
1078
1079 if (!isStateModelLoaded())
1080 {
1081 std::cerr << "Error during Session::configure_signature : invalid state" << std::endl;
1083 }
1084
1085 for (const auto &[subg_idx, sig_str] : _signature_map)
1086 {
1087 if (sig_str == std::string(signature))
1088 {
1089 _selected_signature = subg_idx;
1090
1091 return NNFW_STATUS_NO_ERROR;
1092 }
1093 }
1094
1095 std::cerr << "Error during Session::configure_signature : cannot find signature \"" << signature
1096 << "\"" << std::endl;
1097 return NNFW_STATUS_ERROR;
1098}
@ NNFW_STATUS_UNEXPECTED_NULL
Definition onert-micro.h:95

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, and NNFW_STATUS_UNEXPECTED_NULL.

◆ create()

NNFW_STATUS onert::api::Session::create ( Session **  session)
static

Factory method. It creates and initialize Session.

Note
Use factory instead of constructor to get status

Definition at line 267 of file Session.cc.

268{
269 if (session == nullptr)
271 try
272 {
273 auto new_session = std::unique_ptr<Session>(new Session());
274 new_session->_kernel_registry = std::make_shared<onert::api::CustomKernelRegistry>();
275 *session = new_session.release();
276 }
277 catch (const std::bad_alloc &e)
278 {
279 std::cerr << "Error during session creation" << std::endl;
280 *session = nullptr; // Set nullptr on error to keep the old behavior
282 }
283 catch (const std::exception &e)
284 {
285 std::cerr << "Error during session initialization : " << e.what() << std::endl;
286 *session = nullptr; // Set nullptr on error to keep the old behavior
287 return NNFW_STATUS_ERROR;
288 }
290}
SessionID session(const coco::Module *m)
Definition Session.cpp:48
@ NNFW_STATUS_OUT_OF_MEMORY
Definition onert-micro.h:99

References NNFW_STATUS_ERROR, NNFW_STATUS_NO_ERROR, NNFW_STATUS_OUT_OF_MEMORY, and NNFW_STATUS_UNEXPECTED_NULL.

Referenced by nnfw_create_session().

◆ delete_odc_minmax_file()

NNFW_STATUS onert::api::Session::delete_odc_minmax_file ( )

Definition at line 2281 of file Session.cc.

2282{
2283 if (isStateRunning())
2284 {
2285 std::cerr << "invalid state" << std::endl;
2287 }
2288
2289 if (_quant_manager->deleteMinMaxFile())
2290 return NNFW_STATUS_NO_ERROR;
2291 else
2292 return NNFW_STATUS_ERROR;
2293}

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ deprecated()

NNFW_STATUS onert::api::Session::deprecated ( const char *  msg)
static

Definition at line 1125 of file Session.cc.

1126{
1127 std::cerr << msg << std::endl;
1129}
@ NNFW_STATUS_DEPRECATED_API

References NNFW_STATUS_DEPRECATED_API.

Referenced by nnfw_apply_tensorinfo(), nnfw_pop_pipeline_output(), nnfw_prepare_pipeline(), nnfw_push_pipeline_input(), and nnfw_set_op_backend().

◆ get_config()

NNFW_STATUS onert::api::Session::get_config ( const char *  key,
char *  value,
size_t  value_size 
)

Definition at line 1235 of file Session.cc.

1236{
1237 if (!isStateModelLoaded())
1239
1240 if (!key || !value)
1242
1243 auto check_boundary = [](size_t dest_size, std::string &src) {
1244 if (dest_size < src.length() + 1 /* for '\0' */)
1245 {
1246 std::cerr << "buffer is small to copy config value." << std::endl;
1247 return false;
1248 }
1249 return true;
1250 };
1251
1252 const std::string skey = key;
1253
1254 if (skey == onert::util::config::BACKENDS)
1255 {
1256 if (_coptions->backend_list.size() == 0)
1257 return NNFW_STATUS_NO_ERROR; // no setting backend is not an error of get_config_str()
1258
1259 auto str =
1260 nnfw::misc::join(_coptions->backend_list.begin(), _coptions->backend_list.end(), ";");
1261
1262 if (!check_boundary(value_size, str))
1263 return NNFW_STATUS_ERROR;
1264
1265 strncpy(value, str.c_str(), value_size);
1266 }
1267 else if (skey == onert::util::config::EXECUTOR)
1268 {
1269 if (!check_boundary(value_size, _coptions->executor))
1270 return NNFW_STATUS_ERROR;
1271
1272 strncpy(value, _coptions->executor.c_str(), _coptions->executor.length());
1273 }
1274 else
1275 {
1276 return NNFW_STATUS_ERROR;
1277 }
1278
1279 return NNFW_STATUS_NO_ERROR;
1280}
str
Definition infer.py:18
std::string join(InputIt first, InputIt last, const std::string &concat)

References nnfw::misc::join(), NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, and NNFW_STATUS_UNEXPECTED_NULL.

◆ get_output()

NNFW_STATUS onert::api::Session::get_output ( uint32_t  index,
nnfw_tensorinfo out_info,
const void **  out_buffer 
)

Definition at line 979 of file Session.cc.

980{
981 if (ti == nullptr)
982 {
983 std::cerr << "Error during Session::get_output : tensor info is null" << std::endl;
985 }
986
987 if (out_buffer == nullptr)
988 {
989 std::cerr << "Error during Session::get_output : output buffer is null" << std::endl;
991 }
992
993 if (!isStateFinishedRun())
994 {
995 std::cerr << "Error during Session::get_output : invalid state" << std::endl;
997 }
998
999 try
1000 {
1001 if (index >= getOutputSize())
1002 {
1003 std::cerr << "Error during Session::get_output, index " << index
1004 << " is out of range. (output count: " << getOutputSize() << ")" << std::endl;
1005 return NNFW_STATUS_ERROR;
1006 }
1007
1008 if (!_coptions->internal_output_alloc)
1009 {
1010 std::cerr << "Error during Session::get_output: "
1011 << "internal output allocation is not enabled. "
1012 << "Call nnfw_set_prepare_config(session, "
1013 "NNFW_PREPARE_CONFIG_ENABLE_INTERNAL_OUTPUT_ALLOC, \"true\") "
1014 << "before nnfw_prepare()." << std::endl;
1015 return NNFW_STATUS_ERROR;
1016 }
1017
1018 auto io_index = onert::ir::IOIndex{index};
1019 const auto &info = _execution->outputInfo(io_index);
1020 const auto &shape = info.shape();
1021 const auto &dtype = info.typeInfo().type();
1022 fillTensorInfo(ti, shape, dtype);
1023
1024 *out_buffer = _execution->outputBuffer(io_index);
1025 }
1026 catch (const std::exception &e)
1027 {
1028 std::cerr << "Error during Session::get_output : " << e.what() << std::endl;
1029 return NNFW_STATUS_ERROR;
1030 }
1031
1032 return NNFW_STATUS_NO_ERROR;
1033}
A wrapper class for unsigned integral Index NOTE : Max value of the underlying type is used as the in...
Definition Index.h:37
volatile const char info[]
loco::GraphInputIndex index(const TFPlaceholder *node)
Definition TFNode.cpp:54

References info, NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, and NNFW_STATUS_UNEXPECTED_NULL.

◆ input_size()

NNFW_STATUS onert::api::Session::input_size ( uint32_t *  number)

Definition at line 616 of file Session.cc.

617{
618 if (isStateInitialized()) // Model is not loaded
620
621 try
622 {
623 if (number == nullptr)
624 {
625 std::cerr << "Error during Session::input_size, number is null pointer." << std::endl;
627 }
628 *number = getInputSize();
629 }
630 catch (const std::exception &e)
631 {
632 std::cerr << "Error during Session::input_size : " << e.what() << std::endl;
633 return NNFW_STATUS_ERROR;
634 }
636}
int number
Definition jpeg2hdf5.py:87

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, and NNFW_STATUS_UNEXPECTED_NULL.

◆ input_tensorindex()

NNFW_STATUS onert::api::Session::input_tensorindex ( const char *  tensorname,
uint32_t *  index 
)

Definition at line 1351 of file Session.cc.

1352{
1353 return getTensorIndexImpl(*primary_subgraph(), tensorname, index, true);
1354}

◆ input_tensorinfo()

NNFW_STATUS onert::api::Session::input_tensorinfo ( uint32_t  index,
nnfw_tensorinfo ti 
)

Definition at line 889 of file Session.cc.

890{
891 if (isStateInitialized())
893
894 try
895 {
896 if (ti == nullptr)
897 {
898 std::cerr << "Error during Session::input_tensorinfo, tensorinfo is null pointer."
899 << std::endl;
901 }
902
903 if (index >= getInputSize())
904 {
905 std::cerr << "Error during Session::input_tensorinfo, index is out of range." << std::endl;
906 return NNFW_STATUS_ERROR;
907 }
908
910 if (isStateModelLoaded())
911 {
912 const auto &info = _selected_signature.valid() ? _nnpkg->inputInfo(_selected_signature, index)
913 : _nnpkg->inputInfo(input_index);
914 fillTensorInfo(ti, info.shape(), info.typeInfo().type());
915 }
916 else
917 {
918 const auto &info = _execution->inputInfo(input_index);
919 fillTensorInfo(ti, info.shape(), info.typeInfo().type());
920 }
921 }
922 catch (const std::exception &e)
923 {
924 std::cerr << "Error during Session::input_tensorinfo : " << e.what() << std::endl;
925 return NNFW_STATUS_ERROR;
926 }
928}
bool valid() const
Check whether the value is valid or not.
Definition Index.h:125

References info, NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, and NNFW_STATUS_UNEXPECTED_NULL.

◆ load_circle_from_buffer()

NNFW_STATUS onert::api::Session::load_circle_from_buffer ( uint8_t *  buffer,
size_t  size 
)

Definition at line 294 of file Session.cc.

295{
296 if (!isStateInitialized())
298
299 if (!buffer)
301
302 if (size == 0)
303 return NNFW_STATUS_ERROR;
304
305 try
306 {
308 // TODO: Update _model_path if necessary
309 _nnpkg = std::make_unique<onert::ir::NNPkg>(std::move(model));
310 _train_info = loadTrainingInfo(_nnpkg->primary_model());
311 _state = State::MODEL_LOADED;
312 }
313 catch (const std::exception &e)
314 {
315 std::cerr << "Error during model loading : " << e.what() << std::endl;
316 return NNFW_STATUS_ERROR;
317 }
319}
std::unique_ptr< ir::train::TrainingInfo > loadTrainingInfo(const uint8_t *buffer, const size_t size)
std::unique_ptr< ir::Model > loadCircleModel(const std::string &filename)
int32_t size[5]
Definition Slice.cpp:35

References onert::loader::loadCircleModel(), NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, NNFW_STATUS_UNEXPECTED_NULL, and size.

◆ load_model_from_path()

NNFW_STATUS onert::api::Session::load_model_from_path ( const char *  path)

Definition at line 321 of file Session.cc.

322{
323 if (!isStateInitialized())
325
326 if (!path)
327 {
328 std::cerr << "Path is null." << std::endl;
330 }
331
332 if (!null_terminating(path, MAX_PATH_LENGTH))
333 {
334 std::cerr << "Path is too long" << std::endl;
335 return NNFW_STATUS_ERROR;
336 }
337
338 try
339 {
340 std::filesystem::path filename{path};
341 if (!std::filesystem::is_directory(filename))
342 {
343 std::string model_type = inferModelType(filename);
344 if (model_type.empty())
345 {
346 std::cerr << "Error: Cannot determine model type for '" << filename << "'."
347 << "Please use a file with valid extension." << std::endl;
348 return NNFW_STATUS_ERROR;
349 }
350 else
351 return loadModelFile(filename, model_type);
352 }
353
354 const auto &package_dir = filename;
355
356 // TODO : add support for zipped package file load
357 if (!std::filesystem::is_directory(package_dir))
358 {
359 std::cerr << "invalid path: " << package_dir << std::endl;
360 return NNFW_STATUS_ERROR;
361 }
362
363 const auto manifest_file_name = package_dir / "metadata/MANIFEST";
364 std::ifstream mfs(manifest_file_name);
365
366 // extract the filename of the first(index 0) model
367 // e.g. In MANIFEST file, { "models" : [ "firstmodel.tflite", "2nd.tflite" ] }
368 Json::Value root;
369 mfs >> root;
370 const Json::Value &models = root["models"];
371 const Json::Value &model_types = root["model-types"];
372 const Json::Value &configs = root["configs"];
373
374 if (!configs.empty() && !configs[0].empty())
375 {
376 const auto filepath = package_dir / "metadata" / configs[0].asString();
377
379 if (loadConfigure(filepath.string(), keyValues))
380 {
382 }
383 }
384 _nnpkg = std::make_unique<onert::ir::NNPkg>();
385 auto num_models = models.size();
386 if (num_models == 0 || (num_models - 1) > onert::ir::ModelIndex::max())
387 {
388 std::cerr << "Invalid model size - " << std::to_string(num_models) << std::endl;
389 return NNFW_STATUS_ERROR;
390 }
391
392 // Not support backend mapping to operator index for multiple models yet
393 // TODO Support this
394 if (num_models > 1 && _coptions->manual_scheduler_options.index_to_backend.size() != 0)
395 {
396 std::cerr << "Cannot set backend to operator index for multiple models" << std::endl;
397 return NNFW_STATUS_ERROR;
398 }
399
400 for (uint16_t i = 0; i < num_models; ++i)
401 {
402 const auto model_file_name = std::filesystem::path(models[i].asString());
403 const auto model_file_path = package_dir / model_file_name;
404 std::string model_type;
405
406 // Use model-types if available and not empty, otherwise infer from file extension
407 if (!model_types.empty() && i < model_types.size())
408 model_type = model_types[i].asString();
409 else
410 model_type = inferModelType(model_file_name);
411 if (model_type.empty())
412 {
413 std::cerr << "Error: Cannot determine model type for '" << models[i].asString() << "'."
414 << "Please specify model-types in MANIFEST or use a file with valid extension."
415 << std::endl;
416 return NNFW_STATUS_ERROR;
417 }
418
419 auto model = loadModel(model_file_path.string(), model_type);
420 if (model == nullptr)
421 return NNFW_STATUS_ERROR;
422 _model_path = model_file_path; // TODO Support multiple models
423 model->bindKernelBuilder(_kernel_registry->getBuilder());
424 _nnpkg->push(onert::ir::ModelIndex{i}, std::move(model));
425 }
426
427 _train_info = loadTrainingInfo(_nnpkg->primary_model());
428
429 auto toIODesc = [](std::string str) {
430 auto indices = nnfw::misc::split(str, ':');
431 if (indices.size() != 3)
432 {
433 std::cerr << "IODesc should be 3-tuple." << std::endl;
434 return onert::ir::IODesc{};
435 }
436 auto model_idx = static_cast<uint32_t>(std::stoi(indices.at(0)));
437 auto subgraph_idx = static_cast<uint32_t>(std::stoi(indices.at(1)));
438 auto operand_idx = static_cast<uint32_t>(std::stoi(indices.at(2)));
439 return onert::ir::IODesc{model_idx, subgraph_idx, operand_idx};
440 };
441 // read pkg-inputs and pkg-outputs
442 const Json::Value &pkg_inputs = root["pkg-inputs"];
443 for (uint32_t i = 0; i < pkg_inputs.size(); ++i)
444 _nnpkg->addInput(toIODesc(pkg_inputs[i].asString()));
445 const Json::Value &pkg_outputs = root["pkg-outputs"];
446 for (uint32_t i = 0; i < pkg_outputs.size(); ++i)
447 _nnpkg->addOutput(toIODesc(pkg_outputs[i].asString()));
448 // read model-connect
449 const Json::Value &fromtos = root["model-connect"];
450 for (uint32_t i = 0; i < fromtos.size(); ++i)
451 {
452 const Json::Value &tos = fromtos[i]["to"];
453 for (uint32_t j = 0; j < tos.size(); ++j)
454 _nnpkg->addEdge(toIODesc(fromtos[i]["from"].asString()), toIODesc(tos[j].asString()));
455 }
456
457 _nnpkg->verify();
458 _state = State::MODEL_LOADED;
459 }
460 catch (const std::exception &e)
461 {
462 std::cerr << "Error during model loading : " << e.what() << std::endl;
463 return NNFW_STATUS_ERROR;
464 }
466}
#define MAX_PATH_LENGTH
Definition Session.cc:48
static uint16_t max()
Return max index value.
Definition Index.h:146
Op * root(Op *)
Return the root Op from a given Op node.
Definition Op.cpp:144
std::unique_ptr< mir::Graph > loadModel(std::string predict_net, std::string init_net, const std::vector< std::vector< int > > &input_shapes)
std::vector< std::string > split(const std::string &s, char delim)
std::tuple< ModelIndex, SubgraphIndex, IOIndex > IODesc
Definition NNPkg.h:30
std::unordered_map< std::string, std::string > CfgKeyValues
void setConfigKeyValues(const CfgKeyValues &keyValues)

References onert::util::Index< uint16_t, ModelIndexTag >::max(), MAX_PATH_LENGTH, NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, NNFW_STATUS_UNEXPECTED_NULL, onert::util::setConfigKeyValues(), and nnfw::misc::split().

◆ output_size()

NNFW_STATUS onert::api::Session::output_size ( uint32_t *  number)

Definition at line 638 of file Session.cc.

639{
640 if (isStateInitialized()) // Model is not loaded
642
643 try
644 {
645 if (number == nullptr)
646 {
647 std::cerr << "Error during Session::output_size, number is null pointer." << std::endl;
649 }
650 *number = getOutputSize();
651 }
652 catch (const std::exception &e)
653 {
654 std::cerr << "Error during Session::output_size" << e.what() << std::endl;
655 return NNFW_STATUS_ERROR;
656 }
658}

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, and NNFW_STATUS_UNEXPECTED_NULL.

◆ output_tensorindex()

NNFW_STATUS onert::api::Session::output_tensorindex ( const char *  tensorname,
uint32_t *  index 
)

Definition at line 1356 of file Session.cc.

1357{
1358 return getTensorIndexImpl(*primary_subgraph(), tensorname, index, false);
1359}

◆ output_tensorinfo()

NNFW_STATUS onert::api::Session::output_tensorinfo ( uint32_t  index,
nnfw_tensorinfo ti 
)

Definition at line 930 of file Session.cc.

931{
932 if (isStateInitialized())
934
935 if (ti == nullptr)
936 {
937 std::cerr << "Error during Session::output_tensorinfo, tensorinfo is null pointer."
938 << std::endl;
940 }
941
942 try
943 {
944 if (index >= getOutputSize())
945 {
946 std::cerr << "Error during Session::output_tensorinfo, index is out of range." << std::endl;
947 return NNFW_STATUS_ERROR;
948 }
949
950 const auto output_index = onert::ir::IOIndex{index};
951 if (isStateModelLoaded())
952 {
953 const auto &info = _selected_signature.valid()
954 ? _nnpkg->outputInfo(_selected_signature, index)
955 : _nnpkg->outputInfo(output_index);
956 fillTensorInfo(ti, info.shape(), info.typeInfo().type());
957 }
958 else
959 {
960 auto info = _execution->outputInfo(output_index);
961 fillTensorInfo(ti, info.shape(), info.typeInfo().type());
962 }
963 }
964 catch (const std::exception &e)
965 {
966 std::cerr << "Error during Session::output_tensorinfo : " << e.what() << std::endl;
967 return NNFW_STATUS_ERROR;
968 }
969
971}

References info, NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, and NNFW_STATUS_UNEXPECTED_NULL.

◆ prepare()

NNFW_STATUS onert::api::Session::prepare ( )

Definition at line 468 of file Session.cc.

469{
470 // NOTE. If users want to run prepare() more than one time, this could be removed.
471 if (!isStateModelLoaded())
472 {
473 std::cerr << "Error during model prepare : ";
474 if (isStateInitialized())
475 {
476 std::cerr << "prepare should be run once";
477 }
478 else
479 {
480 std::cerr << "invalid state";
481 }
482 std::cerr << std::endl;
484 }
485
486 try
487 {
488 auto compiler =
489 onert::compiler::CompilerFactory::get().create(std::move(_nnpkg), _coptions.get());
490 _compiler_artifact = compiler->compile();
491 _execution = std::make_unique<onert::exec::Execution>(_compiler_artifact->_executors);
492 }
493 catch (const std::exception &e)
494 {
495 std::cerr << "Error during model prepare : " << e.what() << std::endl;
496 return NNFW_STATUS_ERROR;
497 }
498
499 _state = State::PREPARED;
501}
static CompilerFactory & get()
std::unique_ptr< ICompiler > create(std::unique_ptr< ir::NNPkg > nnpkg, CompilerOptions *copts, const ir::train::TrainingInfo *training_info=nullptr)
Create ICompiler instance. Ownership of nnpkg is moved to ICompiler instance.

References onert::compiler::CompilerFactory::create(), onert::compiler::CompilerFactory::get(), NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ quantize()

NNFW_STATUS onert::api::Session::quantize ( )

Definition at line 2075 of file Session.cc.

2076{
2077 try
2078 {
2079 if (isStateInitialized() || isStateRunning())
2080 {
2081 std::cerr << "invalid state" << std::endl;
2083 }
2084
2085 auto result = _quant_manager->quantize(_model_path.string());
2086 if (!result)
2088
2089 // Replace model
2090 // TODO Support buffer replace, not file reload
2091 return loadModelFile(_quant_manager->exportModelPath(), "circle");
2092 }
2093 catch (const std::exception &e)
2094 {
2095 std::cerr << "Error during Session::quantize : " << e.what() << std::endl;
2096 return NNFW_STATUS_ERROR;
2097 }
2098}
result
Definition infer.py:103

References NNFW_STATUS_ERROR, and NNFW_STATUS_INVALID_STATE.

◆ register_custom_operation()

NNFW_STATUS onert::api::Session::register_custom_operation ( const std::string &  id,
nnfw_custom_eval  eval_func 
)

Definition at line 973 of file Session.cc.

974{
975 _kernel_registry->registerKernel(id, eval_func);
977}

References NNFW_STATUS_NO_ERROR.

◆ reset_execute_config()

NNFW_STATUS onert::api::Session::reset_execute_config ( )

Definition at line 2252 of file Session.cc.

2253{
2254 if (!isStatePreparedOrFinishedRun())
2255 {
2256 std::cerr << "Error during Session::set_execution_config : Invalid state" << std::endl;
2258 }
2259
2260 _execution->executionOptions().dump_minmax = false;
2261 _execution->executionOptions().trace = false;
2262 _execution->executionOptions().profile = false;
2263
2264 return NNFW_STATUS_NO_ERROR;
2265}

References NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ reset_prepare_config()

NNFW_STATUS onert::api::Session::reset_prepare_config ( )

Definition at line 2209 of file Session.cc.

2210{
2211 if (!isStateModelLoaded())
2212 {
2213 std::cerr << "Error during Session::reset_prepare_config : Invalid state" << std::endl;
2215 }
2216
2217 _coptions->he_profiling_mode = false;
2218
2219 return NNFW_STATUS_NO_ERROR;
2220}

References NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ run()

NNFW_STATUS onert::api::Session::run ( )

Definition at line 503 of file Session.cc.

504{
505 if (!isStatePreparedOrFinishedRun())
506 {
507 std::cerr << "Error during Session::run : "
508 << "run should be run after prepare" << std::endl;
510 }
511
512 try
513 {
514 _execution->execute();
515 }
517 {
518 // Currently insufficient buffer always means output buffer.
519 std::cerr << "Error during Session::run : " << e.what() << std::endl;
521 }
522 catch (const std::exception &e)
523 {
524 std::cerr << "Error during Session::run : " << e.what() << std::endl;
525 return NNFW_STATUS_ERROR;
526 }
527
528 _state = State::FINISHED_RUN;
530}
const char * what() const noexcept override
Definition Exceptions.h:31
@ NNFW_STATUS_INSUFFICIENT_OUTPUT_SIZE

References NNFW_STATUS_ERROR, NNFW_STATUS_INSUFFICIENT_OUTPUT_SIZE, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, and onert::Exception::what().

◆ run_async()

NNFW_STATUS onert::api::Session::run_async ( )

Definition at line 532 of file Session.cc.

533{
534 if (!isStatePreparedOrFinishedRun())
535 {
536 std::cerr << "Error during Session::run_async : "
537 << "run_async should be run after prepare" << std::endl;
539 }
540
541 _execution->startExecute();
542
543 _state = State::RUNNING;
545}

References NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ run_with_auto_compilation()

NNFW_STATUS onert::api::Session::run_with_auto_compilation ( const char *  target,
NNFW_CODEGEN_PREF  pref 
)

Definition at line 2296 of file Session.cc.

2297{
2298
2299 if (!isStatePreparedOrFinishedRun())
2300 {
2301 std::cerr << "Error during Session::run_with_auto_compilation : "
2302 << "run should be after preparation" << std::endl;
2304 }
2305
2306 // Check quantization and code-generation parameters
2307 std::string target_str{target};
2308 if (_quant_manager->exportModelPath().empty() || _codegen_manager->exportModelPath().empty() ||
2309 target_str.empty() || target_str.substr(target_str.size() - 4) != "-gen")
2310 {
2311 std::cerr << "Error during Session::run_with_auto_compilation : "
2312 << "quantization and code generation parameters should be set" << std::endl;
2314 }
2315
2316 // Odc: auto compilation with hidden switching mechanizm
2317 // Check is model already quantized or compiled
2318 std::ifstream file_quantized_model(_quant_manager->exportModelPath());
2319 std::ifstream file_compiled_model(_codegen_manager->exportModelPath());
2320
2321 if (!file_quantized_model.good() && !file_compiled_model.good())
2322 {
2323 // Run float model and try to quantize it
2324 {
2325 // Save execution options
2326 auto saved_options = _execution->executionOptions();
2327 // turn on minmax recording
2328 _execution->executionOptions().dump_minmax = true;
2329
2330 try
2331 {
2332 _execution->execute();
2333 }
2335 {
2336 // Currently insufficient buffer always means output buffer.
2337 std::cerr << "Error during Session::run_with_auto_compilation : " << e.what() << std::endl;
2339 }
2340 catch (const std::exception &e)
2341 {
2342 std::cerr << "Error during Session::run_with_auto_compilation : " << e.what() << std::endl;
2343 return NNFW_STATUS_ERROR;
2344 }
2345
2346 _state = State::FINISHED_RUN;
2347
2348 // restore min_max option to user defined state
2349 _execution->executionOptions().dump_minmax = saved_options.dump_minmax;
2350
2351 // if enough statistics are collected, then run the quantization
2352 if (_quant_manager->readyForQuantize())
2353 {
2354 try
2355 {
2356 if (isStateInitialized() || isStateRunning())
2357 {
2358 std::cerr << "invalid state" << std::endl;
2360 }
2361
2362 auto result = _quant_manager->quantize(_model_path);
2363 if (!result)
2365
2366 // remove minmax file
2367 result = _quant_manager->deleteMinMaxFile();
2368 if (!result)
2370 }
2371 catch (const std::exception &e)
2372 {
2373 std::cerr << "Error during Session::run_with_auto_compilation in quantize operation: "
2374 << e.what() << std::endl;
2375 return NNFW_STATUS_ERROR;
2376 }
2377 }
2378 }
2379 }
2380 else
2381 {
2382 // run compiled or quantized model
2383 NNFW_STATUS status;
2384
2385 // turn off minmax recording
2386 _execution->executionOptions().dump_minmax = false;
2387
2388 // save initial buffers if quantized model or compiled model is not loaded
2389 if (_autoCompilationState == Session::AutoCompilationState::INITIAL_STATE)
2390 {
2391 auto dotidx = _codegen_manager->exportModelPath().rfind('.');
2392 if (dotidx == std::string::npos)
2393 {
2394 std::cerr << "Error during Session::run_with_auto_compilation : Invalid compiled "
2395 "model path. Please use a "
2396 "path that includes the extension."
2397 << std::endl;
2398 return NNFW_STATUS_ERROR;
2399 }
2400
2401 std::string compiled_model_type =
2402 _codegen_manager->exportModelPath().substr(dotidx + 1); // + 1 to exclude dot
2403
2404 dotidx = _quant_manager->exportModelPath().rfind('.');
2405 if (dotidx == std::string::npos)
2406 {
2407 std::cerr << "Error during Session::run_with_auto_compilation : Invalid quantized "
2408 "model path. Please use a "
2409 "path that includes the extension."
2410 << std::endl;
2411 return NNFW_STATUS_ERROR;
2412 }
2413 std::string quantized_model_type =
2414 _quant_manager->exportModelPath().substr(dotidx + 1); // + 1 to exclude dot
2415
2416 // Save initial (float) input and output buffers
2417 auto input_size = _execution->inputSize();
2418 auto output_size = _execution->outputSize();
2419
2420 std::vector<const void *> _input_buffers;
2421 std::vector<void *> _output_buffers;
2422
2423 using namespace onert::ir;
2424 // Copy execution context for backup: I/O buffer, shape, and execution options
2425 const onert::exec::ExecutionContext ctx_backup = _execution->context();
2426
2427 // Set compile option to use float type
2428 for (auto input_index = IOIndex{0}; input_index < IOIndex{input_size}; input_index++)
2429 _coptions->input_type.insert_or_assign(IODesc{ModelIndex{0}, SubgraphIndex{0}, input_index},
2430 TypeInfo(DataType::FLOAT32));
2431
2432 // Save Outputs buffers
2433 for (auto output_index = IOIndex{0}; output_index < IOIndex{output_size}; output_index++)
2434 _coptions->output_type.insert_or_assign(
2435 IODesc{ModelIndex{0}, SubgraphIndex{0}, output_index}, TypeInfo(DataType::FLOAT32));
2436
2437 // if there is compiled model - try to load it
2438 if (file_compiled_model.good())
2439 {
2440 // load compiled model
2441 status = loadModelFile(_codegen_manager->exportModelPath(), compiled_model_type);
2442 if (status == NNFW_STATUS_NO_ERROR)
2443 {
2444 _autoCompilationState = Session::AutoCompilationState::COMPILED_MODEL_LOADED;
2445 }
2446 }
2447 else // there is no compiled model - try to compile and load it
2448 {
2449
2450 // avoiding code duplication use existing "codegen" function. Set up _model_path for the
2451 // codegen function.
2452 // TODO: change it if codegen function will be generalized
2453 _model_path = _quant_manager->exportModelPath();
2454
2455 // try to compile and load compiled model
2456 status = codegen(target, pref);
2457 if (status == NNFW_STATUS_NO_ERROR)
2458 {
2459 _autoCompilationState = Session::AutoCompilationState::COMPILED_MODEL_LOADED;
2460 // TODO delete quantized model
2461 }
2462 }
2463
2464 // loading compiled model is fail - try to load quantized model
2465 if (_autoCompilationState != Session::AutoCompilationState::COMPILED_MODEL_LOADED)
2466 {
2467 // load quantized model
2468 status = loadModelFile(_quant_manager->exportModelPath(), quantized_model_type);
2469 if (status != NNFW_STATUS_NO_ERROR)
2470 return status;
2471 else
2472 _autoCompilationState = Session::AutoCompilationState::QUANTIZED_MODEL_LOADED;
2473 }
2474
2475 status = prepare();
2476 if (status != NNFW_STATUS_NO_ERROR)
2477 return status;
2478
2479 // Restore execution context: I/O buffer, shape, and execution options
2480 _execution->restoreContext(ctx_backup);
2481 }
2482
2483 // Run quantized model
2484 if (!isStatePreparedOrFinishedRun())
2485 {
2486 std::cerr << "Error during Session::run_with_auto_compilation : "
2487 << "run should be run after prepare" << std::endl;
2489 }
2490
2491 try
2492 {
2493 _execution->execute();
2494 }
2496 {
2497 // Currently insufficient buffer always means output buffer.
2498 std::cerr << "Error during Session::run_with_auto_compilation : " << e.what() << std::endl;
2500 }
2501 catch (const std::exception &e)
2502 {
2503 std::cerr << "Error during Session::run_with_auto_compilation : " << e.what() << std::endl;
2504 return NNFW_STATUS_ERROR;
2505 }
2506
2507 _state = State::FINISHED_RUN;
2508 }
2509
2510 return NNFW_STATUS_NO_ERROR;
2511}
NNFW_STATUS
Result values returned from a call to an API function.
Definition onert-micro.h:86
NNFW_STATUS prepare()
Definition Session.cc:468
NNFW_STATUS input_size(uint32_t *number)
Definition Session.cc:616
NNFW_STATUS output_size(uint32_t *number)
Definition Session.cc:638

References NNFW_STATUS_ERROR, NNFW_STATUS_INSUFFICIENT_OUTPUT_SIZE, NNFW_STATUS_INVALID_STATE, and onert::Exception::what().

◆ set_available_backends()

NNFW_STATUS onert::api::Session::set_available_backends ( const char *  backends)

Definition at line 1035 of file Session.cc.

1036{
1037 if (!isStateModelLoaded())
1039
1040 try
1041 {
1042 if (!backends)
1044 if (null_terminating(backends, MAX_BACKEND_NAME_LENGTH) == false)
1045 return NNFW_STATUS_ERROR;
1046
1047 using namespace onert::util;
1048
1049 _coptions->backend_list = nnfw::misc::split(std::string{backends}, ';');
1050 }
1051 catch (const std::exception &e)
1052 {
1053 std::cerr << "Error during Session::set_available_backends : " << e.what() << std::endl;
1054 return NNFW_STATUS_ERROR;
1055 }
1056 return NNFW_STATUS_NO_ERROR;
1057}
#define MAX_BACKEND_NAME_LENGTH
Definition Session.cc:46

References MAX_BACKEND_NAME_LENGTH, NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, NNFW_STATUS_UNEXPECTED_NULL, and nnfw::misc::split().

◆ set_backends_per_operation()

NNFW_STATUS onert::api::Session::set_backends_per_operation ( const char *  backend_settings)

Set backends with string-encoded mapping from operation index to backend type (cpu, acl_cl)

Definition at line 1361 of file Session.cc.

1362{
1363 if (backend_settings == NULL)
1364 return NNFW_STATUS_ERROR;
1365
1366 if (!isStateModelLoaded())
1368
1369 // Not supported multiple model
1370 // TODO Support this
1371 if (_nnpkg->model_count() > 1)
1372 {
1373 std::cerr << "Not supported multiple model" << std::endl;
1374 return NNFW_STATUS_ERROR;
1375 }
1376
1377 try
1378 {
1379 // Backend for all
1380 auto &ms_options = _coptions->manual_scheduler_options;
1381 ms_options.setBackendMap(std::string{backend_settings});
1382 }
1383 catch (const std::exception &e)
1384 {
1385 std::cerr << "Error during Session::set_backends_per_operation" << e.what() << std::endl;
1386 return NNFW_STATUS_ERROR;
1387 }
1388
1389 return NNFW_STATUS_NO_ERROR;
1390}

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ set_codegen_model_path()

NNFW_STATUS onert::api::Session::set_codegen_model_path ( const char *  path)

Definition at line 2100 of file Session.cc.

2101{
2102 try
2103 {
2104 if (isStateInitialized() || isStateRunning())
2105 {
2106 std::cerr << "invalid state" << std::endl;
2108 }
2109
2110 assert(_codegen_manager != nullptr);
2111 _codegen_manager->exportModelPath(std::string(path));
2112 }
2113 catch (const std::exception &e)
2114 {
2115 std::cerr << "Error during Session::set_codegen_model_path : " << e.what() << std::endl;
2116 return NNFW_STATUS_ERROR;
2117 }
2118
2119 return NNFW_STATUS_NO_ERROR;
2120}

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ set_config()

NNFW_STATUS onert::api::Session::set_config ( const char *  key,
const char *  value 
)

Definition at line 1131 of file Session.cc.

1132{
1133 if (!isStateModelLoaded())
1135
1136 if (!key || !value)
1138
1139 using namespace onert::util;
1140
1141 const std::string skey = key;
1142
1143 if (skey == config::GRAPH_DOT_DUMP)
1144 {
1145 _coptions->graph_dump_level = toInt(value);
1146 }
1147 else if (skey == config::EXECUTOR)
1148 {
1149 _coptions->executor = value;
1150 }
1151 else if (skey == config::USE_SCHEDULER)
1152 {
1153 _coptions->he_scheduler = toBool(value);
1154 }
1155 else if (skey == config::PROFILING_MODE)
1156 {
1157 _coptions->he_profiling_mode = toBool(value);
1158 }
1159 else if (skey == config::ENABLE_LOG || skey == config::NUM_THREADS)
1160 {
1161 onert::util::CfgKeyValues keyValues;
1162 keyValues[skey] = std::string(value);
1164
1165 if (skey == config::ENABLE_LOG)
1166 {
1168 }
1169 }
1170 else
1171 {
1172 return NNFW_STATUS_ERROR;
1173 }
1174 return NNFW_STATUS_NO_ERROR;
1175}
int toInt(const std::string &val)
bool toBool(const std::string &val)
#define UPDATE_VERBOSE_CONFIG()
Definition logging.h:81

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, NNFW_STATUS_UNEXPECTED_NULL, onert::util::setConfigKeyValues(), and UPDATE_VERBOSE_CONFIG.

◆ set_execute_config()

NNFW_STATUS onert::api::Session::set_execute_config ( const NNFW_RUN_CONFIG  key,
const char *  value 
)

Definition at line 2222 of file Session.cc.

2223{
2224 if (!isStatePreparedOrFinishedRun())
2225 {
2226 std::cerr << "Error during Session::set_execution_config : Invalid state" << std::endl;
2228 }
2229
2230 switch (key)
2231 {
2233 if (_coptions->workspace_dir.empty())
2234 return NNFW_STATUS_ERROR;
2235 _execution->executionOptions().dump_minmax = true;
2236 break;
2238 if (_coptions->workspace_dir.empty())
2239 return NNFW_STATUS_ERROR;
2240 _execution->executionOptions().trace = true;
2241 break;
2243 _execution->executionOptions().profile = true;
2244 break;
2245 default:
2246 return NNFW_STATUS_ERROR;
2247 }
2248
2249 return NNFW_STATUS_NO_ERROR;
2250}
@ NNFW_RUN_CONFIG_PROFILE
@ NNFW_RUN_CONFIG_TRACE
@ NNFW_RUN_CONFIG_DUMP_MINMAX

References NNFW_RUN_CONFIG_DUMP_MINMAX, NNFW_RUN_CONFIG_PROFILE, NNFW_RUN_CONFIG_TRACE, NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ set_input()

NNFW_STATUS onert::api::Session::set_input ( uint32_t  index,
NNFW_TYPE  type,
const void *  buffer,
size_t  length 
)

Definition at line 562 of file Session.cc.

563{
564 if (!isStatePreparedOrFinishedRun())
565 {
566 std::cerr << "Error during Session::set_input : invalid state" << std::endl;
568 }
569
570 if (!buffer && length != 0)
571 {
572 std::cerr << "Error during Session::set_input : given buffer is NULL but the length is not 0"
573 << std::endl;
574 return NNFW_STATUS_ERROR;
575 }
576
577 try
578 {
579 _execution->setInput(onert::ir::IOIndex(index), buffer, length);
580 }
581 catch (const std::exception &e)
582 {
583 std::cerr << "Error during Session::set_input : " << e.what() << std::endl;
584 return NNFW_STATUS_ERROR;
585 }
587}

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ set_input_layout()

NNFW_STATUS onert::api::Session::set_input_layout ( uint32_t  index,
NNFW_LAYOUT  layout 
)

Definition at line 660 of file Session.cc.

661{
662 if (!isStateModelLoaded())
663 {
664 std::cerr << "Error during Session::set_input_layout : "
665 << "run should be run before prepare" << std::endl;
667 }
668
669 try
670 {
671 if (layout != NNFW_LAYOUT_NONE && layout != NNFW_LAYOUT_CHANNELS_FIRST &&
673 {
674 std::cerr << "Error during Session::set_input_layout, not supported layout" << std::endl;
675 return NNFW_STATUS_ERROR;
676 }
677
678 if (_selected_signature.valid())
679 {
680 // TODO Support this
681 std::cerr << "Error during Session::set_input_layout : "
682 << "set_input_layout after signature selection is not supported yet" << std::endl;
683 return NNFW_STATUS_ERROR;
684 }
685
686 const auto io_index = onert::ir::IOIndex{index};
687 // Signature is supported on single model only
688 assert(!_selected_signature.valid() || _nnpkg->model_count() != 1);
689 const auto io_desc =
690 _selected_signature.valid()
691 ? onert::ir::IODesc{onert::ir::ModelIndex{0}, _selected_signature, io_index}
692 : _nnpkg->input(io_index);
693 // Insert if not exists, otherwise update the value
694 _coptions->input_layout[io_desc] = convertLayout(layout);
695 }
696 catch (const std::exception &e)
697 {
698 std::cerr << "Error during Session::set_input_layout : " << e.what() << std::endl;
699 return NNFW_STATUS_ERROR;
700 }
702}
@ NNFW_LAYOUT_CHANNELS_LAST
Definition nnfw.h:141
@ NNFW_LAYOUT_CHANNELS_FIRST
Definition nnfw.h:146
@ NNFW_LAYOUT_NONE
Definition nnfw.h:136

References NNFW_LAYOUT_CHANNELS_FIRST, NNFW_LAYOUT_CHANNELS_LAST, NNFW_LAYOUT_NONE, NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ set_input_tensorinfo()

NNFW_STATUS onert::api::Session::set_input_tensorinfo ( uint32_t  index,
const nnfw_tensorinfo ti 
)

Definition at line 839 of file Session.cc.

840{
841 // sanity check
842 {
843 if (isStateInitialized())
844 {
845 std::cerr << "Error during set_input_tensorinfo : should be run after load_model"
846 << std::endl;
848 }
849
850 if (ti == nullptr)
851 {
852 std::cerr << "Error during Session::set_input_tensorinfo : tensorinfo is null" << std::endl;
854 }
855
856 if (ti->rank < 0 || ti->rank > NNFW_MAX_RANK)
857 {
858 std::cerr << "unsupported rank: " << ti->rank << std::endl;
859 return NNFW_STATUS_ERROR;
860 }
861
862 for (int32_t i = 0; i < ti->rank; ++i)
863 {
864 if (ti->dims[i] <= 0)
865 {
866 std::cerr << "dim must be positive integer but was " << ti->dims[i] << std::endl;
867 return NNFW_STATUS_ERROR;
868 }
869 }
870 }
871
872 onert::ir::Shape new_shape(ti->rank);
873 for (int32_t i = 0; i < ti->rank; i++)
874 new_shape.dim(i) = ti->dims[i];
875
876 const auto input_index = onert::ir::IOIndex(index);
877 if (!isStatePreparedOrFinishedRun())
878 {
879 // In this case, if we apply input shape, it will propagate after compilation and excution
880 _selected_signature.valid() ? _nnpkg->changeInputShape(_selected_signature, index, new_shape)
881 : _nnpkg->changeInputShape(input_index, new_shape);
882 }
883 else // when called after Session::prepare()
884 _execution->changeInputShape(input_index, new_shape);
885
887}
::onert::util::Index< uint32_t, IOIndexTag > IOIndex
Definition Index.h:36
#define NNFW_MAX_RANK
Maximum rank expressible with nnfw.
int32_t dims[NNFW_MAX_RANK]

References nnfw_tensorinfo::dims, NNFW_MAX_RANK, NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, NNFW_STATUS_UNEXPECTED_NULL, and nnfw_tensorinfo::rank.

◆ set_input_type()

NNFW_STATUS onert::api::Session::set_input_type ( uint32_t  index,
NNFW_TYPE  type 
)

Definition at line 749 of file Session.cc.

750{
751 if (!isStateModelLoaded())
752 {
753 std::cerr << "Error during Session::set_input_type : "
754 << "set_input_type should be called before prepare" << std::endl;
756 }
757
758 try
759 {
761 {
762 std::cerr << "Error during Session::set_input_type, not supported type" << std::endl;
763 return NNFW_STATUS_ERROR;
764 }
765
766 if (_selected_signature.valid())
767 {
768 // TODO Support this
769 std::cerr << "Error during Session::set_input_type : "
770 << "set_input_type after signature selection is not supported yet" << std::endl;
771 return NNFW_STATUS_ERROR;
772 }
773
774 const auto io_index = onert::ir::IOIndex{index};
775 // Signature is supported on single model only
776 assert(!_selected_signature.valid() || _nnpkg->model_count() != 1);
777 const auto io_desc =
778 _selected_signature.valid()
779 ? onert::ir::IODesc{onert::ir::ModelIndex{0}, _selected_signature, io_index}
780 : _nnpkg->input(io_index);
781 // Insert if not exists, otherwise update the value
782 _coptions->input_type.insert_or_assign(io_desc,
783 onert::ir::TypeInfo(onert::ir::DataType::FLOAT32));
784 }
785 catch (const std::exception &e)
786 {
787 std::cerr << "Error during Session::set_input_type : " << e.what() << std::endl;
788 return NNFW_STATUS_ERROR;
789 }
790
792}
int32_t type
@ NNFW_TYPE_TENSOR_FLOAT32
Definition onert-micro.h:77

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, NNFW_TYPE_TENSOR_FLOAT32, and type.

◆ set_odc_param_minmax_records_count()

NNFW_STATUS onert::api::Session::set_odc_param_minmax_records_count ( int  minmax_records_count)

Definition at line 2267 of file Session.cc.

2268{
2269 if (isStateInitialized() || isStateRunning())
2270 {
2271 std::cerr << "invalid state" << std::endl;
2273 }
2274
2275 if (_quant_manager->setMinMaxRecordsThreshold(minmax_records_count))
2276 return NNFW_STATUS_NO_ERROR;
2277 else
2278 return NNFW_STATUS_ERROR;
2279}

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ set_output()

NNFW_STATUS onert::api::Session::set_output ( uint32_t  index,
NNFW_TYPE  type,
void *  buffer,
size_t  length 
)

Definition at line 589 of file Session.cc.

590{
591 if (!isStatePreparedOrFinishedRun())
592 {
593 std::cerr << "Error during Session::set_output : invalid state" << std::endl;
595 }
596
597 if (!buffer && length != 0)
598 {
599 std::cerr << "Error during Session::set_output : given buffer is NULL but the length is not 0"
600 << std::endl;
601 return NNFW_STATUS_ERROR;
602 }
603
604 try
605 {
606 _execution->setOutput(onert::ir::IOIndex(index), buffer, length);
607 }
608 catch (const std::exception &e)
609 {
610 std::cerr << "Error during Session::set_output : " << e.what() << std::endl;
611 return NNFW_STATUS_ERROR;
612 }
614}

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ set_output_layout()

NNFW_STATUS onert::api::Session::set_output_layout ( uint32_t  index,
NNFW_LAYOUT  layout 
)

Definition at line 704 of file Session.cc.

705{
706 if (!isStateModelLoaded())
707 {
708 std::cerr << "Error during Session::set_output_layout : "
709 << "run should be run before prepare" << std::endl;
711 }
712
713 try
714 {
715 if (layout != NNFW_LAYOUT_NONE && layout != NNFW_LAYOUT_CHANNELS_FIRST &&
717 {
718 std::cerr << "Error during Session::set_output_layout, not supported layout" << std::endl;
719 return NNFW_STATUS_ERROR;
720 }
721
722 if (_selected_signature.valid())
723 {
724 // TODO Support this
725 std::cerr << "Error during Session::set_output_layout : "
726 << "set_output_layout after signature selection is not supported yet" << std::endl;
727 return NNFW_STATUS_ERROR;
728 }
729
730 const auto io_index = onert::ir::IOIndex{index};
731 // Signature is supported on single model only
732 assert(!_selected_signature.valid() || _nnpkg->model_count() != 1);
733 const auto io_desc =
734 _selected_signature.valid()
735 ? onert::ir::IODesc{onert::ir::ModelIndex{0}, _selected_signature, io_index}
736 : _nnpkg->output(io_index);
737
738 // Insert if not exists, otherwise update the value
739 _coptions->output_layout[io_desc] = convertLayout(layout);
740 }
741 catch (const std::exception &e)
742 {
743 std::cerr << "Error during Session::set_output_layout : " << e.what() << std::endl;
744 return NNFW_STATUS_ERROR;
745 }
747}

References NNFW_LAYOUT_CHANNELS_FIRST, NNFW_LAYOUT_CHANNELS_LAST, NNFW_LAYOUT_NONE, NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ set_output_type()

NNFW_STATUS onert::api::Session::set_output_type ( uint32_t  index,
NNFW_TYPE  type 
)

Definition at line 794 of file Session.cc.

795{
796 if (!isStateModelLoaded())
797 {
798 std::cerr << "Error during Session::set_output_type : "
799 << "set_output_type should be called before prepare" << std::endl;
801 }
802
803 try
804 {
806 {
807 std::cerr << "Error during Session::set_output_type, not supported type" << std::endl;
808 return NNFW_STATUS_ERROR;
809 }
810
811 if (_selected_signature.valid())
812 {
813 // TODO Support this
814 std::cerr << "Error during Session::set_output_type : "
815 << "set_output_type after signature selection is not supported yet" << std::endl;
816 return NNFW_STATUS_ERROR;
817 }
818
819 const auto io_index = onert::ir::IOIndex{index};
820 // Signature is supported on single model only
821 assert(!_selected_signature.valid() || _nnpkg->model_count() != 1);
822 const auto io_desc =
823 _selected_signature.valid()
824 ? onert::ir::IODesc{onert::ir::ModelIndex{0}, _selected_signature, io_index}
825 : _nnpkg->output(io_index);
826 // Insert if not exists, otherwise update the value
827 _coptions->output_type.insert_or_assign(io_desc,
828 onert::ir::TypeInfo(onert::ir::DataType::FLOAT32));
829 }
830 catch (const std::exception &e)
831 {
832 std::cerr << "Error during Session::set_output_type : " << e.what() << std::endl;
833 return NNFW_STATUS_ERROR;
834 }
835
837}

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, NNFW_TYPE_TENSOR_FLOAT32, and type.

◆ set_prepare_config()

NNFW_STATUS onert::api::Session::set_prepare_config ( const NNFW_PREPARE_CONFIG  key,
const char *  value 
)

Definition at line 2186 of file Session.cc.

2187{
2188 if (!isStateModelLoaded())
2189 {
2190 std::cerr << "Error during Session::set_prepare_config : Invalid state" << std::endl;
2192 }
2193
2194 switch (key)
2195 {
2197 _coptions->he_profiling_mode = true;
2198 break;
2200 _coptions->internal_output_alloc = true;
2201 break;
2202 default:
2203 return NNFW_STATUS_ERROR;
2204 }
2205
2206 return NNFW_STATUS_NO_ERROR;
2207}
@ NNFW_PREPARE_CONFIG_PROFILE
@ NNFW_ENABLE_INTERNAL_OUTPUT_ALLOC

References NNFW_ENABLE_INTERNAL_OUTPUT_ALLOC, NNFW_PREPARE_CONFIG_PROFILE, NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ set_quantization_type()

NNFW_STATUS onert::api::Session::set_quantization_type ( NNFW_QUANTIZE_TYPE  qtype)

Definition at line 2014 of file Session.cc.

2015{
2017 try
2018 {
2019 if (isStateInitialized() || isStateRunning())
2020 {
2021 std::cerr << "invalid state" << std::endl;
2023 }
2024
2026 switch (qtype)
2027 {
2030 break;
2033 break;
2036 break;
2039 break;
2040 default:
2042 }
2043 _quant_manager->quantizeType(odc_qtype);
2044 }
2045 catch (const std::exception &e)
2046 {
2047 std::cerr << "Error during Session::set_quantization_type : " << e.what() << std::endl;
2048 return NNFW_STATUS_ERROR;
2049 }
2050
2051 return NNFW_STATUS_NO_ERROR;
2052}
@ ODC_QTYPE_WO_I8_SYM
@ ODC_QTYPE_WO_I16_SYM
@ NNFW_QUANTIZE_TYPE_WO_I16_SYM
@ NNFW_QUANTIZE_TYPE_U8_ASYM
@ NNFW_QUANTIZE_TYPE_I16_SYM
@ NNFW_QUANTIZE_TYPE_WO_I8_SYM

References NNFW_QUANTIZE_TYPE_I16_SYM, NNFW_QUANTIZE_TYPE_U8_ASYM, NNFW_QUANTIZE_TYPE_WO_I16_SYM, NNFW_QUANTIZE_TYPE_WO_I8_SYM, NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, onert::odc::ODC_QTYPE_I16_SYM, onert::odc::ODC_QTYPE_NOT_SET, onert::odc::ODC_QTYPE_U8_ASYM, onert::odc::ODC_QTYPE_WO_I16_SYM, and onert::odc::ODC_QTYPE_WO_I8_SYM.

◆ set_quantized_model_path()

NNFW_STATUS onert::api::Session::set_quantized_model_path ( const char *  path)

Definition at line 2054 of file Session.cc.

2055{
2056 try
2057 {
2058 if (isStateInitialized() || isStateRunning())
2059 {
2060 std::cerr << "invalid state" << std::endl;
2062 }
2063
2064 _quant_manager->exportModelPath(std::string(path));
2065 }
2066 catch (const std::exception &e)
2067 {
2068 std::cerr << "Error during Session::set_quantized_model_path : " << e.what() << std::endl;
2069 return NNFW_STATUS_ERROR;
2070 }
2071
2072 return NNFW_STATUS_NO_ERROR;
2073}

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ set_signature_run()

NNFW_STATUS onert::api::Session::set_signature_run ( const char *  signature)

Definition at line 1100 of file Session.cc.

1101{
1102 if (!signature)
1104
1105 if (!isStatePreparedOrFinishedRun())
1106 {
1107 std::cerr << "Error during Session::set_signature_run : invalid state" << std::endl;
1109 }
1110
1111 for (const auto &[subg_idx, sig_str] : _signature_map)
1112 {
1113 if (sig_str == std::string(signature))
1114 {
1115 _execution =
1116 std::make_unique<onert::exec::Execution>(_compiler_artifact->_executors, subg_idx);
1117 return NNFW_STATUS_NO_ERROR;
1118 }
1119 }
1120
1121 std::cerr << "Error during Session::set_signature_run : cannot find signature" << std::endl;
1122 return NNFW_STATUS_ERROR;
1123}

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, and NNFW_STATUS_UNEXPECTED_NULL.

◆ set_workspace()

NNFW_STATUS onert::api::Session::set_workspace ( const char *  dir)

Definition at line 1059 of file Session.cc.

1060{
1061 // TODO Check dir read & write permission
1062
1063 if (!dir)
1065
1066 if (!isStateInitialized())
1068
1069 _coptions->workspace_dir = std::string(dir);
1070
1071 return NNFW_STATUS_NO_ERROR;
1072}

References NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, and NNFW_STATUS_UNEXPECTED_NULL.

◆ train_expected_tensorinfo()

NNFW_STATUS onert::api::Session::train_expected_tensorinfo ( uint32_t  index,
nnfw_tensorinfo ti 
)

Definition at line 1668 of file Session.cc.

1669{
1670 if (!isStatePreparedOrFinishedTraining())
1671 {
1672 std::cerr << "Error during Session::train_expected_tensorinfo : invalid state" << std::endl;
1674 }
1675
1676 // Check index is valid: [0, getExpectedSize())
1677
1678 // NYI
1679 (void)index;
1680 (void)ti;
1681 return NNFW_STATUS_ERROR;
1682}

References NNFW_STATUS_ERROR, and NNFW_STATUS_INVALID_STATE.

◆ train_export_checkpoint()

NNFW_STATUS onert::api::Session::train_export_checkpoint ( const char *  path)

Definition at line 1957 of file Session.cc.

1958{
1959 if (path == nullptr)
1960 {
1961 std::cerr << "Error during Session::train_export_checkpoint : path is null" << std::endl;
1963 }
1964
1965 // Check training mode is enabled
1966 if (!isStateFinishedTraining())
1967 {
1968 std::cerr << "Error during Session::train_export_checkpoint : invalid state" << std::endl;
1970 }
1971
1972 try
1973 {
1974 onert::exporter::train::exportCheckpoint(path, _train_info, _execution);
1975 }
1976 catch (const std::exception &e)
1977 {
1978 std::cerr << "Error during Session::train_export_checkpoint : " << e.what() << std::endl;
1979 return NNFW_STATUS_ERROR;
1980 }
1981
1982 return NNFW_STATUS_NO_ERROR;
1983}
void exportCheckpoint(const std::string &filename, const std::unique_ptr< ir::train::TrainingInfo > &train_info, const std::unique_ptr< exec::Execution > &exec)

References onert::exporter::train::exportCheckpoint(), NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, and NNFW_STATUS_UNEXPECTED_NULL.

◆ train_export_circle()

NNFW_STATUS onert::api::Session::train_export_circle ( const char *  path)

Definition at line 1872 of file Session.cc.

1873{
1874 if (path == nullptr)
1875 {
1876 std::cerr << "Error during Session::train_export_circle : path is null" << std::endl;
1878 }
1879
1880 // Check training mode is enabled
1881 if (!isStateFinishedTraining())
1882 {
1883 std::cerr << "Error during Session::train_export_circle : invalid state" << std::endl;
1885 }
1886
1887 try
1888 {
1889 onert::exporter::CircleExporter exporter(_model_path.string(), std::string{path});
1890 exporter.updateWeight(_execution);
1891 }
1892 catch (const std::exception &e)
1893 {
1894 std::cerr << "Error during Session::train_export_circle : " << e.what() << std::endl;
1895 return NNFW_STATUS_ERROR;
1896 }
1897
1898 return NNFW_STATUS_NO_ERROR;
1899}

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, NNFW_STATUS_UNEXPECTED_NULL, and onert::exporter::CircleExporter::updateWeight().

◆ train_export_circleplus()

NNFW_STATUS onert::api::Session::train_export_circleplus ( const char *  path)

Definition at line 1901 of file Session.cc.

1902{
1903 if (path == nullptr)
1904 {
1905 std::cerr << "Error during Session::train_export_circleplus : path is null" << std::endl;
1907 }
1908
1909 if (!isStatePreparedOrFinishedTraining())
1910 {
1911 std::cerr << "Error during Session::train_export_circleplus : invalid state" << std::endl;
1913 }
1914
1915 try
1916 {
1917 onert::exporter::CircleExporter exporter(_model_path.string(), std::string{path});
1918 exporter.updateWeight(_execution);
1919 exporter.updateMetadata(_train_info);
1920 }
1921 catch (const std::exception &e)
1922 {
1923 std::cerr << "Error during Session::train_export_circleplus : " << e.what() << std::endl;
1924 return NNFW_STATUS_ERROR;
1925 }
1926
1927 return NNFW_STATUS_NO_ERROR;
1928}

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, NNFW_STATUS_UNEXPECTED_NULL, onert::exporter::CircleExporter::updateMetadata(), and onert::exporter::CircleExporter::updateWeight().

◆ train_get_loss()

NNFW_STATUS onert::api::Session::train_get_loss ( uint32_t  index,
float *  loss 
)

Definition at line 1838 of file Session.cc.

1839{
1840 if (loss == nullptr)
1841 {
1842 std::cerr << "Error during Session::train_get_loss : loss is null" << std::endl;
1844 }
1845
1846 if (!isStateFinishedTraining())
1847 {
1848 std::cerr << "Error during Session::train_get_loss : invalid state" << std::endl;
1850 }
1851
1852 if (index >= getOutputSize())
1853 {
1854 std::cerr << "Error during Session::train_get_loss : index is out of range" << std::endl;
1855 return NNFW_STATUS_ERROR;
1856 }
1857
1858 try
1859 {
1860 auto ind = onert::ir::IOIndex(index);
1861 *loss = _execution->getLoss(ind);
1862 }
1863 catch (const std::exception &e)
1864 {
1865 std::cerr << "Error during Session::train_get_loss : " << e.what() << std::endl;
1866 return NNFW_STATUS_ERROR;
1867 }
1868
1869 return NNFW_STATUS_NO_ERROR;
1870}

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, and NNFW_STATUS_UNEXPECTED_NULL.

◆ train_get_traininfo()

NNFW_STATUS onert::api::Session::train_get_traininfo ( nnfw_train_info info)

Definition at line 1392 of file Session.cc.

1393{
1394 if (isStateInitialized())
1395 {
1396 // There is no _train_info in INITIALIZED, since _train_info is set when a model loaded
1397 std::cerr << "Error during Session::train_get_traininfo : invalid state";
1399 }
1400
1401 if (info == nullptr)
1402 {
1403 std::cerr << "Error during Session::train_get_traininfo : info is nullptr" << std::endl;
1405 }
1406
1407 // after model loaded, it ensures that _train_info is not nullptr
1408 assert(_train_info != nullptr);
1409
1410 auto convertLossCode = [](const onert::ir::train::LossCode &code) -> NNFW_TRAIN_LOSS {
1411 switch (code)
1412 {
1419 default:
1420 throw std::runtime_error{"fail to convert ir::train::LossCode"};
1421 }
1422 };
1423
1424 auto convertLossReduction =
1426 switch (type)
1427 {
1434 default:
1435 throw std::runtime_error{"fail to convert from ir::train::LossReductionType"};
1436 break;
1437 }
1438 };
1439
1440 auto convertOptimizerCode =
1442 switch (code)
1443 {
1450 default:
1451 throw std::runtime_error{"fail to convert from ir::train::OptimizerCode"};
1452 }
1453 };
1454
1455 const auto &loss = _train_info->lossInfo();
1456 const auto &optim = _train_info->optimizerInfo();
1457
1458 try
1459 {
1460 info->learning_rate = optim.learning_rate;
1461 info->batch_size = _train_info->batchSize();
1462 info->loss_info.loss = convertLossCode(loss.loss_code);
1463 info->loss_info.reduction_type = convertLossReduction(loss.reduction_type);
1464 info->opt = convertOptimizerCode(optim.optim_code);
1465
1466 if (_train_info->getTrainableOps().size() > 0)
1467 {
1468 const uint32_t first_trainable_idx = _train_info->getTrainableOps().cbegin()->value();
1469 const uint32_t last_trainable_idx = _train_info->getTrainableOps().crbegin()->value();
1470 const uint32_t ops_size = primary_subgraph()->operations().size();
1471 const uint32_t trainable_indexes_range = last_trainable_idx - first_trainable_idx + 1;
1472
1473 // check if trainable ops set contains continuous indexes on the back of the set
1474 if (last_trainable_idx == ops_size - 1 &&
1475 trainable_indexes_range == _train_info->getTrainableOps().size())
1476 {
1477 // check if all ops are trainable
1478 if (0 == first_trainable_idx)
1479 {
1480 info->num_of_trainable_ops = NNFW_TRAIN_TRAINABLE_ALL;
1481 }
1482 else
1483 {
1484 info->num_of_trainable_ops = trainable_indexes_range;
1485 }
1486 }
1487 else
1488 {
1489 info->num_of_trainable_ops = NNFW_TRAIN_TRAINABLE_INCORRECT_STATE;
1490 std::cerr << "conversion from set of trainable ops to num_of_trainable_ops is impossible"
1491 << std::endl;
1493 }
1494 }
1495 else
1496 {
1497 // no layer will be trained
1498 info->num_of_trainable_ops = NNFW_TRAIN_TRAINABLE_NONE;
1499 }
1500 }
1501 catch (const std::exception &e)
1502 {
1503 std::cerr << "Error during Session::train_get_traininfo" << e.what() << std::endl;
1504 return NNFW_STATUS_ERROR;
1505 }
1506
1507 return NNFW_STATUS_NO_ERROR;
1508}
size_t size() const
Return the number of objects that the manager contains.
Code * code(const SessionID &sess)
Definition Session.cpp:54
@ NNFW_TRAIN_TRAINABLE_NONE
@ NNFW_TRAIN_TRAINABLE_ALL
@ NNFW_TRAIN_TRAINABLE_INCORRECT_STATE
NNFW_TRAIN_LOSS_REDUCTION
@ NNFW_TRAIN_LOSS_REDUCTION_UNDEFINED
@ NNFW_TRAIN_LOSS_REDUCTION_SUM
@ NNFW_TRAIN_LOSS_REDUCTION_SUM_OVER_BATCH_SIZE
NNFW_TRAIN_OPTIMIZER
@ NNFW_TRAIN_OPTIMIZER_ADAM
@ NNFW_TRAIN_OPTIMIZER_SGD
@ NNFW_TRAIN_OPTIMIZER_UNDEFINED
NNFW_TRAIN_LOSS
@ NNFW_TRAIN_LOSS_MEAN_SQUARED_ERROR
@ NNFW_TRAIN_LOSS_UNDEFINED
@ NNFW_TRAIN_LOSS_CATEGORICAL_CROSSENTROPY
virtual const Operations & operations() const =0

References onert::ir::train::Adam, onert::ir::train::CategoricalCrossentropy, info, onert::ir::train::MeanSquaredError, NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, NNFW_STATUS_UNEXPECTED_NULL, NNFW_TRAIN_LOSS_CATEGORICAL_CROSSENTROPY, NNFW_TRAIN_LOSS_MEAN_SQUARED_ERROR, NNFW_TRAIN_LOSS_REDUCTION_SUM, NNFW_TRAIN_LOSS_REDUCTION_SUM_OVER_BATCH_SIZE, NNFW_TRAIN_LOSS_REDUCTION_UNDEFINED, NNFW_TRAIN_LOSS_UNDEFINED, NNFW_TRAIN_OPTIMIZER_ADAM, NNFW_TRAIN_OPTIMIZER_SGD, NNFW_TRAIN_OPTIMIZER_UNDEFINED, NNFW_TRAIN_TRAINABLE_ALL, NNFW_TRAIN_TRAINABLE_INCORRECT_STATE, NNFW_TRAIN_TRAINABLE_NONE, onert::ir::train::SGD, onert::ir::train::Sum, onert::ir::train::SumOverBatchSize, type, and onert::ir::train::Undefined.

◆ train_import_checkpoint()

NNFW_STATUS onert::api::Session::train_import_checkpoint ( const char *  path)

Definition at line 1930 of file Session.cc.

1931{
1932 if (path == nullptr)
1933 {
1934 std::cerr << "Error during Session::train_import_checkpoint : path is null" << std::endl;
1936 }
1937
1938 if (!isStatePreparedOrFinishedTraining())
1939 {
1940 std::cerr << "Error during Session::train_import_checkpoint : invalid state" << std::endl;
1942 }
1943
1944 try
1945 {
1946 onert::loader::train::loadCheckpoint(path, _train_info, _execution);
1947 }
1948 catch (const std::exception &e)
1949 {
1950 std::cerr << "Error during Session::train_import_checkpoint : " << e.what() << std::endl;
1951 return NNFW_STATUS_ERROR;
1952 }
1953
1954 return NNFW_STATUS_NO_ERROR;
1955}
void loadCheckpoint(const std::string &filename, const std::unique_ptr< ir::train::TrainingInfo > &train_info, const std::unique_ptr< exec::Execution > &exec)

References onert::loader::train::loadCheckpoint(), NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, and NNFW_STATUS_UNEXPECTED_NULL.

◆ train_input_tensorinfo()

NNFW_STATUS onert::api::Session::train_input_tensorinfo ( uint32_t  index,
nnfw_tensorinfo ti 
)

Definition at line 1652 of file Session.cc.

1653{
1654 if (!isStatePreparedOrFinishedTraining())
1655 {
1656 std::cerr << "Error during Session::train_input_tensorinfo : invalid state" << std::endl;
1658 }
1659
1660 // Check index is valid: [0, getInputSize())
1661
1662 // NYI
1663 (void)index;
1664 (void)ti;
1665 return NNFW_STATUS_ERROR;
1666}

References NNFW_STATUS_ERROR, and NNFW_STATUS_INVALID_STATE.

◆ train_prepare()

NNFW_STATUS onert::api::Session::train_prepare ( )

Definition at line 1612 of file Session.cc.

1613{
1614 // We may need different state to represent training model is loaded
1615 if (!isStateModelLoaded())
1616 {
1617 std::cerr << "Error during model prepare training: ";
1618 if (_state == State::PREPARED_TRAINING)
1619 std::cerr << "prepare should be run once";
1620 else
1621 std::cerr << "invalid state";
1622 std::cerr << std::endl;
1624 }
1625
1626 // after model loaded, it ensures that _train_info is not nullptr
1627 assert(_train_info != nullptr);
1628
1629 try
1630 {
1631 if (not _train_info->isValid())
1632 throw std::runtime_error{"training info is not valid"};
1633
1634 // initialize trainingStep count
1635 _train_info->trainingStep() = 0;
1636
1638 std::move(_nnpkg), _coptions.get(), _train_info.get());
1639 _compiler_artifact = compiler->compile();
1640 _execution = std::make_unique<onert::exec::Execution>(_compiler_artifact->_executors);
1641 }
1642 catch (const std::exception &e)
1643 {
1644 std::cerr << "Error during Session::train_prepare : " << e.what() << std::endl;
1645 return NNFW_STATUS_ERROR;
1646 }
1647
1648 _state = State::PREPARED_TRAINING;
1649 return NNFW_STATUS_NO_ERROR;
1650}

References onert::compiler::CompilerFactory::create(), onert::compiler::CompilerFactory::get(), NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ train_run()

NNFW_STATUS onert::api::Session::train_run ( bool  update_weights)

Definition at line 1804 of file Session.cc.

1805{
1806 if (!isStatePreparedOrFinishedTraining())
1807 {
1808 std::cerr << "Error during Session::train_run : invalid state" << std::endl;
1810 }
1811
1812 try
1813 {
1814 if (update_weights)
1815 {
1816 auto &training_step = _train_info->trainingStep();
1817 _execution->train(training_step++);
1818 }
1819 else
1820 _execution->execute();
1821 }
1823 {
1824 // Currently insufficient buffer always means output buffer.
1825 std::cerr << "Error during Session::train_run : " << e.what() << std::endl;
1827 }
1828 catch (const std::exception &e)
1829 {
1830 std::cerr << "Error during Session::train_run : " << e.what() << std::endl;
1831 return NNFW_STATUS_ERROR;
1832 }
1833
1834 _state = State::FINISHED_TRAINING;
1835 return NNFW_STATUS_NO_ERROR;
1836}

References NNFW_STATUS_ERROR, NNFW_STATUS_INSUFFICIENT_OUTPUT_SIZE, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, and onert::Exception::what().

◆ train_set_expected()

NNFW_STATUS onert::api::Session::train_set_expected ( uint32_t  index,
const void *  expected,
const nnfw_tensorinfo expected_tensorinfo 
)

Definition at line 1727 of file Session.cc.

1729{
1730 if (expected == nullptr)
1731 {
1732 std::cerr << "Error during Session::train_set_expected : expected buffer is null" << std::endl;
1734 }
1735
1736 if (!isStatePreparedOrFinishedTraining())
1737 {
1738 std::cerr << "Error during Session::train_set_expected : invalid state" << std::endl;
1740 }
1741
1742 if (index >= getOutputSize())
1743 {
1744 std::cerr << "Error during Session::train_set_expected : index is out of range" << std::endl;
1745 return NNFW_STATUS_ERROR;
1746 }
1747
1748 try
1749 {
1750 const auto ind = onert::ir::IOIndex{index};
1751 auto size = _execution->outputInfo(ind).total_size();
1752 if (expected_tensorinfo && getBufSize(expected_tensorinfo) != size)
1753 {
1754 std::cerr << "Error during Session::train_set_expected : invalid tensorinfo" << std::endl;
1755 return NNFW_STATUS_ERROR;
1756 }
1757
1758 // NOTE Find the loss input index
1759 // Input is added as many as the number of outputs.
1760 // The loss index is calculated from the value obtained by subtracting the
1761 // total output(added loss input) from the total input size.
1762 auto input_index = getInputSize() - getOutputSize() + index;
1763 auto input_ind = onert::ir::IOIndex(input_index);
1764 _execution->setInput(input_ind, expected, size);
1765 }
1766 catch (const std::exception &e)
1767 {
1768 std::cerr << "Error during Session::train_set_expected : " << e.what() << std::endl;
1769 return NNFW_STATUS_ERROR;
1770 }
1771
1772 return NNFW_STATUS_NO_ERROR;
1773}

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, NNFW_STATUS_UNEXPECTED_NULL, and size.

◆ train_set_input()

NNFW_STATUS onert::api::Session::train_set_input ( uint32_t  index,
const void *  input,
const nnfw_tensorinfo input_tensorinfo 
)

Definition at line 1684 of file Session.cc.

1686{
1687 if (input == nullptr)
1688 {
1689 std::cerr << "Error during Session::train_set_input : input buffer is null" << std::endl;
1691 }
1692
1693 if (!isStatePreparedOrFinishedTraining())
1694 {
1695 std::cerr << "Error during Session::train_set_input : invalid state" << std::endl;
1697 }
1698
1699 if (index >= getInputSize())
1700 {
1701 std::cerr << "Error during Session::train_set_input : index is out of range" << std::endl;
1702 return NNFW_STATUS_ERROR;
1703 }
1704
1705 try
1706 {
1707 auto ind = onert::ir::IOIndex(index);
1708 auto size = _execution->inputInfo(ind).total_size();
1709 if (input_tensorinfo && getBufSize(input_tensorinfo) != size)
1710 {
1711 std::cerr << "Error during Session::train_set_input : not supporeted to change tensorinfo"
1712 << std::endl;
1713 return NNFW_STATUS_ERROR;
1714 }
1715
1716 _execution->setInput(ind, input, size);
1717 }
1718 catch (const std::exception &e)
1719 {
1720 std::cerr << "Error during Session::train_set_input : " << e.what() << std::endl;
1721 return NNFW_STATUS_ERROR;
1722 }
1723
1724 return NNFW_STATUS_NO_ERROR;
1725}
NNFW_STATUS input_tensorinfo(uint32_t index, nnfw_tensorinfo *ti)
Definition Session.cc:889

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, NNFW_STATUS_UNEXPECTED_NULL, and size.

◆ train_set_output()

NNFW_STATUS onert::api::Session::train_set_output ( uint32_t  index,
NNFW_TYPE  type,
void *  buffer,
size_t  length 
)

Definition at line 1775 of file Session.cc.

1777{
1778 if (!isStatePreparedOrFinishedTraining())
1779 {
1780 std::cerr << "Error during Session::train_set_output : invalid state" << std::endl;
1782 }
1783
1784 if (!buffer && length != 0)
1785 {
1786 std::cerr << "Error during Session::train_set_output : given buffer is NULL but the "
1787 "length is not 0"
1788 << std::endl;
1789 return NNFW_STATUS_ERROR;
1790 }
1791
1792 try
1793 {
1794 _execution->setOutput(onert::ir::IOIndex(index), buffer, length);
1795 }
1796 catch (const std::exception &e)
1797 {
1798 std::cerr << "Error during Session::train_set_output : " << e.what() << std::endl;
1799 return NNFW_STATUS_ERROR;
1800 }
1801 return NNFW_STATUS_NO_ERROR;
1802}

References NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, and NNFW_STATUS_NO_ERROR.

◆ train_set_traininfo()

NNFW_STATUS onert::api::Session::train_set_traininfo ( const nnfw_train_info info)

Definition at line 1510 of file Session.cc.

1511{
1512 if (not isStateModelLoaded())
1513 {
1514 std::cerr << "Error during Session::train_set_traininfo : invalid state" << std::endl;
1516 }
1517
1518 if (info == nullptr)
1519 {
1520 std::cerr << "Session::train_set_traininfo : info is nullptr" << std::endl;
1522 }
1523
1524 // after model loaded, it ensures that _train_info is not nullptr
1525 assert(_train_info != nullptr);
1526
1527 auto convertLossType = [](const int &type) {
1532 else
1533 throw std::runtime_error("not supported loss type");
1534 };
1535
1536 auto convertLossReductionType = [](const int &type) {
1541 else
1542 throw std::runtime_error("not supported loss reduction type");
1543 };
1544
1545 auto convertOptType = [](const int &type) {
1548 else if (type == NNFW_TRAIN_OPTIMIZER_ADAM)
1550 else
1551 throw std::runtime_error("not supported optimizer type");
1552 };
1553
1554 try
1555 {
1557 loss_info.loss_code = convertLossType(info->loss_info.loss);
1558 loss_info.reduction_type = convertLossReductionType(info->loss_info.reduction_type);
1559
1561 opt_info.learning_rate = info->learning_rate;
1562 opt_info.optim_code = convertOptType(info->opt);
1563
1564 _train_info->setBatchSize(info->batch_size);
1565 _train_info->setLossInfo(loss_info);
1566 _train_info->setOptimizerInfo(opt_info);
1567
1568 if (info->num_of_trainable_ops < -1)
1569 {
1570 std::cerr << "Error during Session::train_set_traininfo: provided num_of_trainable_ops "
1571 "has incorrect value: "
1572 << info->num_of_trainable_ops << std::endl;
1573 return NNFW_STATUS_ERROR;
1574 }
1575
1576 const uint32_t ops_size = primary_subgraph()->operations().size();
1577 std::set<onert::ir::OperationIndex> trainable_ops;
1578
1579 if (NNFW_TRAIN_TRAINABLE_ALL == info->num_of_trainable_ops)
1580 {
1581 for (uint32_t idx = 0; idx < ops_size; ++idx)
1582 {
1583 trainable_ops.emplace(idx);
1584 }
1585 }
1586 else
1587 {
1588 if (static_cast<uint32_t>(info->num_of_trainable_ops) > ops_size)
1589 {
1590 std::cerr << "Error during Session::train_set_traininfo: provided num_of_trainable_ops="
1591 << info->num_of_trainable_ops << " is out of operators range equals: " << ops_size
1592 << std::endl;
1593 return NNFW_STATUS_ERROR;
1594 }
1595 for (uint32_t i = 1; i <= static_cast<uint32_t>(info->num_of_trainable_ops); ++i)
1596 {
1597 trainable_ops.emplace(ops_size - i);
1598 }
1599 }
1600 // Note that possible setting an empty trainable_ops set (for NNFW_TRAIN_TRAINABLE_NONE value)
1601 _train_info->setTrainableOps(trainable_ops);
1602 }
1603 catch (const std::exception &e)
1604 {
1605 std::cerr << "Error during Session::train_set_traininfo : " << e.what() << std::endl;
1606 return NNFW_STATUS_ERROR;
1607 }
1608
1609 return NNFW_STATUS_NO_ERROR;
1610}
nnfw::cker::train::LossReductionType convertLossReductionType(ir::train::LossReductionType type)
convert loss reduction type
LossReductionType reduction_type
Definition LossInfo.h:44

References onert::ir::train::Adam, onert::ir::train::CategoricalCrossentropy, info, onert::ir::train::OptimizerInfo::learning_rate, onert::ir::train::LossInfo::loss_code, onert::ir::train::MeanSquaredError, NNFW_STATUS_ERROR, NNFW_STATUS_INVALID_STATE, NNFW_STATUS_NO_ERROR, NNFW_STATUS_UNEXPECTED_NULL, NNFW_TRAIN_LOSS_CATEGORICAL_CROSSENTROPY, NNFW_TRAIN_LOSS_MEAN_SQUARED_ERROR, NNFW_TRAIN_LOSS_REDUCTION_SUM, NNFW_TRAIN_LOSS_REDUCTION_SUM_OVER_BATCH_SIZE, NNFW_TRAIN_OPTIMIZER_ADAM, NNFW_TRAIN_OPTIMIZER_SGD, NNFW_TRAIN_TRAINABLE_ALL, onert::ir::train::OptimizerInfo::optim_code, onert::ir::train::LossInfo::reduction_type, onert::ir::train::SGD, onert::ir::train::Sum, onert::ir::train::SumOverBatchSize, and type.


The documentation for this struct was generated from the following files: