ONE - On-device Neural Engine
All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
onert::backend::trix Namespace Reference

Namespaces

namespace  ops
 

Data Structures

class  Backend
 
class  BackendContext
 
class  BatchThreadPool
 Class that has a threadpool for batch-by-batch multi-threading. More...
 
class  Config
 
class  DevContext
 NPU device context of trix backend. More...
 
class  KernelGenerator
 

Typedefs

using ModelID = uint32_t
 
using Tensor = basic::Tensor
 
using ExternalTensor = basic::ExternalTensor
 
using TensorBuilder = basic::TensorBuilder
 

Functions

data_type convertDataType (const ir::DataType type)
 Convert type of data from onert type to npu type.
 
template<typename T , std::enable_if_t< std::is_base_of_v< IPortableTensor, T >, bool > = true>
void setDataInfo (const std::vector< T * > &tensors, tensors_data_info *info)
 Set the tensors_data_info object.
 
template<typename T , std::enable_if_t< std::is_base_of_v< IPortableTensor, T >, bool > = true>
void setBuffers (const std::vector< T * > &tensors, generic_buffers *buf)
 Set the generic_buffers object.
 

Typedef Documentation

◆ ExternalTensor

Definition at line 27 of file Tensor.h.

◆ ModelID

using onert::backend::trix::ModelID = typedef uint32_t

Definition at line 30 of file DevContext.h.

◆ Tensor

Definition at line 26 of file Tensor.h.

◆ TensorBuilder

Definition at line 25 of file TensorBuilder.h.

Function Documentation

◆ convertDataType()

data_type onert::backend::trix::convertDataType ( const ir::DataType  type)

Convert type of data from onert type to npu type.

Parameters
typeData type in onert
Returns
data_type Data type in npu

Definition at line 22 of file Convert.cc.

23{
24 switch (type)
25 {
26 case ir::DataType::QUANT_UINT8_ASYMM:
27 return DATA_TYPE_QASYMM8;
28 case ir::DataType::QUANT_INT16_SYMM:
29 return DATA_TYPE_QSYMM16;
30 default:
31 throw std::runtime_error("Unsupported data type");
32 }
33}

Referenced by setDataInfo().

◆ setBuffers()

template<typename T , std::enable_if_t< std::is_base_of_v< IPortableTensor, T >, bool > = true>
void onert::backend::trix::setBuffers ( const std::vector< T * > &  tensors,
generic_buffers *  buf 
)

Set the generic_buffers object.

Template Parameters
TType of tensor based of IPortableTensor
Parameters
tensorsTensors that have buffer information
bufgeneric_buffers to be set

Definition at line 65 of file Convert.h.

66{
67 buf->num_buffers = static_cast<uint32_t>(tensors.size());
68
69 for (uint32_t idx = 0; idx < buf->num_buffers; ++idx)
70 {
71 buf->bufs[idx].addr = tensors[idx]->buffer();
72 buf->bufs[idx].size = static_cast<uint64_t>(tensors[idx]->total_size());
73 buf->bufs[idx].type = BUFFER_MAPPED;
74 }
75}

Referenced by onert::backend::trix::ops::BulkLayer::run().

◆ setDataInfo()

template<typename T , std::enable_if_t< std::is_base_of_v< IPortableTensor, T >, bool > = true>
void onert::backend::trix::setDataInfo ( const std::vector< T * > &  tensors,
tensors_data_info *  info 
)

Set the tensors_data_info object.

Template Parameters
TType of tensor based of IPortableTensor
Parameters
tensorsTensors that have data information
infotensors_data_info to be set

Definition at line 46 of file Convert.h.

47{
48 info->num_info = static_cast<uint32_t>(tensors.size());
49
50 for (uint32_t idx = 0; idx < info->num_info; ++idx)
51 {
52 info->info[idx].layout = DATA_LAYOUT_NHWC;
53 info->info[idx].type = convertDataType(tensors[idx]->data_type());
54 }
55}
volatile const char info[]
data_type convertDataType(const ir::DataType type)
Convert type of data from onert type to npu type.
Definition Convert.cc:22

References convertDataType(), and info.

Referenced by onert::backend::trix::ops::BulkLayer::run().