ONE - On-device Neural Engine
Loading...
Searching...
No Matches
onert::backend::trix Namespace Reference

Namespaces

namespace  ops
 

Data Structures

class  Backend
 
class  BackendContext
 
class  BatchThreadPool
 Class that has a threadpool for batch-by-batch multi-threading. More...
 
class  Config
 
class  DevContext
 NPU device context of trix backend. More...
 
class  KernelGenerator
 

Typedefs

using ModelID = uint32_t
 
using Tensor = basic::Tensor
 
using ExternalTensor = basic::ExternalTensor
 
using TensorBuilder = basic::TensorBuilder
 

Functions

data_type convertDataType (const ir::DataType type)
 Convert type of data from onert type to npu type.
 
template<typename T , std::enable_if_t< std::is_base_of< IPortableTensor, T >::value, bool > = true>
void setDataInfo (const std::vector< T * > &tensors, tensors_data_info *info)
 Set the tensors_data_info object.
 
template<typename T , std::enable_if_t< std::is_base_of< IPortableTensor, T >::value, bool > = true>
void setBuffers (const std::vector< T * > &tensors, generic_buffers *buf)
 Set the generic_buffers object.
 

Typedef Documentation

◆ ExternalTensor

Definition at line 31 of file Tensor.h.

◆ ModelID

using onert::backend::trix::ModelID = typedef uint32_t

Definition at line 34 of file DevContext.h.

◆ Tensor

Definition at line 30 of file Tensor.h.

◆ TensorBuilder

Definition at line 29 of file TensorBuilder.h.

Function Documentation

◆ convertDataType()

data_type onert::backend::trix::convertDataType ( const ir::DataType  type)

Convert type of data from onert type to npu type.

Parameters
typeData type in onert
Returns
data_type Data type in npu

Definition at line 26 of file Convert.cc.

27{
28 switch (type)
29 {
30 case ir::DataType::QUANT_UINT8_ASYMM:
31 return DATA_TYPE_QASYMM8;
32 case ir::DataType::QUANT_INT16_SYMM:
33 return DATA_TYPE_QSYMM16;
34 default:
35 throw std::runtime_error("Unsupported data type");
36 }
37}

Referenced by setDataInfo().

◆ setBuffers()

template<typename T , std::enable_if_t< std::is_base_of< IPortableTensor, T >::value, bool > = true>
void onert::backend::trix::setBuffers ( const std::vector< T * > &  tensors,
generic_buffers *  buf 
)

Set the generic_buffers object.

Template Parameters
TType of tensor based of IPortableTensor
Parameters
tensorsTensors that have buffer information
bufgeneric_buffers to be set

Definition at line 69 of file Convert.h.

70{
71 buf->num_buffers = static_cast<uint32_t>(tensors.size());
72
73 for (uint32_t idx = 0; idx < buf->num_buffers; ++idx)
74 {
75 buf->bufs[idx].addr = tensors[idx]->buffer();
76 buf->bufs[idx].size = static_cast<uint64_t>(tensors[idx]->total_size());
77 buf->bufs[idx].type = BUFFER_MAPPED;
78 }
79}

Referenced by onert::backend::trix::ops::BulkLayer::run().

◆ setDataInfo()

template<typename T , std::enable_if_t< std::is_base_of< IPortableTensor, T >::value, bool > = true>
void onert::backend::trix::setDataInfo ( const std::vector< T * > &  tensors,
tensors_data_info *  info 
)

Set the tensors_data_info object.

Template Parameters
TType of tensor based of IPortableTensor
Parameters
tensorsTensors that have data information
infotensors_data_info to be set

Definition at line 50 of file Convert.h.

51{
52 info->num_info = static_cast<uint32_t>(tensors.size());
53
54 for (uint32_t idx = 0; idx < info->num_info; ++idx)
55 {
56 info->info[idx].layout = DATA_LAYOUT_NHWC;
57 info->info[idx].type = convertDataType(tensors[idx]->data_type());
58 }
59}
volatile const char info[]
data_type convertDataType(const ir::DataType type)
Convert type of data from onert type to npu type.
Definition Convert.cc:26

References convertDataType(), and info.

Referenced by onert::backend::trix::ops::BulkLayer::run().