ONE - On-device Neural Engine
Loading...
Searching...
No Matches
MemoryManager.cc
Go to the documentation of this file.
1/*
2 * Copyright (c) 2024 Samsung Electronics Co., Ltd. All Rights Reserved
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "MemoryManager.h"
18
20
21#include <util/ConfigSource.h>
22
23#include <cassert>
24
25namespace onert
26{
27namespace backend
28{
29namespace train
30{
31
33 : _optim_vars_count{optim_vars_count}
34{
35 // DO NOTHING
36}
37
39{
40 _mem_alloc = std::make_shared<basic::Allocator>(_mem_planner->capacity());
41 assert(_mem_alloc->base());
42
43 const auto vars_capacity = _mem_planner->capacity() * _optim_vars_count;
44 _var_mem_alloc = std::make_shared<basic::Allocator>(vars_capacity);
45}
46
48 uint32_t pos_var) const
49{
50 assert(_mem_planner->memory_plans().find(ind) != _mem_planner->memory_plans().end());
51 const auto var_offset = pos_var * _mem_planner->capacity();
52 const auto &mem_blk = _mem_planner->memory_plans().at(ind);
53 return _var_mem_alloc->base() + var_offset + mem_blk.offset;
54}
55
56DisposableMemoryManager::DisposableMemoryManager() : _mem_planner{createMemoryPlanner()}
57{
58 // DO NOTHING
59}
60
61basic::IMemoryPlanner<DisposableTensorIndex> *DisposableMemoryManager::createMemoryPlanner()
62{
63 auto planner_id = util::getConfigString(util::config::CPU_MEMORY_PLANNER);
64 return MemoryPlannerFactory<DisposableTensorIndex>::get().create(planner_id);
65}
66
68DisposableMemoryManager::createMemoryPlanner(const std::string planner_id)
69{
70 return MemoryPlannerFactory<DisposableTensorIndex>::get().create(planner_id);
71}
72
74{
75 _mem_planner->claim(ind, size);
76}
77
79{
80 _mem_planner->release(ind);
81}
82
84{
85 _mem_alloc = std::make_shared<basic::Allocator>(_mem_planner->capacity());
86 assert(_mem_alloc->base());
87}
88
90{
91 assert(_mem_planner->memory_plans().find(ind) != _mem_planner->memory_plans().end());
92 const auto &mem_blk = _mem_planner->memory_plans().at(ind);
93 return _mem_alloc->base() + mem_blk.offset;
94}
95
96LayerScopeMemoryManager::LayerScopeMemoryManager() : _mem_planner{createMemoryPlanner()}
97{
98 // DO NOTHING
99}
100
101basic::IMemoryPlanner<LayerScopeTensorIndex> *LayerScopeMemoryManager::createMemoryPlanner()
102{
103 auto planner_id = util::getConfigString(util::config::CPU_MEMORY_PLANNER);
104 return MemoryPlannerFactory<LayerScopeTensorIndex>::get().create(planner_id);
105}
106
108{
109 _mem_alloc = std::make_shared<basic::Allocator>(_mem_planner->capacity());
110 assert(_mem_alloc->base());
111}
112
114{
115 assert(_mem_planner->memory_plans().find(ind) != _mem_planner->memory_plans().end());
116 const auto &mem_blk = _mem_planner->memory_plans().at(ind);
117 return _mem_alloc->base() + mem_blk.offset;
118}
119
120void LayerScopeMemoryManager::deallocate(void) { _mem_alloc->release(); }
121
123{
124 _mem_planner->claim(ind, size);
125}
126
128{
129 _mem_planner->release(ind);
130}
131
132} // namespace train
133} // namespace backend
134} // namespace onert
std::shared_ptr< Allocator > _mem_alloc
std::shared_ptr< IMemoryPlanner< ir::OperandIndex > > _mem_planner
void releasePlan(const DisposableTensorIndex &ind)
uint8_t * getBuffer(const DisposableTensorIndex &ind) const
void claimPlan(const DisposableTensorIndex &ind, uint32_t size)
Class that is index of DisposableTensor.
void claimPlan(const LayerScopeTensorIndex &ind, uint32_t size)
uint8_t * getBuffer(const LayerScopeTensorIndex &ind) const
void releasePlan(const LayerScopeTensorIndex &ind)
static MemoryPlannerFactory< Index > & get()
TrainableMemoryManager(uint32_t optimizer_vars_count)
uint8_t * getOptVarBuffer(const ir::OperandIndex &ind, uint32_t pos_var) const
std::string getConfigString(const std::string &key)
int32_t size[5]
Definition Slice.cpp:35