ONE - On-device Neural Engine
Loading...
Searching...
No Matches
MemoryManager.cc
Go to the documentation of this file.
1/*
2 * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
18
19#include <cassert>
20
22#include "util/ConfigSource.h"
23#include "util/logging.h"
24
25namespace onert
26{
27namespace backend
28{
29namespace basic
30{
31
32MemoryManager::MemoryManager() : _mem_planner{createMemoryPlanner()}
33{
34 // DO NOTHING
35}
36
37MemoryManager::MemoryManager(const std::string planner_id)
38 : _mem_planner{createMemoryPlanner(planner_id)}
39{
40 // DO NOTHING
41}
42
43basic::IMemoryPlanner<ir::OperandIndex> *MemoryManager::createMemoryPlanner()
44{
45 auto planner_id = util::getConfigString(util::config::CPU_MEMORY_PLANNER);
46 return basic::MemoryPlannerFactory::get().create(planner_id);
47}
48
50MemoryManager::createMemoryPlanner(const std::string planner_id)
51{
52 return basic::MemoryPlannerFactory::get().create(planner_id);
53}
54
56{
57 _mem_planner->claim(ind, size);
58}
59
60void MemoryManager::releasePlan(const ir::OperandIndex &ind) { _mem_planner->release(ind); }
61
63{
64 _mem_alloc = std::make_shared<basic::Allocator>(_mem_planner->capacity());
65 assert(_mem_alloc->base());
66}
67
69{
70 assert(_mem_planner->memory_plans().find(ind) != _mem_planner->memory_plans().end());
71 const auto &mem_blk = _mem_planner->memory_plans().at(ind);
72 return _mem_alloc->base() + mem_blk.offset;
73}
74
75std::shared_ptr<basic::Allocator> DynamicMemoryManager::allocate(const ITensor *tensor,
76 uint32_t capacity)
77{
78 auto find = _mem_alloc_map.find(tensor);
79 if (find != _mem_alloc_map.end())
80 throw std::runtime_error("Cannot allocate memory for a tensor. It was already allocated.");
81
82 _mem_alloc_map[tensor] = std::make_shared<basic::Allocator>(capacity);
83 return _mem_alloc_map[tensor];
84}
85
87{
88 auto find = _mem_alloc_map.find(tensor);
89 if (find == _mem_alloc_map.end())
90 throw std::runtime_error("Cannot find Allocator for the requested index");
91
92 find->second->release(); // explicitly erase memory
93 _mem_alloc_map.erase(find); // remove tensor and alloc
94}
95
97{
98 for (auto &&mem_alloc : _mem_alloc_map)
99 {
100 // Release memory buffer of mem_alloc
101 mem_alloc.second->release();
102 }
103
104 _mem_alloc_map.clear();
105}
106
107} // namespace basic
108} // namespace backend
109} // namespace onert
std::shared_ptr< Allocator > allocate(const ITensor *tensor, uint32_t capacity)
std::shared_ptr< Allocator > _mem_alloc
void claimPlan(const ir::OperandIndex &ind, uint32_t size)
std::shared_ptr< IMemoryPlanner< ir::OperandIndex > > _mem_planner
void releasePlan(const ir::OperandIndex &ind)
uint8_t * getBuffer(const ir::OperandIndex &ind) const
IMemoryPlanner< ir::OperandIndex > * create(const std::string &key)
std::string getConfigString(const std::string &key)
int32_t size[5]
Definition Slice.cpp:35
virtual void claim(const Index &, size_t)=0
Claim memory for tensor.