ONE - On-device Neural Engine
Loading...
Searching...
No Matches
MemoryManager.h
Go to the documentation of this file.
1/*
2 * Copyright (c) 2023 Samsung Electronics Co., Ltd. All Rights Reserved
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef __ONERT_BACKEND_TRAIN_MEMORY_MANAGER_H__
18#define __ONERT_BACKEND_TRAIN_MEMORY_MANAGER_H__
19
21
24
25namespace onert
26{
27namespace backend
28{
29namespace train
30{
31
33
35{
36public:
37 TrainableMemoryManager(uint32_t optimizer_vars_count);
38 virtual ~TrainableMemoryManager() = default;
39
40 void allocate(void);
41 uint8_t *getOptVarBuffer(const ir::OperandIndex &ind, uint32_t pos_var) const;
42
43private:
44 std::shared_ptr<basic::Allocator> _var_mem_alloc;
45 uint32_t _optim_vars_count;
46};
47
49{
50public:
52
53 void allocate(void);
54 uint8_t *getBuffer(const DisposableTensorIndex &ind) const;
55 void deallocate(void) { _mem_alloc->release(); }
56
57 void claimPlan(const DisposableTensorIndex &ind, uint32_t size);
58 void releasePlan(const DisposableTensorIndex &ind);
59
60 std::shared_ptr<basic::Allocator> getMemAlloc() { return _mem_alloc; }
61
62private:
64 basic::IMemoryPlanner<DisposableTensorIndex> *createMemoryPlanner(const std::string planner_id);
65
66private:
67 std::shared_ptr<basic::IMemoryPlanner<DisposableTensorIndex>> _mem_planner;
68 std::shared_ptr<basic::Allocator> _mem_alloc;
69};
70
72{
73public:
75
76 void allocate(void);
77 uint8_t *getBuffer(const LayerScopeTensorIndex &ind) const;
78 void deallocate(void);
79
80 void claimPlan(const LayerScopeTensorIndex &ind, uint32_t size);
81 void releasePlan(const LayerScopeTensorIndex &ind);
82
83private:
85
86private:
87 std::shared_ptr<basic::IMemoryPlanner<LayerScopeTensorIndex>> _mem_planner;
88 std::shared_ptr<basic::Allocator> _mem_alloc;
89};
90
91} // namespace train
92} // namespace backend
93} // namespace onert
94
95#endif // __ONERT_BACKEND_TRAIN_MEMORY_MANAGER_H__
void releasePlan(const DisposableTensorIndex &ind)
uint8_t * getBuffer(const DisposableTensorIndex &ind) const
void claimPlan(const DisposableTensorIndex &ind, uint32_t size)
std::shared_ptr< basic::Allocator > getMemAlloc()
Class that is index of DisposableTensor.
void claimPlan(const LayerScopeTensorIndex &ind, uint32_t size)
uint8_t * getBuffer(const LayerScopeTensorIndex &ind) const
void releasePlan(const LayerScopeTensorIndex &ind)
uint8_t * getOptVarBuffer(const ir::OperandIndex &ind, uint32_t pos_var) const
int32_t size[5]
Definition Slice.cpp:35