ONE - On-device Neural Engine
All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
adam.py
Go to the documentation of this file.
1from .optimizer import Optimizer
2
3
5 """
6 Adam optimizer.
7 """
8 def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-7):
9 """
10 Initialize the Adam optimizer.
11 Args:
12 learning_rate (float): The learning rate for optimization.
13 beta1 (float): Exponential decay rate for the first moment estimates.
14 beta2 (float): Exponential decay rate for the second moment estimates.
15 epsilon (float): Small constant to prevent division by zero.
16 """
17 super().__init__(learning_rate)
18 self.beta1 = beta1
19 self.beta2 = beta2
20 self.epsilon = epsilon
__init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-7)
Definition adam.py:8