-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathDropLR.py
More file actions
43 lines (37 loc) · 1.53 KB
/
DropLR.py
File metadata and controls
43 lines (37 loc) · 1.53 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
class DropLR:
"""
Custom learning rate scheduler that reduces learning rate when validation accuracy plateaus.
Monitors validation accuracy and signals when to drop learning rate based on lack of improvement.
"""
def __init__(self, tolerance=7, delta_factor=1e-3):
"""
Initialize the DropLR scheduler.
Args:
tolerance (int): Number of epochs to wait without improvement before dropping LR
delta_factor (float): Minimum relative improvement threshold (e.g., 1e-3 = 0.1% improvement)
"""
self.tolerance = tolerance
self.delta_factor = delta_factor
self.counter = 0
self.max_validation_accuracy = -float('inf')
self.drop_lr = False
def __call__(self, validation_acc):
"""
Evaluate current validation accuracy and determine if learning rate should be dropped.
Args:
validation_acc (float): Current epoch's validation accuracy
Returns:
bool: True if learning rate should be dropped, False otherwise
"""
delta = 1 + self.delta_factor
if validation_acc >= delta * self.max_validation_accuracy :
self.max_validation_accuracy = validation_acc
self.counter = 0
self.drop_lr = False
elif self.counter < self.tolerance:
self.counter += 1
self.drop_lr = False
else:
self.counter = 0
self.drop_lr = True
return self.drop_lr