From 3f60b6f284d709ba689be0e37fee18a99270d375 Mon Sep 17 00:00:00 2001 From: Alan Saul Date: Wed, 15 Nov 2017 18:14:45 +0000 Subject: [PATCH] Added ability to pass the gradient function to Adadelta --- paramz/optimization/optimization.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/paramz/optimization/optimization.py b/paramz/optimization/optimization.py index 53d9c42..e21149c 100644 --- a/paramz/optimization/optimization.py +++ b/paramz/optimization/optimization.py @@ -251,6 +251,7 @@ def _check_for_climin(): class Opt_Adadelta(Optimizer): def __init__(self, step_rate=0.1, decay=0.9, momentum=0, *args, **kwargs): + self.fp = kwargs.pop('fp', None) Optimizer.__init__(self, *args, **kwargs) self.opt_name = "Adadelta (climin)" self.step_rate=step_rate @@ -261,7 +262,10 @@ def __init__(self, step_rate=0.1, decay=0.9, momentum=0, *args, **kwargs): def opt(self, x_init, f_fp=None, f=None, fp=None): - assert not fp is None + if self.fp is not None: + fp = self.fp + else: + assert not fp is None import climin