-
Notifications
You must be signed in to change notification settings - Fork 402
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Julian Blank
committed
Oct 21, 2019
1 parent
6d8cb13
commit f0aab83
Showing
52 changed files
with
650 additions
and
187 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
import numpy as np | ||
|
||
from pymoo.algorithms.so_gradient_descent import GradientBasedAlgorithm | ||
|
||
|
||
class Adam(GradientBasedAlgorithm): | ||
|
||
def __init__(self, X, | ||
alpha=0.005, | ||
beta_1=0.9, | ||
beta_2=0.999, | ||
epsilon=1e-8, | ||
**kwargs) -> None: | ||
super().__init__(X, **kwargs) | ||
|
||
self.alpha = alpha | ||
self.beta_1 = beta_1 | ||
self.beta_2 = beta_2 | ||
self.epsilon = epsilon | ||
|
||
self.t = 0 | ||
self.m_t = 0 | ||
self.v_t = 0 | ||
|
||
def apply(self): | ||
X, dX = self.X, self.dX | ||
|
||
self.t += 1 | ||
beta_1, beta_2 = self.beta_1, self.beta_2 | ||
|
||
# update moving average of gradient and squared gradient | ||
self.m_t = beta_1 * self.m_t + (1 - beta_1) * dX | ||
self.v_t = beta_2 * self.v_t + (1 - beta_2) * (dX * dX) | ||
|
||
# calculates the bias-corrected estimates | ||
m_cap = self.m_t / (1 - (beta_1 ** self.t)) | ||
v_cap = self.v_t / (1 - (beta_2 ** self.t)) | ||
|
||
# do the gradient update | ||
self.X = X - (self.alpha * m_cap) / (np.sqrt(v_cap) + self.epsilon) | ||
|
||
def restart(self): | ||
self.t = 0 | ||
self.m_t = 0 | ||
self.v_t = 0 | ||
self.alpha /= 2 | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,89 @@ | ||
import numpy as np | ||
|
||
from pymoo.model.algorithm import Algorithm | ||
from pymoo.model.population import Population | ||
from pymoo.model.termination import SingleObjectiveToleranceBasedTermination | ||
from pymoo.operators.repair.out_of_bounds_repair import repair_out_of_bounds | ||
from pymoo.util.display import disp_single_objective | ||
|
||
|
||
class GradientBasedAlgorithm(Algorithm): | ||
|
||
def __init__(self, X, dX=None, objective=0, **kwargs) -> None: | ||
super().__init__(**kwargs) | ||
self.func_display_attrs = disp_single_objective | ||
self.objective = objective | ||
self.n_restarts = 0 | ||
self.default_termination = SingleObjectiveToleranceBasedTermination() | ||
|
||
self.X, self.dX = X, dX | ||
self.F, self.CV = None, None | ||
|
||
if self.X.ndim == 1: | ||
self.X = np.atleast_2d(X) | ||
|
||
def _initialize(self): | ||
self._next() | ||
|
||
def _next(self): | ||
|
||
# create a copy from the current values - if restart is necessary | ||
_X = np.copy(self.X) | ||
|
||
# if the gradient was not provided yet evaluate it | ||
if self.F is None or self.dX is None: | ||
# evaluate the problem and get the information of gradients | ||
F, dX, CV = self.problem.evaluate(self.X, return_values_of=["F", "dF", "CV"]) | ||
|
||
# because we only consider one objective here | ||
F = F[:, [self.objective]] | ||
dX = dX[:, self.objective] | ||
|
||
# increase the evaluation count | ||
self.evaluator.n_eval += len(self.X) | ||
|
||
has_improved = self.F is None or np.any(F < self.F) | ||
is_gradient_valid = np.all(~np.isnan(dX)) | ||
|
||
# if the gradient did lead to an improvement | ||
if has_improved: | ||
|
||
self.F, self.dX, self.CV = F, dX, CV | ||
|
||
# if the gradient is valid and has no nan values | ||
if is_gradient_valid: | ||
|
||
# make the step and check out of bounds for X | ||
self.apply() | ||
self.X = repair_out_of_bounds(self.problem, self.X) | ||
|
||
# set the population object for automatic print | ||
self.pop = Population(len(self.X)).set("X", self.X, "F", self.F, | ||
"CV", self.CV, "feasible", self.CV <= 0) | ||
|
||
# otherwise end the termination form now on | ||
else: | ||
print("WARNING: GRADIENT ERROR", self.dX) | ||
self.termination.force_termination = True | ||
|
||
# otherwise do a restart of the algorithm | ||
else: | ||
self.X = _X | ||
self.restart() | ||
self.n_restarts += 1 | ||
|
||
# set the gradient to none to be ready for the next iteration | ||
self.dX = None | ||
|
||
|
||
class GradientDescent(GradientBasedAlgorithm): | ||
|
||
def __init__(self, X, learning_rate=0.005, **kwargs) -> None: | ||
super().__init__(X, **kwargs) | ||
self.learning_rate = learning_rate | ||
|
||
def restart(self): | ||
self.learning_rate /= 2 | ||
|
||
def apply(self): | ||
self.X = self.X - self.learning_rate * self.dX |
Oops, something went wrong.