18const std::string DerivativeSettings::feasibleInstantiationSearch =
"gradient-descent";
19const std::string DerivativeSettings::derivativeAtInstantiation =
"compute-derivative";
20const std::string DerivativeSettings::learningRate =
"learning-rate";
21const std::string DerivativeSettings::miniBatchSize =
"batch-size";
22const std::string DerivativeSettings::adamParams =
"adam-params";
23const std::string DerivativeSettings::averageDecay =
"average-decay";
24const std::string DerivativeSettings::squaredAverageDecay =
"squared-average-decay";
25const std::string DerivativeSettings::terminationEpsilon =
"termination-epsilon";
26const std::string DerivativeSettings::printJson =
"print-json";
27const std::string DerivativeSettings::gradientDescentMethod =
"descent-method";
28const std::string DerivativeSettings::omitInconsequentialParams =
"omit-inconsequential-params";
29const std::string DerivativeSettings::constraintMethod =
"constraint-method";
33 "Search for a feasible instantiation (restart with new instantiation while not feasible)")
37 "Instantiation at which the derivative should be computed")
53 gradientDescentMethod,
54 "Gradient Descent method (adam, radam, rmsprop, plain, plain-sign, momentum, momentum-sign, nesterov, nesterov-sign)")
61 "Sets hyperparameters of the Gradient Descent algorithms, especially (R)ADAM's. If you're using RMSProp, averageDecay is RMSProp's decay.")
72 "The change in value that constitutes as a \"tiny change\", after a few of which the gradient descent will terminate")
77 "Parameters that are removed in minimization because they have no effect on the rational function are normally set to "
78 "0.5 in the final instantiation. If this flag is set, they will be omitted from the final instantiation entirely.")
94 if (this->
getOption(derivativeAtInstantiation).getHasOptionBeenSet()) {
123 return methodFromString(this->
getOption(gradientDescentMethod).getArgumentByName(gradientDescentMethod).getValueAsString());
135 return constraintMethodFromString(this->
getOption(constraintMethod).getArgumentByName(constraintMethod).getValueAsString());
142boost::optional<derivative::GradientDescentMethod> DerivativeSettings::methodFromString(
const std::string &
str)
const {
146 }
else if (
str ==
"radam") {
148 }
else if (
str ==
"rmsprop") {
150 }
else if (
str ==
"plain") {
152 }
else if (
str ==
"plain-sign") {
154 }
else if (
str ==
"momentum") {
156 }
else if (
str ==
"momentum-sign") {
158 }
else if (
str ==
"nesterov") {
160 }
else if (
str ==
"nesterov-sign") {
168boost::optional<derivative::GradientDescentConstraintMethod> DerivativeSettings::constraintMethodFromString(
const std::string &
str)
const {
170 if (
str ==
"project-gradient") {
172 }
else if (
str ==
"project") {
174 }
else if (
str ==
"penalty-quadratic") {
176 }
else if (
str ==
"barrier-logarithmic") {
178 }
else if (
str ==
"barrier-infinity") {
180 }
else if (
str ==
"logistic-sigmoid") {
virtual std::string getValueAsString() const =0
Retrieves the value of this argument as a string.
virtual int_fast64_t getValueAsInteger() const =0
Retrieves the value of this argument as an integer.
virtual double getValueAsDouble() const =0
Retrieves the value of this argument as a double.
static ArgumentBuilder createDoubleArgument(std::string const &name, std::string const &description)
Creates a double argument with the given parameters.
static ArgumentBuilder createIntegerArgument(std::string const &name, std::string const &description)
Creates an integer argument with the given parameters.
static ArgumentBuilder createStringArgument(std::string const &name, std::string const &description)
Creates a string argument with the given parameters.
This class provides the interface to create an option...
ArgumentBase const & getArgumentByName(std::string const &argumentName) const
Returns a reference to the argument with the specified long name.
bool getHasOptionBeenSet() const
Retrieves whether the option has been set.
boost::optional< std::string > getDerivativeAtInstantiation() const
Retrieves whether an extremum should be found by Gradient Descent.
double getSquaredAverageDecay() const
Retrieves the decay of the squared decaying step average of the ADAM algorithm.
std::string getGradientDescentMethodAsString() const
Retrieves the gradient descent method as a string.
static const std::string moduleName
uint_fast64_t getMiniBatchSize() const
Retrieves the mini batch size of the gradient descent.
boost::optional< derivative::GradientDescentMethod > getGradientDescentMethod() const
Retrieves the gradient descent method.
DerivativeSettings()
Creates a new set of monotonicity checking settings.
double getAverageDecay() const
Retrieves the decay of the decaying step average of the ADAM algorithm.
bool areInconsequentialParametersOmitted() const
Are inconsequential parameters omitted?
boost::optional< derivative::GradientDescentConstraintMethod > getConstraintMethod() const
Retrieves the gradient descent method constraint method.
bool isFeasibleInstantiationSearchSet() const
Retrieves whether a feasible instance should be found by Gradient Descent.
double getTerminationEpsilon() const
Retrieves the termination epsilon.
double getLearningRate() const
Retrieves the learning rate for the gradient descent.
bool isPrintJsonSet() const
Retrieves whether the GradientDescentInstantiationSearcher should print the run as json after finishi...
std::string getConstraintMethodAsString() const
Retrieves the gradient descent method constraint method as a string.
This is the base class of the settings for a particular module.
void addOption(std::shared_ptr< Option > const &option)
Adds and registers the given option.
Option & getOption(std::string const &longName)
Retrieves the option with the given long name.
GradientDescentMethod
GradientDescentMethod is the method of Gradient Descent the GradientDescentInstantiationSearcher shal...
GradientDescentConstraintMethod
GradientDescentConstraintMethod is the method for mitigating constraints that the GradientDescentInst...
SettingsType const & getModule()
Get module.