Skip to content

Documentation for NevergradOptimizer

Nevergrad Wrapper for PHOTONAI.

Nevergrad is a gradient-free optimization platform.

Nevergrad usage and implementation details.

Examples:

1
2
3
4
5
6
7
8
9
import nevergrad as ng
# list of all available nevergrad optimizer
print(list(ng.optimizers.registry.values()))

my_pipe = Hyperpipe('nevergrad_example',
                    optimizer='nevergrad',
                    optimizer_params={'facade': 'NGO', 'n_configurations': 30},
                    ...
                    )

__init__(self, facade='NGO', n_configurations=100, rng=42) special

Initialize the object.

Parameters:

Name Type Description Default
facade

Choice of the Nevergrad backend strategy, e.g. [NGO, ...].

'NGO'
n_configurations int

Number of runs.

100
rng int

Random Seed.

42
Source code in photonai/optimization/nevergrad/nevergrad.py
def __init__(self, facade='NGO', n_configurations: int = 100, rng: int = 42):
    """
    Initialize the object.

    Parameters:
        facade:
            Choice of the Nevergrad backend strategy, e.g. [NGO, ...].

        n_configurations:
            Number of runs.

        rng:
            Random Seed.

    """

    if not __found__:
        msg = "Module nevergrad not found or not installed as expected. " \
              "Please install the nevergrad/requirements.txt PHOTONAI provides."
        logger.error(msg)
        raise ModuleNotFoundError(msg)

    if facade in list(ng.optimizers.registry.values()):
        self.facade = facade
    elif facade in list(ng.optimizers.registry.keys()):
        self.facade = ng.optimizers.registry[facade]
    else:
        msg = "nevergrad.optimizer {} not known. Check out all available nevergrad optimizers " \
              "by nevergrad.optimizers.registry.keys()".format(str(facade))
        logger.error(msg.format(str(facade)))
        raise ValueError(msg.format(str(facade)))

    self.n_configurations = n_configurations
    self.space = None  # Hyperparameter space for nevergrad
    self.switch_optiones = {}
    self.hyperparameters = []
    self.rng = rng

    self.maximize_metric = False
    self.constant_dictionary = {}

    self.objective = None
    self.optimizer = None

optimize(self)

Start the optimization process based on the underlying objective function.

Source code in photonai/optimization/nevergrad/nevergrad.py
def optimize(self) -> None:
    self.optimizer.minimize(self.objective)

prepare(self, pipeline_elements, maximize_metric, objective_function)

Prepare Nevergrad Optimizer.

Parameters:

Name Type Description Default
pipeline_elements list

List of all PipelineElements to create the hyperparameter space.

required
maximize_metric bool

Boolean to distinguish between score and error.

required
objective_function Callable

The cost or objective function.

required
Source code in photonai/optimization/nevergrad/nevergrad.py
def prepare(self, pipeline_elements: list, maximize_metric: bool, objective_function: Callable) -> None:
    """Prepare Nevergrad Optimizer.

    Parameters:
        pipeline_elements:
            List of all PipelineElements to create the hyperparameter space.

        maximize_metric:
            Boolean to distinguish between score and error.

        objective_function:
            The cost or objective function.

    """
    self.space = self._build_nevergrad_space(pipeline_elements)
    self.space.random_state.seed(self.rng)
    if self.constant_dictionary:
        msg = "PHOTONAI has detected some one-valued params in your hyperparameters. Pleas use the kwargs for " \
              "constant values. This run ignores following settings: " + str(self.constant_dictionary.keys())
        logger.warning(msg)
        warnings.warn(msg)
    self.maximize_metric = maximize_metric

    def nevergrad_objective_function(**current_config):
        return objective_function(current_config)
    self.objective = nevergrad_objective_function

    self.optimizer = self.facade(parametrization=self.space, budget=self.n_configurations)