Skip to content

Commit

Permalink
add cli neps(init, run) + modify custom optimizer usage, providing ju…
Browse files Browse the repository at this point in the history
…st a class
  • Loading branch information
danrgll committed Apr 8, 2024
1 parent 701ef7a commit dd79361
Show file tree
Hide file tree
Showing 6 changed files with 153 additions and 50 deletions.
42 changes: 23 additions & 19 deletions neps/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

from __future__ import annotations

import inspect
import logging
import warnings
from pathlib import Path
Expand Down Expand Up @@ -97,16 +98,16 @@ def write_loss_and_config(file_handle, loss_, config_id_, config_):


def run(
run_pipeline: Callable = None,
root_directory: str | Path = None,
run_pipeline: Callable | None = None,
root_directory: str | Path | None = None,
pipeline_space: (
dict[str, Parameter | CS.ConfigurationSpace]
| str
| Path
| CS.ConfigurationSpace
| None
) = None,
run_args: str | Path = None,
run_args: str | Path | None = None,
overwrite_working_directory: bool = False,
post_run_summary: bool = False,
development_stage_id=None,
Expand Down Expand Up @@ -237,28 +238,18 @@ def run(
"overwrite_working_directory", False
)
post_run_summary = optim_settings.get("post_run_summary", False)
development_stage_id = optim_settings.get(
"development_stage_id", None
)
development_stage_id = optim_settings.get("development_stage_id", None)
task_id = optim_settings.get("task_id", None)
max_evaluations_total = optim_settings.get(
"max_evaluations_total", None
)
max_evaluations_per_run = optim_settings.get(
"max_evaluations_per_run", None
)
max_evaluations_total = optim_settings.get("max_evaluations_total", None)
max_evaluations_per_run = optim_settings.get("max_evaluations_per_run", None)
continue_until_max_evaluation_completed = optim_settings.get(
"continue_until_max_evaluation_completed",
False,
)
max_cost_total = optim_settings.get("max_cost_total", None)
ignore_errors = optim_settings.get("ignore_errors", False)
loss_value_on_error = optim_settings.get(
"loss_value_on_error", None
)
cost_value_on_error = optim_settings.get(
"cost_value_on_error", None
)
loss_value_on_error = optim_settings.get("loss_value_on_error", None)
cost_value_on_error = optim_settings.get("cost_value_on_error", None)
pre_load_hooks = optim_settings.get("pre_load_hooks", None)
searcher = optim_settings.get("searcher", "default")
searcher_path = optim_settings.get("searcher_path", None)
Expand All @@ -272,7 +263,8 @@ def run(
pipeline_space,
max_cost_total,
max_evaluations_total,
searcher
searcher,
run_args,
)

if pre_load_hooks is None:
Expand All @@ -290,6 +282,18 @@ def run(
"searcher_args": {},
}

# special case if you load your own optimizer via run_args
if inspect.isclass(searcher):
if issubclass(searcher, BaseOptimizer):
search_space = pipeline_space_from_yaml(pipeline_space)
search_space = SearchSpace(**search_space)
searcher = searcher(search_space)
else:
# Raise an error if searcher is not a subclass of BaseOptimizer
raise TypeError(
"The provided searcher must be a class that inherits from BaseOptimizer."
)

if isinstance(searcher, BaseOptimizer):
searcher_instance = searcher
searcher_info["searcher_name"] = "baseoptimizer"
Expand Down
82 changes: 82 additions & 0 deletions neps/utils/cli.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
import argparse
import os

import neps


def init_config():
# Define the paths for the configuration files
config_path = "config.yaml"
search_space_path = "search_space.yaml"

# Check if 'config.yaml' exists to avoid overwriting
if not os.path.exists(config_path):
with open(config_path, "w") as file:
file.write("# Add your NEPS configuration settings here\n")
print(f"Generated template: {config_path}")
else:
print(f"{config_path} already exists. Skipping to avoid overwriting.")

# Check if 'search_space.yaml' exists to avoid overwriting
if not os.path.exists(search_space_path):
with open(search_space_path, "w") as file:
file.write(
"""pipeline_space:
# Define your search space parameters here
# Example:
# learning_rate:
# type: float
# lower: 1e-4
# upper: 1e-1
# log: true
"""
)
print(f"Generated template: {search_space_path}")
else:
print(f"{search_space_path} already exists. Skipping to avoid overwriting.")


def run_optimization(args):
config_path = args.config if args.config else "config.yaml"

# Check if the config file exists
if not os.path.isfile(config_path):
print(f"No configuration file found at '{config_path}'.")
print("Please create one using 'neps init' or specify the path using '--config'.")
return

print(f"Running optimization using configuration from {config_path}")
neps.run(run_args=config_path)


def main():
parser = argparse.ArgumentParser(description="NePS Command Line Interface")
subparsers = parser.add_subparsers(
dest="command", help="Available commands: init, run"
)

# Subparser for "init" command
parser_init = subparsers.add_parser(
"init", help="Generate starter configuration YAML files"
)
parser_init.set_defaults(func=init_config)

# Subparser for "run" command
parser_run = subparsers.add_parser(
"run", help="Run optimization with specified configuration"
)
parser_run.add_argument(
"--config", type=str, help="Path to the configuration YAML file."
)
parser_run.set_defaults(func=run_optimization)

args = parser.parse_args()

if hasattr(args, "func"):
args.func(args)
else:
parser.print_help()


if __name__ == "__main__":
main()
51 changes: 35 additions & 16 deletions neps/utils/run_args_from_yaml.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import importlib.util
import logging
import sys
import yaml
from collections.abc import Callable
from neps.optimizers.base_optimizer import BaseOptimizer
import yaml

logger = logging.getLogger("neps")

Expand Down Expand Up @@ -137,7 +137,7 @@ def extract_leaf_keys(d, special_keys=None):
RUN_PIPELINE: None,
PRE_LOAD_HOOKS: None,
SEARCHER: None,
PIPELINE_SPACE: None
PIPELINE_SPACE: None,
}

leaf_keys = {}
Expand Down Expand Up @@ -173,8 +173,9 @@ def handle_special_argument_cases(settings, special_configs):
"""
# Load the value of each key from a dictionary specifying "path" and "name".
process_config_key(settings, special_configs, [PIPELINE_SPACE, SEARCHER,
RUN_PIPELINE])
process_config_key(
settings, special_configs, [PIPELINE_SPACE, SEARCHER, RUN_PIPELINE]
)

if special_configs[SEARCHER_KWARGS] is not None:
configs = {}
Expand All @@ -188,9 +189,7 @@ def handle_special_argument_cases(settings, special_configs):

if special_configs[PRE_LOAD_HOOKS] is not None:
# Loads the pre_load_hooks functions and add them in a list to settings.
settings[PRE_LOAD_HOOKS] = load_hooks_from_config(
special_configs[PRE_LOAD_HOOKS]
)
settings[PRE_LOAD_HOOKS] = load_hooks_from_config(special_configs[PRE_LOAD_HOOKS])


def process_config_key(settings, special_configs, keys):
Expand Down Expand Up @@ -233,10 +232,13 @@ def process_config_key(settings, special_configs, keys):
if key == RUN_PIPELINE:
raise TypeError(
f"Value for {key} must be a dictionary, but got "
f"{type(value).__name__}.")
f"{type(value).__name__}."
)
else:
raise TypeError(f"Value for {key} must be a string or a dictionary, "
f"but got {type(value).__name__}.")
raise TypeError(
f"Value for {key} must be a string or a dictionary, "
f"but got {type(value).__name__}."
)


def load_and_return_object(module_path, object_name):
Expand Down Expand Up @@ -341,20 +343,28 @@ def check_run_args(settings):
LOSS_VALUE_ON_ERROR: float,
COST_VALUE_ON_ERROR: float,
IGNORE_ERROR: bool,
SEARCHER: (str, BaseOptimizer),
SEARCHER_PATH: str,
SEARCHER_KWARGS: dict,
}
for param, value in settings.items():
if param == DEVELOPMENT_STAGE_ID or param == TASK_ID:
# this argument can be Any
continue
if param == PRE_LOAD_HOOKS:
elif param == PRE_LOAD_HOOKS:
# check if all items in pre_load_hooks are callable objects
if not all(callable(item) for item in value):
raise TypeError("All items in 'pre_load_hooks' must be callable.")
elif param == SEARCHER:
if not (isinstance(param, str) or issubclass(param, BaseOptimizer)):
raise TypeError(
"Parameter 'searcher' must be a string or a class that is a subclass "
"of BaseOptimizer."
)
else:
expected_type = expected_types[param]
try:
expected_type = expected_types[param]
except KeyError as e:
raise KeyError(f"{param} is not a valid argument of neps") from e
if not isinstance(value, expected_type):
raise TypeError(
f"Parameter '{param}' expects a value of type {expected_type}, got "
Expand All @@ -363,8 +373,14 @@ def check_run_args(settings):


def check_essential_arguments(
run_pipeline, root_directory, pipeline_space, max_cost_total, max_evaluation_total,
searcher):
run_pipeline,
root_directory,
pipeline_space,
max_cost_total,
max_evaluation_total,
searcher,
run_args,
):
"""
Validates essential NEPS configuration arguments.
Expand All @@ -389,8 +405,11 @@ def check_essential_arguments(
if not root_directory:
raise ValueError("'root_directory' is required but was not provided.")
if not pipeline_space:
if not isinstance(searcher, BaseOptimizer):
# handling special case for searcher instance, in which user doesn't have to
# provide the search_space because it's the argument of the searcher.
if run_args or not isinstance(searcher, BaseOptimizer):
raise ValueError("'pipeline_space' is required but was not provided.")

if not max_evaluation_total and not max_cost_total:
raise ValueError(
"'max_evaluation_total' or 'max_cost_total' is required but "
Expand Down
3 changes: 3 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -176,3 +176,6 @@ enable = [
'invalid-getnewargs-ex-returned','super-with-arguments','deprecated-class','invalid-class-object',
'unused-private-member',
]

[tool.poetry.scripts]
neps = "neps.utils.cli:main"
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ run_args:
run_pipeline:
path: "tests/test_yaml_run_args/test_run_args_by_neps_run/neps_run.py"
name: "run_pipeline"
pipeline_space: "tests/test_yaml_run_args/test_run_args_by_neps_run/search_space.yaml"
root_directory: "tests/test_yaml_run_args/test_run_args_by_neps_run/results"

max_evaluations_total: 1
Expand All @@ -20,7 +21,8 @@ run_args:
search:
# Test Case
searcher:
path: "tests/test_yaml_run_args/test_run_args_by_neps_run/neps_run.py"
name: optimizer
path: "neps/optimizers/bayesian_optimization/optimizer.py"
name: BayesianOptimization


pre_load_hooks: None
19 changes: 6 additions & 13 deletions tests/test_yaml_run_args/test_run_args_by_neps_run/neps_run.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import neps
import numpy as np
from neps.optimizers.bayesian_optimization.optimizer import BayesianOptimization
from neps.search_spaces.search_space import SearchSpace
import argparse
import numpy as np
import neps


def run_pipeline(learning_rate, epochs, optimizer, batch_size):
Expand All @@ -20,18 +18,13 @@ def run_pipeline(learning_rate, epochs, optimizer, batch_size):
learning_rate=neps.FloatParameter(lower=1e-6, upper=1e-1, log=False),
epochs=neps.IntegerParameter(lower=1, upper=3, is_fidelity=False),
optimizer=neps.CategoricalParameter(choices=["a", "b", "c"]),
batch_size=neps.ConstantParameter(64)
batch_size=neps.ConstantParameter(64),
)

# Required for testing yaml loading, in the case 'searcher' is an instance of
# BaseOptimizer.
search_space = SearchSpace(**pipeline_space)
optimizer = BayesianOptimization(search_space)

if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Run NEPS optimization with run_args.yml.")
parser.add_argument('run_args', type=str,
help='Path to the YAML configuration file.')
description="Run NEPS optimization with run_args.yml."
)
parser.add_argument("run_args", type=str, help="Path to the YAML configuration file.")
args = parser.parse_args()
neps.run(run_args=args.run_args)

0 comments on commit dd79361

Please sign in to comment.