Skip to content

Commit

Permalink
Changing global file name variables to uppercase
Browse files Browse the repository at this point in the history
  • Loading branch information
TarekAbouChakra committed Jan 17, 2024
1 parent 07cfbcd commit 3517c6c
Show file tree
Hide file tree
Showing 6 changed files with 54 additions and 51 deletions.
6 changes: 3 additions & 3 deletions neps/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def _post_evaluation_hook(

# 1. Write all configs and losses
all_configs_losses = Path(
working_directory, Filenamings.root_file_all_losses_w_configs
working_directory, Filenamings.ROOT_FILE_ALL_LOSSES_W_CONFIG
)

def write_loss_and_config(file_handle, loss_, config_id_, config_):
Expand All @@ -62,10 +62,10 @@ def write_loss_and_config(file_handle, loss_, config_id_, config_):

# 2. Write best losses/configs
best_loss_trajectory_file = Path(
working_directory, Filenamings.root_file_best_loss_traj
working_directory, Filenamings.ROOT_FILE_BEST_LOSS_TRAJ
)
best_loss_config_trajectory_file = Path(
working_directory, Filenamings.root_file_config_w_best_loss_traj
working_directory, Filenamings.ROOT_FILE_CONFIG_W_BEST_LOSS_TRAJ
)

if not best_loss_trajectory_file.exists():
Expand Down
42 changes: 21 additions & 21 deletions neps/metahyper/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,16 +169,16 @@ def _process_sampler_info(

def _load_sampled_paths(optimization_dir: Path | str, serializer, logger):
optimization_dir = Path(optimization_dir)
base_result_directory = optimization_dir / Filenamings.root_directory_results
base_result_directory = optimization_dir / Filenamings.ROOT_DIRECTORY_RESULTS
logger.debug(f"Loading results from {base_result_directory}")

previous_paths, pending_paths = {}, {}
for config_dir in base_result_directory.iterdir():
if not config_dir.is_dir():
continue
config_id = config_dir.name[len("config_") :]
config_file = config_dir / f"{Filenamings.config_file_config_details}"
result_file = config_dir / f"{Filenamings.config_file_result_details}"
config_file = config_dir / f"{Filenamings.CONFIG_FILE_CONFIG_DETAILS}"
result_file = config_dir / f"{Filenamings.CONFIG_FILE_CONFIG_DETAILS}"

if non_empty_file(result_file):
previous_paths[config_id] = (config_dir, config_file, result_file)
Expand Down Expand Up @@ -210,9 +210,9 @@ def _load_sampled_paths(optimization_dir: Path | str, serializer, logger):

def _read_config_result(result_dir: Path | str, serializer: YamlSerializer):
result_dir = Path(result_dir)
config = serializer.load_config(result_dir / Filenamings.config_file_config_details)
result = serializer.load(result_dir / Filenamings.config_file_result_details)
metadata = serializer.load(result_dir / Filenamings.config_file_metadata_details)
config = serializer.load_config(result_dir / Filenamings.CONFIG_FILE_CONFIG_DETAILS)
result = serializer.load(result_dir / Filenamings.CONFIG_FILE_RESULT_DETAILS)
metadata = serializer.load(result_dir / Filenamings.CONFIG_FILE_METADATA_DETAILS)
return ConfigResult(config, result, metadata)


Expand All @@ -223,7 +223,7 @@ def read(optimization_dir: Path | str, serializer=None, logger=None, do_lock=Tru
logger = logging.getLogger("metahyper")

if do_lock:
decision_lock_file = optimization_dir / Filenamings.root_file_locker
decision_lock_file = optimization_dir / Filenamings.ROOT_FILE_LOCKER
decision_lock_file.touch(exist_ok=True)
decision_locker = Locker(decision_lock_file, logger.getChild("_locker"))
while not decision_locker.acquire_lock():
Expand All @@ -243,7 +243,7 @@ def read(optimization_dir: Path | str, serializer=None, logger=None, do_lock=Tru
for config_id, (config_dir, config_file) in pending_paths.items():
pending_configs[config_id] = serializer.load_config(config_file)

config_lock_file = config_dir / Filenamings.config_file_locker
config_lock_file = config_dir / Filenamings.CONFIG_FILE_LOCKER
config_locker = Locker(config_lock_file, logger.getChild("_locker"))
if config_locker.acquire_lock():
pending_configs_free[config_id] = pending_configs[config_id]
Expand Down Expand Up @@ -290,7 +290,7 @@ def _sample_config(optimization_dir, sampler, serializer, logger, pre_load_hooks
optimization_dir, serializer, logger, do_lock=False
)

base_result_directory = optimization_dir / Filenamings.root_directory_results
base_result_directory = optimization_dir / Filenamings.ROOT_DIRECTORY_RESULTS

logger.debug("Sampling a new configuration")

Expand All @@ -317,25 +317,25 @@ def _sample_config(optimization_dir, sampler, serializer, logger, pre_load_hooks

if previous_config_id is not None:
previous_config_id_file = (
pipeline_directory / Filenamings.config_file_id_previous_config
pipeline_directory / Filenamings.CONFIG_FILE_ID_PREVIOUS_CONFIG
)
previous_config_id_file.write_text(previous_config_id) # TODO: Get rid of this
serializer.dump(
{"time_sampled": time.time(), "previous_config_id": previous_config_id},
pipeline_directory / Filenamings.config_file_metadata_details,
pipeline_directory / Filenamings.CONFIG_FILE_METADATA_DETAILS,
)
previous_pipeline_directory = Path(
base_result_directory, f"config_{previous_config_id}"
)
else:
serializer.dump(
{"time_sampled": time.time()},
pipeline_directory / Filenamings.config_file_metadata_details,
pipeline_directory / Filenamings.CONFIG_FILE_METADATA_DETAILS,
)
previous_pipeline_directory = None

# We want this to be the last action in sampling to catch potential crashes
serializer.dump(config, pipeline_directory / Filenamings.config_file_config_details)
serializer.dump(config, pipeline_directory / Filenamings.CONFIG_FILE_CONFIG_DETAILS)

logger.debug(f"Sampled config {config_id}")
return (
Expand Down Expand Up @@ -448,12 +448,12 @@ def metahyper_run(
logger.warning("Overwriting working_directory")
shutil.rmtree(optimization_dir)

sampler_state_file = optimization_dir / Filenamings.root_file_optimizer_state
sampler_info_file = optimization_dir / Filenamings.root_file_optimizer_info
base_result_directory = optimization_dir / Filenamings.root_directory_results
sampler_state_file = optimization_dir / Filenamings.ROOT_FILE_OPTIMIZER_STATE
sampler_info_file = optimization_dir / Filenamings.ROOT_FILE_OPTIMIZER_INFO
base_result_directory = optimization_dir / Filenamings.ROOT_DIRECTORY_RESULTS
base_result_directory.mkdir(parents=True, exist_ok=True)

decision_lock_file = optimization_dir / Filenamings.root_file_locker
decision_lock_file = optimization_dir / Filenamings.ROOT_FILE_LOCKER
decision_lock_file.touch(exist_ok=True)
decision_locker = Locker(decision_lock_file, logger.getChild("_locker"))

Expand Down Expand Up @@ -505,7 +505,7 @@ def metahyper_run(
optimization_dir,
)

config_lock_file = pipeline_directory / Filenamings.config_file_locker
config_lock_file = pipeline_directory / Filenamings.CONFIG_FILE_LOCKER
config_lock_file.touch(exist_ok=True)
config_locker = Locker(config_lock_file, logger.getChild("_locker"))
config_lock_acquired = config_locker.acquire_lock()
Expand All @@ -527,7 +527,7 @@ def metahyper_run(
# 2. Then, we now dump all information to disk:
serializer.dump(
result,
pipeline_directory / Filenamings.config_file_result_details,
pipeline_directory / Filenamings.CONFIG_FILE_RESULT_DETAILS,
)

if result != "error":
Expand Down Expand Up @@ -555,12 +555,12 @@ def metahyper_run(
)

config_metadata = serializer.load(
pipeline_directory / Filenamings.config_file_metadata_details
pipeline_directory / Filenamings.CONFIG_FILE_METADATA_DETAILS
)
config_metadata.update(metadata)
serializer.dump(
config_metadata,
pipeline_directory / Filenamings.config_file_metadata_details,
pipeline_directory / Filenamings.CONFIG_FILE_METADATA_DETAILS,
)

# 3. Anything the user might want to do after the evaluation
Expand Down
36 changes: 18 additions & 18 deletions neps/metahyper/run_file_names.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,23 +23,23 @@ class Filenamings:
the root directory.
"""

root_directory_results = "results"
root_directory_summary_csv = "summary_csv"
root_directory_summary_tblogger = "summary_tblogger"
root_file_locker = ".decision_lock"
root_file_all_losses_w_configs = "all_losses_and_configs.txt"
root_file_best_loss_traj = "best_loss_trajectory.txt"
root_file_config_w_best_loss_traj = "best_loss_with_config_trajectory.txt"
root_file_optimizer_state = ".optimizer_state.yaml"
root_file_optimizer_info = ".optimizer_info.yaml"
ROOT_DIRECTORY_RESULTS = "results"
ROOT_DIRECTORY_SUMMARY_CSV = "summary_csv"
ROOT_DIRECTORY_SUMMARY_TBLOGGER = "summary_tblogger"
ROOT_FILE_LOCKER = ".decision_lock"
ROOT_FILE_ALL_LOSSES_W_CONFIG = "all_losses_and_configs.txt"
ROOT_FILE_BEST_LOSS_TRAJ = "best_loss_trajectory.txt"
ROOT_FILE_CONFIG_W_BEST_LOSS_TRAJ = "best_loss_with_config_trajectory.txt"
ROOT_FILE_OPTIMIZER_STATE = ".optimizer_state.yaml"
ROOT_FILE_OPTIMIZER_INFO = ".optimizer_info.yaml"

summary_file_locker = ".summary_lock"
summary_file_config_result = "config_data.csv"
summary_file_run_status = "run_status.csv"
SUMMARY_FILE_LOCKER = ".summary_lock"
SUMMARY_FILE_CONFIG_RESULT = "config_data.csv"
SUMMARY_FILE_RUN_STATUS = "run_status.csv"

config_directory_tblogger = "tbevents"
config_file_locker = ".config_lock"
config_file_config_details = "config.yaml"
config_file_metadata_details = "metadata.yaml"
config_file_result_details = "result.yaml"
config_file_id_previous_config = "previous_config.id"
CONFIG_DIRECTORY_TBLOGGER = "tbevents"
CONFIG_FILE_LOCKER = ".config_lock"
CONFIG_FILE_CONFIG_DETAILS = "config.yaml"
CONFIG_FILE_METADATA_DETAILS = "metadata.yaml"
CONFIG_FILE_RESULT_DETAILS = "result.yaml"
CONFIG_FILE_ID_PREVIOUS_CONFIG = "previous_config.id"
8 changes: 4 additions & 4 deletions neps/plot/tensorboard_eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def _initialize_writers() -> None:
"/", maxsplit=1
)[-1]
tblogger.config_writer = SummaryWriter_(
tblogger.config_working_directory / Filenamings.config_directory_tblogger
tblogger.config_working_directory / Filenamings.CONFIG_DIRECTORY_TBLOGGER
)
return
# Searching for the initial directory where tensorboard events are stored.
Expand All @@ -113,9 +113,9 @@ def _initialize_writers() -> None:
pipeline_directory=tblogger.config_working_directory
)
tblogger.config_id = str(init_dir).rsplit("/", maxsplit=1)[-1]
if os.path.exists(init_dir / Filenamings.config_directory_tblogger):
if os.path.exists(init_dir / Filenamings.CONFIG_DIRECTORY_TBLOGGER):
tblogger.config_writer = SummaryWriter_(
init_dir / Filenamings.config_directory_tblogger
init_dir / Filenamings.CONFIG_DIRECTORY_TBLOGGER
)
return
else:
Expand Down Expand Up @@ -416,7 +416,7 @@ def _tracking_incumbent_api() -> None:

if tblogger.summary_writer is None and tblogger.optim_path:
tblogger.summary_writer = SummaryWriter_(
tblogger.optim_path / Filenamings.root_directory_summary_tblogger
tblogger.optim_path / Filenamings.ROOT_DIRECTORY_SUMMARY_TBLOGGER
)

tblogger.summary_writer.add_scalar(
Expand Down
8 changes: 4 additions & 4 deletions neps/status/status.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,13 +145,13 @@ def _initiate_summary_csv(
multi-threaded or multi-process environment.
"""
root_directory = Path(root_directory)
summary_csv_directory = Path(root_directory / Filenamings.root_directory_summary_csv)
summary_csv_directory = Path(root_directory / Filenamings.ROOT_DIRECTORY_SUMMARY_CSV)
summary_csv_directory.mkdir(parents=True, exist_ok=True)

csv_config_data = summary_csv_directory / Filenamings.summary_file_config_result
csv_run_data = summary_csv_directory / Filenamings.summary_file_run_status
csv_config_data = summary_csv_directory / Filenamings.SUMMARY_FILE_CONFIG_RESULT
csv_run_data = summary_csv_directory / Filenamings.SUMMARY_FILE_RUN_STATUS

csv_lock_file = summary_csv_directory / Filenamings.summary_file_locker
csv_lock_file = summary_csv_directory / Filenamings.SUMMARY_FILE_LOCKER
csv_lock_file.touch(exist_ok=True)
csv_locker = Locker(csv_lock_file, logger.getChild("_locker"))

Expand Down
5 changes: 4 additions & 1 deletion neps/utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import yaml

from ..metahyper.api import ConfigInRun
from ..metahyper.run_file_names import Filenamings
from ..optimizers.info import SearcherConfigs


Expand Down Expand Up @@ -173,7 +174,9 @@ def get_initial_directory(pipeline_directory: Path | str | None = None) -> Path:

while True:
# Get the id of the previous directory
previous_pipeline_directory_id = pipeline_directory / "previous_config.id"
previous_pipeline_directory_id = (
pipeline_directory / Filenamings.CONFIG_FILE_ID_PREVIOUS_CONFIG
)

# Get the directory where all configs are saved
optim_result_dir = pipeline_directory.parent
Expand Down

0 comments on commit 3517c6c

Please sign in to comment.