-
Notifications
You must be signed in to change notification settings - Fork 244
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
10 changed files
with
232 additions
and
57 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,5 @@ | ||
from .builders import * | ||
from .encoders import * | ||
from .lr_schedulers import * | ||
from .optimizers import * | ||
from .q_functions import * |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,94 @@ | ||
import dataclasses | ||
|
||
from torch.optim import Optimizer | ||
from torch.optim.lr_scheduler import CosineAnnealingLR, LambdaLR, LRScheduler | ||
|
||
from ..serializable_config import ( | ||
DynamicConfig, | ||
generate_optional_config_generation, | ||
) | ||
|
||
__all__ = [ | ||
"LRSchedulerFactory", | ||
"WarmupSchedulerFactory", | ||
"CosineAnnealingLRFactory", | ||
"make_lr_scheduler_field", | ||
] | ||
|
||
|
||
@dataclasses.dataclass() | ||
class LRSchedulerFactory(DynamicConfig): | ||
"""A factory class that creates a learning rate scheduler a lazy way.""" | ||
|
||
def create(self, optim: Optimizer) -> LRScheduler: | ||
"""Returns a learning rate scheduler object. | ||
Args: | ||
optim: PyTorch optimizer. | ||
Returns: | ||
Learning rate scheduler. | ||
""" | ||
raise NotImplementedError | ||
|
||
|
||
@dataclasses.dataclass() | ||
class WarmupSchedulerFactory(LRSchedulerFactory): | ||
r"""A warmup learning rate scheduler. | ||
.. math:: | ||
lr = \max((t + 1) / warmup_steps, 1) | ||
Args: | ||
warmup_steps: Warmup steps. | ||
""" | ||
|
||
warmup_steps: int | ||
|
||
def create(self, optim: Optimizer) -> LRScheduler: | ||
return LambdaLR( | ||
optim, | ||
lambda steps: min((steps + 1) / self.warmup_steps, 1), | ||
) | ||
|
||
@staticmethod | ||
def get_type() -> str: | ||
return "warmup" | ||
|
||
|
||
@dataclasses.dataclass() | ||
class CosineAnnealingLRFactory(LRSchedulerFactory): | ||
"""A cosine annealing learning rate scheduler. | ||
Args: | ||
T_max: Maximum time step. | ||
eta_min: Minimum learning rate. | ||
last_epoch: Last epoch. | ||
""" | ||
|
||
T_max: int | ||
eta_min: float = 0.0 | ||
last_epoch: int = -1 | ||
|
||
def create(self, optim: Optimizer) -> LRScheduler: | ||
return CosineAnnealingLR( | ||
optim, | ||
T_max=self.T_max, | ||
eta_min=self.eta_min, | ||
last_epoch=self.last_epoch, | ||
) | ||
|
||
@staticmethod | ||
def get_type() -> str: | ||
return "cosine_annealing" | ||
|
||
|
||
register_lr_scheduler_factory, make_lr_scheduler_field = ( | ||
generate_optional_config_generation( | ||
LRSchedulerFactory, | ||
) | ||
) | ||
|
||
register_lr_scheduler_factory(WarmupSchedulerFactory) | ||
register_lr_scheduler_factory(CosineAnnealingLRFactory) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
import argparse | ||
|
||
import gymnasium | ||
|
||
import d3rlpy | ||
|
||
|
||
def main() -> None: | ||
parser = argparse.ArgumentParser() | ||
parser.add_argument("--env", type=str, default="Hopper-v2") | ||
parser.add_argument("--seed", type=int, default=1) | ||
parser.add_argument("--gpu", action="store_true") | ||
args = parser.parse_args() | ||
|
||
env = gymnasium.make(args.env) | ||
eval_env = gymnasium.make(args.env) | ||
|
||
# fix seed | ||
d3rlpy.seed(args.seed) | ||
d3rlpy.envs.seed_env(env, args.seed) | ||
d3rlpy.envs.seed_env(eval_env, args.seed) | ||
|
||
# setup algorithm | ||
sac = d3rlpy.algos.SACConfig( | ||
batch_size=256, | ||
actor_learning_rate=3e-4, | ||
critic_learning_rate=3e-4, | ||
actor_optim_factory=d3rlpy.models.AdamFactory( | ||
# setup learning rate scheduler | ||
lr_scheduler_factory=d3rlpy.models.WarmupSchedulerFactory( | ||
warmup_steps=10000 | ||
), | ||
), | ||
critic_optim_factory=d3rlpy.models.AdamFactory( | ||
# setup learning rate scheduler | ||
lr_scheduler_factory=d3rlpy.models.WarmupSchedulerFactory( | ||
warmup_steps=10000 | ||
), | ||
), | ||
temp_learning_rate=3e-4, | ||
).create(device=args.gpu) | ||
|
||
# replay buffer for experience replay | ||
buffer = d3rlpy.dataset.create_fifo_replay_buffer(limit=1000000, env=env) | ||
|
||
# start training | ||
sac.fit_online( | ||
env, | ||
buffer, | ||
eval_env=eval_env, | ||
n_steps=1000000, | ||
n_steps_per_epoch=10000, | ||
update_interval=1, | ||
update_start_step=1000, | ||
) | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
Oops, something went wrong.