Skip to content

Commit

Permalink
lint
Browse files Browse the repository at this point in the history
  • Loading branch information
PeterSH6 committed Jan 17, 2025
1 parent 1d937b7 commit 5ae496d
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion verl/workers/actor/dp_actor.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def _make_minibatch_iterator(self, data: DataProto) -> Iterable[DataProto]:
data = data.select(batch_keys=select_keys)
return data.make_iterator(mini_batch_size=self.config.ppo_mini_batch_size,
epochs=self.config.ppo_epochs,
dataloader_kwargs={'shuffle': False}) # TODO: hardcode to False
dataloader_kwargs={'shuffle': False}) # TODO: hardcode to False

def _optimizer_step(self):
assert self.config.grad_clip is not None
Expand Down
2 changes: 1 addition & 1 deletion verl/workers/critic/dp_critic.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def _make_minibatch_iterator(self, data: DataProto) -> Iterable[DataProto]:
data = data.select(batch_keys=select_keys)
return data.make_iterator(mini_batch_size=self.config.ppo_mini_batch_size,
epochs=self.config.ppo_epochs,
dataloader_kwargs={'shuffle': False}) # TODO: hardcode to False
dataloader_kwargs={'shuffle': False}) # TODO: hardcode to False

def _optimizer_step(self):
assert self.config.grad_clip is not None
Expand Down

0 comments on commit 5ae496d

Please sign in to comment.