-
Notifications
You must be signed in to change notification settings - Fork 8
/
logger.py
49 lines (40 loc) · 1.67 KB
/
logger.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import logging
import os
import time
from datetime import datetime
from torch.utils.tensorboard import SummaryWriter
class Logger:
def __init__(self, run_name, args):
dt = datetime.now()
# didn't figure out how to easily remove seconds from datetime
self.log_name = 'logs/' + "_".join(str(dt).split(" "))[:16] + \
"_" + run_name
self.start_time = time.time()
self.n_eps = 0
if not os.path.exists('logs'):
os.makedirs('logs')
os.makedirs('models')
self.writer = SummaryWriter(self.log_name)
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s %(message)s',
handlers=[
logging.StreamHandler(),
logging.FileHandler(f'{self.log_name}.log'),
],
datefmt='%Y/%m/%d %I:%M:%S %p')
logging.info(args)
def log_scalars(self, scalar_dict, step):
for key, val in scalar_dict.items():
self.writer.add_scalar(key, val, step)
def log_episode(self, info, step):
for episode_dict in info:
if episode_dict['returns/episodic_reward'] is not None:
self.n_eps += 1
self.log_scalars(episode_dict, step)
reward = episode_dict['returns/episodic_reward']
length = episode_dict['returns/episodic_length']
time_expired = (time.time()-self.start_time) / 60 / 60
logging.info(f"> ep = {self.n_eps} | total steps = {step}"
f" | reward = {reward} | length = {length}"
f" | hours = {time_expired:.3f}")