You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
I would like to ask how to access the hook data (self.layer_outputs) from the loss function, as I need the data to perform in my loss function. I have investigated and I haven't found a way. What is the best workaround?
Beforehand, much appreciated for your help!
Here is the code:
classOutputHook:
"""Output feature map of some layers. Args: module (nn.Module): The whole module to get layers. outputs (tuple[str] | list[str]): Layer name to output. Default: None. as_tensor (bool): Determine to return a tensor or a numpy array. Default: False. """def__init__(self, module, outputs=None, as_tensor=False):
self.outputs=outputsself.as_tensor=as_tensorself.layer_outputs= {}
self.handles= []
self.register(module)
defregister(self, module):
defhook_wrapper(name):
defhook(model, input, output):
ifnotisinstance(output, torch.Tensor):
warnings.warn(f'Directly return the output from {name}, 'f'since it is not a tensor')
self.layer_outputs[name] =outputelifself.as_tensor:
self.layer_outputs[name] =outputelse:
self.layer_outputs[name] =output.detach().cpu().numpy()
returnhookifisinstance(self.outputs, (list, tuple)):
fornameinself.outputs:
try:
layer=rgetattr(module, name)
h=layer.register_forward_hook(hook_wrapper(name))
exceptAttributeError:
raiseAttributeError(f'Module {name} not found')
self.handles.append(h)
defremove(self):
forhinself.handles:
h.remove()
def__enter__(self):
returnselfdef__exit__(self, exc_type, exc_val, exc_tb):
self.remove()
# using wonder's beautiful simplification:# https://stackoverflow.com/questions/31174295/getattr-and-setattr-on-nested-objectsdefrgetattr(obj, attr, *args):
def_getattr(obj, attr):
returngetattr(obj, attr, *args)
returnfunctools.reduce(_getattr, [obj] +attr.split('.'))
@HOOKS.register_module()classOutputOuternHook(Hook):
def__init__(self, outputs, as_tensor):
self.outputs=outputsself.as_tensor=as_tensordefbefore_train_iter(self, runner):
# OutputHook is defined in pyskl/core/hooks.py by defaultself.output_hook=OutputHook(runner.model, self.outputs, self.as_tensor)
defafter_train_iter(self, runner):
self.output_hook.remove()
@LOSSES.register_module()classCustomLoss(nn.Module):
def__init__(self, test_data):
super().__init__()
self.test_data=mmcv.load(test_data)
defforward(self, cls_score, label, **kwargs):
forkey, valueinkwargs.items():
print(f"{key}: {value}")
# How to access the values of the output hooks? Can I access the hooks or runner from here?loss= ...
returnloss
reacted with thumbs up emoji reacted with thumbs down emoji reacted with laugh emoji reacted with hooray emoji reacted with confused emoji reacted with heart emoji reacted with rocket emoji reacted with eyes emoji
-
Hello!
I would like to ask how to access the hook data (self.layer_outputs) from the loss function, as I need the data to perform in my loss function. I have investigated and I haven't found a way. What is the best workaround?
Beforehand, much appreciated for your help!
Here is the code:
Beta Was this translation helpful? Give feedback.
All reactions