We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent f6c054f commit 212f1b5Copy full SHA for 212f1b5
finetune.py
@@ -65,8 +65,8 @@ def forward(self, x):
65
for layer, (name, module) in enumerate(self.model.features._modules.items()):
66
x = module(x)
67
if isinstance(module, torch.nn.modules.conv.Conv2d):
68
- x.register_hook(self.compute_rank)
69
- self.activations.append(self.compute_rank(activation_index))
+ x.register_hook(self.compute_rank(activation_index))
+ self.activations.append(x)
70
self.activation_to_layer[activation_index] = layer
71
activation_index += 1
72
0 commit comments