From 34ca303b534694ca49bab6b9e1ec8a8e422a720f Mon Sep 17 00:00:00 2001 From: jdecid Date: Fri, 10 Jan 2020 17:44:20 +0100 Subject: [PATCH] Migrate custom autograd Function to new static requirements to avoid future deprication --- grad-cam.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/grad-cam.py b/grad-cam.py index ec787ffc2..700437283 100644 --- a/grad-cam.py +++ b/grad-cam.py @@ -129,14 +129,16 @@ def __call__(self, input, index=None): class GuidedBackpropReLU(Function): - def forward(self, input): + @staticmethod + def forward(ctx, input): positive_mask = (input > 0).type_as(input) output = torch.addcmul(torch.zeros(input.size()).type_as(input), input, positive_mask) - self.save_for_backward(input, output) + ctx.save_for_backward(input, output) return output - def backward(self, grad_output): - input, output = self.saved_tensors + @staticmethod + def backward(ctx, grad_output): + input, output = ctx.saved_tensors grad_input = None positive_mask_1 = (input > 0).type_as(grad_output) @@ -159,7 +161,7 @@ def __init__(self, model, use_cuda): # replace ReLU with GuidedBackpropReLU for idx, module in self.model.features._modules.items(): if module.__class__.__name__ == 'ReLU': - self.model.features._modules[idx] = GuidedBackpropReLU() + self.model.features._modules[idx] = GuidedBackpropReLU.apply def forward(self, input): return self.model(input)