Skip to content

Commit

Permalink
Migrate custom autograd Function to new static requirements to avoid …
Browse files Browse the repository at this point in the history
…future deprication
  • Loading branch information
josepdecid committed Jan 10, 2020
1 parent f998b55 commit 34ca303
Showing 1 changed file with 7 additions and 5 deletions.
12 changes: 7 additions & 5 deletions grad-cam.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,14 +129,16 @@ def __call__(self, input, index=None):

class GuidedBackpropReLU(Function):

def forward(self, input):
@staticmethod
def forward(ctx, input):
positive_mask = (input > 0).type_as(input)
output = torch.addcmul(torch.zeros(input.size()).type_as(input), input, positive_mask)
self.save_for_backward(input, output)
ctx.save_for_backward(input, output)
return output

def backward(self, grad_output):
input, output = self.saved_tensors
@staticmethod
def backward(ctx, grad_output):
input, output = ctx.saved_tensors
grad_input = None

positive_mask_1 = (input > 0).type_as(grad_output)
Expand All @@ -159,7 +161,7 @@ def __init__(self, model, use_cuda):
# replace ReLU with GuidedBackpropReLU
for idx, module in self.model.features._modules.items():
if module.__class__.__name__ == 'ReLU':
self.model.features._modules[idx] = GuidedBackpropReLU()
self.model.features._modules[idx] = GuidedBackpropReLU.apply

def forward(self, input):
return self.model(input)
Expand Down

0 comments on commit 34ca303

Please sign in to comment.