-
Notifications
You must be signed in to change notification settings - Fork 55
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
attributions: refacto deconvnet and guidedbackprop for pylint
- Loading branch information
Antonin POCHE
committed
Oct 19, 2023
1 parent
5fc13fd
commit 47397a2
Showing
8 changed files
with
105 additions
and
95 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
""" | ||
Attributions methods based on gradients override | ||
""" | ||
|
||
from .deconvnet import DeconvNet | ||
from .guided_backpropagation import GuidedBackprop |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,40 @@ | ||
""" | ||
Module related to DeconvNet method | ||
""" | ||
|
||
from ...commons import deconv_relu_policy | ||
from ...types import Callable | ||
|
||
from .gradient_override import GradientOverride | ||
|
||
class DeconvNet(GradientOverride): | ||
""" | ||
Used to compute the DeconvNet method, which modifies the classic Saliency procedure on | ||
ReLU's non linearities, allowing only the positive gradients (even from negative inputs) to | ||
pass through. | ||
Ref. Zeiler & al., Visualizing and Understanding Convolutional Networks (2013). | ||
https://arxiv.org/abs/1311.2901 | ||
Parameters | ||
---------- | ||
model | ||
The model from which we want to obtain explanations | ||
output_layer | ||
Layer to target for the outputs (e.g logits or after softmax). | ||
If an `int` is provided it will be interpreted as a layer index. | ||
If a `string` is provided it will look for the layer name. | ||
Default to the last layer. | ||
It is recommended to use the layer before Softmax. | ||
batch_size | ||
Number of inputs to explain at once, if None compute all at once. | ||
operator | ||
Function g to explain, g take 3 parameters (f, x, y) and should return a scalar, | ||
with f the model, x the inputs and y the targets. If None, use the standard | ||
operator g(f, x, y) = f(x)[y]. | ||
reducer | ||
String, name of the reducer to use. Either "min", "mean", "max" or "sum". | ||
""" | ||
|
||
def _get_override_policy(self) -> Callable: | ||
return deconv_relu_policy |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
42 changes: 42 additions & 0 deletions
42
xplique/attributions/gradient_override/guided_backpropagation.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,42 @@ | ||
""" | ||
Module related to Guided Backpropagation method | ||
""" | ||
|
||
from ...commons import guided_relu_policy | ||
from ...types import Callable | ||
|
||
from .gradient_override import GradientOverride | ||
|
||
|
||
class GuidedBackprop(GradientOverride): | ||
""" | ||
Used to compute the Guided Backpropagation, which modifies the classic Saliency procedure on | ||
ReLU's non linearities, allowing only the positive gradients from positive activations to pass | ||
through. | ||
Ref. Tobias & al., Striving for Simplicity: The All Convolutional Net (2014). | ||
https://arxiv.org/abs/1412.6806 | ||
Parameters | ||
---------- | ||
model | ||
The model from which we want to obtain explanations | ||
output_layer | ||
Layer to target for the outputs (e.g logits or after softmax). | ||
If an `int` is provided it will be interpreted as a layer index. | ||
If a `string` is provided it will look for the layer name. | ||
Default to the last layer. | ||
It is recommended to use the layer before Softmax. | ||
batch_size | ||
Number of inputs to explain at once, if None compute all at once. | ||
operator | ||
Function g to explain, g take 3 parameters (f, x, y) and should return a scalar, | ||
with f the model, x the inputs and y the targets. If None, use the standard | ||
operator g(f, x, y) = f(x)[y]. | ||
reducer | ||
String, name of the reducer to use. Either "min", "mean", "max" or "sum". | ||
""" | ||
|
||
def _get_override_policy(self) -> Callable: | ||
return guided_relu_policy |
This file was deleted.
Oops, something went wrong.