Skip to content

Commit 24d9771

Browse files
authored
Update Flux dependency to v0.13 (#53)
1 parent 667e062 commit 24d9771

File tree

12 files changed

+7
-14
lines changed

12 files changed

+7
-14
lines changed

Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
1919
[compat]
2020
ColorSchemes = "3"
2121
Distributions = "0.25"
22-
Flux = "0.12"
22+
Flux = "0.12, 0.13"
2323
ImageCore = "0.8, 0.9"
2424
PrettyTables = "1"
2525
Tullio = "0.3"

docs/literate/advanced_lrp.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ heatmap(input, analyzer)
157157
# ### Registering activation functions
158158
# The mechanism for registering custom activation functions is analogous to that of custom layers:
159159
myrelu(x) = max.(0, x)
160-
model = Chain(flatten, Dense(784, 100, myrelu), Dense(100, 10))
160+
model = Chain(Flux.flatten, Dense(784, 100, myrelu), Dense(100, 10))
161161

162162
# Once again, creating an LRP analyzer for this model will throw an `ArgumentError`
163163
# and display the following model check summary:

src/flux.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,11 +61,11 @@ function strip_softmax(model::Chain)
6161
end
6262
strip_softmax(l::Union{Dense,Conv}) = set_params(l, l.weight, l.bias, identity)
6363

64-
# helper function to work around Flux.Zeros
64+
# helper function to work around `bias=false` (Flux v0.13) and `bias=Flux.Zeros` (v0.12)
6565
function get_params(layer)
6666
W = layer.weight
6767
b = layer.bias
68-
if typeof(b) <: Flux.Zeros
68+
if b == false || typeof(b) <: Flux.Zeros
6969
b = zeros(eltype(W), size(W, 1))
7070
end
7171
return W, b

src/lrp_checks.jl

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -78,14 +78,6 @@ function check_model(::Val{:LRP}, c::Chain; verbose=true)
7878
LRP_CONFIG.supports_layer(::typeof(mylayer)) = true # for functions
7979
```
8080
The default fallback for this layer will use Automatic Differentiation according to "Layer-Wise Relevance Propagation: An Overview".
81-
You can also define a fully LRP-custom rule for your layer by using the interface
82-
```julia
83-
function (rule::AbstractLRPRule)(layer::MyLayer, aₖ, Rₖ₊₁)
84-
# ...
85-
return Rₖ
86-
end
87-
```
88-
This pattern can also be used to dispatch on specific rules.
8981
""",
9082
),
9183
)
Binary file not shown.
Binary file not shown.
-1.06 KB
Binary file not shown.
-1.06 KB
Binary file not shown.

test/test_canonize.jl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
using Flux
2+
using Flux: flatten
23
using ExplainableAI
34
using ExplainableAI: fuse_batchnorm
45
using Random

test/test_rules.jl

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,6 @@ layers = Dict(
139139
"Conv" => Conv((3, 3), 2 => 4; init=pseudorandn),
140140
"MaxPool" => MaxPool((3, 3)),
141141
"MeanPool" => MaxPool((3, 3)),
142-
"DepthwiseConv" => DepthwiseConv((3, 3), 2 => 4; init=pseudorandn),
143142
"ConvTranspose" => ConvTranspose((3, 3), 2 => 4; init=pseudorandn),
144143
"CrossCor" => CrossCor((3, 3), 2 => 4; init=pseudorandn),
145144
"flatten" => flatten,

0 commit comments

Comments
 (0)