Skip to content

Commit

Permalink
fixup
Browse files Browse the repository at this point in the history
  • Loading branch information
mcabbott committed Feb 20, 2022
1 parent e364bd0 commit 02f6594
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 9 deletions.
8 changes: 2 additions & 6 deletions src/layers/basic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -301,7 +301,7 @@ end
Bilinear((in1, in2) => out, σ=identity; bias=true, init=glorot_uniform)
Bilinear(W::AbstractArray, [bias, σ])
Creates a bilinear layer, which operates on two inputs at the same time.
Creates a fully connected layer which operates on two inputs.
Its output, given vectors `x` & `y`, is another vector `z` with,
for all `i ∈ 1:out`:
Expand Down Expand Up @@ -394,11 +394,7 @@ function Base.show(io::IO, l::Bilinear)
print(io, "Bilinear((", size(l.weight, 2), ", ", size(l.weight, 3), ") => ", size(l.weight, 1))
end
l.σ == identity || print(io, ", ", l.σ)
<<<<<<< HEAD
l.bias == Flux.Zeros() && print(io, "; bias=false")
=======
l.bias === false && print(io, ", bias=false")
>>>>>>> 1ef2cd377 (rm Flux.Zeros, take N+1)
l.bias === false && print(io, "; bias=false")
print(io, ")")
end

Expand Down
6 changes: 3 additions & 3 deletions test/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ end
end

@testset "zero bias" begin
m = Dense(3,2; bias=false)
m = Dense(3 => 2; bias=false)
@test f64(m).bias === m.bias === false
@test f32(m).bias === m.bias === false

Expand Down Expand Up @@ -376,8 +376,8 @@ end
end

@testset "loadparams! & absent bias" begin
m0 = Dense(2,3; bias=false, init = Flux.ones32)
m1 = Dense(2,3; bias = Flux.randn32(3))
m0 = Dense(2 => 3; bias=false, init = Flux.ones32)
m1 = Dense(2 => 3; bias = Flux.randn32(3))
m2 = Dense(Float32[1 2; 3 4; 5 6], Float32[7, 8, 9])

Flux.loadparams!(m1, Flux.params(m2))
Expand Down

0 comments on commit 02f6594

Please sign in to comment.