diff --git a/src/Benchmarks/SharpNeat.Benchmarks/NeuralNets/Double/ActivationFunctions/ActivationFunctionsBenchmarks.cs b/src/Benchmarks/SharpNeat.Benchmarks/NeuralNets/Double/ActivationFunctions/ActivationFunctionsBenchmarks.cs index 2cd86dee..b558dba7 100644 --- a/src/Benchmarks/SharpNeat.Benchmarks/NeuralNets/Double/ActivationFunctions/ActivationFunctionsBenchmarks.cs +++ b/src/Benchmarks/SharpNeat.Benchmarks/NeuralNets/Double/ActivationFunctions/ActivationFunctionsBenchmarks.cs @@ -23,7 +23,6 @@ public class ActivationFunctionsBenchmarks static readonly IActivationFunction __PolynomialApproximantSteep = new PolynomialApproximantSteep(); static readonly IActivationFunction __QuadraticSigmoid = new QuadraticSigmoid(); static readonly IActivationFunction __ReLU = new ReLU(); - static readonly IActivationFunction __BitwiseReLU = new BitwiseReLU(); static readonly IActivationFunction __ScaledELU = new ScaledELU(); static readonly IActivationFunction __SoftSignSteep = new SoftSignSteep(); static readonly IActivationFunction __SReLU = new SReLU(); @@ -126,12 +125,6 @@ public void ReLU() RunBenchmark(__ReLU); } - [Benchmark] - public void BitwiseReLU() - { - RunBenchmark(__BitwiseReLU); - } - [Benchmark] public void ScaledELU() { diff --git a/src/SharpNeat/NeuralNets/Double/ActivationFunctions/BitwiseReLU.cs b/src/SharpNeat/NeuralNets/Double/ActivationFunctions/BitwiseReLU.cs deleted file mode 100644 index 8356706c..00000000 --- a/src/SharpNeat/NeuralNets/Double/ActivationFunctions/BitwiseReLU.cs +++ /dev/null @@ -1,73 +0,0 @@ -// This file is part of SharpNEAT; Copyright Colin D. Green. -// See LICENSE.txt for details. -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -namespace SharpNeat.NeuralNets.Double.ActivationFunctions; - -/// -/// Rectified linear activation unit (ReLU) using bitwise operators. -/// -public sealed class BitwiseReLU : IActivationFunction -{ - /// - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public void Fn(ref double x) - { - long xlong = Unsafe.As(ref x); - x = BitConverter.Int64BitsToDouble(xlong & ~(xlong >> 63)); - } - - /// - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public void Fn(ref double x, ref double y) - { - long xlong = Unsafe.As(ref x); - y = BitConverter.Int64BitsToDouble(xlong & ~(xlong >> 63)); - } - - /// - public void Fn(Span v) - { - Fn(ref MemoryMarshal.GetReference(v), v.Length); - } - - /// - public void Fn(ReadOnlySpan v, Span w) - { - // Obtain refs to the spans, and call on to the unsafe ref based overload. - Fn( - ref MemoryMarshal.GetReference(v), - ref MemoryMarshal.GetReference(w), - v.Length); - } - - /// - public void Fn(ref double vref, int len) - { - // Calc span bounds. - ref double vrefBound = ref Unsafe.Add(ref vref, len); - - // Loop over span elements, invoking the scalar activation fn for each. - for(; Unsafe.IsAddressLessThan(ref vref, ref vrefBound); - vref = ref Unsafe.Add(ref vref, 1)) - { - Fn(ref vref); - } - } - - /// - public void Fn(ref double vref, ref double wref, int len) - { - // Calc span bounds. - ref double vrefBound = ref Unsafe.Add(ref vref, len); - - // Loop over span elements, invoking the scalar activation fn for each. - for(; Unsafe.IsAddressLessThan(ref vref, ref vrefBound); - vref = ref Unsafe.Add(ref vref, 1), - wref = ref Unsafe.Add(ref wref, 1)) - { - Fn(ref vref, ref wref); - } - } -} diff --git a/src/SharpNeat/NeuralNets/Double/ActivationFunctions/ReLU.cs b/src/SharpNeat/NeuralNets/Double/ActivationFunctions/ReLU.cs index ac3661e4..6ceb8bda 100644 --- a/src/SharpNeat/NeuralNets/Double/ActivationFunctions/ReLU.cs +++ b/src/SharpNeat/NeuralNets/Double/ActivationFunctions/ReLU.cs @@ -13,17 +13,31 @@ public sealed class ReLU : IActivationFunction /// public void Fn(ref double x) { - if(x < 0.0) - x = 0.0; + // Calculate the equivalent of: + // + // return x < 0.0 ? 0.0 : x; + // + // The approach used here uses bit manipulation of the double precision bits to achieve faster performance. The + // performance improvement is due to the avoidance of the conditional branch. + + // Get the bits of the double as a signed long (noting that the high bit is the sign bit for both double and + // long). + long xlong = Unsafe.As(ref x); + + // Shift xlong right 63 bits. This shifts all of the value bits out of the value; these bits are replaced with + // the sign bit (which is how shift right works for signed types). Therefore, if xlong was negative then all + // the bits are set to 1 (including the sign bit), otherwise they are all set to zero. + // We then take the complement (flip all the bits), and bitwise AND the result with the original value of xlong. + // This means that we AND xlong with zeros when x is negative, and AND with all ones when the x is positive, + // thus achieving the ReLU function without using a conditional branch. + x = BitConverter.Int64BitsToDouble(xlong & ~(xlong >> 63)); } /// public void Fn(ref double x, ref double y) { - y = x; - - if(x < 0.0) - y = 0.0; + long xlong = Unsafe.As(ref x); + y = BitConverter.Int64BitsToDouble(xlong & ~(xlong >> 63)); } /// diff --git a/src/Tests/SharpNeat.Tests/NeuralNets/Double/ActivationFunctions/ActivationFunctionMonotonicityTests.cs b/src/Tests/SharpNeat.Tests/NeuralNets/Double/ActivationFunctions/ActivationFunctionMonotonicityTests.cs index 2667937a..0ceea072 100644 --- a/src/Tests/SharpNeat.Tests/NeuralNets/Double/ActivationFunctions/ActivationFunctionMonotonicityTests.cs +++ b/src/Tests/SharpNeat.Tests/NeuralNets/Double/ActivationFunctions/ActivationFunctionMonotonicityTests.cs @@ -19,7 +19,6 @@ public void TestMonotonicity() AssertMonotonic(new PolynomialApproximantSteep(), true); AssertMonotonic(new QuadraticSigmoid(), false); AssertMonotonic(new ReLU(), false); - AssertMonotonic(new BitwiseReLU(), false); AssertMonotonic(new ScaledELU(), true); AssertMonotonic(new SoftSignSteep(), true); AssertMonotonic(new SReLU(), true); diff --git a/src/Tests/SharpNeat.Tests/NeuralNets/Double/ActivationFunctions/ReLUTests.cs b/src/Tests/SharpNeat.Tests/NeuralNets/Double/ActivationFunctions/ReLUTests.cs new file mode 100644 index 00000000..11994b6b --- /dev/null +++ b/src/Tests/SharpNeat.Tests/NeuralNets/Double/ActivationFunctions/ReLUTests.cs @@ -0,0 +1,40 @@ +using FluentAssertions; +using Xunit; + +namespace SharpNeat.NeuralNets.Double.ActivationFunctions; + +#pragma warning disable xUnit1025 // InlineData should be unique within the Theory it belongs to + +public class ReLUTests +{ + [Theory] + [InlineData(0.0)] + [InlineData(-0.0)] + [InlineData(-0.000001)] + [InlineData(+0.000001)] + [InlineData(-0.1)] + [InlineData(0.1)] + [InlineData(-1.1)] + [InlineData(1.1)] + [InlineData(-1_000_000.0)] + [InlineData(1_000_000.0)] + [InlineData(double.Epsilon)] + [InlineData(-double.Epsilon)] + [InlineData(double.MinValue)] + [InlineData(double.MaxValue)] + [InlineData(double.PositiveInfinity)] + [InlineData(double.NegativeInfinity)] + public void BitwiseReLUGivesCorrectResponses(double x) + { + // Arrange. + var relu = new ReLU(); + + // Act. + double actual = x; + relu.Fn(ref actual); + + // Assert. + double expected = x < 0.0 ? 0.0 : x; + actual.Should().Be(expected); + } +}