From 55d75349deddace54c99f78c94285596289ce46b Mon Sep 17 00:00:00 2001 From: daquexian Date: Thu, 10 Oct 2019 16:22:33 +0800 Subject: [PATCH] Fix wrong relu implementation on non-ARM devices --- dabnn/layers/Relu.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dabnn/layers/Relu.cpp b/dabnn/layers/Relu.cpp index 4d6dbeb..4d82008 100644 --- a/dabnn/layers/Relu.cpp +++ b/dabnn/layers/Relu.cpp @@ -20,7 +20,10 @@ void Relu::forward_impl() const { } #else float *ptr = static_cast(*data_mat); - FORZ(i, data_mat->total()) { *ptr = std::max(*ptr, 0.f); } + FORZ(i, data_mat->total()) { + *ptr = std::max(*ptr, 0.f); + ptr++; + } #endif // __ARM_NEON } } // namespace bnn