diff --git a/.gitattributes b/.gitattributes index e280dc95e78ac..7985aa57bc658 100644 --- a/.gitattributes +++ b/.gitattributes @@ -18,7 +18,7 @@ /sklearn/utils/arrayfuncs.c -diff /sklearn/utils/graph_shortest_path.c -diff /sklearn/utils/lgamma.c -diff -sklearn/utils/_logistic_sigmoid.c -diff +/sklearn/utils/_logistic_sigmoid.c -diff /sklearn/utils/murmurhash.c -diff /sklearn/utils/seq_dataset.c -diff /sklearn/utils/sparsefuncs.c -diff diff --git a/sklearn/neural_network/tests/test_rbm.py b/sklearn/neural_network/tests/test_rbm.py index 9d6535b1dd186..791fdc4990d74 100644 --- a/sklearn/neural_network/tests/test_rbm.py +++ b/sklearn/neural_network/tests/test_rbm.py @@ -101,15 +101,12 @@ def test_gibbs_smoke(): def test_pseudo_likelihood_no_clipping(): - """ - checks that the pseudo likelihood is computed without clipping, - which happened until commit 52d1b778ca7164ac04ea9f8ba39077054954b77a - the new implementation (as of commit - 52d1b778ca7164ac04ea9f8ba39077054954b77a ) follows: + """Check that the pseudo likelihood is computed without clipping. + http://fa.bianp.net/blog/2013/numerical-optimizers-for-logistic-regression/ """ rng = np.random.RandomState(42) - X = np.array([[0. for i in xrange(1000)], [1. for i in xrange(1000)]]) + X = np.vstack([np.zeros(1000), np.ones(1000)]) rbm1 = BernoulliRBM(n_components=10, batch_size=2, n_iter=10, random_state=rng) rbm1.fit(X)