diff --git a/src/test/java/org/xerial/snappy/SnappyHadoopCompatibleOutputStreamTest.java b/src/test/java/org/xerial/snappy/SnappyHadoopCompatibleOutputStreamTest.java index 72422bc3..e9c66bf9 100644 --- a/src/test/java/org/xerial/snappy/SnappyHadoopCompatibleOutputStreamTest.java +++ b/src/test/java/org/xerial/snappy/SnappyHadoopCompatibleOutputStreamTest.java @@ -2,7 +2,6 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; -import org.apache.commons.lang.SystemUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.SnappyCodec; import org.junit.AfterClass; @@ -25,13 +24,13 @@ public static void loadHadoopNativeLibrary() throws Exception { final String libResourceFolder; Map libraryNames = new LinkedHashMap<>(); - if (SystemUtils.IS_OS_LINUX) { + if (OSInfo.getOSName() == "Linux") { libResourceFolder = "/lib/Linux"; libraryNames.put("libhadoop.so", "libhadoop.so"); // certain Linux systems need these shared library be copied before the JVM started, see build.sbt libraryNames.put("libsnappy.so", "libsnappy.so"); libraryNames.put("libsnappy.so.1", "libsnappy.so"); - } else if (SystemUtils.IS_OS_MAC_OSX) { + } else if (OSInfo.getOSName() == "Mac") { libResourceFolder = "/lib/MacOSX"; libraryNames.put("libhadoop.dylib", "libhadoop.dylib"); libraryNames.put("libsnappy.dylib", "libsnappy.dylib"); diff --git a/src/test/java/org/xerial/snappy/SnappyOutputStreamTest.java b/src/test/java/org/xerial/snappy/SnappyOutputStreamTest.java index d7831cbe..a79eb198 100755 --- a/src/test/java/org/xerial/snappy/SnappyOutputStreamTest.java +++ b/src/test/java/org/xerial/snappy/SnappyOutputStreamTest.java @@ -180,7 +180,7 @@ public void batchingOfWritesShouldNotAffectCompressedDataSize() if (ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN) assertEquals(90992, expectedCompressedData.length); else - assertEquals(91080, expectedCompressedData.length); + assertEquals(91051, expectedCompressedData.length); // The chunk size should not affect the size of the compressed output: int[] chunkSizes = new int[] {1, 100, 1023, 1024, 10000}; for (int chunkSize : chunkSizes) {