Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(android): enable armv8.2-a+fp16+dotprod #27

Merged
merged 2 commits into from
Oct 19, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 36 additions & 23 deletions android/src/main/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ project(llama.rn)
set(CMAKE_CXX_STANDARD 11)
set(RNLLAMA_LIB_DIR ${CMAKE_SOURCE_DIR}/../../../cpp)

include_directories(${RNLLAMA_LIB_DIR})

set(
SOURCE_FILES
${RNLLAMA_LIB_DIR}/ggml-alloc.c
Expand All @@ -19,36 +21,47 @@ set(
${CMAKE_SOURCE_DIR}/jni.cpp
)

set(RNLLAMA_LIBRARY_NAME rnllama)
find_library(LOG_LIB log)

add_library(
${RNLLAMA_LIBRARY_NAME}
SHARED
${SOURCE_FILES}
)
function(build_library target_name)
add_library(
${target_name}
SHARED
${SOURCE_FILES}
)

find_library(LOG_LIB log)
target_link_libraries(${target_name} ${LOG_LIB} android)

target_link_libraries(${RNLLAMA_LIBRARY_NAME} ${LOG_LIB} android)
include_directories(${RNLLAMA_LIB_DIR})
if (${target_name} STREQUAL "rnllama_v8fp16_va")
target_compile_options(${target_name} PRIVATE -march=armv8.2-a+fp16+dotprod)
endif ()

target_compile_options(${RNLLAMA_LIBRARY_NAME} PRIVATE -DLM_GGML_USE_K_QUANTS -pthread)
target_compile_options(${target_name} PRIVATE -DLM_GGML_USE_K_QUANTS -pthread)

if (${CMAKE_BUILD_TYPE} STREQUAL "Debug")
target_compile_options(${RNLLAMA_LIBRARY_NAME} PRIVATE -DRNLLAMA_ANDROID_ENABLE_LOGGING)
endif ()
if (${target_name} STREQUAL "rnllama_v8fp16_va")
target_compile_options(${target_name} PRIVATE -march=armv8.2-a+fp16+dotprod)
endif ()

if (${CMAKE_BUILD_TYPE} STREQUAL "Debug")
target_compile_options(${target_name} PRIVATE -DRNLLAMA_ANDROID_ENABLE_LOGGING)
endif ()

# NOTE: If you want to debug the native code, you can uncomment if and endif
# if (NOT ${CMAKE_BUILD_TYPE} STREQUAL "Debug")
# NOTE: If you want to debug the native code, you can uncomment if and endif
# if (NOT ${CMAKE_BUILD_TYPE} STREQUAL "Debug")

target_compile_options(${RNLLAMA_LIBRARY_NAME} PRIVATE -Ofast -DNDEBUG)
target_compile_options(${RNLLAMA_LIBRARY_NAME} PRIVATE -fvisibility=hidden -fvisibility-inlines-hidden)
target_compile_options(${RNLLAMA_LIBRARY_NAME} PRIVATE -ffunction-sections -fdata-sections)
target_compile_options(${target_name} PRIVATE -Ofast -DNDEBUG)
target_compile_options(${target_name} PRIVATE -fvisibility=hidden -fvisibility-inlines-hidden)
target_compile_options(${target_name} PRIVATE -ffunction-sections -fdata-sections)

target_link_options(${RNLLAMA_LIBRARY_NAME} PRIVATE -Wl,--gc-sections)
target_link_options(${RNLLAMA_LIBRARY_NAME} PRIVATE -Wl,--exclude-libs,ALL)
target_link_options(${RNLLAMA_LIBRARY_NAME} PRIVATE -flto)
target_link_options(${target_name} PRIVATE -Wl,--gc-sections)
target_link_options(${target_name} PRIVATE -Wl,--exclude-libs,ALL)
target_link_options(${target_name} PRIVATE -flto)

# endif ()
# endif ()
endfunction()

# target_compile_features(${RNLLAMA_LIBRARY_NAME} PRIVATE exceptions)
build_library("rnllama") # Default target

if (${ANDROID_ABI} STREQUAL "arm64-v8a")
build_library("rnllama_v8fp16_va")
endif ()
42 changes: 41 additions & 1 deletion android/src/main/java/com/rnllama/LlamaContext.java
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,30 @@ public void release() {
}

static {
if (LlamaContext.isArm64V8a() == true || LlamaContext.isX86_64() == true) {
Log.d(NAME, "Primary ABI: " + Build.SUPPORTED_ABIS[0]);
if (LlamaContext.isArm64V8a()) {
boolean loadV8fp16 = false;
if (LlamaContext.isArm64V8a()) {
// ARMv8.2a needs runtime detection support
String cpuInfo = LlamaContext.cpuInfo();
if (cpuInfo != null) {
Log.d(NAME, "CPU info: " + cpuInfo);
if (cpuInfo.contains("fphp")) {
Log.d(NAME, "CPU supports fp16 arithmetic");
loadV8fp16 = true;
}
}
}

if (loadV8fp16) {
Log.d(NAME, "Loading librnllama_v8fp16_va.so");
System.loadLibrary("rnllama_v8fp16_va");
} else {
Log.d(NAME, "Loading librnllama.so");
System.loadLibrary("rnllama");
}
} else if (LlamaContext.isX86_64()) {
Log.d(NAME, "Loading librnllama.so");
System.loadLibrary("rnllama");
}
}
Expand All @@ -224,6 +247,23 @@ private static boolean isX86_64() {
return Build.SUPPORTED_ABIS[0].equals("x86_64");
}

private static String cpuInfo() {
File file = new File("/proc/cpuinfo");
StringBuilder stringBuilder = new StringBuilder();
try {
BufferedReader bufferedReader = new BufferedReader(new FileReader(file));
String line;
while ((line = bufferedReader.readLine()) != null) {
stringBuilder.append(line);
}
bufferedReader.close();
return stringBuilder.toString();
} catch (IOException e) {
Log.w(NAME, "Couldn't read /proc/cpuinfo", e);
return null;
}
}

protected static native long initContext(
String model,
boolean embedding,
Expand Down
Loading