From 3816e930727a7f5bffff910a1a15214dc0d56927 Mon Sep 17 00:00:00 2001 From: Wu Zhenyu Date: Sun, 1 Oct 2023 16:52:55 +0800 Subject: [PATCH] Disable some flags in Apple x86_64 @abetlen reports some flags cannot work in Apple x86_64. Disable them by default. https://github.com/abetlen/llama-cpp-python/blob/b4939c2d99cb3b0e49bcaeee8731050deed9ccfe/CMakeLists.txt#L9-L16 --- CMakeLists.txt | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index d5acf8540d1c8..c585b44d3f3e8 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -58,12 +58,19 @@ option(LLAMA_SANITIZE_ADDRESS "llama: enable address sanitizer" option(LLAMA_SANITIZE_UNDEFINED "llama: enable undefined sanitizer" OFF) # instruction set specific -option(LLAMA_AVX "llama: enable AVX" ON) -option(LLAMA_AVX2 "llama: enable AVX2" ON) +if (APPLE AND NOT CMAKE_SYSTEM_PROCESSOR MATCHES "arm64") + # Need to disable these llama.cpp flags on Apple x86_64, + # otherwise users may encounter invalid instruction errors + option(NOT_APPLE_X86 OFF) +else() + option(NOT_APPLE_X86 ON) +endif() +option(LLAMA_AVX "llama: enable AVX" ${NOT_APPLE_X86}) +option(LLAMA_AVX2 "llama: enable AVX2" ${NOT_APPLE_X86}) option(LLAMA_AVX512 "llama: enable AVX512" OFF) option(LLAMA_AVX512_VBMI "llama: enable AVX512-VBMI" OFF) option(LLAMA_AVX512_VNNI "llama: enable AVX512-VNNI" OFF) -option(LLAMA_FMA "llama: enable FMA" ON) +option(LLAMA_FMA "llama: enable FMA" ${NOT_APPLE_X86}) # in MSVC F16C is implied with AVX2/AVX512 if (NOT MSVC) option(LLAMA_F16C "llama: enable F16C" ON)