8000
We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 4cb0e35 commit e65a823Copy full SHA for e65a823
CMakeLists.txt
@@ -8,10 +8,10 @@ option(LLAMA_BUILD "Build llama.cpp shared library and install alongside python
8
if (LLAMA_BUILD)
9
set(BUILD_SHARED_LIBS "On")
10
if (APPLE)
11
- set(LLAMA_AVX "Off")
12
- set(LLAMA_AVX2 "Off")
13
- set(LLAMA_FMA "Off")
14
- set(LLAMA_F16C "Off")
+ set(LLAMA_AVX "Off" CACHE BOOL "llama: enable AVX" FORCE)
+ set(LLAMA_AVX2 "Off" CACHE BOOL "llama: enable AVX2" FORCE)
+ set(LLAMA_FMA "Off" CACHE BOOL "llama: enable FMA" FORCE)
+ set(LLAMA_F16C "Off" CACHE BOOL "llama: enable F16C" FORCE)
15
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=native -mtune=native")
16
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -march=native -mtune=native")
17
endif()
0 commit comments