Skip to content

Commit 6f0b97c

Browse files
committedMar 18, 2023
inital release
1 parent 0470434 commit 6f0b97c

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

52 files changed

+15197
-876
lines changed
 

‎CMakeLists.txt

+128
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,128 @@
1+
cmake_minimum_required(VERSION 3.8)
2+
project("alpaca.cpp")
3+
4+
set(CMAKE_CXX_STANDARD 20)
5+
set(CMAKE_CXX_STANDARD_REQUIRED true)
6+
set(CMAKE_C_STANDARD 11)
7+
8+
if (NOT XCODE AND NOT MSVC AND NOT CMAKE_BUILD_TYPE)
9+
set(CMAKE_BUILD_TYPE Release CACHE STRING "Build type" FORCE)
10+
set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" "MinSizeRel" "RelWithDebInfo")
11+
endif()
12+
13+
option(LLAMA_ALL_WARNINGS "llama: enable all compiler warnings" ON)
14+
option(LLAMA_ALL_WARNINGS_3RD_PARTY "llama: enable all compiler warnings in 3rd party libs" OFF)
15+
16+
option(LLAMA_SANITIZE_THREAD "llama: enable thread sanitizer" OFF)
17+
option(LLAMA_SANITIZE_ADDRESS "llama: enable address sanitizer" OFF)
18+
option(LLAMA_SANITIZE_UNDEFINED "llama: enable undefined sanitizer" OFF)
19+
20+
if (APPLE)
21+
option(LLAMA_NO_ACCELERATE "llama: disable Accelerate framework" OFF)
22+
option(LLAMA_NO_AVX "llama: disable AVX" OFF)
23+
option(LLAMA_NO_AVX2 "llama: disable AVX2" OFF)
24+
option(LLAMA_NO_FMA "llama: disable FMA" OFF)
25+
endif()
26+
27+
if (NOT MSVC)
28+
if (LLAMA_SANITIZE_THREAD)
29+
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fsanitize=thread")
30+
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=thread")
31+
endif()
32+
33+
if (LLAMA_SANITIZE_ADDRESS)
34+
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fsanitize=address -fno-omit-frame-pointer")
35+
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address -fno-omit-frame-pointer")
36+
endif()
37+
38+
if (LLAMA_SANITIZE_UNDEFINED)
39+
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fsanitize=undefined")
40+
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=undefined")
41+
endif()
42+
endif()
43+
44+
if (APPLE AND NOT LLAMA_NO_ACCELERATE)
45+
find_library(ACCELERATE_FRAMEWORK Accelerate)
46+
if (ACCELERATE_FRAMEWORK)
47+
message(STATUS "Accelerate framework found")
48+
49+
set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ${ACCELERATE_FRAMEWORK})
50+
set(LLAMA_EXTRA_FLAGS ${LLAMA_EXTRA_FLAGS} -DGGML_USE_ACCELERATE)
51+
else()
52+
message(WARNING "Accelerate framework not found")
53+
endif()
54+
endif()
55+
56+
if (LLAMA_ALL_WARNINGS)
57+
if (NOT MSVC)
58+
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} \
59+
-Wall \
60+
-Wextra \
61+
-Wpedantic \
62+
-Wshadow \
63+
-Wcast-qual \
64+
-Wstrict-prototypes \
65+
-Wpointer-arith \
66+
-Wno-unused-function \
67+
")
68+
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} \
69+
-Wall \
70+
-Wextra \
71+
-Wpedantic \
72+
-Wcast-qual \
73+
")
74+
else()
75+
# todo : msvc
76+
endif()
77+
endif()
78+
79+
message(STATUS "CMAKE_SYSTEM_PROCESSOR: ${CMAKE_SYSTEM_PROCESSOR}")
80+
81+
if (${CMAKE_SYSTEM_PROCESSOR} MATCHES "arm" OR ${CMAKE_SYSTEM_PROCESSOR} MATCHES "aarch64")
82+
message(STATUS "ARM detected")
83+
else()
84+
message(STATUS "x86 detected")
85+
if (MSVC)
86+
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /arch:AVX2")
87+
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} /arch:AVX2")
88+
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /arch:AVX2")
89+
else()
90+
if(NOT LLAMA_NO_AVX)
91+
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mavx")
92+
endif()
93+
if(NOT LLAMA_NO_AVX2)
94+
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mavx2")
95+
endif()
96+
if(NOT LLAMA_NO_FMA)
97+
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mfma")
98+
endif()
99+
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mf16c")
100+
endif()
101+
endif()
102+
103+
# if (LLAMA_PERF)
104+
# set(LLAMA_EXTRA_FLAGS ${LLAMA_EXTRA_FLAGS} -DGGML_PERF)
105+
# endif()
106+
107+
add_executable(chat
108+
chat.cpp
109+
utils.cpp
110+
utils.h)
111+
112+
add_executable(quantize
113+
quantize.cpp
114+
utils.cpp
115+
utils.h)
116+
117+
add_library(ggml
118+
ggml.c
119+
ggml.h)
120+
121+
target_compile_definitions(ggml PUBLIC ${LLAMA_EXTRA_FLAGS})
122+
target_compile_definitions(chat PUBLIC ${LLAMA_EXTRA_FLAGS})
123+
target_compile_definitions(quantize PUBLIC ${LLAMA_EXTRA_FLAGS})
124+
125+
target_link_libraries(ggml PRIVATE ${LLAMA_EXTRA_LIBS})
126+
target_include_directories(ggml PUBLIC .)
127+
target_link_libraries(quantize PRIVATE ggml)
128+
target_link_libraries(chat PRIVATE ggml)

‎Inference Test.ipynb

-161
This file was deleted.

0 commit comments

Comments
 (0)