File tree 3 files changed +33
-2
lines changed
3 files changed +33
-2
lines changed Original file line number Diff line number Diff line change @@ -13,13 +13,22 @@ elseif (UNIX)
13
13
set (CMAKE_INSTALL_RPATH_USE_LINK_PATH FALSE )
14
14
endif ()
15
15
16
+ # Stable llama.cpp commit for bindings
17
+ set (LLAMACPP_COMMIT "84d547554123a62e9ac77107cb20e4f6cc503af4" )
18
+
19
+ set (LLAMACPP_REPO "https://github.com/ggerganov/llama.cpp.git" CACHE STRING "llama.cpp repository URL" )
20
+ message (STATUS "Using llama.cpp repo ${LLAMACPP_REPO} " )
21
+
22
+ set (LLAMACPP_TAG ${LLAMACPP_COMMIT} CACHE STRING "llama.cpp repository tag" )
23
+ message (STATUS "Using llama.cpp tag ${LLAMACPP_COMMIT} " )
24
+
16
25
# Fetch llama.cpp latest
17
26
# FIXME: Maybe use a vendored llama.cpp build for stability
18
27
include (FetchContent)
19
28
FetchContent_Declare(
20
29
llama
21
- GIT_REPOSITORY https://github.com/ggerganov/llama.cpp.git
22
- GIT_TAG 80a02aa8588ef167d616f76f1781b104c245ace0
30
+ GIT_REPOSITORY ${LLAMACPP_REPO}
31
+ GIT_TAG ${LLAMACPP_TAG}
23
32
)
24
33
25
34
# Required for proper builds
Original file line number Diff line number Diff line change @@ -13,6 +13,17 @@ $jobs = if ($env:MAX_JOBS) {
13
13
14
14
$extraCmakeArgs = @ ()
15
15
16
+ # llama.cpp dev options
17
+ if ($env: LLAMACPP_REPO ) {
18
+ $extraCmakeArgs += " -DLLAMACPP_REPO=$env: LLAMACPP_REPO "
19
+ Write-Host " Using custom llama.cpp repo: $env: LLAMACPP_REPO "
20
+ }
21
+
22
+ if ($env: LLAMACPP_TAG ) {
23
+ $extraCmakeArgs += " -DLLAMACPP_REPO=$env: LLAMACPP_TAG "
24
+ Write-Host " Using custom llama.cpp tag: $env: LLAMACPP_TAG "
25
+ }
26
+
16
27
if ($env: GGML_CUDA -eq 1 ) {
17
28
Write-Host " CUDA enabled, including in build"
18
29
Original file line number Diff line number Diff line change 14
14
# Initialize as empty array
15
15
EXTRA_CMAKE_ARGS=()
16
16
17
+ # llama.cpp dev options
18
+ if [ -n " $LLAMACPP_REPO " ]; then
19
+ EXTRA_CMAKE_ARGS+=(" -DLLAMACPP_REPO=$LLAMACPP_REPO " )
20
+ echo " Using custom llama.cpp repo: ${LLAMACPP_REPO} "
21
+ fi
22
+
23
+ if [ -n " $LLAMACPP_TAG " ]; then
24
+ EXTRA_CMAKE_ARGS+=(" -DLLAMACPP_TAG=$LLAMACPP_TAG " )
25
+ echo " Using custom llama.cpp tag: ${LLAMACPP_TAG} "
26
+ fi
27
+
17
28
if [ " $GGML_CUDA " = " 1" ]; then
18
29
EXTRA_CMAKE_ARGS+=(" -DGGML_CUDA=ON" )
19
30
echo " CUDA enabled, including in build"
You can’t perform that action at this time.
0 commit comments