From 92994abf68524171d9df4da702d23f6da8ad5a05 Mon Sep 17 00:00:00 2001 From: Pepijn de Vos Date: Thu, 16 Jan 2025 18:16:22 +0100 Subject: [PATCH] Build with curl support This allows the standalone container to download models Signed-off-by: Pepijn de Vos --- container-images/scripts/build_llama_and_whisper.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/container-images/scripts/build_llama_and_whisper.sh b/container-images/scripts/build_llama_and_whisper.sh index 5721a822..8000f718 100644 --- a/container-images/scripts/build_llama_and_whisper.sh +++ b/container-images/scripts/build_llama_and_whisper.sh @@ -3,7 +3,7 @@ dnf_install() { local rpm_list=("python3" "python3-pip" "python3-argcomplete" \ "python3-dnf-plugin-versionlock" "gcc-c++" "cmake" "vim" \ - "procps-ng" "git" "dnf-plugins-core") + "procps-ng" "git" "dnf-plugins-core" "libcurl-devel") local vulkan_rpms=("vulkan-headers" "vulkan-loader-devel" "vulkan-tools" \ "spirv-tools" "glslc" "glslang") @@ -55,7 +55,7 @@ set_install_prefix() { } configure_common_flags() { - common_flags=("-DGGML_NATIVE=OFF") + common_flags=("-DGGML_NATIVE=OFF" "-DLLAMA_CURL=ON") case "$containerfile" in rocm) common_flags+=("-DGGML_HIPBLAS=1")