Skip to content

Commit

Permalink
Make CI build all images
Browse files Browse the repository at this point in the history
To ensure they all continue to build and remain of reasonable size.

Signed-off-by: Eric Curtin <ecurtin@redhat.com>
  • Loading branch information
ericcurtin committed Feb 22, 2025
1 parent c5054f1 commit bfe91e3
Show file tree
Hide file tree
Showing 11 changed files with 85 additions and 72 deletions.
4 changes: 3 additions & 1 deletion .github/workflows/ci-images.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,13 @@ on:
pull_request:
paths:
- "**/container-images/**"
- ".github/workflows/ci-images.yml"
push:
branches:
- main
paths:
- "**/container-images/**"
- ".github/workflows/ci-images.yml"

jobs:
build:
Expand Down Expand Up @@ -69,4 +71,4 @@ jobs:
- name: Build Images
run: |
make build-rm
./container_build.sh -r -c build
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ jobs:
- name: install golang
shell: bash
run: |
brew install go bats bash jq llama.cpp
brew install go bats bash jq llama.cpp shellcheck
make install-requirements
- name: Run a one-line script
shell: bash
Expand Down
1 change: 1 addition & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,7 @@ docs:
.PHONY: lint
lint:
flake8 *.py ramalama/*.py
shellcheck *.sh */*.sh container-images/*/*.sh

.PHONY: check-format
check-format:
Expand Down
17 changes: 9 additions & 8 deletions container-images/intel-gpu/entrypoint.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env bash

if [ -z ${HOME} ]
if [ -z "${HOME}" ]
then
export HOME=/home/llama-user
fi
Expand All @@ -18,8 +18,8 @@ then
then
echo "${USER_NAME:-llama-user}:x:$(id -u):0:${USER_NAME:-llama-user} user:${HOME}:/bin/bash" >> /etc/passwd
echo "${USER_NAME:-llama-user}:x:$(id -u):" >> /etc/group
render_group="$(cat /etc/group | grep 'render:x')"
video_group="$(cat /etc/group | grep 'video:x')"
render_group="$(grep 'render:x' /etc/group)"
video_group="$(grep 'video:x' /etc/group)"
render_group_new="${render_group}${USER_NAME:-llama-user}"
video_group_new="${video_group}${USER_NAME:-llama-user}"
sed "s|${render_group}|${render_group_new}|g" /etc/group > /tmp/group
Expand All @@ -30,17 +30,18 @@ then
fi

# Configure Z shell
if [ ! -f ${HOME}/.zshrc ]
if [ ! -f "${HOME}/.zshrc" ]
then
(echo "source /opt/intel/oneapi/setvars.sh") > ${HOME}/.zshrc
(echo ". /opt/intel/oneapi/setvars.sh") > "${HOME}/.zshrc"
fi

# Configure Bash shell
if [ ! -f ${HOME}/.bashrc ]
if [ ! -f "${HOME}/.bashrc" ]
then
(echo "source /opt/intel/oneapi/setvars.sh") > ${HOME}/.bashrc
(echo ". /opt/intel/oneapi/setvars.sh") > "${HOME}/.bashrc"
fi

source /opt/intel/oneapi/setvars.sh
# shellcheck disable=SC1091
. /opt/intel/oneapi/setvars.sh

exec "$@"
2 changes: 1 addition & 1 deletion container-images/pragmatic/Containerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,6 @@ FROM $IMAGE

ARG GPU=cpu
ARG CONTENT=rag
COPY ./build_pragmatic.sh /tmp/
COPY ../pragmatic/build_pragmatic.sh /tmp/
RUN echo $CONTENT
RUN sh /tmp/build_pragmatic.sh ${GPU} ${CONTENT}
64 changes: 28 additions & 36 deletions container-images/pragmatic/build_pragmatic.sh
Original file line number Diff line number Diff line change
@@ -1,51 +1,43 @@
#!/bin/bash

set -exu -o pipefail

export PYTHON_VERSION="python3 -m"
if [ "$(python3 --version)" \< "Python 3.11" ]; then
dnf install -y python3.11 python3.11-pip git
export PYTHON_VERSION="/usr/bin/python3.11 -m"
else
dnf install -y python3-pip git
fi

cuda="cu124"

rocm="rocm6.2"

cpu="cpu"

vulkan=$cpu

asahi=$cpu

install_pytorch() {
version=${!1}
echo ${PYTHON_VERSION} pip install torch==${version} -f https://download.pytorch.org/whl/torch_stable.html
${PYTHON_VERSION} pip install torch==${version} -f https://download.pytorch.org/whl/torch_stable.html
}

clone_and_build_pragmatic() {
git clone https://github.com/redhat-et/PRAGmatic
cd PRAGmatic
git submodule update --init --recursive

local url="https://download.pytorch.org/whl"
if [ "$2" == "docling" ]; then
${PYTHON_VERSION} pip install docling --extra-index-url https://download.pytorch.org/whl/$1
${PYTHON_VERSION} pip install docling --extra-index-url "$url/$1"
${PYTHON_VERSION} pip install -r requirements.txt --prefix=/usr
else
${PYTHON_VERSION} pip install torch --extra-index-url https://download.pytorch.org/whl/$1
${PYTHON_VERSION} pip install torch --extra-index-url "$url/$1"
local tmpfile
tmpfile=$(mktemp)
grep -v docling requirements.txt > ${tmpfile}
${PYTHON_VERSION} pip install -r /tmp/nodocling.txt --prefix=/usr
rm ${tmpfile}
grep -v docling requirements.txt > "${tmpfile}"
${PYTHON_VERSION} pip install -r "${tmpfile}" --prefix=/usr
rm "${tmpfile}"
fi

${PYTHON_VERSION} pip install --prefix=/usr .
cd ..
}

clone_and_build_pragmatic ${!1} $2
rm -rf /var/cache/*dnf* /opt/rocm-*/lib/*/library/*gfx9* /root/.cache /root/buildinfo PRAGmatic
dnf -y clean all
ldconfig
main() {
set -exu -o pipefail

export PYTHON_VERSION="python3 -m"
if [ "$(python3 --version)" \< "Python 3.11" ]; then
dnf install -y python3.11 python3.11-pip git
export PYTHON_VERSION="/usr/bin/python3.11 -m"
else
dnf install -y python3-pip git
fi

clone_and_build_pragmatic "$1" "$2"
rm -rf /var/cache/*dnf* /opt/rocm-*/lib/*/library/*gfx9* /root/.cache \
/root/buildinfo PRAGmatic
dnf -y clean all
ldconfig
}

main "$@"

2 changes: 1 addition & 1 deletion container-images/scripts/build_llama_and_whisper.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ dnf_install_intel_gpu() {
"oneapi-level-zero" "oneapi-level-zero-devel" "intel-compute-runtime")
dnf install -y "${rpm_list[@]}" "${intel_rpms[@]}"

# shellcheck source=/dev/null
# shellcheck disable=SC1091
. /opt/intel/oneapi/setvars.sh
}

Expand Down
12 changes: 6 additions & 6 deletions container-images/scripts/llama-server.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,13 @@ if [ -n "${MODEL_CHAT_FORMAT}" ]; then
CHAT_FORMAT="--chat_template ${MODEL_CHAT_FORMAT}"
fi

if [ -n ${MODEL_PATH} ]; then
if [ -n "${MODEL_PATH}" ]; then
llama-server \
--model ${MODEL_PATH} \
--host ${HOST:=0.0.0.0} \
--port ${PORT:=8001} \
--gpu_layers ${GPU_LAYERS:=0} \
${CHAT_FORMAT}
--model "${MODEL_PATH}" \
--host "${HOST:=0.0.0.0}" \
--port "${PORT:=8001}" \
--gpu_layers "${GPU_LAYERS:=0}" \
"${CHAT_FORMAT}"
exit 0
fi

Expand Down
9 changes: 5 additions & 4 deletions container-images/scripts/whisper-run.sh
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
#!/bin/bash
if [ -n ${MODEL_PATH} ]; then

if [ -n "${MODEL_PATH}" ]; then
whisper-server \
-tr \
--model ${MODEL_PATH} \
--model "${MODEL_PATH}" \
--convert \
--host ${HOST:=0.0.0.0} \
--port ${PORT:=8001}
--host "${HOST:=0.0.0.0}" \
--port "${PORT:=8001}"
exit 0
fi

Expand Down
40 changes: 28 additions & 12 deletions container_build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -39,13 +39,13 @@ rm_container_image() {

add_entrypoint() {
containerfile=$(mktemp)
cat > ${containerfile} <<EOF
cat > "${containerfile}" <<EOF
FROM $2
ENTRYPOINT [ "/usr/bin/$3.sh" ]
EOF
echo $1 build --no-cache -t "$2-$3" -f ${containerfile} .
eval $1 build --no-cache -t "$2-$3" -f ${containerfile} .
rm ${containerfile}
echo "$1 build --no-cache -t $2-$3 -f ${containerfile} ."
eval "$1 build --no-cache -t $2-$3 -f ${containerfile} ."
rm "${containerfile}"
}

add_entrypoints() {
Expand Down Expand Up @@ -91,14 +91,16 @@ build() {
}

determine_platform() {
local platform
case $conman_bin in
podman)
local platform="$(podman info --format '{{ .Version.OsArch }}' 2>/dev/null)"
platform="$(podman info --format '{{ .Version.OsArch }}' 2>/dev/null)"
;;
docker)
local platform="$(docker info --format '{{ .ClientInfo.Os }}/{{ .ClientInfo.Arch }}' 2>/dev/null)"
platform="$(docker info --format '{{ .ClientInfo.Os }}/{{ .ClientInfo.Arch }}' 2>/dev/null)"
;;
esac

if [ "$(uname -m)" = "aarch64" ] || { [ "$(uname -s)" = "Darwin" ] && [ "$(uname -m)" = "arm64" ]; }; then
platform="linux/arm64"
fi
Expand All @@ -113,6 +115,10 @@ parse_arguments() {
print_usage
exit 0
;;
-c)
ci="true"
shift
;;
-d)
option="$1"
shift
Expand All @@ -136,18 +142,27 @@ parse_arguments() {
process_all_targets() {
local command="$1"
local option="$2"

# build ramalama container image first, as other images inherit from it
build "container-images/ramalama" "$command" "$option"
for i in container-images/*; do
if [ "$i" == "container-images/scripts" ]; then
# skip these directories
if [[ "$i" =~ ^container-images/(scripts|ramalama|pragmatic)$ ]]; then
continue
fi

# todo, trim and get building in CI again
if $ci && [[ "$i" =~ ^container-images/rocm$ ]]; then
continue
fi

# skip images that don't make sense for multi-arch builds
if [ "$command" = "multi-arch" ]; then
case "${i//container-images\//}" in
rocm|intel-gpu)
continue
;;
esac
if [[ "$i" =~ ^container-images/(rocm|intel-gpu)$ ]]; then
continue
fi
fi

build "$i" "$command" "$option"
done
}
Expand Down Expand Up @@ -183,6 +198,7 @@ main() {
local command=""
local option=""
local rm_after_build="false"
local ci="false"
parse_arguments "$@"
if [ -z "$command" ]; then
echo "Error: command is required (build or push)"
Expand Down
4 changes: 2 additions & 2 deletions install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -124,8 +124,8 @@ setup_ramalama() {
download "$url" "$to_file"
local ramalama_bin="${1}/${binfile}"
local sharedirs=("/opt/homebrew/share" "/usr/local/share" "/usr/share")
local syspath=$(get_installation_dir)

local syspath
syspath=$(get_installation_dir)
$sudo install -m755 -d "$syspath"
syspath="$syspath/ramalama"
$sudo install -m755 -d "$syspath"
Expand Down

0 comments on commit bfe91e3

Please sign in to comment.