Skip to content

Commit

Permalink
Check if terminal is compatible with emojis before using them
Browse files Browse the repository at this point in the history
Just in case it doesn't.

Signed-off-by: Eric Curtin <ecurtin@redhat.com>
  • Loading branch information
ericcurtin committed Feb 24, 2025
1 parent 00839ee commit 9536b24
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions ramalama/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
get_gpu,
run_cmd,
)
from ramalama.console import EMOJI
from ramalama.gguf_parser import GGUFInfoParser
from ramalama.kube import Kube
from ramalama.model_inspect import GGUFModelInfo, ModelInfoBase
Expand Down Expand Up @@ -213,8 +214,7 @@ def setup_container(self, args):
conman_args.extend(container_labels)

# if args.subcommand is run add LLAMA_PROMPT_PREFIX to the container
if hasattr(args, "subcommand") and args.subcommand == "run":
# if podman
if EMOJI and hasattr(args, "subcommand") and args.subcommand == "run":
if os.path.basename(args.engine) == "podman":
conman_args += ["--env", "LLAMA_PROMPT_PREFIX=🦭 > "]
elif os.path.basename(args.engine) == "docker":
Expand Down Expand Up @@ -392,7 +392,7 @@ def build_exec_args_run(self, args, model_path, prompt):
exec_model_path = model_path if not args.container else MNT_FILE

# override prompt if not set to the local call
if "LLAMA_PROMPT_PREFIX" not in os.environ:
if EMOJI and "LLAMA_PROMPT_PREFIX" not in os.environ:
os.environ["LLAMA_PROMPT_PREFIX"] = "🦙 > "

exec_args = ["llama-run", "-c", f"{args.context}", "--temp", f"{args.temp}"]
Expand Down

0 comments on commit 9536b24

Please sign in to comment.