@@ -2165,7 +2165,7 @@ def create_completion(stop):
2165
2165
2166
2166
2167
2167
class Llava15ChatHandler :
2168
- DEFAULT_SYSTEM_MESSAGE = "A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions."
2168
+ DEFAULT_SYSTEM_MESSAGE : Optional [ str ] = "A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions."
2169
2169
2170
2170
CHAT_FORMAT = (
2171
2171
"{% for message in messages %}"
@@ -2288,7 +2288,7 @@ def __call__(
2288
2288
assert self .clip_ctx is not None
2289
2289
2290
2290
system_prompt = _get_system_message (messages )
2291
- if system_prompt == "" :
2291
+ if system_prompt == "" and self . DEFAULT_SYSTEM_MESSAGE is not None :
2292
2292
messages = [llama_types .ChatCompletionRequestSystemMessage (role = "system" , content = self .DEFAULT_SYSTEM_MESSAGE )] + messages
2293
2293
2294
2294
image_urls = self .get_image_urls (messages )
@@ -2771,6 +2771,66 @@ class NanoLlavaChatHandler(Llava15ChatHandler):
2771
2771
"{% endif %}"
2772
2772
)
2773
2773
2774
+ class Llama3VisionAlpha (Llava15ChatHandler ):
2775
+ # question = "<image>" + q
2776
+
2777
+ # prompt = f"<|start_header_id|>user<|end_header_id|>\n\n{question}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n"
2778
+ DEFAULT_SYSTEM_MESSAGE = None
2779
+
2780
+ CHAT_FORMAT = (
2781
+ "{% for message in messages %}"
2782
+
2783
+ "<|start_header_id|>"
2784
+
2785
+ "{% if message.role == 'user' %}"
2786
+
2787
+ "user<|end_header_id|>\n \n "
2788
+
2789
+ "{% if message.content is iterable %}"
2790
+
2791
+ # <image>
2792
+ "{% for content in message.content %}"
2793
+ "{% if content.type == 'image_url' %}"
2794
+ "{% if content.image_url is string %}"
2795
+ "{{ content.image_url }}"
2796
+ "{% endif %}"
2797
+ "{% if content.image_url is mapping %}"
2798
+ "{{ content.image_url.url }}"
2799
+ "{% endif %}"
2800
+ "{% endif %}"
2801
+ "{% endfor %}"
2802
+
2803
+ # Question:
2804
+ "{% for content in message.content %}"
2805
+ "{% if content.type == 'text' %}"
2806
+ "{{ content.text }}"
2807
+ "{% endif %}"
2808
+ "{% endfor %}"
2809
+
2810
+ "{% endif %}"
2811
+
2812
+ # Question:
2813
+ "{% if message.content is string %}"
2814
+ "{{ message.content }}"
2815
+ "{% endif %}"
2816
+
2817
+ "{% endif %}"
2818
+
2819
+ # Answer:
2820
+ "{% if message.role == 'assistant' %}"
2821
+ "assistant<|end_header_id|>\n \n "
2822
+ "{{ message.content }}"
2823
+ "{% endif %}"
2824
+
2825
+ "<|eot_id|>"
2826
+
2827
+ "{% endfor %}"
2828
+
2829
+ # Generation prompt
2830
+ "{% if add_generation_prompt %}"
2831
+ "<|start_header_id|>assistant<|end_header_id|>\n \n "
2832
+ "{% endif %}"
2833
+ )
2774
2834
2775
2835
@register_chat_completion_handler ("chatml-function-calling" )
2776
2836
def chatml_function_calling (
0 commit comments