We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 1b376c6 commit b84d76aCopy full SHA for b84d76a
llama_cpp/llama_chat_format.py
@@ -565,7 +565,7 @@ def format_chatml(
565
_messages = _map_roles(messages, _roles)
566
_messages.append((_roles["assistant"], None))
567
_prompt = _format_chatml(system_message, _messages, _sep)
568
- return ChatFormatterResponse(prompt=_prompt)
+ return ChatFormatterResponse(prompt=_prompt, stop=_sep)
569
570
571
@register_chat_completion_handler("functionary")
0 commit comments