Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 8c3aa78

Browse filesBrowse files
committed
Merge branch 'main' of github.com:abetlen/llama_cpp_python into main
2 parents 19e02f1 + d68fc07 commit 8c3aa78
Copy full SHA for 8c3aa78

File tree

Expand file treeCollapse file tree

1 file changed

+32
-0
lines changed
Filter options
Expand file treeCollapse file tree

1 file changed

+32
-0
lines changed

‎llama_cpp/llama_chat_format.py

Copy file name to clipboardExpand all lines: llama_cpp/llama_chat_format.py
+32Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -471,6 +471,23 @@ def format_baichuan2(
471471
_prompt = _format_no_colon_single(system_message, _messages, _sep)
472472
return ChatFormatterResponse(prompt=_prompt)
473473

474+
475+
@register_chat_format("baichuan")
476+
def format_baichuan(
477+
messages: List[llama_types.ChatCompletionRequestMessage],
478+
**kwargs: Any,
479+
) -> ChatFormatterResponse:
480+
_system_template = "{system_message}"
481+
_roles = dict(user="<reserved_102>", assistant="<reserved_103>")
482+
_sep = ""
483+
system_message = _get_system_message(messages)
484+
system_message = _system_template.format(system_message=system_message)
485+
_messages = _map_roles(messages, _roles)
486+
_messages.append((_roles["assistant"], None))
487+
_prompt = _format_no_colon_single(system_message, _messages, _sep)
488+
return ChatFormatterResponse(prompt=_prompt)
489+
490+
474491
@register_chat_format("openbuddy")
475492
def format_openbuddy(
476493
messages: List[llama_types.ChatCompletionRequestMessage],
@@ -604,6 +621,21 @@ def format_mistrallite(
604621
_prompt = _format_no_colon_single(system_message, _messages, _sep)
605622
return ChatFormatterResponse(prompt=_prompt)
606623

624+
@register_chat_format("zephyr")
625+
def format_zephyr(
626+
messages: List[llama_types.ChatCompletionRequestMessage],
627+
**kwargs: Any,
628+
) -> ChatFormatterResponse:
629+
system_template = """<|system|>
630+
{system_message}"""
631+
system_message = _get_system_message(messages)
632+
system_message = system_template.format(system_message=system_message)
633+
_roles = dict(user="<|user|>\n", assistant="<|assistant|>\n")
634+
_sep = "</s>"
635+
_messages = _map_roles(messages, _roles)
636+
_messages.append((_roles["assistant"], None))
637+
_prompt = _format_chatml(system_message, _messages, _sep)
638+
return ChatFormatterResponse(prompt=_prompt, stop=_sep)
607639

608640
@register_chat_format("chatml")
609641
def format_chatml(

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.