From bc2c515025c5680d5a432468f4fa4c22514d9058 Mon Sep 17 00:00:00 2001 From: mrfakename Date: Sat, 18 Nov 2023 18:39:49 -0800 Subject: [PATCH 1/3] Add MistralLite format --- llama_cpp/llama_chat_format.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/llama_cpp/llama_chat_format.py b/llama_cpp/llama_chat_format.py index e0b98f7ec..fde58340f 100644 --- a/llama_cpp/llama_chat_format.py +++ b/llama_cpp/llama_chat_format.py @@ -551,6 +551,21 @@ def format_open_orca( return ChatFormatterResponse(prompt=_prompt, stop=stop_str) +@register_chat_format("mistrallite") +def format_mistrallite( + messages: List[llama_types.ChatCompletionRequestMessage], + **kwargs: Any, +) -> ChatFormatterResponse: + _roles = dict(user="<|prompter|>", assistant="\n<|assistant|>") + _sep = " " + system_template = """<|system|>{system_message}""" + system_message = _get_system_message(messages) + system_message = system_template.format(system_message=system_message) + _messages = _map_roles(messages, _roles) + _messages.append((_roles["assistant"], None)) + _prompt = _format_no_colon_single(system_message, _messages, _sep) + return ChatFormatterResponse(prompt=_prompt) + @register_chat_format("chatml") def format_chatml( messages: List[llama_types.ChatCompletionRequestMessage], From f696a3b4aa943cf26bd2915bbb81e5dd9cdb0efd Mon Sep 17 00:00:00 2001 From: mrfakename Date: Sun, 19 Nov 2023 14:55:30 -0800 Subject: [PATCH 2/3] Update llama_chat_format.py --- llama_cpp/llama_chat_format.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/llama_cpp/llama_chat_format.py b/llama_cpp/llama_chat_format.py index fde58340f..746f30c5b 100644 --- a/llama_cpp/llama_chat_format.py +++ b/llama_cpp/llama_chat_format.py @@ -582,6 +582,21 @@ def format_chatml( _prompt = _format_chatml(system_message, _messages, _sep) return ChatFormatterResponse(prompt=_prompt, stop=_sep) +@register_chat_format("openchat") +def format_openchat( + messages: List[llama_types.ChatCompletionRequestMessage], + **kwargs: Any, +) -> ChatFormatterResponse: + system_template = "{system_message}<|end_of_turn|>" + system_message = _get_system_message(messages) + system_message = system_template.format(system_message=system_message) + _roles = dict(user="GPT4 Correct User: ", assistant="<|end_of_turn|>GPT4 Correct Assistant: ") + _sep = "<|end_of_turn|>" + _messages = _map_roles(messages, _roles) + _messages.append((_roles["assistant"], None)) + _prompt = _format_chatml(system_message, _messages, _sep) + return ChatFormatterResponse(prompt=_prompt, stop=_sep) + @register_chat_completion_handler("functionary") def functionary_chat_handler( From 1cfed3b68f2ce2f212baf1c2b02ef57d14433f61 Mon Sep 17 00:00:00 2001 From: mrfakename Date: Sun, 19 Nov 2023 16:01:29 -0800 Subject: [PATCH 3/3] Update llama_chat_format.py --- llama_cpp/llama_chat_format.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/llama_cpp/llama_chat_format.py b/llama_cpp/llama_chat_format.py index 746f30c5b..7388b554a 100644 --- a/llama_cpp/llama_chat_format.py +++ b/llama_cpp/llama_chat_format.py @@ -523,6 +523,19 @@ def format_phind( _prompt = _format_add_colon_single(_system_message, _messages, _sep) return ChatFormatterResponse(prompt=_prompt) +@register_chat_format("intel") +def format_intel( + messages: List[llama_types.ChatCompletionRequestMessage], + **kwargs: Any, +) -> ChatFormatterResponse: + _roles = dict(user="### User:", assistant="### Assistant:") + _sep = "\n" + _system_message = "### System:\n{system_message}" + _messages = _map_roles(messages, _roles) + _messages.append((_roles["assistant"], None)) + _prompt = _format_add_colon_single(_system_message, _messages, _sep) + return ChatFormatterResponse(prompt=_prompt) + @register_chat_format("open-orca") def format_open_orca(