Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 23177a3

Browse filesBrowse files
committed
fix
1 parent 10bf749 commit 23177a3
Copy full SHA for 23177a3

File tree

Expand file treeCollapse file tree

1 file changed

+29
-32
lines changed
Filter options
Expand file treeCollapse file tree

1 file changed

+29
-32
lines changed

‎llama_cpp/llama_chat_format.py

Copy file name to clipboardExpand all lines: llama_cpp/llama_chat_format.py
+29-32Lines changed: 29 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -2073,12 +2073,12 @@ def generate_streaming(tools, functions, function_call, prompt):
20732073
],
20742074
)
20752075
# Yield tool_call/function_call stop message
2076-
yield {
2077-
"id": "chat" + chunk["id"],
2078-
"object": "chat.completion.chunk",
2079-
"created": chunk["created"],
2080-
"model": chunk["model"],
2081-
"choices": [
2076+
yield llama_types.CreateChatCompletionStreamResponse(
2077+
id="chat" + chunk["id"],
2078+
object="chat.completion.chunk",
2079+
created=chunk["created"],
2080+
model=chunk["model"],
2081+
choices=[
20822082
{
20832083
"index": 0,
20842084
"finish_reason": "tool_calls" if tools is not None else "function_call",
@@ -2088,7 +2088,7 @@ def generate_streaming(tools, functions, function_call, prompt):
20882088
},
20892089
}
20902090
],
2091-
}
2091+
)
20922092
# If "auto" or no tool_choice/function_call
20932093
elif isinstance(function_call, str) and function_call == "auto":
20942094
tool_index = 0
@@ -2108,20 +2108,20 @@ def generate_streaming(tools, functions, function_call, prompt):
21082108
if function_name == "all":
21092109
prompt += "all\n<|content|>"
21102110
# Yield the first empty message for content
2111-
yield {
2112-
"id": "chat" + chunk_id,
2113-
"model": chunk["model"],
2114-
"created": chunk_created,
2115-
"object": "chat.completion.chunk",
2116-
"choices": [
2111+
yield llama_types.CreateChatCompletionStreamResponse(
2112+
id="chat" + chunk_id,
2113+
model=chunk["model"],
2114+
created=chunk_created,
2115+
object="chat.completion.chunk",
2116+
choices=[
21172117
{
21182118
"index": 0,
21192119
"delta": {"role": "assistant", "content": ""},
21202120
"logprobs": None,
21212121
"finish_reason": None,
21222122
}
21232123
],
2124-
}
2124+
)
21252125
else:
21262126
prompt += f"{function_name}\n<|content|>"
21272127
grammar = get_grammar(function_name)
@@ -2221,20 +2221,20 @@ def generate_streaming(tools, functions, function_call, prompt):
22212221
prompt += f"{cleaned_completion_text}\n<|from|>assistant\n<|recipient|>"
22222222
else:
22232223
# Yield stop message
2224-
yield {
2225-
"id": "chat" + chunk_id,
2226-
"model": chunk["model"],
2227-
"created": chunk_created,
2228-
"object": "chat.completion.chunk",
2229-
"choices": [
2224+
yield llama_types.CreateChatCompletionStreamResponse(
2225+
id="chat" + chunk_id,
2226+
model=chunk["model"],
2227+
created=chunk_created,
2228+
object="chat.completion.chunk",
2229+
choices=[
22302230
{
22312231
"index": 0,
22322232
"delta": {},
22332233
"logprobs": None,
22342234
"finish_reason": "stop",
22352235
}
22362236
],
2237-
}
2237+
)
22382238
break
22392239
else:
22402240
# Check whether the model wants to generate another turn
@@ -2284,25 +2284,22 @@ def generate_streaming(tools, functions, function_call, prompt):
22842284
tool_index += 1
22852285
else:
22862286
# Yield tool_call/function_call stop message
2287-
yield {
2288-
"id": "chat" + chunk_id,
2289-
"object": "chat.completion.chunk",
2290-
"created": chunk_created,
2291-
"model": chunk["model"],
2292-
"choices": [
2287+
yield llama_types.CreateChatCompletionStreamResponse(
2288+
id="chat" + chunk_id,
2289+
object="chat.completion.chunk",
2290+
created=chunk_created,
2291+
model=chunk["model"],
2292+
choices=[
22932293
{
22942294
"index": 0,
22952295
"finish_reason": "tool_calls" if tools is not None else "function_call",
22962296
"logprobs": None,
22972297
"delta": {
2298-
"role": None,
2299-
"content": None,
2300-
"function_call": None,
2301-
"tool_calls": None,
2298+
"role": None, "content": None, "function_call": None, "tool_calls": None
23022299
},
23032300
}
23042301
],
2305-
}
2302+
)
23062303
break
23072304

23082305
if stream is not False:

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.