Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 26cc4ee

Browse filesBrowse files
committed
Fix signature for stop parameter
1 parent 7dc0838 commit 26cc4ee
Copy full SHA for 26cc4ee

File tree

Expand file treeCollapse file tree

1 file changed

+6
-4
lines changed
Filter options
Expand file treeCollapse file tree

1 file changed

+6
-4
lines changed

‎llama_cpp/llama.py

Copy file name to clipboardExpand all lines: llama_cpp/llama.py
+6-4Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -323,7 +323,7 @@ def _create_completion(
323323
top_p: float = 0.95,
324324
logprobs: Optional[int] = None,
325325
echo: bool = False,
326-
stop: List[str] = [],
326+
stop: Optional[List[str]] = [],
327327
repeat_penalty: float = 1.1,
328328
top_k: int = 40,
329329
stream: bool = False,
@@ -336,6 +336,7 @@ def _create_completion(
336336
prompt_tokens = self.tokenize(b" " + prompt.encode("utf-8"))
337337
text = b""
338338
returned_characters = 0
339+
stop = stop if not None else []
339340

340341
if self.verbose:
341342
llama_cpp.llama_reset_timings(self.ctx)
@@ -537,7 +538,7 @@ def create_completion(
537538
top_p: float = 0.95,
538539
logprobs: Optional[int] = None,
539540
echo: bool = False,
540-
stop: List[str] = [],
541+
stop: Optional[List[str]] = [],
541542
repeat_penalty: float = 1.1,
542543
top_k: int = 40,
543544
stream: bool = False,
@@ -592,7 +593,7 @@ def __call__(
592593
top_p: float = 0.95,
593594
logprobs: Optional[int] = None,
594595
echo: bool = False,
595-
stop: List[str] = [],
596+
stop: Optional[List[str]] = [],
596597
repeat_penalty: float = 1.1,
597598
top_k: int = 40,
598599
stream: bool = False,
@@ -698,7 +699,7 @@ def create_chat_completion(
698699
top_p: float = 0.95,
699700
top_k: int = 40,
700701
stream: bool = False,
701-
stop: List[str] = [],
702+
stop: Optional[List[str]] = [],
702703
max_tokens: int = 128,
703704
repeat_penalty: float = 1.1,
704705
) -> Union[ChatCompletion, Iterator[ChatCompletionChunk]]:
@@ -717,6 +718,7 @@ def create_chat_completion(
717718
Returns:
718719
Generated chat completion or a stream of chat completion chunks.
719720
"""
721+
stop = stop if not None else []
720722
instructions = """Complete the following chat conversation between the user and the assistant. System messages should be strictly followed as additional instructions."""
721723
chat_history = "\n".join(
722724
f'{message["role"]} {message.get("user", "")}: {message["content"]}'

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.