Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 3f76e1d

Browse filesBrowse files
committed
cjk pr minor cleanup
1 parent bae44ec commit 3f76e1d
Copy full SHA for 3f76e1d

File tree

Expand file treeCollapse file tree

1 file changed

+5
-4
lines changed
Filter options
Expand file treeCollapse file tree

1 file changed

+5
-4
lines changed

‎llama_cpp/llama.py

Copy file name to clipboardExpand all lines: llama_cpp/llama.py
+5-4Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1066,14 +1066,15 @@ def _create_completion(
10661066
while len(remaining_tokens) > 0:
10671067
decode_success = False
10681068
for i in range(1, len(remaining_tokens) + 1):
1069-
tokens = remaining_tokens[:i]
10701069
try:
1071-
bs = self.detokenize(tokens)
1072-
text = bs.decode('utf-8')
1070+
bs = self.detokenize(remaining_tokens[:i])
1071+
ts = bs.decode('utf-8')
10731072
decode_success = True
10741073
break
10751074
except UnicodeError:
10761075
pass
1076+
else:
1077+
break
10771078
if not decode_success:
10781079
# all remaining tokens cannot be decoded to a UTF-8 character
10791080
break
@@ -1090,7 +1091,7 @@ def _create_completion(
10901091
"model": model_name,
10911092
"choices": [
10921093
{
1093-
"text": text,
1094+
"text": ts,
10941095
"index": 0,
10951096
"logprobs": None,
10961097
"finish_reason": None,

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.