Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 9cd9b42

Browse filesBrowse files
authored
Update llama.py
ntk rope
1 parent 275e5e8 commit 9cd9b42
Copy full SHA for 9cd9b42

File tree

Expand file treeCollapse file tree

1 file changed

+12
-0
lines changed
Filter options
Expand file treeCollapse file tree

1 file changed

+12
-0
lines changed

‎llama_cpp/llama.py

Copy file name to clipboardExpand all lines: llama_cpp/llama.py
+12Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1195,6 +1195,8 @@ def create_completion(
11951195
model: Optional[str] = None,
11961196
stopping_criteria: Optional[StoppingCriteriaList] = None,
11971197
logits_processor: Optional[LogitsProcessorList] = None,
1198+
rope_freq_base: int = 80000,
1199+
rope_freq_scale: float = 0.5,
11981200
) -> Union[Completion, Iterator[CompletionChunk]]:
11991201
"""Generate text from a prompt.
12001202
@@ -1239,6 +1241,8 @@ def create_completion(
12391241
model=model,
12401242
stopping_criteria=stopping_criteria,
12411243
logits_processor=logits_processor,
1244+
rope_freq_base=rope_freq_base,
1245+
rope_freq_scale=rope_freq_scale,
12421246
)
12431247
if stream:
12441248
chunks: Iterator[CompletionChunk] = completion_or_chunks
@@ -1268,6 +1272,8 @@ def __call__(
12681272
model: Optional[str] = None,
12691273
stopping_criteria: Optional[StoppingCriteriaList] = None,
12701274
logits_processor: Optional[LogitsProcessorList] = None,
1275+
rope_freq_base: int = 80000,
1276+
rope_freq_scale: float = 0.5,
12711277
) -> Union[Completion, Iterator[CompletionChunk]]:
12721278
"""Generate text from a prompt.
12731279
@@ -1312,6 +1318,8 @@ def __call__(
13121318
model=model,
13131319
stopping_criteria=stopping_criteria,
13141320
logits_processor=logits_processor,
1321+
rope_freq_base=rope_freq_base,
1322+
rope_freq_scale=rope_freq_scale,
13151323
)
13161324

13171325
def _convert_text_completion_to_chat(
@@ -1390,6 +1398,8 @@ def create_chat_completion(
13901398
mirostat_eta: float = 0.1,
13911399
model: Optional[str] = None,
13921400
logits_processor: Optional[LogitsProcessorList] = None,
1401+
rope_freq_base: int = 80000,
1402+
rope_freq_scale: float = 0.5,
13931403
) -> Union[ChatCompletion, Iterator[ChatCompletionChunk]]:
13941404
"""Generate a chat completion from a list of messages.
13951405
@@ -1432,6 +1442,8 @@ def create_chat_completion(
14321442
mirostat_eta=mirostat_eta,
14331443
model=model,
14341444
logits_processor=logits_processor,
1445+
rope_freq_base=rope_freq_base,
1446+
rope_freq_scale=rope_freq_scale,
14351447
)
14361448
if stream:
14371449
chunks: Iterator[CompletionChunk] = completion_or_chunks # type: ignore

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.