Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit a7c9e38

Browse filesBrowse files
committed
Update variable name
1 parent a335292 commit a7c9e38
Copy full SHA for a7c9e38

File tree

Expand file treeCollapse file tree

1 file changed

+6
-6
lines changed
Filter options
Expand file treeCollapse file tree

1 file changed

+6
-6
lines changed

‎llama_cpp/llama.py

Copy file name to clipboardExpand all lines: llama_cpp/llama.py
+6-6Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -532,7 +532,7 @@ def create_embedding(self, input: str, model: Optional[str] = None) -> Embedding
532532
An embedding object.
533533
"""
534534
assert self.ctx is not None
535-
_model: str = model if model is not None else self.model_path
535+
model_name: str = model if model is not None else self.model_path
536536

537537
if self.params.embedding == False:
538538
raise RuntimeError(
@@ -562,7 +562,7 @@ def create_embedding(self, input: str, model: Optional[str] = None) -> Embedding
562562
"index": 0,
563563
}
564564
],
565-
"model": _model,
565+
"model": model_name,
566566
"usage": {
567567
"prompt_tokens": n_tokens,
568568
"total_tokens": n_tokens,
@@ -612,7 +612,7 @@ def _create_completion(
612612
text: bytes = b""
613613
returned_characters: int = 0
614614
stop = stop if stop is not None else []
615-
_model: str = model if model is not None else self.model_path
615+
model_name: str = model if model is not None else self.model_path
616616

617617
if self.verbose:
618618
llama_cpp.llama_reset_timings(self.ctx)
@@ -711,7 +711,7 @@ def _create_completion(
711711
"id": completion_id,
712712
"object": "text_completion",
713713
"created": created,
714-
"model": _model,
714+
"model": model_name,
715715
"choices": [
716716
{
717717
"text": text[start:].decode("utf-8", errors="ignore"),
@@ -740,7 +740,7 @@ def _create_completion(
740740
"id": completion_id,
741741
"object": "text_completion",
742742
"created": created,
743-
"model": _model,
743+
"model": model_name,
744744
"choices": [
745745
{
746746
"text": text[returned_characters:].decode(
@@ -810,7 +810,7 @@ def _create_completion(
810810
"id": completion_id,
811811
"object": "text_completion",
812812
"created": created,
813-
"model": _model,
813+
"model": model_name,
814814
"choices": [
815815
{
816816
"text": text_str,

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.