Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit f7c5cfa

Browse filesBrowse files
committed
Format server options
1 parent 9c41a3e commit f7c5cfa
Copy full SHA for f7c5cfa

File tree

Expand file treeCollapse file tree

1 file changed

+3
-3
lines changed
Filter options
Expand file treeCollapse file tree

1 file changed

+3
-3
lines changed

‎llama_cpp/server/app.py

Copy file name to clipboardExpand all lines: llama_cpp/server/app.py
+3-3Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -260,18 +260,18 @@ class CreateCompletionRequest(BaseModel):
260260
presence_penalty: Optional[float] = presence_penalty_field
261261
frequency_penalty: Optional[float] = frequency_penalty_field
262262
logit_bias: Optional[Dict[str, float]] = Field(None)
263-
logit_bias_type: Optional[Literal["input_ids", "tokens"]] = Field(None)
263+
logprobs: Optional[int] = Field(None)
264264

265265
# ignored or currently unsupported
266266
model: Optional[str] = model_field
267267
n: Optional[int] = 1
268-
logprobs: Optional[int] = Field(None)
269268
best_of: Optional[int] = 1
270269
user: Optional[str] = Field(None)
271270

272271
# llama.cpp specific parameters
273272
top_k: int = top_k_field
274273
repeat_penalty: float = repeat_penalty_field
274+
logit_bias_type: Optional[Literal["input_ids", "tokens"]] = Field(None)
275275

276276
class Config:
277277
schema_extra = {
@@ -424,7 +424,6 @@ class CreateChatCompletionRequest(BaseModel):
424424
presence_penalty: Optional[float] = presence_penalty_field
425425
frequency_penalty: Optional[float] = frequency_penalty_field
426426
logit_bias: Optional[Dict[str, float]] = Field(None)
427-
logit_bias_type: Optional[Literal["input_ids", "tokens"]] = Field(None)
428427

429428
# ignored or currently unsupported
430429
model: Optional[str] = model_field
@@ -434,6 +433,7 @@ class CreateChatCompletionRequest(BaseModel):
434433
# llama.cpp specific parameters
435434
top_k: int = top_k_field
436435
repeat_penalty: float = repeat_penalty_field
436+
logit_bias_type: Optional[Literal["input_ids", "tokens"]] = Field(None)
437437

438438
class Config:
439439
schema_extra = {

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.