Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit b345d60

Browse filesBrowse files
committed
Update llama.cpp
1 parent 91e86e5 commit b345d60
Copy full SHA for b345d60

File tree

Expand file treeCollapse file tree

2 files changed

+33
-1
lines changed
Filter options
Expand file treeCollapse file tree

2 files changed

+33
-1
lines changed

‎llama_cpp/llama_cpp.py

Copy file name to clipboardExpand all lines: llama_cpp/llama_cpp.py
+32Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -158,6 +158,25 @@ class llama_token_data_array(Structure):
158158
llama_progress_callback = ctypes.CFUNCTYPE(None, c_float, c_void_p)
159159

160160

161+
# enum llama_log_level {
162+
# LLAMA_LOG_LEVEL_ERROR = 2,
163+
# LLAMA_LOG_LEVEL_WARN = 3,
164+
# LLAMA_LOG_LEVEL_INFO = 4
165+
# };
166+
LLAMA_LOG_LEVEL_ERROR = c_int(2)
167+
LLAMA_LOG_LEVEL_WARN = c_int(3)
168+
LLAMA_LOG_LEVEL_INFO = c_int(4)
169+
170+
171+
# // Signature for logging events
172+
# // Note that text includes the new line character at the end for most events.
173+
# // If your logging mechanism cannot handle that, check if the last character is '\n' and strip it
174+
# // if it exists.
175+
# // It might not exist for progress report where '.' is output repeatedly.
176+
# typedef void (*llama_log_callback)(enum llama_log_level level, const char * text, void * user_data);
177+
llama_log_callback = ctypes.CFUNCTYPE(None, c_int, c_char_p, c_void_p)
178+
179+
161180
# struct llama_context_params {
162181
# uint32_t seed; // RNG seed, -1 for random
163182
# int32_t n_ctx; // text context
@@ -351,6 +370,19 @@ class llama_timings(Structure):
351370
]
352371

353372

373+
# // Set callback for all future logging events.
374+
# // If this is not called, or NULL is supplied, everything is output on stderr.
375+
# LLAMA_API void llama_log_set(llama_log_callback log_callback, void * user_data);
376+
def llama_log_set(
377+
log_callback: "ctypes._FuncPointer", user_data: c_void_p # type: ignore
378+
):
379+
return _lib.llama_log_set(log_callback, user_data)
380+
381+
382+
_lib.llama_log_set.argtypes = [llama_log_callback, c_void_p]
383+
_lib.llama_log_set.restype = None
384+
385+
354386
# LLAMA_API int llama_max_devices();
355387
def llama_max_devices() -> int:
356388
return _lib.llama_max_devices()

‎vendor/llama.cpp

Copy file name to clipboard

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.