File tree Expand file tree Collapse file tree 2 files changed +8
-1
lines changed
Filter options
Expand file tree Collapse file tree 2 files changed +8
-1
lines changed
Original file line number Diff line number Diff line change 10
10
# GGML_LOG_LEVEL_WARN = 2,
11
11
# GGML_LOG_LEVEL_ERROR = 3,
12
12
# GGML_LOG_LEVEL_DEBUG = 4,
13
+ # GGML_LOG_LEVEL_CONT = 5, // continue previous log
13
14
# };
14
15
GGML_LOG_LEVEL_TO_LOGGING_LEVEL = {
15
16
0 : logging .CRITICAL ,
16
17
1 : logging .INFO ,
17
18
2 : logging .WARNING ,
18
19
3 : logging .ERROR ,
19
20
4 : logging .DEBUG ,
21
+ 5 : logging .DEBUG ,
20
22
}
21
23
22
24
logger = logging .getLogger ("llama-cpp-python" )
23
25
26
+ _last_log_level = GGML_LOG_LEVEL_TO_LOGGING_LEVEL [0 ]
24
27
25
28
# typedef void (*ggml_log_callback)(enum ggml_log_level level, const char * text, void * user_data);
26
29
@llama_cpp .llama_log_callback
@@ -29,8 +32,12 @@ def llama_log_callback(
29
32
text : bytes ,
30
33
user_data : ctypes .c_void_p ,
31
34
):
35
+ # TODO: Correctly implement continue previous log
36
+ global _last_log_level
37
+ log_level = GGML_LOG_LEVEL_TO_LOGGING_LEVEL [level ] if level != 5 else _last_log_level
32
38
if logger .level <= GGML_LOG_LEVEL_TO_LOGGING_LEVEL [level ]:
33
39
print (text .decode ("utf-8" ), end = "" , flush = True , file = sys .stderr )
40
+ _last_log_level = log_level
34
41
35
42
36
43
llama_cpp .llama_log_set (llama_log_callback , ctypes .c_void_p (0 ))
You can’t perform that action at this time.
0 commit comments