Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 2993936

Browse filesBrowse files
authored
Fix ctypes definitions of llama_kv_cache_view_update and llama_kv_cache_view_free. (abetlen#1028)
1 parent 5e863d8 commit 2993936
Copy full SHA for 2993936

File tree

Expand file treeCollapse file tree

1 file changed

+7
-4
lines changed
Filter options
Expand file treeCollapse file tree

1 file changed

+7
-4
lines changed

‎llama_cpp/llama_cpp.py

Copy file name to clipboardExpand all lines: llama_cpp/llama_cpp.py
+7-4Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1041,6 +1041,9 @@ class llama_kv_cache_view(Structure):
10411041
]
10421042

10431043

1044+
llama_kv_cache_view_p = POINTER(llama_kv_cache_view)
1045+
1046+
10441047
# // Create an empty KV cache view. (use only for debugging purposes)
10451048
# LLAMA_API struct llama_kv_cache_view llama_kv_cache_view_init(const struct llama_context * ctx, int32_t n_max_seq);
10461049
def llama_kv_cache_view_init(
@@ -1056,23 +1059,23 @@ def llama_kv_cache_view_init(
10561059

10571060
# // Free a KV cache view. (use only for debugging purposes)
10581061
# LLAMA_API void llama_kv_cache_view_free(struct llama_kv_cache_view * view);
1059-
def llama_kv_cache_view_free(view: llama_kv_cache_view):
1062+
def llama_kv_cache_view_free(view: llama_kv_cache_view_p):
10601063
"""Free a KV cache view. (use only for debugging purposes)"""
10611064
return _lib.llama_kv_cache_view_free(view)
10621065

10631066

1064-
_lib.llama_kv_cache_view_free.argtypes = [llama_kv_cache_view]
1067+
_lib.llama_kv_cache_view_free.argtypes = [llama_kv_cache_view_p]
10651068
_lib.llama_kv_cache_view_free.restype = None
10661069

10671070

10681071
# // Update the KV cache view structure with the current state of the KV cache. (use only for debugging purposes)
10691072
# LLAMA_API void llama_kv_cache_view_update(const struct llama_context * ctx, struct llama_kv_cache_view * view);
1070-
def llama_kv_cache_view_update(ctx: llama_context_p, view: llama_kv_cache_view):
1073+
def llama_kv_cache_view_update(ctx: llama_context_p, view: llama_kv_cache_view_p):
10711074
"""Update the KV cache view structure with the current state of the KV cache. (use only for debugging purposes)"""
10721075
return _lib.llama_kv_cache_view_update(ctx, view)
10731076

10741077

1075-
_lib.llama_kv_cache_view_update.argtypes = [llama_context_p, llama_kv_cache_view]
1078+
_lib.llama_kv_cache_view_update.argtypes = [llama_context_p, llama_kv_cache_view_p]
10761079
_lib.llama_kv_cache_view_update.restype = None
10771080

10781081

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.