File tree Expand file tree Collapse file tree 2 files changed +9
-1
lines changed
Filter options
Expand file tree Collapse file tree 2 files changed +9
-1
lines changed
Original file line number Diff line number Diff line change @@ -120,6 +120,8 @@ class llama_context_params(Structure):
120
120
LLAMA_FTYPE_MOSTLY_Q4_2 = ctypes .c_int (5 ) # except 1d tensors
121
121
LLAMA_FTYPE_MOSTYL_Q4_3 = ctypes .c_int (6 ) # except 1d tensors
122
122
LLAMA_FTYPE_MOSTYL_Q8_0 = ctypes .c_int (7 ) # except 1d tensors
123
+ LLAMA_FTYPE_MOSTYL_Q5_0 = ctypes .c_int (8 ) # except 1d tensors
124
+ LLAMA_FTYPE_MOSTYL_Q5_1 = ctypes .c_int (9 ) # except 1d tensors
123
125
124
126
# Functions
125
127
@@ -210,6 +212,12 @@ def llama_get_kv_cache_token_count(ctx: llama_context_p) -> c_int:
210
212
_lib .llama_get_kv_cache_token_count .argtypes = [llama_context_p ]
211
213
_lib .llama_get_kv_cache_token_count .restype = c_int
212
214
215
+ # Sets the current rng seed.
216
+ def llama_set_rng_seed (ctx : llama_context_p , seed : c_int ):
217
+ return _lib .llama_set_rng_seed (ctx , seed )
218
+
219
+ _lib .llama_set_rng_seed .argtypes = [llama_context_p , c_int ]
220
+ _lib .llama_set_rng_seed .restype = None
213
221
214
222
# Returns the size in bytes of the state (rng, logits, embedding and kv_cache)
215
223
def llama_get_state_size (ctx : llama_context_p ) -> c_size_t :
You can’t perform that action at this time.
0 commit comments