File tree Expand file tree Collapse file tree 2 files changed +3
-3
lines changed
Filter options
Expand file tree Collapse file tree 2 files changed +3
-3
lines changed
Original file line number Diff line number Diff line change @@ -445,7 +445,7 @@ class llama_model_params(Structure):
445
445
# uint32_t n_batch; // prompt processing maximum batch size
446
446
# uint32_t n_threads; // number of threads to use for generation
447
447
# uint32_t n_threads_batch; // number of threads to use for batch processing
448
- # int8_t rope_scaling_type; // RoPE scaling type, from `enum llama_rope_scaling_type`
448
+ # int32_t rope_scaling_type; // RoPE scaling type, from `enum llama_rope_scaling_type`
449
449
450
450
# // ref: https://github.com/ggerganov/llama.cpp/pull/2054
451
451
# float rope_freq_base; // RoPE base frequency, 0 = from model
@@ -502,7 +502,7 @@ class llama_context_params(Structure):
502
502
("n_batch" , c_uint32 ),
503
503
("n_threads" , c_uint32 ),
504
504
("n_threads_batch" , c_uint32 ),
505
- ("rope_scaling_type" , c_int8 ),
505
+ ("rope_scaling_type" , c_int32 ),
506
506
("rope_freq_base" , c_float ),
507
507
("rope_freq_scale" , c_float ),
508
508
("yarn_ext_factor" , c_float ),
You can’t perform that action at this time.
0 commit comments