mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
Update llama.cpp
This commit is contained in:
@@ -297,21 +297,25 @@ class Llama:
|
||||
ctx=self.ctx,
|
||||
candidates=llama_cpp.ctypes.pointer(candidates),
|
||||
k=top_k,
|
||||
min_keep=llama_cpp.c_size_t(1),
|
||||
)
|
||||
llama_cpp.llama_sample_tail_free(
|
||||
ctx=self.ctx,
|
||||
candidates=llama_cpp.ctypes.pointer(candidates),
|
||||
z=llama_cpp.c_float(1.0),
|
||||
min_keep=llama_cpp.c_size_t(1),
|
||||
)
|
||||
llama_cpp.llama_sample_typical(
|
||||
ctx=self.ctx,
|
||||
candidates=llama_cpp.ctypes.pointer(candidates),
|
||||
p=llama_cpp.c_float(1.0),
|
||||
min_keep=llama_cpp.c_size_t(1),
|
||||
)
|
||||
llama_cpp.llama_sample_top_p(
|
||||
ctx=self.ctx,
|
||||
candidates=llama_cpp.ctypes.pointer(candidates),
|
||||
p=top_p,
|
||||
min_keep=llama_cpp.c_size_t(1),
|
||||
)
|
||||
llama_cpp.llama_sample_temperature(
|
||||
ctx=self.ctx,
|
||||
|
||||
@@ -515,7 +515,7 @@ def llama_sample_top_k(
|
||||
ctx: llama_context_p,
|
||||
candidates, # type: _Pointer[llama_token_data_array]
|
||||
k: c_int,
|
||||
min_keep: c_size_t = c_size_t(1),
|
||||
min_keep: c_size_t,
|
||||
):
|
||||
return _lib.llama_sample_top_k(ctx, candidates, k, min_keep)
|
||||
|
||||
@@ -534,7 +534,7 @@ def llama_sample_top_p(
|
||||
ctx: llama_context_p,
|
||||
candidates, # type: _Pointer[llama_token_data_array]
|
||||
p: c_float,
|
||||
min_keep: c_size_t = c_size_t(1),
|
||||
min_keep: c_size_t,
|
||||
):
|
||||
return _lib.llama_sample_top_p(ctx, candidates, p, min_keep)
|
||||
|
||||
@@ -553,7 +553,7 @@ def llama_sample_tail_free(
|
||||
ctx: llama_context_p,
|
||||
candidates, # type: _Pointer[llama_token_data_array]
|
||||
z: c_float,
|
||||
min_keep: c_size_t = c_size_t(1),
|
||||
min_keep: c_size_t,
|
||||
):
|
||||
return _lib.llama_sample_tail_free(ctx, candidates, z, min_keep)
|
||||
|
||||
@@ -572,7 +572,7 @@ def llama_sample_typical(
|
||||
ctx: llama_context_p,
|
||||
candidates, # type: _Pointer[llama_token_data_array]
|
||||
p: c_float,
|
||||
min_keep: c_size_t = c_size_t(1),
|
||||
min_keep: c_size_t,
|
||||
):
|
||||
return _lib.llama_sample_typical(ctx, candidates, p, min_keep)
|
||||
|
||||
|
||||
2
vendor/llama.cpp
vendored
2
vendor/llama.cpp
vendored
Submodule vendor/llama.cpp updated: 2edbdb0f99...1b0fd45465
Reference in New Issue
Block a user