mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
Fix temperature bug
This commit is contained in:
@@ -287,7 +287,7 @@ class Llama:
|
|||||||
candidates=llama_cpp.ctypes.pointer(candidates),
|
candidates=llama_cpp.ctypes.pointer(candidates),
|
||||||
penalty=repeat_penalty,
|
penalty=repeat_penalty,
|
||||||
)
|
)
|
||||||
if temp == 0.0:
|
if float(temp) == 0.0:
|
||||||
return llama_cpp.llama_sample_token_greedy(
|
return llama_cpp.llama_sample_token_greedy(
|
||||||
ctx=self.ctx,
|
ctx=self.ctx,
|
||||||
candidates=llama_cpp.ctypes.pointer(candidates),
|
candidates=llama_cpp.ctypes.pointer(candidates),
|
||||||
|
|||||||
Reference in New Issue
Block a user