mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
Fix type signature of token_to_str
This commit is contained in:
@@ -187,7 +187,7 @@ _lib.llama_get_embeddings.restype = POINTER(c_float)
|
|||||||
|
|
||||||
|
|
||||||
# Token Id -> String. Uses the vocabulary in the provided context
|
# Token Id -> String. Uses the vocabulary in the provided context
|
||||||
def llama_token_to_str(ctx: llama_context_p, token: int) -> bytes:
|
def llama_token_to_str(ctx: llama_context_p, token: llama_token) -> bytes:
|
||||||
return _lib.llama_token_to_str(ctx, token)
|
return _lib.llama_token_to_str(ctx, token)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user