mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
Cache should raise KeyError when key is missing
This commit is contained in:
@@ -33,12 +33,10 @@ class LlamaCache:
|
|||||||
return k
|
return k
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def __getitem__(
|
def __getitem__(self, key: Sequence[llama_cpp.llama_token]) -> "LlamaState":
|
||||||
self, key: Sequence[llama_cpp.llama_token]
|
|
||||||
) -> Optional["LlamaState"]:
|
|
||||||
_key = self._find_key(tuple(key))
|
_key = self._find_key(tuple(key))
|
||||||
if _key is None:
|
if _key is None:
|
||||||
return None
|
raise KeyError(f"Key not found: {key}")
|
||||||
return self.cache_state[_key]
|
return self.cache_state[_key]
|
||||||
|
|
||||||
def __contains__(self, key: Sequence[llama_cpp.llama_token]) -> bool:
|
def __contains__(self, key: Sequence[llama_cpp.llama_token]) -> bool:
|
||||||
|
|||||||
Reference in New Issue
Block a user