From 5be0efa5f8f98f4b889ca9869e5005ecb5f195d2 Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Fri, 5 May 2023 12:21:49 -0400 Subject: [PATCH] Cache should raise KeyError when key is missing --- llama_cpp/llama.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/llama_cpp/llama.py b/llama_cpp/llama.py index 32d5424..4e03ed4 100644 --- a/llama_cpp/llama.py +++ b/llama_cpp/llama.py @@ -33,12 +33,10 @@ class LlamaCache: return k return None - def __getitem__( - self, key: Sequence[llama_cpp.llama_token] - ) -> Optional["LlamaState"]: + def __getitem__(self, key: Sequence[llama_cpp.llama_token]) -> "LlamaState": _key = self._find_key(tuple(key)) if _key is None: - return None + raise KeyError(f"Key not found: {key}") return self.cache_state[_key] def __contains__(self, key: Sequence[llama_cpp.llama_token]) -> bool: