mirror of
https://github.com/Textualize/textual.git
synced 2025-10-17 02:38:12 +03:00
Added FIFOCache
This commit is contained in:
@@ -9,7 +9,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
|
||||
|
||||
### Added
|
||||
|
||||
- Added textual.strip primitive
|
||||
- Added textual.strip.Strip primitive
|
||||
- Added textual._cache.FIFOCache
|
||||
|
||||
## [0.8.1] - 2022-12-25
|
||||
|
||||
|
||||
@@ -175,3 +175,83 @@ class LRUCache(Generic[CacheKey, CacheValue]):
|
||||
|
||||
def __contains__(self, key: CacheKey) -> bool:
|
||||
return key in self._cache
|
||||
|
||||
|
||||
class FIFOCache(Generic[CacheKey, CacheValue]):
|
||||
"""A simple cache that discards the least recently added key when full.
|
||||
|
||||
This has a lower overhead than LRUCache, but won't manage a working set as efficiently.
|
||||
It is most suitable for a cache with a relatively low maximum size that is not expected to
|
||||
do many lookups.
|
||||
|
||||
Args:
|
||||
maxsize (int): Maximum size of the cache.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
"_maxsize",
|
||||
"_cache",
|
||||
"_lock",
|
||||
"hits",
|
||||
"misses",
|
||||
]
|
||||
|
||||
def __init__(self, maxsize: int) -> None:
|
||||
self._maxsize = maxsize
|
||||
self._cache: dict[CacheKey, CacheValue] = {}
|
||||
self._lock = Lock()
|
||||
self.hits = 0
|
||||
self.misses = 0
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return bool(self._cache)
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._cache)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"<Cache maxsize={self._maxsize!r} hits={self.hits} misses={self.misses}>"
|
||||
)
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Clear the cache."""
|
||||
self._cache.clear()
|
||||
|
||||
def keys(self) -> KeysView[CacheKey]:
|
||||
"""Get cache keys."""
|
||||
# Mostly for tests
|
||||
return self._cache.keys()
|
||||
|
||||
def set(self, key: CacheKey, value: CacheValue) -> None:
|
||||
with self._lock:
|
||||
if key not in self._cache and len(self._cache) == self._maxsize:
|
||||
self._cache.pop(next(iter(self._cache.keys())))
|
||||
self._cache[key] = value
|
||||
|
||||
__setitem__ = set
|
||||
|
||||
@overload
|
||||
def get(self, key: CacheKey) -> CacheValue | None:
|
||||
...
|
||||
|
||||
@overload
|
||||
def get(self, key: CacheKey, default: DefaultValue) -> CacheValue | DefaultValue:
|
||||
...
|
||||
|
||||
def get(
|
||||
self, key: CacheKey, default: DefaultValue | None = None
|
||||
) -> CacheValue | DefaultValue | None:
|
||||
return self._cache.get(key, default)
|
||||
|
||||
def __getitem__(self, key: CacheKey) -> CacheValue:
|
||||
try:
|
||||
return self._cache[key]
|
||||
except KeyError:
|
||||
self.misses += 1
|
||||
raise KeyError(key) from None
|
||||
finally:
|
||||
self.hits += 1
|
||||
|
||||
def __container__(self, key: CacheKey) -> bool:
|
||||
return key in self._cache
|
||||
|
||||
@@ -8,7 +8,7 @@ from rich.cells import cell_len, set_cell_size
|
||||
from rich.segment import Segment
|
||||
from rich.style import Style
|
||||
|
||||
from ._cache import LRUCache
|
||||
from ._cache import FIFOCache
|
||||
from ._filter import LineFilter
|
||||
|
||||
|
||||
@@ -35,8 +35,8 @@ class Strip:
|
||||
) -> None:
|
||||
self._segments = list(segments)
|
||||
self._cell_length = cell_length
|
||||
self._divide_cache: LRUCache[tuple[int, ...], list[Strip]] = LRUCache(4)
|
||||
self._crop_cache: LRUCache[tuple[int, int], Strip] = LRUCache(4)
|
||||
self._divide_cache: FIFOCache[tuple[int, ...], list[Strip]] = FIFOCache(4)
|
||||
self._crop_cache: FIFOCache[tuple[int, int], Strip] = FIFOCache(4)
|
||||
|
||||
def __rich_repr__(self) -> rich.repr.Result:
|
||||
yield self._segments
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import unicode_literals
|
||||
|
||||
import pytest
|
||||
|
||||
from textual._cache import LRUCache
|
||||
from textual._cache import FIFOCache, LRUCache
|
||||
|
||||
|
||||
def test_lru_cache():
|
||||
@@ -61,6 +61,7 @@ def test_lru_cache_get():
|
||||
assert "egg" not in cache
|
||||
assert "eggegg" in cache
|
||||
|
||||
|
||||
def test_lru_cache_maxsize():
|
||||
cache = LRUCache(3)
|
||||
|
||||
@@ -74,7 +75,7 @@ def test_lru_cache_maxsize():
|
||||
assert cache.maxsize == 30, "Incorrect cache maxsize after setting it"
|
||||
|
||||
# Add more than maxsize items to the cache and be sure
|
||||
for spam in range(cache.maxsize+10):
|
||||
for spam in range(cache.maxsize + 10):
|
||||
cache[f"spam{spam}"] = spam
|
||||
|
||||
# Finally, check the cache is the max size we set.
|
||||
@@ -146,3 +147,27 @@ def test_lru_cache_len(keys: list[str], expected_len: int):
|
||||
for value, key in enumerate(keys):
|
||||
cache[key] = value
|
||||
assert len(cache) == expected_len
|
||||
|
||||
|
||||
def test_fifo_cache():
|
||||
cache = FIFOCache(4)
|
||||
assert not cache
|
||||
cache["foo"] = 1
|
||||
assert cache
|
||||
cache["bar"] = 2
|
||||
cache["baz"] = 3
|
||||
cache["egg"] = 4
|
||||
# Cache is full
|
||||
assert list(cache.keys()) == ["foo", "bar", "baz", "egg"]
|
||||
assert len(cache) == 4
|
||||
cache["Paul"] = 100
|
||||
assert list(cache.keys()) == ["bar", "baz", "egg", "Paul"]
|
||||
assert len(cache) == 4
|
||||
assert cache["baz"] == 3
|
||||
assert cache["bar"] == 2
|
||||
cache["Chani"] = 101
|
||||
assert list(cache.keys()) == ["baz", "egg", "Paul", "Chani"]
|
||||
assert len(cache) == 4
|
||||
cache.clear()
|
||||
assert len(cache) == 0
|
||||
assert list(cache.keys()) == []
|
||||
|
||||
Reference in New Issue
Block a user