Skip to content

cache

Full name: tenets.utils.cache

cache

Caching utilities for tenets.

Provides LRU caching with TTL support, file-based caching for expensive computations like embeddings and ranking scores.

Classes

CacheEntrydataclass

Python
CacheEntry(value: T, created_at: float = time.time(), last_accessed: float = time.time(), access_count: int = 0, size_bytes: int = 0)

Bases: Generic[T]

A single cache entry with value and metadata.

Functions
is_expired
Python
is_expired(ttl_seconds: float) -> bool

Check if entry has expired based on TTL.

Source code in tenets/utils/cache.py
Python
def is_expired(self, ttl_seconds: float) -> bool:
    """Check if entry has expired based on TTL."""
    if ttl_seconds <= 0:
        return False  # No TTL, never expires
    return time.time() - self.created_at > ttl_seconds
touch
Python
touch() -> None

Update last access time and increment counter.

Source code in tenets/utils/cache.py
Python
def touch(self) -> None:
    """Update last access time and increment counter."""
    self.last_accessed = time.time()
    self.access_count += 1

LRUCache

Python
LRUCache(max_size: int = 1000, ttl_seconds: float = 0, name: str = 'cache')

Bases: Generic[T]

Thread-safe LRU cache with optional TTL.

Provides efficient caching with automatic eviction of least recently used entries when capacity is reached.

ATTRIBUTEDESCRIPTION
max_size

Maximum number of entries

ttl_seconds

Time-to-live in seconds (0 = no expiration)

hits

Number of cache hits

misses

Number of cache misses

Initialize the cache.

PARAMETERDESCRIPTION
max_size

Maximum number of entries to store

TYPE:intDEFAULT:1000

ttl_seconds

Time-to-live for entries (0 = no expiration)

TYPE:floatDEFAULT:0

name

Name for logging/debugging

TYPE:strDEFAULT:'cache'

Source code in tenets/utils/cache.py
Python
def __init__(
    self,
    max_size: int = 1000,
    ttl_seconds: float = 0,
    name: str = "cache",
):
    """Initialize the cache.

    Args:
        max_size: Maximum number of entries to store
        ttl_seconds: Time-to-live for entries (0 = no expiration)
        name: Name for logging/debugging
    """
    self.max_size = max_size
    self.ttl_seconds = ttl_seconds
    self.name = name

    self._cache: Dict[str, CacheEntry[T]] = {}
    self._lock = threading.RLock()

    # Statistics
    self.hits = 0
    self.misses = 0
Attributes
sizeproperty
Python
size: int

Current number of entries.

hit_rateproperty
Python
hit_rate: float

Cache hit rate (0.0 to 1.0).

Functions
get
Python
get(key: str) -> Optional[T]

Get value from cache.

PARAMETERDESCRIPTION
key

Cache key

TYPE:str

RETURNSDESCRIPTION
Optional[T]

Cached value or None if not found/expired

Source code in tenets/utils/cache.py
Python
def get(self, key: str) -> Optional[T]:
    """Get value from cache.

    Args:
        key: Cache key

    Returns:
        Cached value or None if not found/expired
    """
    with self._lock:
        entry = self._cache.get(key)

        if entry is None:
            self.misses += 1
            return None

        if entry.is_expired(self.ttl_seconds):
            del self._cache[key]
            self.misses += 1
            return None

        entry.touch()
        self.hits += 1
        return entry.value
set
Python
set(key: str, value: T, size_bytes: int = 0) -> None

Set value in cache.

PARAMETERDESCRIPTION
key

Cache key

TYPE:str

value

Value to cache

TYPE:T

size_bytes

Optional size estimate for memory tracking

TYPE:intDEFAULT:0

Source code in tenets/utils/cache.py
Python
def set(self, key: str, value: T, size_bytes: int = 0) -> None:
    """Set value in cache.

    Args:
        key: Cache key
        value: Value to cache
        size_bytes: Optional size estimate for memory tracking
    """
    with self._lock:
        # Evict if at capacity
        if len(self._cache) >= self.max_size and key not in self._cache:
            self._evict_lru()

        self._cache[key] = CacheEntry(
            value=value,
            size_bytes=size_bytes,
        )
delete
Python
delete(key: str) -> bool

Delete entry from cache.

PARAMETERDESCRIPTION
key

Cache key

TYPE:str

RETURNSDESCRIPTION
bool

True if entry was deleted, False if not found

Source code in tenets/utils/cache.py
Python
def delete(self, key: str) -> bool:
    """Delete entry from cache.

    Args:
        key: Cache key

    Returns:
        True if entry was deleted, False if not found
    """
    with self._lock:
        if key in self._cache:
            del self._cache[key]
            return True
        return False
clear
Python
clear() -> int

Clear all entries.

RETURNSDESCRIPTION
int

Number of entries cleared

Source code in tenets/utils/cache.py
Python
def clear(self) -> int:
    """Clear all entries.

    Returns:
        Number of entries cleared
    """
    with self._lock:
        count = len(self._cache)
        self._cache.clear()
        self.hits = 0
        self.misses = 0
        return count
stats
Python
stats() -> Dict[str, Any]

Get cache statistics.

RETURNSDESCRIPTION
Dict[str, Any]

Dictionary with cache statistics

Source code in tenets/utils/cache.py
Python
def stats(self) -> Dict[str, Any]:
    """Get cache statistics.

    Returns:
        Dictionary with cache statistics
    """
    with self._lock:
        total_size = sum(e.size_bytes for e in self._cache.values())
        return {
            "name": self.name,
            "size": self.size,
            "max_size": self.max_size,
            "hits": self.hits,
            "misses": self.misses,
            "hit_rate": self.hit_rate,
            "total_bytes": total_size,
            "ttl_seconds": self.ttl_seconds,
        }

FileContentCache

Python
FileContentCache(max_size: int = 500, max_file_size: int = 1024 * 1024)

Cache for file contents with modification tracking.

Automatically invalidates cache entries when files are modified.

ATTRIBUTEDESCRIPTION
max_size

Maximum number of files to cache

max_file_size

Maximum file size to cache (bytes)

Initialize file content cache.

PARAMETERDESCRIPTION
max_size

Maximum number of files to cache

TYPE:intDEFAULT:500

max_file_size

Maximum file size to cache in bytes

TYPE:intDEFAULT:1024 * 1024

Source code in tenets/utils/cache.py
Python
def __init__(
    self,
    max_size: int = 500,
    max_file_size: int = 1024 * 1024,  # 1MB default
):
    """Initialize file content cache.

    Args:
        max_size: Maximum number of files to cache
        max_file_size: Maximum file size to cache in bytes
    """
    self._cache: LRUCache[tuple[float, str]] = LRUCache(
        max_size=max_size,
        name="file_content",
    )
    self.max_file_size = max_file_size
Functions
get
Python
get(path: Path) -> Optional[str]

Get cached file content if still valid.

PARAMETERDESCRIPTION
path

Path to file

TYPE:Path

RETURNSDESCRIPTION
Optional[str]

File content or None if not cached/stale

Source code in tenets/utils/cache.py
Python
def get(self, path: Path) -> Optional[str]:
    """Get cached file content if still valid.

    Args:
        path: Path to file

    Returns:
        File content or None if not cached/stale
    """
    key = str(path.resolve())
    entry = self._cache.get(key)

    if entry is None:
        return None

    mtime, content = entry

    # Check if file was modified
    try:
        current_mtime = path.stat().st_mtime
        if current_mtime > mtime:
            self._cache.delete(key)
            return None
    except OSError:
        self._cache.delete(key)
        return None

    return content
set
Python
set(path: Path, content: str) -> bool

Cache file content.

PARAMETERDESCRIPTION
path

Path to file

TYPE:Path

content

File content

TYPE:str

RETURNSDESCRIPTION
bool

True if cached, False if file too large

Source code in tenets/utils/cache.py
Python
def set(self, path: Path, content: str) -> bool:
    """Cache file content.

    Args:
        path: Path to file
        content: File content

    Returns:
        True if cached, False if file too large
    """
    if len(content) > self.max_file_size:
        return False

    key = str(path.resolve())
    try:
        mtime = path.stat().st_mtime
    except OSError:
        mtime = time.time()

    self._cache.set(key, (mtime, content), size_bytes=len(content))
    return True
invalidate
Python
invalidate(path: Path) -> bool

Invalidate cached content for a file.

PARAMETERDESCRIPTION
path

Path to file

TYPE:Path

RETURNSDESCRIPTION
bool

True if entry was removed, False if not cached

Source code in tenets/utils/cache.py
Python
def invalidate(self, path: Path) -> bool:
    """Invalidate cached content for a file.

    Args:
        path: Path to file

    Returns:
        True if entry was removed, False if not cached
    """
    return self._cache.delete(str(path.resolve()))
clear
Python
clear() -> int

Clear all cached content.

RETURNSDESCRIPTION
int

Number of entries cleared

Source code in tenets/utils/cache.py
Python
def clear(self) -> int:
    """Clear all cached content.

    Returns:
        Number of entries cleared
    """
    return self._cache.clear()
stats
Python
stats() -> Dict[str, Any]

Get cache statistics.

Source code in tenets/utils/cache.py
Python
def stats(self) -> Dict[str, Any]:
    """Get cache statistics."""
    return self._cache.stats()

EmbeddingCache

Python
EmbeddingCache(max_size: int = 2000, ttl_seconds: float = 3600)

Cache for text embeddings.

Stores computed embeddings to avoid re-computation for repeated queries or unchanged files.

ATTRIBUTEDESCRIPTION
max_size

Maximum number of embeddings to cache

ttl_seconds

Time-to-live for cached embeddings

Initialize embedding cache.

PARAMETERDESCRIPTION
max_size

Maximum number of embeddings to cache

TYPE:intDEFAULT:2000

ttl_seconds

Time-to-live for cached embeddings

TYPE:floatDEFAULT:3600

Source code in tenets/utils/cache.py
Python
def __init__(
    self,
    max_size: int = 2000,
    ttl_seconds: float = 3600,  # 1 hour default
):
    """Initialize embedding cache.

    Args:
        max_size: Maximum number of embeddings to cache
        ttl_seconds: Time-to-live for cached embeddings
    """
    self._cache: LRUCache[list[float]] = LRUCache(
        max_size=max_size,
        ttl_seconds=ttl_seconds,
        name="embeddings",
    )
Functions
get
Python
get(text: str, model: str = 'default') -> Optional[list[float]]

Get cached embedding.

PARAMETERDESCRIPTION
text

Text that was embedded

TYPE:str

model

Model used for embedding

TYPE:strDEFAULT:'default'

RETURNSDESCRIPTION
Optional[list[float]]

Cached embedding vector or None

Source code in tenets/utils/cache.py
Python
def get(self, text: str, model: str = "default") -> Optional[list[float]]:
    """Get cached embedding.

    Args:
        text: Text that was embedded
        model: Model used for embedding

    Returns:
        Cached embedding vector or None
    """
    key = cache_key(text[:500], model)  # Truncate for key
    return self._cache.get(key)
set
Python
set(text: str, embedding: list[float], model: str = 'default') -> None

Cache an embedding.

PARAMETERDESCRIPTION
text

Text that was embedded

TYPE:str

embedding

Embedding vector

TYPE:list[float]

model

Model used for embedding

TYPE:strDEFAULT:'default'

Source code in tenets/utils/cache.py
Python
def set(
    self,
    text: str,
    embedding: list[float],
    model: str = "default",
) -> None:
    """Cache an embedding.

    Args:
        text: Text that was embedded
        embedding: Embedding vector
        model: Model used for embedding
    """
    key = cache_key(text[:500], model)
    self._cache.set(key, embedding, size_bytes=len(embedding) * 8)
clear
Python
clear() -> int

Clear all cached embeddings.

RETURNSDESCRIPTION
int

Number of entries cleared

Source code in tenets/utils/cache.py
Python
def clear(self) -> int:
    """Clear all cached embeddings.

    Returns:
        Number of entries cleared
    """
    return self._cache.clear()
stats
Python
stats() -> Dict[str, Any]

Get cache statistics.

Source code in tenets/utils/cache.py
Python
def stats(self) -> Dict[str, Any]:
    """Get cache statistics."""
    return self._cache.stats()

RankingScoreCache

Python
RankingScoreCache(max_size: int = 5000, ttl_seconds: float = 300)

Cache for file ranking scores.

Caches ranking scores for file-prompt pairs to speed up repeated queries on unchanged files.

ATTRIBUTEDESCRIPTION
max_size

Maximum number of scores to cache

ttl_seconds

Time-to-live for cached scores

Initialize ranking score cache.

PARAMETERDESCRIPTION
max_size

Maximum number of scores to cache

TYPE:intDEFAULT:5000

ttl_seconds

Time-to-live for cached scores

TYPE:floatDEFAULT:300

Source code in tenets/utils/cache.py
Python
def __init__(
    self,
    max_size: int = 5000,
    ttl_seconds: float = 300,  # 5 minutes default
):
    """Initialize ranking score cache.

    Args:
        max_size: Maximum number of scores to cache
        ttl_seconds: Time-to-live for cached scores
    """
    self._cache: LRUCache[Dict[str, Any]] = LRUCache(
        max_size=max_size,
        ttl_seconds=ttl_seconds,
        name="ranking_scores",
    )
Functions
get
Python
get(file_path: Path, prompt_hash: str, file_mtime: float, algorithm: str = 'balanced') -> Optional[Dict[str, Any]]

Get cached ranking score.

PARAMETERDESCRIPTION
file_path

Path to file

TYPE:Path

prompt_hash

Hash of the prompt

TYPE:str

file_mtime

File modification time

TYPE:float

algorithm

Ranking algorithm used

TYPE:strDEFAULT:'balanced'

RETURNSDESCRIPTION
Optional[Dict[str, Any]]

Cached score data or None

Source code in tenets/utils/cache.py
Python
def get(
    self,
    file_path: Path,
    prompt_hash: str,
    file_mtime: float,
    algorithm: str = "balanced",
) -> Optional[Dict[str, Any]]:
    """Get cached ranking score.

    Args:
        file_path: Path to file
        prompt_hash: Hash of the prompt
        file_mtime: File modification time
        algorithm: Ranking algorithm used

    Returns:
        Cached score data or None
    """
    key = cache_key(str(file_path), prompt_hash, algorithm)
    entry = self._cache.get(key)

    if entry is None:
        return None

    # Check if file was modified since caching
    if entry.get("mtime", 0) < file_mtime:
        self._cache.delete(key)
        return None

    return entry
set
Python
set(file_path: Path, prompt_hash: str, file_mtime: float, score: float, factors: Dict[str, float], algorithm: str = 'balanced') -> None

Cache a ranking score.

PARAMETERDESCRIPTION
file_path

Path to file

TYPE:Path

prompt_hash

Hash of the prompt

TYPE:str

file_mtime

File modification time

TYPE:float

score

Computed relevance score

TYPE:float

factors

Individual ranking factors

TYPE:Dict[str, float]

algorithm

Ranking algorithm used

TYPE:strDEFAULT:'balanced'

Source code in tenets/utils/cache.py
Python
def set(
    self,
    file_path: Path,
    prompt_hash: str,
    file_mtime: float,
    score: float,
    factors: Dict[str, float],
    algorithm: str = "balanced",
) -> None:
    """Cache a ranking score.

    Args:
        file_path: Path to file
        prompt_hash: Hash of the prompt
        file_mtime: File modification time
        score: Computed relevance score
        factors: Individual ranking factors
        algorithm: Ranking algorithm used
    """
    key = cache_key(str(file_path), prompt_hash, algorithm)
    self._cache.set(
        key,
        {
            "score": score,
            "factors": factors,
            "mtime": file_mtime,
            "algorithm": algorithm,
        },
    )
clear
Python
clear() -> int

Clear all cached scores.

RETURNSDESCRIPTION
int

Number of entries cleared

Source code in tenets/utils/cache.py
Python
def clear(self) -> int:
    """Clear all cached scores.

    Returns:
        Number of entries cleared
    """
    return self._cache.clear()
stats
Python
stats() -> Dict[str, Any]

Get cache statistics.

Source code in tenets/utils/cache.py
Python
def stats(self) -> Dict[str, Any]:
    """Get cache statistics."""
    return self._cache.stats()

Functions

cache_key

Python
cache_key(*args: Any, **kwargs: Any) -> str

Generate a cache key from arguments.

Creates a deterministic hash from the provided arguments that can be used as a cache key.

PARAMETERDESCRIPTION
*args

Positional arguments to include in key

TYPE:AnyDEFAULT:()

**kwargs

Keyword arguments to include in key

TYPE:AnyDEFAULT:{}

RETURNSDESCRIPTION
str

A hexadecimal hash string

Source code in tenets/utils/cache.py
Python
def cache_key(*args: Any, **kwargs: Any) -> str:
    """Generate a cache key from arguments.

    Creates a deterministic hash from the provided arguments that can be
    used as a cache key.

    Args:
        *args: Positional arguments to include in key
        **kwargs: Keyword arguments to include in key

    Returns:
        A hexadecimal hash string
    """
    # Serialize arguments to JSON for consistent hashing
    key_data = {
        "args": [_serialize_arg(a) for a in args],
        "kwargs": {k: _serialize_arg(v) for k, v in sorted(kwargs.items())},
    }
    key_str = json.dumps(key_data, sort_keys=True, default=str)
    return hashlib.sha256(key_str.encode()).hexdigest()[:16]

get_file_cache

Python
get_file_cache() -> FileContentCache

Get or create the global file content cache.

Source code in tenets/utils/cache.py
Python
def get_file_cache() -> FileContentCache:
    """Get or create the global file content cache."""
    global _file_cache
    with _cache_lock:
        if _file_cache is None:
            _file_cache = FileContentCache()
        return _file_cache

get_embedding_cache

Python
get_embedding_cache() -> EmbeddingCache

Get or create the global embedding cache.

Source code in tenets/utils/cache.py
Python
def get_embedding_cache() -> EmbeddingCache:
    """Get or create the global embedding cache."""
    global _embedding_cache
    with _cache_lock:
        if _embedding_cache is None:
            _embedding_cache = EmbeddingCache()
        return _embedding_cache

get_ranking_cache

Python
get_ranking_cache() -> RankingScoreCache

Get or create the global ranking score cache.

Source code in tenets/utils/cache.py
Python
def get_ranking_cache() -> RankingScoreCache:
    """Get or create the global ranking score cache."""
    global _ranking_cache
    with _cache_lock:
        if _ranking_cache is None:
            _ranking_cache = RankingScoreCache()
        return _ranking_cache

clear_all_caches

Python
clear_all_caches() -> Dict[str, int]

Clear all global caches.

RETURNSDESCRIPTION
Dict[str, int]

Dictionary with count of cleared entries per cache

Source code in tenets/utils/cache.py
Python
def clear_all_caches() -> Dict[str, int]:
    """Clear all global caches.

    Returns:
        Dictionary with count of cleared entries per cache
    """
    global _file_cache, _embedding_cache, _ranking_cache

    with _cache_lock:
        results = {}

        if _file_cache is not None:
            results["file_content"] = _file_cache.clear()

        if _embedding_cache is not None:
            results["embeddings"] = _embedding_cache.clear()

        if _ranking_cache is not None:
            results["ranking_scores"] = _ranking_cache.clear()

        return results

get_all_cache_stats

Python
get_all_cache_stats() -> Dict[str, Dict[str, Any]]

Get statistics for all caches.

RETURNSDESCRIPTION
Dict[str, Dict[str, Any]]

Dictionary with stats for each cache type

Source code in tenets/utils/cache.py
Python
def get_all_cache_stats() -> Dict[str, Dict[str, Any]]:
    """Get statistics for all caches.

    Returns:
        Dictionary with stats for each cache type
    """
    stats = {}

    file_cache = get_file_cache()
    stats["file_content"] = file_cache.stats()

    embedding_cache = get_embedding_cache()
    stats["embeddings"] = embedding_cache.stats()

    ranking_cache = get_ranking_cache()
    stats["ranking_scores"] = ranking_cache.stats()

    return stats