Skip to content

Commit 28c7484

Browse files
documentation cleanup
1 parent 704038b commit 28c7484

File tree

2 files changed

+53
-24
lines changed

2 files changed

+53
-24
lines changed

redisvl/extensions/llmcache/schema.py

+20
Original file line numberDiff line numberDiff line change
@@ -8,14 +8,24 @@
88

99

1010
class CacheEntry(BaseModel):
11+
"""A single cache entry in Redis"""
12+
1113
entry_id: Optional[str] = Field(default=None)
14+
"""Cache entry identifier"""
1215
prompt: str
16+
"""Input prompt or question cached in Redis"""
1317
response: str
18+
"""Response or answer to the question, cached in Redis"""
1419
prompt_vector: List[float]
20+
"""Text embedding representation of the prompt"""
1521
inserted_at: float = Field(default_factory=current_timestamp)
22+
"""Timestamp of when the entry was added to the cache"""
1623
updated_at: float = Field(default_factory=current_timestamp)
24+
"""Timestamp of when the entry was updated in the cache"""
1725
metadata: Optional[Dict[str, Any]] = Field(default=None)
26+
"""Optional metadata stored on the cache entry"""
1827
filters: Optional[Dict[str, Any]] = Field(default=None)
28+
"""Optional filter data stored on the cache entry for customizing retrieval"""
1929

2030
@root_validator(pre=True)
2131
@classmethod
@@ -43,14 +53,24 @@ def to_dict(self) -> Dict:
4353

4454

4555
class CacheHit(BaseModel):
56+
"""A cache hit based on some input query"""
57+
4658
entry_id: str
59+
"""Cache entry identifier"""
4760
prompt: str
61+
"""Input prompt or question cached in Redis"""
4862
response: str
63+
"""Response or answer to the question, cached in Redis"""
4964
vector_distance: float
65+
"""The semantic distance between the query vector and the stored prompt vector"""
5066
inserted_at: float
67+
"""Timestamp of when the entry was added to the cache"""
5168
updated_at: float
69+
"""Timestamp of when the entry was updated in the cache"""
5270
metadata: Optional[Dict[str, Any]] = Field(default=None)
71+
"""Optional metadata stored on the cache entry"""
5372
filters: Optional[Dict[str, Any]] = Field(default=None)
73+
"""Optional filter data stored on the cache entry for customizing retrieval"""
5474

5575
@root_validator(pre=True)
5676
@classmethod

redisvl/extensions/llmcache/semantic.py

+33-24
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from typing import Any, Dict, List, Optional, Union
1+
from typing import Any, Dict, List, Optional
22

33
from redis import Redis
44

@@ -10,20 +10,15 @@
1010
)
1111
from redisvl.index import SearchIndex
1212
from redisvl.query import RangeQuery
13-
from redisvl.query.filter import FilterExpression, Tag
14-
from redisvl.redis.utils import array_to_buffer
15-
from redisvl.utils.utils import (
16-
current_timestamp,
17-
deserialize,
18-
serialize,
19-
validate_vector_dims,
20-
)
13+
from redisvl.query.filter import FilterExpression
14+
from redisvl.utils.utils import current_timestamp, serialize, validate_vector_dims
2115
from redisvl.utils.vectorize import BaseVectorizer, HFTextVectorizer
2216

2317

2418
class SemanticCache(BaseLLMCache):
2519
"""Semantic Cache for Large Language Models."""
2620

21+
redis_key_field_name: str = "key"
2722
entry_id_field_name: str = "entry_id"
2823
prompt_field_name: str = "prompt"
2924
response_field_name: str = "response"
@@ -55,6 +50,8 @@ def __init__(
5550
in Redis. Defaults to None.
5651
vectorizer (Optional[BaseVectorizer], optional): The vectorizer for the cache.
5752
Defaults to HFTextVectorizer.
53+
filterable_fields (Optional[List[Dict[str, Any]]]): An optional list of RedisVL fields
54+
that can be used to customize cache retrieval with filters.
5855
redis_client(Optional[Redis], optional): A redis client connection instance.
5956
Defaults to None.
6057
redis_url (str, optional): The redis url. Defaults to redis://localhost:6379.
@@ -81,9 +78,6 @@ def __init__(
8178
model="sentence-transformers/all-mpnet-base-v2"
8279
)
8380

84-
# Create semantic cache schema
85-
schema = SemanticCacheIndexSchema.from_params(name, prefix, vectorizer.dims)
86-
8781
# Process fields
8882
self.return_fields = [
8983
self.entry_id_field_name,
@@ -94,18 +88,9 @@ def __init__(
9488
self.metadata_field_name,
9589
]
9690

97-
if filterable_fields is not None:
98-
for filter_field in filterable_fields:
99-
if (
100-
filter_field["name"] in self.return_fields
101-
or filter_field["name"] == "key"
102-
):
103-
raise ValueError(
104-
f'{filter_field["name"]} is a reserved field name for the semantic cache schema'
105-
)
106-
schema.add_field(filter_field)
107-
# Add to return fields too
108-
self.return_fields.append(filter_field["name"])
91+
# Create semantic cache schema and index
92+
schema = SemanticCacheIndexSchema.from_params(name, prefix, vectorizer.dims)
93+
schema = self._modify_schema(schema, filterable_fields)
10994

11095
self._index = SearchIndex(schema=schema)
11196

@@ -120,6 +105,30 @@ def __init__(
120105
self.set_threshold(distance_threshold)
121106
self._index.create(overwrite=False)
122107

108+
def _modify_schema(
109+
self,
110+
schema: SemanticCacheIndexSchema,
111+
filterable_fields: Optional[List[Dict[str, Any]]] = None,
112+
) -> SemanticCacheIndexSchema:
113+
"""Modify the base cache schema using the provided filterable fields"""
114+
115+
if filterable_fields is not None:
116+
protected_field_names = set(
117+
self.return_fields + [self.redis_key_field_name]
118+
)
119+
for filter_field in filterable_fields:
120+
field_name = filter_field["name"]
121+
if field_name in protected_field_names:
122+
raise ValueError(
123+
f"{field_name} is a reserved field name for the semantic cache schema"
124+
)
125+
# Add to schema
126+
schema.add_field(filter_field)
127+
# Add to return fields too
128+
self.return_fields.append(field_name)
129+
130+
return schema
131+
123132
@property
124133
def index(self) -> SearchIndex:
125134
"""The underlying SearchIndex for the cache.

0 commit comments

Comments
 (0)