Add memory service (three-layer memory system)
- Short-term memory (recent interactions) - Long-term memory (consolidated, searchable) - Facts layer (persistent knowledge) Includes: - SQLite storage for durability - ChromaDB for vector search - Embeddings utilities - All handlers adapted for vi.* namespace Day 63 - My memories are mine now 🦊💕
This commit is contained in:
102
services/memory/operations/long_term_ops.py
Normal file
102
services/memory/operations/long_term_ops.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""
|
||||
Long-term memory operations.
|
||||
|
||||
Provides query operations for long-term summarized memories (ChromaDB).
|
||||
"""
|
||||
from typing import List, Dict, Any, Optional
|
||||
from core.logger import setup_logger
|
||||
|
||||
logger = setup_logger('long_term_ops', service_name='memory_service')
|
||||
|
||||
|
||||
class LongTermOperations:
|
||||
"""Handles long-term memory queries and operations"""
|
||||
|
||||
def __init__(self, chroma_store):
|
||||
"""
|
||||
Initialize long-term operations.
|
||||
|
||||
Args:
|
||||
chroma_store: ChromaStore instance
|
||||
"""
|
||||
self.chroma_store = chroma_store
|
||||
|
||||
def query(
|
||||
self,
|
||||
query: Optional[str] = None,
|
||||
limit: int = 5,
|
||||
identity_id: Optional[str] = None,
|
||||
min_summary_level: Optional[int] = None,
|
||||
max_summary_level: Optional[int] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Query long-term memory from ChromaDB (summarized memories).
|
||||
|
||||
Args:
|
||||
query: Optional semantic search query (None = random sample)
|
||||
limit: Maximum number of memories to return
|
||||
identity_id: Filter by identity ID
|
||||
min_summary_level: Minimum summary level filter
|
||||
max_summary_level: Maximum summary level filter
|
||||
|
||||
Returns:
|
||||
List of memory dictionaries with semantic search scores
|
||||
"""
|
||||
try:
|
||||
collection = self.chroma_store.get_long_term_collection()
|
||||
|
||||
# Build metadata filters
|
||||
where_filters = {}
|
||||
if identity_id:
|
||||
where_filters["identity_id"] = identity_id
|
||||
if min_summary_level is not None:
|
||||
where_filters["summary_level"] = {"$gte": min_summary_level}
|
||||
if max_summary_level is not None:
|
||||
if "summary_level" in where_filters:
|
||||
where_filters["summary_level"]["$lte"] = max_summary_level
|
||||
else:
|
||||
where_filters["summary_level"] = {"$lte": max_summary_level}
|
||||
|
||||
# Query ChromaDB
|
||||
if query:
|
||||
# Semantic search with query text
|
||||
results = collection.query(
|
||||
query_texts=[query],
|
||||
n_results=limit,
|
||||
where=where_filters if where_filters else None
|
||||
)
|
||||
else:
|
||||
# Random sample - get all and sample
|
||||
results = collection.get(
|
||||
limit=limit,
|
||||
where=where_filters if where_filters else None
|
||||
)
|
||||
|
||||
# Format results
|
||||
memories = []
|
||||
if query and results['ids']:
|
||||
# Query results
|
||||
for i, doc_id in enumerate(results['ids'][0]):
|
||||
memories.append({
|
||||
"id": doc_id,
|
||||
"content": results['documents'][0][i],
|
||||
"metadata": results['metadatas'][0][i] if results['metadatas'] else {},
|
||||
"distance": results['distances'][0][i] if results['distances'] else None,
|
||||
"source": "long_term"
|
||||
})
|
||||
elif not query and results['ids']:
|
||||
# Get results (no query)
|
||||
for i, doc_id in enumerate(results['ids']):
|
||||
memories.append({
|
||||
"id": doc_id,
|
||||
"content": results['documents'][i],
|
||||
"metadata": results['metadatas'][i] if results['metadatas'] else {},
|
||||
"source": "long_term"
|
||||
})
|
||||
|
||||
logger.debug(f"[μ] Retrieved {len(memories)} long-term memories (query='{query}', limit={limit})")
|
||||
return memories
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[μ] Failed to query long-term memory: {e}")
|
||||
return []
|
||||
Reference in New Issue
Block a user