Quick Start
Basic Usage
from cmm.pipeline.conversation import CognitiveMemoryPipeline
pipeline = CognitiveMemoryPipeline()
# Ingest conversation turns — both sides captured
pipeline.ingest("user", "I'm allergic to peanuts. I carry an EpiPen.")
pipeline.ingest("assistant", "Noted, severe peanut allergy.")
pipeline.ingest("user", "My project deadline is April 15th.")
# Later... automatic recall
results = pipeline.recall("ordering food for the team lunch")
print(pipeline.format_recalled(results))
# → Recalls the peanut allergy automatically
# Save to disk
pipeline.save("./my_memory")
# Load and continue tomorrow
pipeline = CognitiveMemoryPipeline.load("./my_memory")
With the API Middleware
from integrations.middleware import MemoryMiddleware
# Wraps any LLM API call with automatic memory
mw = MemoryMiddleware(api_type="anthropic")
response = mw.chat("I'm allergic to peanuts.")
response = mw.chat("Order food for the team lunch.")
# ^ Automatically recalls the peanut allergy
With Claude Code Hooks
# Start the memory server
python -m integrations.claude-code.memory_server --data-dir ./memory
# Configure hooks in Claude Code settings
# That's it — memory happens automatically on every message
See the Integrations section for all integration options.