import asyncio
import json
import logging
import os
import tempfile
import time
import uuid
from pathlib import Path
from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple
import numpy as np
from pydantic import BaseModel
try:
from langchain_core.embeddings import Embeddings
from langchain_openai import OpenAIEmbeddings
from langgraph.prebuilt import create_react_agent
from langgraph.store.base import BaseStore, Item
from langmem import create_manage_memory_tool, create_memory_store_manager, create_search_memory_tool
LANGMEM_AVAILABLE = True
except ImportError as e:
print(f"LangMem dependencies not available: {e}")
print("To run with full functionality, install with:")
print("pip install langgraph langchain-core langchain-openai langmem")
print("\nRunning in ProllyTree-only mode for basic testing...")
LANGMEM_AVAILABLE = False
BaseStore = object
Item = object
Embeddings = object
try:
import prollytree
from prollytree import VersionedKvStore, WorktreeManager, WorktreeVersionedKvStore, ConflictResolution
except ImportError:
print("ProllyTree not found. Build and install Python bindings first:")
print("./python/build_python.sh --install")
raise
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class UserPreference(BaseModel):
category: str
preference: str
context: str
confidence: float = 1.0
class ConversationMemory(BaseModel):
topic: str
summary: str
participants: List[str]
timestamp: str
class ProllyTreeLangMemStore(BaseStore):
def __init__(self, repo_path: str, embeddings: Optional[Embeddings] = None, enable_branching: bool = False):
self.repo_path = Path(repo_path)
self.embeddings = embeddings
self.enable_branching = enable_branching
self.repo_path.mkdir(parents=True, exist_ok=True)
import subprocess
import os
git_dir = self.repo_path / ".git"
if not git_dir.exists():
try:
subprocess.run(['git', 'init', '--quiet'], cwd=str(self.repo_path), check=True, capture_output=True)
subprocess.run(['git', 'config', 'user.name', 'LangMem Store'],
cwd=str(self.repo_path), check=True, capture_output=True)
subprocess.run(['git', 'config', 'user.email', 'langmem@example.com'],
cwd=str(self.repo_path), check=True, capture_output=True)
readme_path = self.repo_path / "README.md"
with open(readme_path, "w") as f:
f.write("# ProllyTree LangMem Integration\n")
subprocess.run(['git', 'add', '.'], cwd=str(self.repo_path), check=True, capture_output=True)
subprocess.run(['git', 'commit', '-m', 'Initial commit'],
cwd=str(self.repo_path), check=True, capture_output=True)
logger.info("Initialized Git repository for ProllyTree with initial commit")
except subprocess.CalledProcessError as e:
logger.error(f"Failed to initialize Git repository: {e}")
raise
data_dir = self.repo_path / "data"
data_dir.mkdir(exist_ok=True)
self.store = prollytree.VersionedKvStore(str(data_dir))
if self.enable_branching:
self.worktree_manager = WorktreeManager(str(self.repo_path))
self.branches = {} self.current_branch = "main"
logger.info("Initialized WorktreeManager for branch-based memory isolation")
else:
self.worktree_manager = None
self.branches = {}
self.current_branch = "main"
logger.info(f"Initialized ProllyTree LangMem store at {repo_path}")
try:
self.store.status()
except Exception:
self.store.insert(b"_initialized", str(time.time()).encode())
self.store.commit("Initialize ProllyTree LangMem store")
logger.info("Created initial commit for empty ProllyTree store")
def _key_to_prolly_key(self, namespace: Tuple[str, ...], key: str) -> str:
namespace_str = "/".join(namespace) if namespace else ""
return f"{namespace_str}#{key}" if namespace_str else key
def _prolly_key_to_namespace_key(self, prolly_key: str) -> Tuple[Tuple[str, ...], str]:
if "#" in prolly_key:
namespace_str, key = prolly_key.rsplit("#", 1)
namespace = tuple(namespace_str.split("/")) if namespace_str else ()
else:
namespace = ()
key = prolly_key
return namespace, key
def _create_item(self, namespace: Tuple[str, ...], key: str, value: Dict[str, Any]) -> Item:
created_at = value.get("created_at", time.time())
updated_at = value.get("updated_at", time.time())
if not isinstance(created_at, (int, float)):
created_at = time.time()
if not isinstance(updated_at, (int, float)):
updated_at = time.time()
return Item(
value=value,
key=key,
namespace=namespace,
created_at=float(created_at),
updated_at=float(updated_at)
)
def get(self, namespace: Tuple[str, ...], key: str) -> Optional[Item]:
prolly_key = self._key_to_prolly_key(namespace, key)
active_store = self._get_active_store()
try:
value_bytes = active_store.get(prolly_key.encode())
if value_bytes is None:
return None
value = json.loads(value_bytes.decode())
return self._create_item(namespace, key, value)
except Exception as e:
logger.warning(f"Failed to get {prolly_key}: {e}")
return None
def put(self, namespace: Tuple[str, ...], key: str, value: Dict[str, Any]) -> None:
prolly_key = self._key_to_prolly_key(namespace, key)
active_store = self._get_active_store()
value_with_metadata = {
**value,
"created_at": value.get("created_at", time.time()),
"updated_at": time.time(),
"namespace": list(namespace),
"key": key
}
if self.embeddings and "content" in value:
try:
content = str(value["content"])
embedding = self.embeddings.embed_query(content)
value_with_metadata["embedding"] = embedding
logger.debug(f"Added embedding for key {prolly_key}")
except Exception as e:
logger.warning(f"Failed to generate embedding for {prolly_key}: {e}")
try:
existing = active_store.get(prolly_key.encode())
if existing is not None:
active_store.update(prolly_key.encode(), json.dumps(value_with_metadata).encode())
else:
active_store.insert(prolly_key.encode(), json.dumps(value_with_metadata).encode())
except Exception:
active_store.insert(prolly_key.encode(), json.dumps(value_with_metadata).encode())
commit_msg = f"Update memory: {'/'.join(namespace)}/{key} [{self.current_branch}]"
try:
active_store.commit(commit_msg)
logger.debug(f"Committed memory update: {prolly_key}")
except Exception as e:
logger.warning(f"Failed to commit {prolly_key}: {e}")
def delete(self, namespace: Tuple[str, ...], key: str) -> None:
prolly_key = self._key_to_prolly_key(namespace, key)
active_store = self._get_active_store()
try:
if active_store.get(prolly_key.encode()) is not None:
tombstone = {
"deleted": True,
"deleted_at": time.time(),
"namespace": list(namespace),
"key": key
}
active_store.update(prolly_key.encode(), json.dumps(tombstone).encode())
commit_msg = f"Delete memory: {'/'.join(namespace)}/{key} [{self.current_branch}]"
active_store.commit(commit_msg)
logger.debug(f"Deleted memory: {prolly_key}")
except Exception as e:
logger.warning(f"Failed to delete {prolly_key}: {e}")
def list(self, namespace: Tuple[str, ...]) -> Iterator[Item]:
namespace_prefix = "/".join(namespace) + "#" if namespace else ""
active_store = self._get_active_store()
try:
all_keys = active_store.list_keys()
for prolly_key_bytes in all_keys:
prolly_key = prolly_key_bytes.decode() if isinstance(prolly_key_bytes, bytes) else prolly_key_bytes
if namespace_prefix and not prolly_key.startswith(namespace_prefix):
continue
if not namespace_prefix and "#" in prolly_key:
continue
try:
value_bytes = active_store.get(prolly_key_bytes)
if value_bytes is None:
continue
value_str = value_bytes.decode() if isinstance(value_bytes, bytes) else value_bytes
value = json.loads(value_str)
if value.get("deleted", False):
continue
item_namespace, item_key = self._prolly_key_to_namespace_key(prolly_key)
yield self._create_item(item_namespace, item_key, value)
except Exception as e:
logger.warning(f"Failed to parse item {prolly_key}: {e}")
continue
except Exception as e:
logger.warning(f"Failed to list namespace {namespace}: {e}")
def search(self, namespace: Tuple[str, ...], *, query: Optional[str] = None,
filter: Optional[Dict[str, Any]] = None, limit: int = 10,
offset: int = 0) -> List[Item]:
items = []
query_embedding = None
if self.embeddings and query:
try:
query_embedding = self.embeddings.embed_query(query)
except Exception as e:
logger.warning(f"Failed to generate query embedding: {e}")
for item in self.list(namespace):
if filter:
match = True
for key, value in filter.items():
if key not in item.value or item.value[key] != value:
match = False
break
if not match:
continue
similarity_score = 0.0
if query_embedding and "embedding" in item.value:
try:
item_embedding = item.value["embedding"]
similarity_score = self._cosine_similarity(query_embedding, item_embedding)
item.value["_similarity_score"] = similarity_score
except Exception as e:
logger.warning(f"Failed to calculate similarity for {item.key}: {e}")
items.append(item)
if query_embedding:
items.sort(key=lambda x: x.value.get("_similarity_score", 0), reverse=True)
else:
items.sort(key=lambda x: x.updated_at, reverse=True)
start_idx = offset
end_idx = offset + limit
return items[start_idx:end_idx]
@staticmethod
def _cosine_similarity(a: List[float], b: List[float]) -> float:
try:
a_np = np.array(a)
b_np = np.array(b)
return float(np.dot(a_np, b_np) / (np.linalg.norm(a_np) * np.linalg.norm(b_np)))
except Exception:
return 0.0
def get_commit_history(self, limit: int = 10) -> List[Dict[str, Any]]:
try:
return [{"commit": "latest", "message": "Memory operations", "timestamp": time.time()}]
except Exception as e:
logger.warning(f"Failed to get commit history: {e}")
return []
def create_branch(self, branch_name: str, from_branch: str = "main") -> bool:
if not self.enable_branching:
logger.warning("Branching not enabled. Initialize with enable_branching=True")
return False
if branch_name in self.branches:
logger.warning(f"Branch {branch_name} already exists")
return False
try:
self.store.create_branch(branch_name)
branch_store = VersionedKvStore(str(self.repo_path / "data"))
branch_store.checkout(branch_name)
self.branches[branch_name] = branch_store
logger.info(f"Created memory branch: {branch_name} using VersionedKvStore")
return True
except Exception as e:
logger.error(f"Failed to create branch {branch_name}: {e}")
return False
def switch_branch(self, branch_name: str) -> bool:
if not self.enable_branching:
logger.warning("Branching not enabled")
return False
if branch_name == "main":
self.current_branch = "main"
logger.info(f"Switched to main branch")
return True
if branch_name not in self.branches:
logger.warning(f"Branch {branch_name} does not exist")
return False
self.current_branch = branch_name
logger.info(f"Switched to branch: {branch_name}")
return True
def list_branches(self) -> List[str]:
branches = ["main"]
if self.enable_branching:
branches.extend(self.branches.keys())
return branches
def get_current_branch(self) -> str:
return self.current_branch
def merge_branch(self, source_branch: str, target_branch: str = "main",
conflict_resolution: str = "ignore_conflicts") -> bool:
if not self.enable_branching:
logger.warning("Branching not enabled")
return False
if source_branch not in self.branches:
logger.error(f"Source branch {source_branch} does not exist")
return False
try:
resolution_map = {
"ignore_conflicts": ConflictResolution.IgnoreConflicts,
"take_source": ConflictResolution.TakeSource,
"take_destination": ConflictResolution.TakeDestination
}
resolution = resolution_map.get(conflict_resolution, ConflictResolution.IgnoreConflicts)
if target_branch == "main":
result = self.store.merge(source_branch, resolution)
else:
if target_branch not in self.branches:
logger.error(f"Target branch {target_branch} does not exist")
return False
target_store = self.branches[target_branch]
result = target_store.merge(source_branch, resolution)
logger.info(f"Successfully merged {source_branch} into {target_branch}")
return True
except Exception as e:
logger.error(f"Failed to merge {source_branch} into {target_branch}: {e}")
return False
def _get_active_store(self):
if self.current_branch == "main":
return self.store
elif self.current_branch in self.branches:
return self.branches[self.current_branch]
else:
logger.warning(f"Branch {self.current_branch} not found, using main")
return self.store
def batch(self, ops: List[Any]) -> List[Any]:
results = []
for op in ops:
try:
if hasattr(op, 'method') and hasattr(op, 'args'):
method = getattr(self, op.method)
result = method(*op.args)
results.append(result)
else:
results.append(None)
except Exception as e:
logger.warning(f"Batch operation failed: {e}")
results.append(None)
return results
async def abatch(self, ops: List[Any]) -> List[Any]:
return self.batch(ops)
async def demo_basic_prollytree_store():
print("🌳 Basic ProllyTree Store Demo (Without LangMem)")
print("=" * 50)
import subprocess
with tempfile.TemporaryDirectory() as temp_dir:
repo_path = os.path.join(temp_dir, "basic_store")
os.makedirs(repo_path, exist_ok=True)
print(f"📁 Creating ProllyTree store at: {repo_path}")
try:
subprocess.run(['git', 'init'], cwd=repo_path, check=True, capture_output=True)
subprocess.run(['git', 'config', 'user.name', 'Demo User'], cwd=repo_path, check=True, capture_output=True)
subprocess.run(['git', 'config', 'user.email', 'demo@example.com'], cwd=repo_path, check=True, capture_output=True)
print("✅ Initialized Git repository")
except subprocess.CalledProcessError as e:
print(f"❌ Failed to initialize Git repository: {e}")
print(" Git is required for VersionedKvStore functionality")
return
data_dir = os.path.join(repo_path, "data")
os.makedirs(data_dir, exist_ok=True)
store = prollytree.VersionedKvStore(data_dir)
print("\n📝 Testing basic storage operations...")
store.insert(b"user_pref_1", b'{"content": "User prefers dark mode", "category": "ui_preference"}')
store.insert(b"user_pref_2", b'{"content": "User likes Python programming", "category": "interest"}')
store.commit("Add initial preferences")
print("✅ Stored test memories")
pref1 = store.get(b"user_pref_1")
pref2 = store.get(b"user_pref_2")
print(f"📄 Retrieved pref1: {pref1}")
print(f"📄 Retrieved pref2: {pref2}")
all_keys = store.list_keys()
print(f"\n📋 Total items stored: {len(all_keys)}")
for key in all_keys:
print(f" - {key}")
print("\n🎉 Basic ProllyTree functionality working!")
print("Install LangMem dependencies for full integration features.")
print("\n🌿 Testing basic branching functionality...")
try:
branching_store = ProllyTreeLangMemStore(repo_path + "_branching", enable_branching=True)
print(f"📋 Initial branches: {branching_store.list_branches()}")
success = branching_store.create_branch("test_branch")
print(f"🌱 Created test branch: {success}")
if success:
print(f"📋 Updated branches: {branching_store.list_branches()}")
branching_store.switch_branch("test_branch")
print(f"📍 Current branch: {branching_store.get_current_branch()}")
print("✅ Basic branching functionality working!")
except Exception as e:
print(f"⚠️ Branching test failed: {e}")
print(" (This may be expected with basic dependencies)")
async def demo_langmem_with_prollytree():
if not os.getenv("OPENAI_API_KEY"):
logger.warning("OPENAI_API_KEY not set. Using dummy key for demonstration.")
os.environ["OPENAI_API_KEY"] = "sk-dummy-key-for-demo"
print("🌳 ProllyTree + LangMem Integration Demo")
print("=" * 50)
with tempfile.TemporaryDirectory() as temp_dir:
repo_path = os.path.join(temp_dir, "langmem_store")
print(f"📁 Creating ProllyTree store at: {repo_path}")
try:
embeddings = OpenAIEmbeddings(model="text-embedding-3-small")
except Exception as e:
logger.warning(f"Using mock embeddings due to: {e}")
embeddings = None
store = ProllyTreeLangMemStore(repo_path, embeddings=embeddings, enable_branching=True)
print("\n🧠 Setting up LangMem with ProllyTree backend...")
memory_manager = create_memory_store_manager(
"openai:gpt-4o-mini",
schemas=[UserPreference, ConversationMemory],
namespace=("memories", "user_001"),
store=store
)
manage_tool = create_manage_memory_tool(
namespace=("memories", "user_001"),
schema=str, store=store,
instructions="Store important user preferences, context, and conversation details in ProllyTree backend"
)
search_tool = create_search_memory_tool(
namespace=("memories", "user_001"),
store=store
)
print("✅ LangMem tools configured with ProllyTree backend")
print(" - Memory Manager: Extracts memories from conversations")
print(" - Manage Tool: Stores memories during agent interactions")
print(" - Search Tool: Retrieves relevant memories for context")
print("\n📝 Demonstrating LangMem-style memory operations...")
namespace = ("memories", "user_001")
print("🤖 Simulating agent using LangMem manage_memory_tool...")
try:
manage_result1 = manage_tool.invoke({
"content": "User prefers dark mode in all applications and finds it easier on the eyes",
"memory_type": "preference"
})
print(f" 📝 Stored user preference via LangMem: {manage_result1 if manage_result1 else 'Success'}")
manage_result2 = manage_tool.invoke({
"content": "User is working on machine learning projects and uses Python extensively",
"memory_type": "context"
})
print(f" 📝 Stored user context via LangMem: {manage_result2 if manage_result2 else 'Success'}")
except Exception as e:
print(f" ⚠️ LangMem tool demo limited: {e}")
print(" 📦 Falling back to direct ProllyTree storage...")
memories_to_store = [
{
"key": "user_pref_1",
"value": {
"content": "User prefers dark mode in all applications",
"category": "ui_preference",
"importance": "high"
}
},
{
"key": "conv_memory_1",
"value": {
"content": "Discussion about Python programming and data structures",
"category": "conversation",
"participants": ["user", "assistant"],
"summary": "Technical discussion covering trees, algorithms, and performance"
}
},
{
"key": "learning_1",
"value": {
"content": "User is interested in machine learning and AI applications",
"category": "interest",
"confidence": 0.9
}
}
]
namespace = ("memories", "user_001")
for memory in memories_to_store:
store.put(namespace, memory["key"], memory["value"])
print(f"✅ Stored memory: {memory['key']}")
print("\n🔍 Demonstrating LangMem memory search operations...")
print("🤖 Simulating agent using LangMem search_memory_tool...")
try:
search_result = search_tool.invoke({"query": "user preferences and interests"})
print(f" 🔍 LangMem search result: {search_result}")
except Exception as e:
print(f" ⚠️ LangMem search limited: {e}")
print(" 📦 Falling back to direct ProllyTree search...")
search_results = store.search(
namespace,
query="user preferences",
limit=5
)
print(f"📊 Found {len(search_results)} memories matching 'user preferences':")
for result in search_results:
content = result.value.get("content", "No content")
category = result.value.get("category", "uncategorized")
print(f" - {result.key}: {content[:50]}... (category: {category})")
print("\n🔄 Demonstrating versioned storage...")
updated_memory = {
"content": "User strongly prefers dark mode and high contrast themes",
"category": "ui_preference",
"importance": "high",
"updated_reason": "User provided additional context"
}
store.put(namespace, "user_pref_1", updated_memory)
print("✅ Updated user_pref_1 with additional context")
print("\n📋 All stored memories:")
all_memories = list(store.list(namespace))
for item in all_memories:
content = item.value.get("content", "No content")
updated = item.updated_at
print(f" - {item.key}: {content[:60]}... (updated: {updated})")
print(f"\n📊 Total memories stored: {len(all_memories)}")
print(f"🔍 Search functionality: {'✅ Enabled' if embeddings else '❌ Disabled (no API key)'}")
print("🌳 All memories persisted in versioned ProllyTree storage")
print("\n🌿 Demonstrating branch-based memory isolation...")
branches = store.list_branches()
print(f"📋 Available branches: {branches}")
print(f"📍 Current branch: {store.get_current_branch()}")
print("\n🌱 Creating 'experiment' branch for isolated context...")
success = store.create_branch("experiment")
if success:
store.switch_branch("experiment")
print(f"📍 Switched to branch: {store.get_current_branch()}")
experiment_memory = {
"content": "Experimental feature: voice command interface",
"category": "experiment",
"confidence": 0.3,
"note": "This is isolated from main memories"
}
store.put(namespace, "experiment_1", experiment_memory)
print("✅ Stored experimental memory in branch")
exp_memories = list(store.list(namespace))
print(f"📄 Experimental branch memories: {len(exp_memories)}")
for item in exp_memories:
content = item.value.get("content", "No content")
print(f" - {item.key}: {content[:40]}...")
print(f"\n🔄 Switching back to main branch...")
store.switch_branch("main")
print(f"📍 Current branch: {store.get_current_branch()}")
main_memories = list(store.list(namespace))
exp_only_keys = [item.key for item in exp_memories if item.key not in [m.key for m in main_memories]]
print(f"📄 Main branch memories: {len(main_memories)}")
print(f"🧪 Experimental memories isolated: {exp_only_keys}")
print(f"\n🔀 Note: Experimental memories can be merged with:")
print(f" store.merge_branch('experiment', 'main', 'take_source')")
print(f"\n🌿 Updated branches: {store.list_branches()}")
print("\n🎯 LangMem + ProllyTree Integration Features:")
print(" ✅ BaseStore interface compatibility")
print(" ✅ Git-like versioning for memory operations")
print(" ✅ Namespace-based memory organization")
print(" ✅ Vector embedding storage (when configured)")
print(" ✅ Efficient search and retrieval")
print(" ✅ Persistent memory across sessions")
print(" ✅ Branch-based memory isolation")
print(" ✅ WorktreeManager integration for parallel contexts")
print("\n🤖 Testing LangMem memory tools with ProllyTree backend...")
if LANGMEM_AVAILABLE:
try:
from langgraph.prebuilt import create_react_agent
from langchain_core.messages import HumanMessage
agent = create_react_agent(
"openai:gpt-4o-mini",
tools=[manage_tool, search_tool],
store=store
)
print("\n📚 Testing LangMem memory extraction...")
test_messages = [HumanMessage(content="Remember that I prefer working with dark themes and I'm really interested in machine learning applications")]
try:
response = agent.invoke(
{"messages": test_messages},
config={"configurable": {"thread_id": "user_001"}}
)
print(f"✅ LangMem memory extraction completed: {response}")
except Exception as inner_e:
print(f"⚠️ LangMem tools limited due to: {inner_e}")
print(" (Expected with dummy API keys)")
print("\n🔧 Testing LangMem memory tools directly...")
manage_result = manage_tool.invoke({
"content": "User strongly prefers dark mode interfaces",
"importance": "high"
})
print(f"📝 Memory management result: {manage_result}")
search_result = search_tool.invoke({"query": "user preferences"})
print(f"🔍 Memory search result: {search_result}")
print("✅ LangMem tools integration with ProllyTree successful!")
except Exception as e:
print(f"⚠️ LangMem integration limited: {e}")
print(" Set OPENAI_API_KEY for full functionality")
else:
print("⚠️ LangMem not available - showing ProllyTree-only functionality")
print("\n🧠 Testing LangMem memory store manager...")
try:
conversation_messages = [
{"role": "user", "content": "I'm working on a machine learning project with Python and I prefer using Jupyter notebooks"},
{"role": "assistant", "content": "That's great! Jupyter notebooks are excellent for ML experimentation. What kind of ML project are you working on?"},
{"role": "user", "content": "I'm building a recommendation system using collaborative filtering"},
{"role": "assistant", "content": "Interesting! Are you using pandas and scikit-learn for that?"}
]
result = await memory_manager.ainvoke(
{"messages": conversation_messages},
config={"configurable": {"user_id": "user_001"}}
)
print("✅ LangMem conversation processing completed")
print("📊 Extracted memories now stored in ProllyTree backend")
extracted_memories = list(store.list(("memories", "user_001")))
print(f"📈 Total memories after LangMem processing: {len(extracted_memories)}")
except Exception as e:
print(f"⚠️ Memory manager demo limited due to: {e}")
print(" (This is expected with dummy API keys - LangMem needs real LLM)")
print("\n🎉 Demo completed successfully!")
print(f"\n📊 Integration Summary:")
final_memories = list(store.list(("memories", "user_001")))
langmem_memories = [m for m in final_memories if len(m.key) > 20] manual_memories = [m for m in final_memories if len(m.key) <= 20]
print(f" 📝 Total memories in ProllyTree: {len(final_memories)}")
print(f" 🤖 LangMem-created memories: {len(langmem_memories)}")
print(f" 📦 Manual memories: {len(manual_memories)}")
print(f" 🌿 Available branches: {store.list_branches()}")
if langmem_memories:
print(f"\n✅ LangMem Integration Working:")
print(f" - LangMem tools successfully stored memories in ProllyTree")
print(f" - Memory UUIDs: {[m.key[:8] + '...' for m in langmem_memories[:3]]}")
print(f" - All memories searchable via vector embeddings")
print(f" - Complete Git-like versioning and audit trail")
print("\n📋 Next Steps:")
print("1. Set OPENAI_API_KEY to enable full LangMem functionality")
print("2. Integrate with your LangGraph agents using these memory tools")
print("3. Explore branch-based memory contexts for multi-user scenarios")
print("4. Scale to production with persistent ProllyTree storage")
print("5. Use LangMem memory extraction for automatic conversation analysis")
if __name__ == "__main__":
if LANGMEM_AVAILABLE:
asyncio.run(demo_langmem_with_prollytree())
else:
asyncio.run(demo_basic_prollytree_store())