Initial commit: workspace setup with skills, memory, config
This commit is contained in:
204
skills/qdrant-memory/scripts/consolidate_memories.py
Executable file
204
skills/qdrant-memory/scripts/consolidate_memories.py
Executable file
@@ -0,0 +1,204 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Memory consolidation - weekly and monthly maintenance
|
||||
Usage: consolidate_memories.py weekly|monthly
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
WORKSPACE = "/root/.openclaw/workspace"
|
||||
MEMORY_DIR = f"{WORKSPACE}/memory"
|
||||
MEMORY_FILE = f"{WORKSPACE}/MEMORY.md"
|
||||
|
||||
def get_recent_daily_logs(days=7):
|
||||
"""Get daily log files from the last N days"""
|
||||
logs = []
|
||||
cutoff = datetime.now() - timedelta(days=days)
|
||||
|
||||
for file in Path(MEMORY_DIR).glob("*.md"):
|
||||
# Extract date from filename (YYYY-MM-DD.md)
|
||||
match = re.match(r"(\d{4}-\d{2}-\d{2})\.md", file.name)
|
||||
if match:
|
||||
file_date = datetime.strptime(match.group(1), "%Y-%m-%d")
|
||||
if file_date >= cutoff:
|
||||
logs.append((file_date, file))
|
||||
|
||||
return sorted(logs, reverse=True)
|
||||
|
||||
def extract_key_memories(content):
|
||||
"""Extract key memories from daily log content"""
|
||||
key_memories = []
|
||||
|
||||
# Look for lesson learned sections
|
||||
lessons_pattern = r"(?:##?\s*Lessons?\s*Learned|###?\s*Mistakes?|###?\s*Fixes?)(.*?)(?=##?|$)"
|
||||
lessons_match = re.search(lessons_pattern, content, re.DOTALL | re.IGNORECASE)
|
||||
if lessons_match:
|
||||
lessons_section = lessons_match.group(1)
|
||||
# Extract bullet points
|
||||
for line in lessons_section.split('\n'):
|
||||
if line.strip().startswith('-') or line.strip().startswith('*'):
|
||||
key_memories.append({
|
||||
"type": "lesson",
|
||||
"content": line.strip()[1:].strip(),
|
||||
"source": "daily_log"
|
||||
})
|
||||
|
||||
# Look for preferences/decisions
|
||||
pref_pattern = r"(?:###?\s*Preferences?|###?\s*Decisions?|###?\s*Rules?)(.*?)(?=##?|$)"
|
||||
pref_match = re.search(pref_pattern, content, re.DOTALL | re.IGNORECASE)
|
||||
if pref_match:
|
||||
pref_section = pref_match.group(1)
|
||||
for line in pref_section.split('\n'):
|
||||
if line.strip().startswith('-') or line.strip().startswith('*'):
|
||||
key_memories.append({
|
||||
"type": "preference",
|
||||
"content": line.strip()[1:].strip(),
|
||||
"source": "daily_log"
|
||||
})
|
||||
|
||||
return key_memories
|
||||
|
||||
def update_memory_md(new_memories):
|
||||
"""Update MEMORY.md with new consolidated memories"""
|
||||
today = datetime.now().strftime("%Y-%m-%d")
|
||||
|
||||
# Read current MEMORY.md
|
||||
if os.path.exists(MEMORY_FILE):
|
||||
with open(MEMORY_FILE, 'r') as f:
|
||||
content = f.read()
|
||||
else:
|
||||
content = "# MEMORY.md — Long-Term Memory\n\n*Curated memories. The distilled essence, not raw logs.*\n"
|
||||
|
||||
# Check if we need to add a new section
|
||||
consolidation_header = f"\n\n## Consolidated Memories - {today}\n\n"
|
||||
|
||||
if consolidation_header.strip() not in content:
|
||||
content += consolidation_header
|
||||
|
||||
for memory in new_memories:
|
||||
emoji = "📚" if memory["type"] == "lesson" else "⚙️"
|
||||
content += f"- {emoji} [{memory['type'].title()}] {memory['content']}\n"
|
||||
|
||||
# Write back
|
||||
with open(MEMORY_FILE, 'w') as f:
|
||||
f.write(content)
|
||||
|
||||
return len(new_memories)
|
||||
|
||||
return 0
|
||||
|
||||
def archive_old_logs(keep_days=30):
|
||||
"""Archive daily logs older than N days"""
|
||||
archived = 0
|
||||
cutoff = datetime.now() - timedelta(days=keep_days)
|
||||
|
||||
for file in Path(MEMORY_DIR).glob("*.md"):
|
||||
match = re.match(r"(\d{4}-\d{2}-\d{2})\.md", file.name)
|
||||
if match:
|
||||
file_date = datetime.strptime(match.group(1), "%Y-%m-%d")
|
||||
if file_date < cutoff:
|
||||
# Could move to archive folder
|
||||
# For now, just count
|
||||
archived += 1
|
||||
|
||||
return archived
|
||||
|
||||
def weekly_consolidation():
|
||||
"""Weekly: Extract key memories from last 7 days"""
|
||||
print("📅 Weekly Memory Consolidation")
|
||||
print("=" * 40)
|
||||
|
||||
logs = get_recent_daily_logs(7)
|
||||
all_memories = []
|
||||
|
||||
for file_date, log_file in logs:
|
||||
print(f"Processing {log_file.name}...")
|
||||
with open(log_file, 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
memories = extract_key_memories(content)
|
||||
all_memories.extend(memories)
|
||||
print(f" Found {len(memories)} key memories")
|
||||
|
||||
if all_memories:
|
||||
count = update_memory_md(all_memories)
|
||||
print(f"\n✅ Consolidated {count} memories to MEMORY.md")
|
||||
else:
|
||||
print("\nℹ️ No new key memories to consolidate")
|
||||
|
||||
return len(all_memories)
|
||||
|
||||
def monthly_cleanup():
|
||||
"""Monthly: Archive old logs, update MEMORY.md index"""
|
||||
print("📆 Monthly Memory Cleanup")
|
||||
print("=" * 40)
|
||||
|
||||
# Archive logs older than 30 days
|
||||
archived = archive_old_logs(30)
|
||||
print(f"Found {archived} old log files to archive")
|
||||
|
||||
# Compact MEMORY.md if it's getting too long
|
||||
if os.path.exists(MEMORY_FILE):
|
||||
with open(MEMORY_FILE, 'r') as f:
|
||||
lines = f.readlines()
|
||||
|
||||
if len(lines) > 500: # If more than 500 lines
|
||||
print("⚠️ MEMORY.md is getting long - consider manual review")
|
||||
|
||||
print("\n✅ Monthly cleanup complete")
|
||||
return archived
|
||||
|
||||
def search_qdrant_for_context():
|
||||
"""Search Qdrant for high-value memories to add to MEMORY.md"""
|
||||
cmd = [
|
||||
"python3", f"{WORKSPACE}/skills/qdrant-memory/scripts/search_memories.py",
|
||||
"important preferences rules",
|
||||
"--limit", "10",
|
||||
"--json"
|
||||
]
|
||||
|
||||
result = subprocess.run(cmd, capture_output=True, text=True)
|
||||
if result.returncode == 0:
|
||||
try:
|
||||
memories = json.loads(result.stdout)
|
||||
# Filter for high importance
|
||||
high_importance = [m for m in memories if m.get("importance") == "high"]
|
||||
return high_importance
|
||||
except:
|
||||
return []
|
||||
return []
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Memory consolidation")
|
||||
parser.add_argument("action", choices=["weekly", "monthly", "status"])
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.action == "weekly":
|
||||
count = weekly_consolidation()
|
||||
sys.exit(0 if count >= 0 else 1)
|
||||
|
||||
elif args.action == "monthly":
|
||||
archived = monthly_cleanup()
|
||||
|
||||
# Also do weekly tasks
|
||||
weekly_consolidation()
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
elif args.action == "status":
|
||||
logs = get_recent_daily_logs(30)
|
||||
print(f"📊 Memory Status")
|
||||
print(f" Daily logs (last 30 days): {len(logs)}")
|
||||
if os.path.exists(MEMORY_FILE):
|
||||
with open(MEMORY_FILE, 'r') as f:
|
||||
lines = len(f.readlines())
|
||||
print(f" MEMORY.md lines: {lines}")
|
||||
print(f" Memory directory: {MEMORY_DIR}")
|
||||
Reference in New Issue
Block a user