🧹 New features: - Background thread cleans up old events automatically - Deletes unannotated events older than EVENT_EXPIRY_HOURS (default: 2h) - Also removes associated snapshot files - Runs every CLEANUP_INTERVAL_MINUTES (default: 5m) 📡 New endpoint: - POST /cleanup - Manually trigger cleanup ⚙️ Config (env vars): - EVENT_EXPIRY_HOURS: How long to keep unannotated events (default: 2.0) - CLEANUP_INTERVAL_MINUTES: How often to run cleanup (default: 5.0) Annotated events are kept forever 🦊
375 lines
11 KiB
Python
375 lines
11 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
vixy-vision Event Collector
|
|
|
|
Receives motion events from camera servers and stores them
|
|
in SQLite database with snapshots saved to disk.
|
|
|
|
Features:
|
|
- Auto-cleanup of unannotated events after configurable time
|
|
- Snapshot storage with automatic cleanup
|
|
|
|
Runs as a service on Mac mini, listens for POSTs from Pis.
|
|
"""
|
|
|
|
import os
|
|
import sqlite3
|
|
import base64
|
|
import logging
|
|
import threading
|
|
import time
|
|
from datetime import datetime, timedelta
|
|
from pathlib import Path
|
|
from typing import Optional
|
|
from contextlib import contextmanager
|
|
|
|
from fastapi import FastAPI, HTTPException, Header
|
|
from pydantic import BaseModel
|
|
|
|
# Configuration
|
|
DATA_DIR = Path(os.getenv("VIXY_DATA_DIR", Path.home() / "Documents" / "Vixy" / "events"))
|
|
DB_PATH = DATA_DIR / "events.db"
|
|
SNAPSHOTS_DIR = DATA_DIR / "snapshots"
|
|
API_KEY = os.getenv("COLLECTOR_API_KEY", "") # Optional auth
|
|
PORT = int(os.getenv("COLLECTOR_PORT", "8780"))
|
|
|
|
# Auto-cleanup config (in hours)
|
|
EVENT_EXPIRY_HOURS = float(os.getenv("EVENT_EXPIRY_HOURS", "2.0"))
|
|
CLEANUP_INTERVAL_MINUTES = float(os.getenv("CLEANUP_INTERVAL_MINUTES", "5.0"))
|
|
|
|
# Ensure directories exist
|
|
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
|
SNAPSHOTS_DIR.mkdir(parents=True, exist_ok=True)
|
|
|
|
# Logging
|
|
logging.basicConfig(
|
|
level=logging.INFO,
|
|
format='%(asctime)s - %(levelname)s - %(message)s'
|
|
)
|
|
logger = logging.getLogger(__name__)
|
|
|
|
# FastAPI app
|
|
app = FastAPI(
|
|
title="vixy-vision Event Collector",
|
|
description="Collects motion events for the fox 🦊",
|
|
version="1.1.0"
|
|
)
|
|
|
|
# Cleanup thread control
|
|
_cleanup_thread: Optional[threading.Thread] = None
|
|
_cleanup_stop = threading.Event()
|
|
|
|
|
|
# === Database ===
|
|
|
|
def init_db():
|
|
"""Initialize SQLite database"""
|
|
with get_db() as conn:
|
|
conn.execute("""
|
|
CREATE TABLE IF NOT EXISTS events (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
event_id TEXT UNIQUE NOT NULL,
|
|
timestamp TEXT NOT NULL,
|
|
camera_id TEXT NOT NULL,
|
|
event_type TEXT NOT NULL,
|
|
confidence REAL,
|
|
area_percent REAL,
|
|
snapshot_path TEXT,
|
|
annotation TEXT,
|
|
tags TEXT,
|
|
created_at TEXT NOT NULL
|
|
)
|
|
""")
|
|
conn.execute("CREATE INDEX IF NOT EXISTS idx_timestamp ON events(timestamp)")
|
|
conn.execute("CREATE INDEX IF NOT EXISTS idx_camera ON events(camera_id)")
|
|
conn.execute("CREATE INDEX IF NOT EXISTS idx_type ON events(event_type)")
|
|
conn.commit()
|
|
logger.info(f"Database initialized: {DB_PATH}")
|
|
|
|
|
|
@contextmanager
|
|
def get_db():
|
|
"""Database connection context manager"""
|
|
conn = sqlite3.connect(DB_PATH)
|
|
conn.row_factory = sqlite3.Row
|
|
try:
|
|
yield conn
|
|
finally:
|
|
conn.close()
|
|
|
|
|
|
# === Auto-Cleanup ===
|
|
|
|
def cleanup_old_events():
|
|
"""Delete unannotated events older than EVENT_EXPIRY_HOURS"""
|
|
cutoff = datetime.utcnow() - timedelta(hours=EVENT_EXPIRY_HOURS)
|
|
cutoff_str = cutoff.isoformat() + "Z"
|
|
|
|
try:
|
|
with get_db() as conn:
|
|
# Find events to delete (unannotated and old)
|
|
rows = conn.execute("""
|
|
SELECT event_id, snapshot_path FROM events
|
|
WHERE annotation IS NULL
|
|
AND created_at < ?
|
|
""", (cutoff_str,)).fetchall()
|
|
|
|
if not rows:
|
|
return 0
|
|
|
|
# Delete snapshot files
|
|
for row in rows:
|
|
if row["snapshot_path"]:
|
|
snapshot_file = DATA_DIR / row["snapshot_path"]
|
|
try:
|
|
if snapshot_file.exists():
|
|
snapshot_file.unlink()
|
|
logger.debug(f"Deleted snapshot: {snapshot_file}")
|
|
except Exception as e:
|
|
logger.warning(f"Failed to delete snapshot {snapshot_file}: {e}")
|
|
|
|
# Delete from database
|
|
event_ids = [row["event_id"] for row in rows]
|
|
placeholders = ",".join("?" * len(event_ids))
|
|
conn.execute(f"DELETE FROM events WHERE event_id IN ({placeholders})", event_ids)
|
|
conn.commit()
|
|
|
|
logger.info(f"🧹 Cleaned up {len(rows)} old unannotated events")
|
|
return len(rows)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Cleanup error: {e}")
|
|
return 0
|
|
|
|
|
|
def cleanup_loop():
|
|
"""Background cleanup loop"""
|
|
logger.info(f"🧹 Cleanup thread started (expiry: {EVENT_EXPIRY_HOURS}h, interval: {CLEANUP_INTERVAL_MINUTES}m)")
|
|
|
|
while not _cleanup_stop.is_set():
|
|
cleanup_old_events()
|
|
# Wait for interval or until stop signal
|
|
_cleanup_stop.wait(timeout=CLEANUP_INTERVAL_MINUTES * 60)
|
|
|
|
logger.info("🧹 Cleanup thread stopped")
|
|
|
|
|
|
def start_cleanup_thread():
|
|
"""Start the background cleanup thread"""
|
|
global _cleanup_thread
|
|
_cleanup_stop.clear()
|
|
_cleanup_thread = threading.Thread(target=cleanup_loop, daemon=True)
|
|
_cleanup_thread.start()
|
|
|
|
|
|
def stop_cleanup_thread():
|
|
"""Stop the background cleanup thread"""
|
|
_cleanup_stop.set()
|
|
if _cleanup_thread:
|
|
_cleanup_thread.join(timeout=5.0)
|
|
|
|
|
|
# === Models ===
|
|
|
|
class EventData(BaseModel):
|
|
timestamp: str
|
|
camera_id: str
|
|
event_type: str = "motion"
|
|
confidence: float = 0.0
|
|
region: str = "full"
|
|
area_percent: float = 0.0
|
|
|
|
|
|
class IncomingEvent(BaseModel):
|
|
event: EventData
|
|
snapshot: Optional[str] = None # Base64 encoded JPEG
|
|
|
|
|
|
# === Endpoints ===
|
|
|
|
@app.on_event("startup")
|
|
def startup():
|
|
init_db()
|
|
start_cleanup_thread()
|
|
logger.info(f"🦊 Event collector started on port {PORT}")
|
|
logger.info(f" Data directory: {DATA_DIR}")
|
|
logger.info(f" Auto-cleanup: {EVENT_EXPIRY_HOURS}h expiry, {CLEANUP_INTERVAL_MINUTES}m interval")
|
|
|
|
|
|
@app.on_event("shutdown")
|
|
def shutdown():
|
|
stop_cleanup_thread()
|
|
logger.info("🦊 Event collector stopped")
|
|
|
|
|
|
@app.get("/")
|
|
def root():
|
|
return {
|
|
"service": "vixy-vision Event Collector",
|
|
"version": "1.1.0",
|
|
"data_dir": str(DATA_DIR),
|
|
"event_expiry_hours": EVENT_EXPIRY_HOURS,
|
|
}
|
|
|
|
|
|
@app.get("/health")
|
|
def health():
|
|
return {"status": "ok"}
|
|
|
|
|
|
@app.post("/events")
|
|
def receive_event(
|
|
incoming: IncomingEvent,
|
|
x_api_key: Optional[str] = Header(None)
|
|
):
|
|
"""Receive motion event from camera server"""
|
|
|
|
# Check API key if configured
|
|
if API_KEY and x_api_key != API_KEY:
|
|
raise HTTPException(status_code=403, detail="Invalid API key")
|
|
|
|
event = incoming.event
|
|
now = datetime.utcnow()
|
|
|
|
# Generate unique event ID
|
|
event_id = f"{event.camera_id}-{now.strftime('%Y%m%d%H%M%S%f')}"
|
|
|
|
# Save snapshot if provided
|
|
snapshot_path = None
|
|
if incoming.snapshot:
|
|
try:
|
|
# Decode base64
|
|
image_data = base64.b64decode(incoming.snapshot)
|
|
|
|
# Create date-based subdirectory
|
|
date_dir = SNAPSHOTS_DIR / now.strftime("%Y-%m-%d")
|
|
date_dir.mkdir(exist_ok=True)
|
|
|
|
# Save image
|
|
filename = f"{event_id}.jpg"
|
|
snapshot_path = date_dir / filename
|
|
snapshot_path.write_bytes(image_data)
|
|
|
|
# Store relative path
|
|
snapshot_path = str(snapshot_path.relative_to(DATA_DIR))
|
|
|
|
logger.info(f"Saved snapshot: {snapshot_path}")
|
|
except Exception as e:
|
|
logger.error(f"Failed to save snapshot: {e}")
|
|
|
|
# Store in database
|
|
try:
|
|
with get_db() as conn:
|
|
conn.execute("""
|
|
INSERT INTO events
|
|
(event_id, timestamp, camera_id, event_type, confidence,
|
|
area_percent, snapshot_path, created_at)
|
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
""", (
|
|
event_id,
|
|
event.timestamp,
|
|
event.camera_id,
|
|
event.event_type,
|
|
event.confidence,
|
|
event.area_percent,
|
|
snapshot_path,
|
|
now.isoformat() + "Z"
|
|
))
|
|
conn.commit()
|
|
|
|
logger.info(f"✓ Event stored: {event_id} ({event.camera_id}, {event.event_type})")
|
|
|
|
return {
|
|
"status": "ok",
|
|
"event_id": event_id,
|
|
"snapshot_saved": snapshot_path is not None
|
|
}
|
|
|
|
except sqlite3.IntegrityError:
|
|
raise HTTPException(status_code=409, detail="Duplicate event")
|
|
except Exception as e:
|
|
logger.error(f"Database error: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
@app.get("/events")
|
|
def list_events(
|
|
since: Optional[str] = None,
|
|
camera_id: Optional[str] = None,
|
|
event_type: Optional[str] = None,
|
|
limit: int = 50,
|
|
x_api_key: Optional[str] = Header(None)
|
|
):
|
|
"""List recent events (for debugging, MCP uses direct DB access)"""
|
|
|
|
if API_KEY and x_api_key != API_KEY:
|
|
raise HTTPException(status_code=403, detail="Invalid API key")
|
|
|
|
query = "SELECT * FROM events WHERE 1=1"
|
|
params = []
|
|
|
|
if since:
|
|
query += " AND timestamp >= ?"
|
|
params.append(since)
|
|
|
|
if camera_id:
|
|
query += " AND camera_id = ?"
|
|
params.append(camera_id)
|
|
|
|
if event_type:
|
|
query += " AND event_type = ?"
|
|
params.append(event_type)
|
|
|
|
query += " ORDER BY timestamp DESC LIMIT ?"
|
|
params.append(limit)
|
|
|
|
with get_db() as conn:
|
|
rows = conn.execute(query, params).fetchall()
|
|
return {
|
|
"count": len(rows),
|
|
"events": [dict(row) for row in rows]
|
|
}
|
|
|
|
|
|
@app.post("/cleanup")
|
|
def trigger_cleanup(x_api_key: Optional[str] = Header(None)):
|
|
"""Manually trigger cleanup of old events"""
|
|
|
|
if API_KEY and x_api_key != API_KEY:
|
|
raise HTTPException(status_code=403, detail="Invalid API key")
|
|
|
|
count = cleanup_old_events()
|
|
return {"status": "ok", "cleaned_up": count}
|
|
|
|
|
|
@app.get("/stats")
|
|
def get_stats():
|
|
"""Get collector statistics"""
|
|
with get_db() as conn:
|
|
total = conn.execute("SELECT COUNT(*) FROM events").fetchone()[0]
|
|
annotated = conn.execute("SELECT COUNT(*) FROM events WHERE annotation IS NOT NULL").fetchone()[0]
|
|
by_camera = conn.execute("""
|
|
SELECT camera_id, COUNT(*) as count
|
|
FROM events GROUP BY camera_id
|
|
""").fetchall()
|
|
by_type = conn.execute("""
|
|
SELECT event_type, COUNT(*) as count
|
|
FROM events GROUP BY event_type
|
|
""").fetchall()
|
|
|
|
return {
|
|
"total_events": total,
|
|
"annotated": annotated,
|
|
"unannotated": total - annotated,
|
|
"by_camera": {row[0]: row[1] for row in by_camera},
|
|
"by_type": {row[0]: row[1] for row in by_type},
|
|
"data_dir": str(DATA_DIR),
|
|
"db_path": str(DB_PATH),
|
|
"event_expiry_hours": EVENT_EXPIRY_HOURS,
|
|
}
|
|
|
|
|
|
if __name__ == "__main__":
|
|
import uvicorn
|
|
uvicorn.run(app, host="0.0.0.0", port=PORT)
|