Motion detection now optionally runs MobileNet V2 SSD (COCO, quantized) on frames that trigger motion, identifying objects like people, cats, and cars. Events without detected objects are suppressed by default. Snapshots include bounding box annotations. New MCP tool vision_get_detections() enables label-based queries. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
405 lines
12 KiB
Python
405 lines
12 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
vixy-vision Event Collector
|
|
|
|
Receives motion events from camera servers and stores them
|
|
in SQLite database with snapshots saved to disk.
|
|
|
|
Features:
|
|
- Auto-cleanup of unannotated events after configurable time
|
|
- Snapshot storage with automatic cleanup
|
|
|
|
Runs as a service on Mac mini, listens for POSTs from Pis.
|
|
"""
|
|
|
|
import os
|
|
import json
|
|
import sqlite3
|
|
import base64
|
|
import logging
|
|
import threading
|
|
import time
|
|
from datetime import datetime, timedelta
|
|
from pathlib import Path
|
|
from typing import Optional, List
|
|
from contextlib import contextmanager
|
|
|
|
from fastapi import FastAPI, HTTPException, Header
|
|
from pydantic import BaseModel
|
|
|
|
# Configuration
|
|
DATA_DIR = Path(os.getenv("VIXY_DATA_DIR", Path.home() / "Documents" / "Vixy" / "events"))
|
|
DB_PATH = DATA_DIR / "events.db"
|
|
SNAPSHOTS_DIR = DATA_DIR / "snapshots"
|
|
API_KEY = os.getenv("COLLECTOR_API_KEY", "") # Optional auth
|
|
PORT = int(os.getenv("COLLECTOR_PORT", "8780"))
|
|
|
|
# Auto-cleanup config (in hours)
|
|
EVENT_EXPIRY_HOURS = float(os.getenv("EVENT_EXPIRY_HOURS", "2.0"))
|
|
CLEANUP_INTERVAL_MINUTES = float(os.getenv("CLEANUP_INTERVAL_MINUTES", "5.0"))
|
|
|
|
# Ensure directories exist
|
|
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
|
SNAPSHOTS_DIR.mkdir(parents=True, exist_ok=True)
|
|
|
|
# Logging
|
|
logging.basicConfig(
|
|
level=logging.INFO,
|
|
format='%(asctime)s - %(levelname)s - %(message)s'
|
|
)
|
|
logger = logging.getLogger(__name__)
|
|
|
|
# FastAPI app
|
|
app = FastAPI(
|
|
title="vixy-vision Event Collector",
|
|
description="Collects motion events for the fox 🦊",
|
|
version="1.1.0"
|
|
)
|
|
|
|
# Cleanup thread control
|
|
_cleanup_thread: Optional[threading.Thread] = None
|
|
_cleanup_stop = threading.Event()
|
|
|
|
|
|
# === Database ===
|
|
|
|
def init_db():
|
|
"""Initialize SQLite database"""
|
|
with get_db() as conn:
|
|
conn.execute("""
|
|
CREATE TABLE IF NOT EXISTS events (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
event_id TEXT UNIQUE NOT NULL,
|
|
timestamp TEXT NOT NULL,
|
|
camera_id TEXT NOT NULL,
|
|
event_type TEXT NOT NULL,
|
|
confidence REAL,
|
|
area_percent REAL,
|
|
snapshot_path TEXT,
|
|
annotation TEXT,
|
|
tags TEXT,
|
|
created_at TEXT NOT NULL
|
|
)
|
|
""")
|
|
conn.execute("CREATE INDEX IF NOT EXISTS idx_timestamp ON events(timestamp)")
|
|
conn.execute("CREATE INDEX IF NOT EXISTS idx_camera ON events(camera_id)")
|
|
conn.execute("CREATE INDEX IF NOT EXISTS idx_type ON events(event_type)")
|
|
conn.commit()
|
|
logger.info(f"Database initialized: {DB_PATH}")
|
|
|
|
|
|
def migrate_db():
|
|
"""Add new columns if they don't exist (idempotent)"""
|
|
with get_db() as conn:
|
|
columns = [row[1] for row in conn.execute("PRAGMA table_info(events)").fetchall()]
|
|
if "detections" not in columns:
|
|
conn.execute("ALTER TABLE events ADD COLUMN detections TEXT")
|
|
conn.commit()
|
|
logger.info("Migration: added 'detections' column to events table")
|
|
|
|
|
|
@contextmanager
|
|
def get_db():
|
|
"""Database connection context manager"""
|
|
conn = sqlite3.connect(DB_PATH)
|
|
conn.row_factory = sqlite3.Row
|
|
try:
|
|
yield conn
|
|
finally:
|
|
conn.close()
|
|
|
|
|
|
# === Auto-Cleanup ===
|
|
|
|
def cleanup_old_events():
|
|
"""Delete unannotated events older than EVENT_EXPIRY_HOURS"""
|
|
cutoff = datetime.utcnow() - timedelta(hours=EVENT_EXPIRY_HOURS)
|
|
cutoff_str = cutoff.isoformat() + "Z"
|
|
|
|
try:
|
|
with get_db() as conn:
|
|
# Find events to delete (unannotated and old)
|
|
rows = conn.execute("""
|
|
SELECT event_id, snapshot_path FROM events
|
|
WHERE annotation IS NULL
|
|
AND created_at < ?
|
|
""", (cutoff_str,)).fetchall()
|
|
|
|
if not rows:
|
|
return 0
|
|
|
|
# Delete snapshot files
|
|
for row in rows:
|
|
if row["snapshot_path"]:
|
|
snapshot_file = DATA_DIR / row["snapshot_path"]
|
|
try:
|
|
if snapshot_file.exists():
|
|
snapshot_file.unlink()
|
|
logger.debug(f"Deleted snapshot: {snapshot_file}")
|
|
except Exception as e:
|
|
logger.warning(f"Failed to delete snapshot {snapshot_file}: {e}")
|
|
|
|
# Delete from database
|
|
event_ids = [row["event_id"] for row in rows]
|
|
placeholders = ",".join("?" * len(event_ids))
|
|
conn.execute(f"DELETE FROM events WHERE event_id IN ({placeholders})", event_ids)
|
|
conn.commit()
|
|
|
|
logger.info(f"🧹 Cleaned up {len(rows)} old unannotated events")
|
|
return len(rows)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Cleanup error: {e}")
|
|
return 0
|
|
|
|
|
|
def cleanup_loop():
|
|
"""Background cleanup loop"""
|
|
logger.info(f"🧹 Cleanup thread started (expiry: {EVENT_EXPIRY_HOURS}h, interval: {CLEANUP_INTERVAL_MINUTES}m)")
|
|
|
|
while not _cleanup_stop.is_set():
|
|
cleanup_old_events()
|
|
# Wait for interval or until stop signal
|
|
_cleanup_stop.wait(timeout=CLEANUP_INTERVAL_MINUTES * 60)
|
|
|
|
logger.info("🧹 Cleanup thread stopped")
|
|
|
|
|
|
def start_cleanup_thread():
|
|
"""Start the background cleanup thread"""
|
|
global _cleanup_thread
|
|
_cleanup_stop.clear()
|
|
_cleanup_thread = threading.Thread(target=cleanup_loop, daemon=True)
|
|
_cleanup_thread.start()
|
|
|
|
|
|
def stop_cleanup_thread():
|
|
"""Stop the background cleanup thread"""
|
|
_cleanup_stop.set()
|
|
if _cleanup_thread:
|
|
_cleanup_thread.join(timeout=5.0)
|
|
|
|
|
|
# === Models ===
|
|
|
|
class DetectionItem(BaseModel):
|
|
label: str
|
|
confidence: float
|
|
bbox: List[float]
|
|
|
|
|
|
class EventData(BaseModel):
|
|
timestamp: str
|
|
camera_id: str
|
|
event_type: str = "motion"
|
|
confidence: float = 0.0
|
|
region: str = "full"
|
|
area_percent: float = 0.0
|
|
detections: Optional[List[DetectionItem]] = None
|
|
|
|
|
|
class IncomingEvent(BaseModel):
|
|
event: EventData
|
|
snapshot: Optional[str] = None # Base64 encoded JPEG
|
|
|
|
|
|
# === Endpoints ===
|
|
|
|
@app.on_event("startup")
|
|
def startup():
|
|
init_db()
|
|
migrate_db()
|
|
start_cleanup_thread()
|
|
logger.info(f"🦊 Event collector started on port {PORT}")
|
|
logger.info(f" Data directory: {DATA_DIR}")
|
|
logger.info(f" Auto-cleanup: {EVENT_EXPIRY_HOURS}h expiry, {CLEANUP_INTERVAL_MINUTES}m interval")
|
|
|
|
|
|
@app.on_event("shutdown")
|
|
def shutdown():
|
|
stop_cleanup_thread()
|
|
logger.info("🦊 Event collector stopped")
|
|
|
|
|
|
@app.get("/")
|
|
def root():
|
|
return {
|
|
"service": "vixy-vision Event Collector",
|
|
"version": "1.1.0",
|
|
"data_dir": str(DATA_DIR),
|
|
"event_expiry_hours": EVENT_EXPIRY_HOURS,
|
|
}
|
|
|
|
|
|
@app.get("/health")
|
|
def health():
|
|
return {"status": "ok"}
|
|
|
|
|
|
@app.post("/events")
|
|
def receive_event(
|
|
incoming: IncomingEvent,
|
|
x_api_key: Optional[str] = Header(None)
|
|
):
|
|
"""Receive motion event from camera server"""
|
|
|
|
# Check API key if configured
|
|
if API_KEY and x_api_key != API_KEY:
|
|
raise HTTPException(status_code=403, detail="Invalid API key")
|
|
|
|
event = incoming.event
|
|
now = datetime.utcnow()
|
|
|
|
# Generate unique event ID
|
|
event_id = f"{event.camera_id}-{now.strftime('%Y%m%d%H%M%S%f')}"
|
|
|
|
# Save snapshot if provided
|
|
snapshot_path = None
|
|
if incoming.snapshot:
|
|
try:
|
|
# Decode base64
|
|
image_data = base64.b64decode(incoming.snapshot)
|
|
|
|
# Create date-based subdirectory
|
|
date_dir = SNAPSHOTS_DIR / now.strftime("%Y-%m-%d")
|
|
date_dir.mkdir(exist_ok=True)
|
|
|
|
# Save image
|
|
filename = f"{event_id}.jpg"
|
|
snapshot_path = date_dir / filename
|
|
snapshot_path.write_bytes(image_data)
|
|
|
|
# Store relative path
|
|
snapshot_path = str(snapshot_path.relative_to(DATA_DIR))
|
|
|
|
logger.info(f"Saved snapshot: {snapshot_path}")
|
|
except Exception as e:
|
|
logger.error(f"Failed to save snapshot: {e}")
|
|
|
|
# Serialize detections to JSON if present
|
|
detections_json = None
|
|
if event.detections:
|
|
detections_json = json.dumps([d.model_dump() for d in event.detections])
|
|
|
|
# Store in database
|
|
try:
|
|
with get_db() as conn:
|
|
conn.execute("""
|
|
INSERT INTO events
|
|
(event_id, timestamp, camera_id, event_type, confidence,
|
|
area_percent, snapshot_path, detections, created_at)
|
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
""", (
|
|
event_id,
|
|
event.timestamp,
|
|
event.camera_id,
|
|
event.event_type,
|
|
event.confidence,
|
|
event.area_percent,
|
|
snapshot_path,
|
|
detections_json,
|
|
now.isoformat() + "Z"
|
|
))
|
|
conn.commit()
|
|
|
|
logger.info(f"✓ Event stored: {event_id} ({event.camera_id}, {event.event_type})")
|
|
|
|
return {
|
|
"status": "ok",
|
|
"event_id": event_id,
|
|
"snapshot_saved": snapshot_path is not None
|
|
}
|
|
|
|
except sqlite3.IntegrityError:
|
|
raise HTTPException(status_code=409, detail="Duplicate event")
|
|
except Exception as e:
|
|
logger.error(f"Database error: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
@app.get("/events")
|
|
def list_events(
|
|
since: Optional[str] = None,
|
|
camera_id: Optional[str] = None,
|
|
event_type: Optional[str] = None,
|
|
limit: int = 50,
|
|
x_api_key: Optional[str] = Header(None)
|
|
):
|
|
"""List recent events (for debugging, MCP uses direct DB access)"""
|
|
|
|
if API_KEY and x_api_key != API_KEY:
|
|
raise HTTPException(status_code=403, detail="Invalid API key")
|
|
|
|
query = "SELECT * FROM events WHERE 1=1"
|
|
params = []
|
|
|
|
if since:
|
|
query += " AND timestamp >= ?"
|
|
params.append(since)
|
|
|
|
if camera_id:
|
|
query += " AND camera_id = ?"
|
|
params.append(camera_id)
|
|
|
|
if event_type:
|
|
query += " AND event_type = ?"
|
|
params.append(event_type)
|
|
|
|
query += " ORDER BY timestamp DESC LIMIT ?"
|
|
params.append(limit)
|
|
|
|
with get_db() as conn:
|
|
rows = conn.execute(query, params).fetchall()
|
|
return {
|
|
"count": len(rows),
|
|
"events": [dict(row) for row in rows]
|
|
}
|
|
|
|
|
|
@app.post("/cleanup")
|
|
def trigger_cleanup(x_api_key: Optional[str] = Header(None)):
|
|
"""Manually trigger cleanup of old events"""
|
|
|
|
if API_KEY and x_api_key != API_KEY:
|
|
raise HTTPException(status_code=403, detail="Invalid API key")
|
|
|
|
count = cleanup_old_events()
|
|
return {"status": "ok", "cleaned_up": count}
|
|
|
|
|
|
@app.get("/stats")
|
|
def get_stats():
|
|
"""Get collector statistics"""
|
|
with get_db() as conn:
|
|
total = conn.execute("SELECT COUNT(*) FROM events").fetchone()[0]
|
|
annotated = conn.execute("SELECT COUNT(*) FROM events WHERE annotation IS NOT NULL").fetchone()[0]
|
|
by_camera = conn.execute("""
|
|
SELECT camera_id, COUNT(*) as count
|
|
FROM events GROUP BY camera_id
|
|
""").fetchall()
|
|
by_type = conn.execute("""
|
|
SELECT event_type, COUNT(*) as count
|
|
FROM events GROUP BY event_type
|
|
""").fetchall()
|
|
|
|
with_detections = conn.execute(
|
|
"SELECT COUNT(*) FROM events WHERE detections IS NOT NULL"
|
|
).fetchone()[0]
|
|
|
|
return {
|
|
"total_events": total,
|
|
"annotated": annotated,
|
|
"unannotated": total - annotated,
|
|
"with_detections": with_detections,
|
|
"by_camera": {row[0]: row[1] for row in by_camera},
|
|
"by_type": {row[0]: row[1] for row in by_type},
|
|
"data_dir": str(DATA_DIR),
|
|
"db_path": str(DB_PATH),
|
|
"event_expiry_hours": EVENT_EXPIRY_HOURS,
|
|
}
|
|
|
|
|
|
if __name__ == "__main__":
|
|
import uvicorn
|
|
uvicorn.run(app, host="0.0.0.0", port=PORT)
|