DreamTail v1.0.0 with IP-Adapter FaceID support
- SDXL image generation using RealVisXL_V4.0 - IP-Adapter FaceID integration for consistent face generation - Simplified API (removed client_id requirement) - New params: face_image, face_strength - 'vixy' shortcut for face-locked generation - Queue-based async job processing - FastAPI with proper error handling Co-authored-by: Alex <alex@k4zka.online>
This commit is contained in:
1
dreamtail_storage/__init__.py
Executable file
1
dreamtail_storage/__init__.py
Executable file
@@ -0,0 +1 @@
|
||||
"""Storage management for DreamTail."""
|
||||
111
dreamtail_storage/cleanup_task.py
Executable file
111
dreamtail_storage/cleanup_task.py
Executable file
@@ -0,0 +1,111 @@
|
||||
"""
|
||||
Cleanup Task
|
||||
|
||||
Periodically deletes images older than the retention period.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CleanupTask:
|
||||
"""Background task to clean up old images."""
|
||||
|
||||
def __init__(self):
|
||||
self.images_dir = config.IMAGES_DIR
|
||||
self.retention_days = config.IMAGE_RETENTION_DAYS
|
||||
self.interval_hours = config.CLEANUP_INTERVAL_HOURS
|
||||
self.running = False
|
||||
self._task = None
|
||||
|
||||
async def start(self):
|
||||
"""Start the cleanup task."""
|
||||
if self.running:
|
||||
logger.warning("Cleanup task already running")
|
||||
return
|
||||
|
||||
self.running = True
|
||||
self._task = asyncio.create_task(self._run())
|
||||
logger.info(f"Cleanup task started (retention: {self.retention_days} days, interval: {self.interval_hours}h)")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop the cleanup task."""
|
||||
if not self.running:
|
||||
return
|
||||
|
||||
self.running = False
|
||||
if self._task:
|
||||
self._task.cancel()
|
||||
try:
|
||||
await self._task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
logger.info("Cleanup task stopped")
|
||||
|
||||
async def _run(self):
|
||||
"""Main cleanup loop."""
|
||||
while self.running:
|
||||
try:
|
||||
await self._cleanup_old_images()
|
||||
# Sleep for the configured interval
|
||||
await asyncio.sleep(self.interval_hours * 3600)
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error(f"Error in cleanup task: {e}")
|
||||
await asyncio.sleep(300) # Wait 5 minutes before retry
|
||||
|
||||
async def _cleanup_old_images(self):
|
||||
"""Delete images older than retention period."""
|
||||
try:
|
||||
cutoff_time = datetime.now() - timedelta(days=self.retention_days)
|
||||
cutoff_timestamp = cutoff_time.timestamp()
|
||||
|
||||
deleted_count = 0
|
||||
deleted_size = 0
|
||||
|
||||
# Find all image files
|
||||
image_files = list(self.images_dir.glob(f"*.{config.IMAGE_FORMAT.lower()}"))
|
||||
|
||||
for file_path in image_files:
|
||||
try:
|
||||
# Check file modification time
|
||||
file_mtime = file_path.stat().st_mtime
|
||||
|
||||
if file_mtime < cutoff_timestamp:
|
||||
file_size = file_path.stat().st_size
|
||||
file_path.unlink()
|
||||
deleted_count += 1
|
||||
deleted_size += file_size
|
||||
|
||||
logger.debug(f"Deleted old image: {file_path.name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting {file_path.name}: {e}")
|
||||
|
||||
if deleted_count > 0:
|
||||
logger.info(
|
||||
f"Cleanup completed: deleted {deleted_count} images "
|
||||
f"({deleted_size / 1024 / 1024:.1f} MB) older than {self.retention_days} days"
|
||||
)
|
||||
else:
|
||||
logger.debug(f"Cleanup completed: no images older than {self.retention_days} days")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during cleanup: {e}")
|
||||
|
||||
async def cleanup_now(self):
|
||||
"""Trigger immediate cleanup (for testing or manual trigger)."""
|
||||
logger.info("Manual cleanup triggered")
|
||||
await self._cleanup_old_images()
|
||||
|
||||
|
||||
# Global cleanup task instance
|
||||
cleanup_task = CleanupTask()
|
||||
132
dreamtail_storage/file_manager.py
Executable file
132
dreamtail_storage/file_manager.py
Executable file
@@ -0,0 +1,132 @@
|
||||
"""
|
||||
File Storage Manager
|
||||
|
||||
Handles saving and retrieving generated images.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from PIL import Image
|
||||
import aiofiles
|
||||
import os
|
||||
|
||||
import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FileManager:
|
||||
"""Manages image file storage and retrieval."""
|
||||
|
||||
def __init__(self):
|
||||
self.images_dir = config.IMAGES_DIR
|
||||
self.image_format = config.IMAGE_FORMAT
|
||||
|
||||
# Ensure storage directory exists
|
||||
self.images_dir.mkdir(parents=True, exist_ok=True)
|
||||
logger.info(f"Image storage directory: {self.images_dir}")
|
||||
|
||||
async def save_image(self, job_id: str, image: Image.Image) -> str:
|
||||
"""
|
||||
Save generated image to disk.
|
||||
|
||||
Args:
|
||||
job_id: Job identifier (used as filename)
|
||||
image: PIL Image to save
|
||||
|
||||
Returns:
|
||||
file_path: Absolute path to saved image
|
||||
|
||||
Raises:
|
||||
IOError: If save fails
|
||||
"""
|
||||
filename = f"{job_id}.{self.image_format.lower()}"
|
||||
file_path = self.images_dir / filename
|
||||
|
||||
try:
|
||||
# Save in thread pool to avoid blocking
|
||||
import asyncio
|
||||
loop = asyncio.get_event_loop()
|
||||
await loop.run_in_executor(
|
||||
None,
|
||||
lambda: image.save(
|
||||
file_path,
|
||||
format=self.image_format,
|
||||
quality=config.IMAGE_QUALITY if self.image_format == "JPEG" else None
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(f"Image saved: {file_path} ({os.path.getsize(file_path) / 1024:.1f} KB)")
|
||||
return str(file_path)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save image {job_id}: {e}")
|
||||
raise IOError(f"Failed to save image: {e}")
|
||||
|
||||
def get_image_path(self, job_id: str) -> Optional[Path]:
|
||||
"""
|
||||
Get path to image file if it exists.
|
||||
|
||||
Args:
|
||||
job_id: Job identifier
|
||||
|
||||
Returns:
|
||||
Path to image file or None if not found
|
||||
"""
|
||||
filename = f"{job_id}.{self.image_format.lower()}"
|
||||
file_path = self.images_dir / filename
|
||||
|
||||
if file_path.exists():
|
||||
return file_path
|
||||
return None
|
||||
|
||||
def image_exists(self, job_id: str) -> bool:
|
||||
"""Check if image file exists."""
|
||||
return self.get_image_path(job_id) is not None
|
||||
|
||||
async def delete_image(self, job_id: str) -> bool:
|
||||
"""
|
||||
Delete an image file.
|
||||
|
||||
Args:
|
||||
job_id: Job identifier
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found
|
||||
"""
|
||||
file_path = self.get_image_path(job_id)
|
||||
|
||||
if file_path:
|
||||
try:
|
||||
file_path.unlink()
|
||||
logger.info(f"Deleted image: {file_path}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete image {job_id}: {e}")
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
def get_storage_stats(self) -> dict:
|
||||
"""Get storage statistics."""
|
||||
try:
|
||||
files = list(self.images_dir.glob(f"*.{self.image_format.lower()}"))
|
||||
total_size = sum(f.stat().st_size for f in files)
|
||||
|
||||
return {
|
||||
"total_images": len(files),
|
||||
"total_size_mb": total_size / (1024 * 1024),
|
||||
"storage_path": str(self.images_dir)
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get storage stats: {e}")
|
||||
return {
|
||||
"total_images": 0,
|
||||
"total_size_mb": 0,
|
||||
"storage_path": str(self.images_dir)
|
||||
}
|
||||
|
||||
|
||||
# Global file manager instance
|
||||
file_manager = FileManager()
|
||||
Reference in New Issue
Block a user