DreamTail v1.0.0 with IP-Adapter FaceID support

- SDXL image generation using RealVisXL_V4.0
- IP-Adapter FaceID integration for consistent face generation
- Simplified API (removed client_id requirement)
- New params: face_image, face_strength
- 'vixy' shortcut for face-locked generation
- Queue-based async job processing
- FastAPI with proper error handling

Co-authored-by: Alex <alex@k4zka.online>
This commit is contained in:
2026-01-01 19:54:59 -06:00
commit e4294b57e6
18 changed files with 1895 additions and 0 deletions

19
scripts/build.sh Executable file
View File

@@ -0,0 +1,19 @@
#!/bin/bash
# Build DreamTail Docker image for Jetson AGX Orin
set -e
echo "🎨 Building DreamTail Docker image..."
cd "$(dirname "$0")/.."
# Build for ARM64 (Jetson architecture)
docker build \
--platform linux/arm64 \
-t dreamtail:latest \
-f Dockerfile \
.
echo "✅ Build complete!"
echo ""
echo "To run DreamTail:"
echo " ./scripts/run.sh"

53
scripts/download-models.sh Executable file
View File

@@ -0,0 +1,53 @@
#!/bin/bash
# Download SDXL models for DreamTail using Docker
set -e
echo "📥 Downloading SDXL models..."
echo "This will download ~13GB of model weights"
echo ""
# Model cache directory
MODELS_DIR="${DREAMTAIL_MODELS:-/mnt/nvme/models}"
# Create directory if it doesn't exist
mkdir -p "$MODELS_DIR"
echo "Models will be cached in: $MODELS_DIR"
echo ""
echo "Using Docker container to download models..."
echo ""
# Use L4T PyTorch container to download models
docker run --rm -it \
-v "${MODELS_DIR}:/models" \
dustynv/l4t-pytorch:r36.2.0-pth2.1-py3 \
bash -c "
pip3 install -q diffusers transformers accelerate safetensors &&
python3 << 'PYEOF'
from diffusers import StableDiffusionXLPipeline
model_id = 'stabilityai/stable-diffusion-xl-base-1.0'
cache_dir = '/models'
print(f'Downloading {model_id}...')
print(f'Cache directory: {cache_dir}')
print('')
try:
pipeline = StableDiffusionXLPipeline.from_pretrained(
model_id,
use_safetensors=True,
cache_dir=cache_dir
)
print('✅ SDXL model downloaded successfully!')
except Exception as e:
print(f'❌ Error downloading model: {e}')
exit(1)
PYEOF
"
echo ""
echo "✅ Model download complete!"
echo ""
echo "Models are cached in: $MODELS_DIR"
echo "You can now build and run DreamTail"

45
scripts/run.sh Executable file
View File

@@ -0,0 +1,45 @@
#!/bin/bash
# Run DreamTail on Jetson AGX Orin
set -e
echo "🎨 Starting DreamTail..."
# Configuration
CONTAINER_NAME="dreamtail"
PORT=8765
MODELS_DIR="/mnt/nvme/models" # Models on NVMe
STORAGE_DIR="/mnt/nvme/dreamtail" # DreamTail storage on NVMe
# Create storage directory if it doesn't exist
mkdir -p "$STORAGE_DIR"
# Stop existing container if running
if docker ps -a --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
echo "Stopping existing DreamTail container..."
docker stop "$CONTAINER_NAME" 2>/dev/null || true
docker rm "$CONTAINER_NAME" 2>/dev/null || true
fi
# Run container
echo "Starting DreamTail container..."
docker run -d \
--name "$CONTAINER_NAME" \
--runtime=nvidia \
--restart unless-stopped \
-p ${PORT}:8765 \
-v "${MODELS_DIR}:/app/models" \
-v "${STORAGE_DIR}:/app/storage" \
-e DREAMTAIL_STORAGE=/app/storage \
-e DREAMTAIL_MODELS=/app/models \
-e LOG_LEVEL=INFO \
dreamtail:latest
echo "✅ DreamTail started!"
echo ""
echo "API available at: http://bigorin:${PORT}"
echo ""
echo "To check logs:"
echo " docker logs -f ${CONTAINER_NAME}"
echo ""
echo "To stop:"
echo " docker stop ${CONTAINER_NAME}"