From 5ca1b7a07dcd2e084104c1010b688e260125b28d Mon Sep 17 00:00:00 2001 From: Ryan Malloy Date: Fri, 5 Sep 2025 10:38:12 -0600 Subject: [PATCH] Migrate to Procrastinate 3.x with backward compatibility for 2.x MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add comprehensive compatibility layer supporting both Procrastinate 2.x and 3.x - Implement version-aware database migration system with pre/post migrations for 3.x - Create worker option mapping for seamless transition between versions - Add extensive test coverage for all compatibility features - Update dependency constraints to support both 2.x and 3.x simultaneously - Provide Docker containerization with uv caching and multi-service orchestration - Include demo applications and web interface for testing capabilities - Bump version to 0.2.0 reflecting new compatibility features Key Features: - Automatic version detection and feature flagging - Unified connector creation across PostgreSQL drivers - Worker option translation (timeout → fetch_job_polling_interval) - Database migration utilities with CLI and programmatic interfaces - Complete Docker Compose setup with PostgreSQL, Redis, workers, and demos Files Added: - src/video_processor/tasks/compat.py - Core compatibility layer - src/video_processor/tasks/migration.py - Migration utilities - src/video_processor/tasks/worker_compatibility.py - Worker CLI - tests/test_procrastinate_compat.py - Compatibility tests - tests/test_procrastinate_migration.py - Migration tests - Dockerfile - Multi-stage build with uv caching - docker-compose.yml - Complete development environment - examples/docker_demo.py - Containerized demo application - examples/web_demo.py - Flask web interface demo Migration Support: - Procrastinate 2.x: Single migration command compatibility - Procrastinate 3.x: Separate pre/post migration phases - Database URL validation and connection testing - Version-specific feature detection and graceful degradation --- Dockerfile | 84 +++++ README.md | 63 +++- docker-compose.yml | 158 +++++++++ docker/init-db.sql | 42 +++ examples/async_processing.py | 17 +- examples/docker_demo.py | 231 +++++++++++++ examples/web_demo.py | 254 ++++++++++++++ examples/worker_compatibility.py | 189 +++++++++++ .../caa085b6/caa085b6_360_front_5.jpg | Bin 0 -> 9550 bytes .../caa085b6/caa085b6_360_stereographic_5.jpg | Bin 0 -> 29498 bytes pyproject.toml | 9 +- src/video_processor/__init__.py | 4 +- src/video_processor/config.py | 13 +- src/video_processor/core/metadata.py | 2 +- src/video_processor/core/processor.py | 16 +- src/video_processor/core/thumbnails.py | 49 +-- src/video_processor/core/thumbnails_360.py | 141 ++++---- src/video_processor/tasks/compat.py | 190 +++++++++++ src/video_processor/tasks/migration.py | 253 ++++++++++++++ .../tasks/procrastinate_tasks.py | 34 +- .../tasks/worker_compatibility.py | 159 +++++++++ src/video_processor/utils/sprite_generator.py | 184 ++++++++++ src/video_processor/utils/video_360.py | 87 +++-- tests/test_procrastinate_compat.py | 314 ++++++++++++++++++ tests/test_procrastinate_migration.py | 216 ++++++++++++ 25 files changed, 2536 insertions(+), 173 deletions(-) create mode 100644 Dockerfile create mode 100644 docker-compose.yml create mode 100644 docker/init-db.sql create mode 100644 examples/docker_demo.py create mode 100644 examples/web_demo.py create mode 100644 examples/worker_compatibility.py create mode 100644 pipeline_360_only/caa085b6/caa085b6_360_front_5.jpg create mode 100644 pipeline_360_only/caa085b6/caa085b6_360_stereographic_5.jpg create mode 100644 src/video_processor/tasks/compat.py create mode 100644 src/video_processor/tasks/migration.py create mode 100644 src/video_processor/tasks/worker_compatibility.py create mode 100644 src/video_processor/utils/sprite_generator.py create mode 100644 tests/test_procrastinate_compat.py create mode 100644 tests/test_procrastinate_migration.py diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..ae8c747 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,84 @@ +# Video Processor Dockerfile with uv caching optimization +# Based on uv Docker integration best practices +# https://docs.astral.sh/uv/guides/integration/docker/ + +FROM python:3.11-slim as base + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + ffmpeg \ + imagemagick \ + postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +# Install uv +COPY --from=ghcr.io/astral-sh/uv:latest /uv /bin/uv + +# Create app directory +WORKDIR /app + +# Create user for running the application +RUN groupadd -r app && useradd -r -g app app + +# Change to app user for dependency installation +USER app + +# Copy dependency files first for better caching +COPY --chown=app:app pyproject.toml uv.lock* ./ + +# Create virtual environment and install dependencies +# This layer will be cached if dependencies don't change +ENV UV_SYSTEM_PYTHON=1 +RUN uv sync --frozen --no-dev + +# Copy application code +COPY --chown=app:app . . + +# Install the application +RUN uv pip install -e . + +# Production stage +FROM base as production + +# Set environment variables +ENV PYTHONUNBUFFERED=1 +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PATH="/app/.venv/bin:$PATH" + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=60s --retries=3 \ + CMD python -c "from video_processor import VideoProcessor; print('OK')" || exit 1 + +# Default command +CMD ["python", "-m", "video_processor.tasks.procrastinate_tasks"] + +# Development stage with dev dependencies +FROM base as development + +# Install development dependencies +RUN uv sync --frozen + +# Install pre-commit hooks +RUN uv run pre-commit install || true + +# Set development environment +ENV FLASK_ENV=development +ENV PYTHONPATH=/app + +# Default command for development +CMD ["bash"] + +# Worker stage for Procrastinate workers +FROM production as worker + +# Set worker-specific environment +ENV PROCRASTINATE_WORKER=1 + +# Command to run Procrastinate worker +CMD ["python", "-m", "video_processor.tasks.worker_compatibility", "worker"] + +# Migration stage for database migrations +FROM production as migration + +# Command to run migrations +CMD ["python", "-m", "video_processor.tasks.migration"] \ No newline at end of file diff --git a/README.md b/README.md index f0edd62..bc93b77 100644 --- a/README.md +++ b/README.md @@ -126,6 +126,67 @@ uv add "video-processor[video-360-full]" # Includes: All 360° dependencies + exifread ``` +### ⚡ Procrastinate Migration (2.x → 3.x) + +This library supports both **Procrastinate 2.x** and **3.x** for smooth migration: + +#### 🔄 Automatic Version Detection +```python +from video_processor.tasks.compat import get_version_info, IS_PROCRASTINATE_3_PLUS + +version_info = get_version_info() +print(f"Using Procrastinate {version_info['procrastinate_version']}") +print(f"Features available: {list(version_info['features'].keys())}") + +# Version-aware setup +if IS_PROCRASTINATE_3_PLUS: + # Use 3.x features like improved performance, graceful shutdown + pass +``` + +#### 📋 Migration Steps +1. **Install compatible version**: + ```bash + uv add "procrastinate>=3.5.2,<4.0.0" # Or keep 2.x support: ">=2.15.1,<4.0.0" + ``` + +2. **Apply database migrations**: + ```bash + # Procrastinate 3.x (two-step process) + procrastinate schema --apply --mode=pre # Before deploying + # Deploy new code + procrastinate schema --apply --mode=post # After deploying + + # Procrastinate 2.x (single step) + procrastinate schema --apply + ``` + +3. **Use migration helper**: + ```python + from video_processor.tasks.migration import migrate_database + + # Automatic version-aware migration + success = await migrate_database("postgresql://localhost/mydb") + ``` + +4. **Update worker configuration**: + ```python + from video_processor.tasks import get_worker_kwargs + + # Automatically normalizes options for your version + worker_options = get_worker_kwargs( + concurrency=4, + timeout=5, # Maps to fetch_job_polling_interval in 3.x + remove_error=True, # Maps to remove_failed in 3.x + ) + ``` + +#### 🆕 Procrastinate 3.x Benefits +- **Better performance** with improved job fetching +- **Graceful shutdown** with `shutdown_graceful_timeout` +- **Enhanced error handling** and job cancellation +- **Schema compatibility** improvements (3.5.2+) + ### Development Setup ```bash @@ -512,7 +573,7 @@ This project is licensed under the **MIT License** - see the [LICENSE](LICENSE) - ✨ **Multi-format encoding**: MP4, WebM, OGV support - 🖼️ **Thumbnail generation** with customizable timestamps - 🎞️ **Sprite sheet creation** with WebVTT files -- ⚡ **Background processing** with Procrastinate +- ⚡ **Background processing** with Procrastinate (2.x and 3.x compatible) - ⚙️ **Type-safe configuration** with Pydantic V2 - 🛠️ **Modern tooling**: uv, ruff, pytest integration - 📚 **Comprehensive documentation** and examples diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..30233de --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,158 @@ +# Docker Compose setup for Video Processor with Procrastinate +# Complete development and testing environment + +version: '3.8' + +services: + # PostgreSQL database for Procrastinate + postgres: + image: postgres:15-alpine + environment: + POSTGRES_DB: video_processor + POSTGRES_USER: video_user + POSTGRES_PASSWORD: video_password + POSTGRES_HOST_AUTH_METHOD: trust + volumes: + - postgres_data:/var/lib/postgresql/data + - ./docker/init-db.sql:/docker-entrypoint-initdb.d/init-db.sql + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U video_user -d video_processor"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - video_net + + # Redis for additional caching (optional) + redis: + image: redis:7-alpine + ports: + - "6379:6379" + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - video_net + + # Video Processor API service + app: + build: + context: . + dockerfile: Dockerfile + target: development + environment: + - DATABASE_URL=postgresql://video_user:video_password@postgres:5432/video_processor + - PROCRASTINATE_DATABASE_URL=postgresql://video_user:video_password@postgres:5432/video_processor + - REDIS_URL=redis://redis:6379/0 + - PYTHONPATH=/app + volumes: + - .:/app + - video_uploads:/app/uploads + - video_outputs:/app/outputs + ports: + - "8000:8000" + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + networks: + - video_net + command: ["python", "examples/docker_demo.py"] + + # Procrastinate worker for background processing + worker: + build: + context: . + dockerfile: Dockerfile + target: worker + environment: + - PROCRASTINATE_DATABASE_URL=postgresql://video_user:video_password@postgres:5432/video_processor + - WORKER_CONCURRENCY=4 + - WORKER_TIMEOUT=300 + volumes: + - video_uploads:/app/uploads + - video_outputs:/app/outputs + depends_on: + postgres: + condition: service_healthy + networks: + - video_net + command: ["python", "-m", "video_processor.tasks.worker_compatibility", "worker"] + + # Migration service (runs once to setup DB) + migrate: + build: + context: . + dockerfile: Dockerfile + target: migration + environment: + - PROCRASTINATE_DATABASE_URL=postgresql://video_user:video_password@postgres:5432/video_processor + depends_on: + postgres: + condition: service_healthy + networks: + - video_net + command: ["python", "-c", " + import asyncio; + from video_processor.tasks.migration import migrate_database; + asyncio.run(migrate_database('postgresql://video_user:video_password@postgres:5432/video_processor')) + "] + + # Test runner service + test: + build: + context: . + dockerfile: Dockerfile + target: development + environment: + - DATABASE_URL=postgresql://video_user:video_password@postgres:5432/video_processor_test + - PROCRASTINATE_DATABASE_URL=postgresql://video_user:video_password@postgres:5432/video_processor_test + volumes: + - .:/app + depends_on: + postgres: + condition: service_healthy + networks: + - video_net + command: ["uv", "run", "pytest", "tests/", "-v", "--cov=src/", "--cov-report=html", "--cov-report=term"] + + # Demo web interface (optional) + demo: + build: + context: . + dockerfile: Dockerfile + target: development + environment: + - DATABASE_URL=postgresql://video_user:video_password@postgres:5432/video_processor + - PROCRASTINATE_DATABASE_URL=postgresql://video_user:video_password@postgres:5432/video_processor + ports: + - "8080:8080" + volumes: + - .:/app + - video_uploads:/app/uploads + - video_outputs:/app/outputs + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + networks: + - video_net + command: ["python", "examples/web_demo.py"] + +volumes: + postgres_data: + driver: local + video_uploads: + driver: local + video_outputs: + driver: local + +networks: + video_net: + driver: bridge \ No newline at end of file diff --git a/docker/init-db.sql b/docker/init-db.sql new file mode 100644 index 0000000..c8b4093 --- /dev/null +++ b/docker/init-db.sql @@ -0,0 +1,42 @@ +-- Database initialization for Video Processor +-- Creates necessary databases and extensions + +-- Create test database +CREATE DATABASE video_processor_test; + +-- Connect to main database +\c video_processor; + +-- Enable required extensions +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; + +-- Create basic schema (Procrastinate will handle its own tables) +CREATE SCHEMA IF NOT EXISTS video_processor; + +-- Grant permissions +GRANT ALL PRIVILEGES ON DATABASE video_processor TO video_user; +GRANT ALL PRIVILEGES ON DATABASE video_processor_test TO video_user; +GRANT ALL PRIVILEGES ON SCHEMA video_processor TO video_user; + +-- Create a sample videos table for demo purposes +CREATE TABLE IF NOT EXISTS video_processor.videos ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + filename VARCHAR(255) NOT NULL, + original_path TEXT, + processed_path TEXT, + status VARCHAR(50) DEFAULT 'pending', + metadata JSONB, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Create index for efficient queries +CREATE INDEX IF NOT EXISTS idx_videos_status ON video_processor.videos(status); +CREATE INDEX IF NOT EXISTS idx_videos_created_at ON video_processor.videos(created_at); + +-- Insert sample data +INSERT INTO video_processor.videos (filename, status) VALUES + ('sample_video_1.mp4', 'pending'), + ('sample_video_2.mp4', 'processing'), + ('sample_video_3.mp4', 'completed') +ON CONFLICT DO NOTHING; \ No newline at end of file diff --git a/examples/async_processing.py b/examples/async_processing.py index 39c51be..0e0dd08 100644 --- a/examples/async_processing.py +++ b/examples/async_processing.py @@ -14,7 +14,8 @@ from pathlib import Path import procrastinate from video_processor import ProcessorConfig -from video_processor.tasks import setup_procrastinate +from video_processor.tasks import setup_procrastinate, get_worker_kwargs +from video_processor.tasks.compat import get_version_info, IS_PROCRASTINATE_3_PLUS async def async_processing_example(): @@ -25,8 +26,18 @@ async def async_processing_example(): database_url = "postgresql://localhost/procrastinate_test" try: - # Set up Procrastinate - app = setup_procrastinate(database_url) + # Print version information + version_info = get_version_info() + print(f"Using Procrastinate {version_info['procrastinate_version']}") + print(f"Version 3.x+: {version_info['is_v3_plus']}") + + # Set up Procrastinate with version-appropriate settings + connector_kwargs = {} + if IS_PROCRASTINATE_3_PLUS: + # Procrastinate 3.x specific settings + connector_kwargs["pool_size"] = 10 + + app = setup_procrastinate(database_url, connector_kwargs=connector_kwargs) with tempfile.TemporaryDirectory() as temp_dir: temp_path = Path(temp_dir) diff --git a/examples/docker_demo.py b/examples/docker_demo.py new file mode 100644 index 0000000..85ce58e --- /dev/null +++ b/examples/docker_demo.py @@ -0,0 +1,231 @@ +#!/usr/bin/env python3 +""" +Docker Demo Application for Video Processor + +This demo shows how to use the video processor in a containerized environment +with Procrastinate background tasks and PostgreSQL. +""" + +import asyncio +import logging +import os +import tempfile +from pathlib import Path + +from video_processor import ProcessorConfig, VideoProcessor +from video_processor.tasks import setup_procrastinate +from video_processor.tasks.compat import get_version_info +from video_processor.tasks.migration import migrate_database + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' +) +logger = logging.getLogger(__name__) + + +async def create_sample_video(output_path: Path) -> Path: + """Create a sample video using ffmpeg for testing.""" + video_file = output_path / "sample_test_video.mp4" + + # Create a simple test video using ffmpeg + import subprocess + + cmd = [ + "ffmpeg", "-y", + "-f", "lavfi", + "-i", "testsrc=duration=10:size=640x480:rate=30", + "-c:v", "libx264", + "-preset", "fast", + "-crf", "23", + str(video_file) + ] + + try: + result = subprocess.run(cmd, capture_output=True, text=True) + if result.returncode != 0: + logger.error(f"FFmpeg failed: {result.stderr}") + raise RuntimeError("Failed to create sample video") + + logger.info(f"Created sample video: {video_file}") + return video_file + + except FileNotFoundError: + logger.error("FFmpeg not found. Please install FFmpeg.") + raise + + +async def demo_sync_processing(): + """Demonstrate synchronous video processing.""" + logger.info("🎬 Starting Synchronous Processing Demo") + + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Create sample video + sample_video = await create_sample_video(temp_path) + + # Configure processor + config = ProcessorConfig( + output_dir=temp_path / "outputs", + output_formats=["mp4", "webm"], + quality_preset="fast", + generate_thumbnails=True, + generate_sprites=True, + enable_360_processing=True, # Will be disabled if deps not available + ) + + # Process video + processor = VideoProcessor(config) + result = processor.process_video(sample_video) + + logger.info("✅ Synchronous processing completed!") + logger.info(f"📹 Processed video ID: {result.video_id}") + logger.info(f"📁 Output files: {len(result.encoded_files)} formats") + logger.info(f"🖼️ Thumbnails: {len(result.thumbnails)}") + + if result.sprite_file: + sprite_size = result.sprite_file.stat().st_size // 1024 + logger.info(f"🎯 Sprite sheet: {sprite_size}KB") + + if hasattr(result, 'thumbnails_360') and result.thumbnails_360: + logger.info(f"🌐 360° thumbnails: {len(result.thumbnails_360)}") + + +async def demo_async_processing(): + """Demonstrate asynchronous video processing with Procrastinate.""" + logger.info("⚡ Starting Asynchronous Processing Demo") + + # Get database URL from environment + database_url = os.environ.get( + 'PROCRASTINATE_DATABASE_URL', + 'postgresql://video_user:video_password@postgres:5432/video_processor' + ) + + try: + # Show version info + version_info = get_version_info() + logger.info(f"📦 Using Procrastinate {version_info['procrastinate_version']}") + + # Run migrations + logger.info("🔄 Running database migrations...") + migration_success = await migrate_database(database_url) + + if not migration_success: + logger.error("❌ Database migration failed") + return + + logger.info("✅ Database migrations completed") + + # Set up Procrastinate + app = setup_procrastinate(database_url) + + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Create sample video + sample_video = await create_sample_video(temp_path) + + # Configure processing + config_dict = { + "base_path": str(temp_path), + "output_formats": ["mp4"], + "quality_preset": "fast", + "generate_thumbnails": True, + "sprite_interval": 5, + } + + async with app.open_async() as app_context: + # Submit video processing task + logger.info("📤 Submitting async video processing job...") + + job = await app_context.configure_task( + "process_video_async", + queue="video_processing" + ).defer_async( + input_path=str(sample_video), + output_dir=str(temp_path / "async_outputs"), + config_dict=config_dict + ) + + logger.info(f"✅ Job submitted with ID: {job.id}") + logger.info("🔄 Job will be processed by background worker...") + + # In a real app, you would monitor job status or use webhooks + # For demo purposes, we'll just show the job was submitted + + # Submit additional tasks + logger.info("📤 Submitting thumbnail generation job...") + + thumb_job = await app_context.configure_task( + "generate_thumbnail_async", + queue="thumbnail_generation" + ).defer_async( + video_path=str(sample_video), + output_dir=str(temp_path / "thumbnails"), + timestamp=5, + video_id="demo_thumb" + ) + + logger.info(f"✅ Thumbnail job submitted: {thumb_job.id}") + + except Exception as e: + logger.error(f"❌ Async processing demo failed: {e}") + raise + + +async def demo_migration_features(): + """Demonstrate migration utilities.""" + logger.info("🔄 Migration Features Demo") + + from video_processor.tasks.migration import ProcrastinateMigrationHelper + + database_url = os.environ.get( + 'PROCRASTINATE_DATABASE_URL', + 'postgresql://video_user:video_password@postgres:5432/video_processor' + ) + + # Show migration plan + helper = ProcrastinateMigrationHelper(database_url) + helper.print_migration_plan() + + # Show version-specific features + version_info = get_version_info() + logger.info("🆕 Available Features:") + for feature, available in version_info['features'].items(): + status = "✅" if available else "❌" + logger.info(f" {status} {feature}") + + +async def main(): + """Run all demo scenarios.""" + logger.info("🚀 Video Processor Docker Demo Starting...") + + try: + # Run demos in sequence + await demo_sync_processing() + await demo_async_processing() + await demo_migration_features() + + logger.info("🎉 All demos completed successfully!") + + # Keep the container running to show logs + logger.info("📋 Demo completed. Container will keep running for log inspection...") + logger.info("💡 Check the logs with: docker-compose logs app") + logger.info("🛑 Stop with: docker-compose down") + + # Keep running for log inspection + while True: + await asyncio.sleep(30) + logger.info("💓 Demo container heartbeat - still running...") + + except KeyboardInterrupt: + logger.info("🛑 Demo interrupted by user") + except Exception as e: + logger.error(f"❌ Demo failed: {e}") + raise + + +if __name__ == "__main__": + asyncio.run(main()) \ No newline at end of file diff --git a/examples/web_demo.py b/examples/web_demo.py new file mode 100644 index 0000000..7c12774 --- /dev/null +++ b/examples/web_demo.py @@ -0,0 +1,254 @@ +#!/usr/bin/env python3 +""" +Simple web demo interface for Video Processor. + +This provides a basic Flask web interface to demonstrate video processing +capabilities in a browser-friendly format. +""" + +import asyncio +import os +import tempfile +from pathlib import Path +from typing import Optional + +try: + from flask import Flask, jsonify, render_template_string, request +except ImportError: + print("Flask not installed. Install with: uv add flask") + exit(1) + +from video_processor import ProcessorConfig, VideoProcessor +from video_processor.tasks import setup_procrastinate +from video_processor.tasks.compat import get_version_info + +# Simple HTML template +HTML_TEMPLATE = """ + + + + Video Processor Demo + + + +
+

🎬 Video Processor Demo

+ +
+ System Information:
+ Version: {{ version_info.version }}
+ Procrastinate: {{ version_info.procrastinate_version }}
+ Features: {{ version_info.features }} +
+ +

Test Video Processing

+ + + + +
+ +

Processing Logs

+
Ready...
+
+ + + + +""" + +app = Flask(__name__) + + +async def create_test_video(output_dir: Path) -> Path: + """Create a simple test video for processing.""" + import subprocess + + video_file = output_dir / "web_demo_test.mp4" + + cmd = [ + "ffmpeg", "-y", + "-f", "lavfi", + "-i", "testsrc=duration=5:size=320x240:rate=15", + "-c:v", "libx264", + "-preset", "ultrafast", + "-crf", "30", + str(video_file) + ] + + try: + result = subprocess.run(cmd, capture_output=True, text=True) + if result.returncode != 0: + raise RuntimeError(f"FFmpeg failed: {result.stderr}") + return video_file + except FileNotFoundError: + raise RuntimeError("FFmpeg not found. Please install FFmpeg.") + + +@app.route('/') +def index(): + """Serve the demo web interface.""" + version_info = get_version_info() + return render_template_string(HTML_TEMPLATE, version_info=version_info) + + +@app.route('/api/info') +def api_info(): + """Get system information.""" + return jsonify(get_version_info()) + + +@app.route('/api/process-test', methods=['POST']) +def api_process_test(): + """Process a test video synchronously.""" + try: + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Create test video + test_video = asyncio.run(create_test_video(temp_path)) + + # Configure processor for fast processing + config = ProcessorConfig( + output_dir=temp_path / "outputs", + output_formats=["mp4"], + quality_preset="ultrafast", + generate_thumbnails=True, + generate_sprites=False, # Skip sprites for faster demo + enable_360_processing=False, # Skip 360 for faster demo + ) + + # Process video + processor = VideoProcessor(config) + result = processor.process_video(test_video) + + return jsonify({ + "status": "success", + "video_id": result.video_id, + "encoded_files": len(result.encoded_files), + "thumbnails": len(result.thumbnails), + "processing_time": "< 30s (estimated)", + "message": "Test video processed successfully!" + }) + + except Exception as e: + return jsonify({"error": str(e)}), 500 + + +@app.route('/api/async-job', methods=['POST']) +def api_async_job(): + """Submit an async processing job.""" + try: + database_url = os.environ.get( + 'PROCRASTINATE_DATABASE_URL', + 'postgresql://video_user:video_password@postgres:5432/video_processor' + ) + + # Set up Procrastinate + app_context = setup_procrastinate(database_url) + + # In a real application, you would: + # 1. Accept file uploads + # 2. Store them temporarily + # 3. Submit processing jobs + # 4. Return job IDs for status tracking + + # For demo, we'll just simulate job submission + job_id = f"demo-job-{os.urandom(4).hex()}" + + return jsonify({ + "status": "submitted", + "job_id": job_id, + "queue": "video_processing", + "message": "Job submitted to background worker", + "note": "In production, this would submit a real Procrastinate job" + }) + + except Exception as e: + return jsonify({"error": str(e)}), 500 + + +def main(): + """Run the web demo server.""" + port = int(os.environ.get('PORT', 8080)) + debug = os.environ.get('FLASK_ENV') == 'development' + + print(f"🌐 Starting Video Processor Web Demo on port {port}") + print(f"📖 Open http://localhost:{port} in your browser") + + app.run(host='0.0.0.0', port=port, debug=debug) + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/examples/worker_compatibility.py b/examples/worker_compatibility.py new file mode 100644 index 0000000..e77808d --- /dev/null +++ b/examples/worker_compatibility.py @@ -0,0 +1,189 @@ +#!/usr/bin/env python3 +""" +Procrastinate worker compatibility example. + +This example demonstrates how to run a Procrastinate worker that works +with both version 2.x and 3.x of Procrastinate. +""" + +import asyncio +import logging +import signal +import sys +from pathlib import Path + +from video_processor.tasks import setup_procrastinate, get_worker_kwargs +from video_processor.tasks.compat import get_version_info, IS_PROCRASTINATE_3_PLUS +from video_processor.tasks.migration import migrate_database + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +async def setup_and_run_worker(): + """Set up and run a Procrastinate worker with version compatibility.""" + + # Database connection + database_url = "postgresql://localhost/procrastinate_dev" + + try: + # Print version information + version_info = get_version_info() + logger.info(f"Starting worker with Procrastinate {version_info['procrastinate_version']}") + logger.info(f"Available features: {list(version_info['features'].keys())}") + + # Optionally run database migration + migrate_success = await migrate_database(database_url) + if not migrate_success: + logger.error("Database migration failed") + return + + # Set up Procrastinate app + connector_kwargs = {} + if IS_PROCRASTINATE_3_PLUS: + # Procrastinate 3.x connection pool settings + connector_kwargs.update({ + "pool_size": 20, + "max_pool_size": 50, + }) + + app = setup_procrastinate(database_url, connector_kwargs=connector_kwargs) + + # Configure worker options with version compatibility + worker_options = { + "concurrency": 4, + "name": "video-processor-worker", + } + + # Add version-specific options + if IS_PROCRASTINATE_3_PLUS: + # Procrastinate 3.x options + worker_options.update({ + "fetch_job_polling_interval": 5, # Renamed from "timeout" in 2.x + "shutdown_graceful_timeout": 30, # New in 3.x + "remove_failed": True, # Renamed from "remove_error" + "include_failed": False, # Renamed from "include_error" + }) + else: + # Procrastinate 2.x options + worker_options.update({ + "timeout": 5, + "remove_error": True, + "include_error": False, + }) + + # Normalize options for the current version + normalized_options = get_worker_kwargs(**worker_options) + + logger.info(f"Worker options: {normalized_options}") + + # Create and configure worker + async with app.open_async() as app_context: + worker = app_context.create_worker( + queues=["video_processing", "thumbnail_generation", "sprite_generation"], + **normalized_options + ) + + # Set up signal handlers for graceful shutdown + if IS_PROCRASTINATE_3_PLUS: + # Procrastinate 3.x has improved graceful shutdown + def signal_handler(sig, frame): + logger.info(f"Received signal {sig}, shutting down gracefully...") + worker.stop() + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + logger.info("Starting Procrastinate worker...") + logger.info("Queues: video_processing, thumbnail_generation, sprite_generation") + logger.info("Press Ctrl+C to stop") + + # Run the worker + await worker.run_async() + + except KeyboardInterrupt: + logger.info("Worker interrupted by user") + except Exception as e: + logger.error(f"Worker error: {e}") + raise + + +async def test_task_submission(): + """Test task submission with both Procrastinate versions.""" + + database_url = "postgresql://localhost/procrastinate_dev" + + try: + app = setup_procrastinate(database_url) + + # Test video processing task + with Path("test_video.mp4").open("w") as f: + f.write("") # Create dummy file for testing + + async with app.open_async() as app_context: + # Submit test task + job = await app_context.configure_task( + "process_video_async", + queue="video_processing" + ).defer_async( + input_path="test_video.mp4", + output_dir="/tmp/test_output", + config_dict={"quality_preset": "fast"} + ) + + logger.info(f"Submitted test job: {job.id}") + + # Clean up + Path("test_video.mp4").unlink(missing_ok=True) + + except Exception as e: + logger.error(f"Task submission test failed: {e}") + + +def show_migration_help(): + """Show migration help for upgrading from Procrastinate 2.x to 3.x.""" + + print("\nProcrastinate Migration Guide") + print("=" * 40) + + version_info = get_version_info() + + if version_info['is_v3_plus']: + print("✅ You are running Procrastinate 3.x") + print("\nMigration steps for 3.x:") + print("1. Apply pre-migration: python -m video_processor.tasks.migration --pre") + print("2. Deploy new application code") + print("3. Apply post-migration: python -m video_processor.tasks.migration --post") + print("4. Verify: procrastinate schema --check") + else: + print("📦 You are running Procrastinate 2.x") + print("\nTo upgrade to 3.x:") + print("1. Update dependencies: uv add 'procrastinate>=3.0,<4.0'") + print("2. Apply pre-migration: python -m video_processor.tasks.migration --pre") + print("3. Deploy new code") + print("4. Apply post-migration: python -m video_processor.tasks.migration --post") + + print(f"\nCurrent version: {version_info['procrastinate_version']}") + print(f"Available features: {list(version_info['features'].keys())}") + + +if __name__ == "__main__": + if len(sys.argv) > 1: + command = sys.argv[1] + + if command == "worker": + asyncio.run(setup_and_run_worker()) + elif command == "test": + asyncio.run(test_task_submission()) + elif command == "help": + show_migration_help() + else: + print("Usage: python worker_compatibility.py [worker|test|help]") + else: + print("Procrastinate Worker Compatibility Demo") + print("Usage:") + print(" python worker_compatibility.py worker - Run worker") + print(" python worker_compatibility.py test - Test task submission") + print(" python worker_compatibility.py help - Show migration help") + + show_migration_help() \ No newline at end of file diff --git a/pipeline_360_only/caa085b6/caa085b6_360_front_5.jpg b/pipeline_360_only/caa085b6/caa085b6_360_front_5.jpg new file mode 100644 index 0000000000000000000000000000000000000000..66255289ad38508e757627a78a6c46b04a686cc9 GIT binary patch literal 9550 zcmbta4R}*!mi}(iG;P{IOD`pj^bcvdcd4dbj2?b;}S@6I9x^W0)r->XQhnSd#MA?Kmo49cl6~a0t1owraUxY}U0;^0*mu0{k z-pxWHLM#@+`XK;#_cQQ&l#(r(RIJjb=9t$=r!|oGIA3}-?aqpQhbJ@P3)9Q3ziCXD z!z~=*$=YIsCV?BVS(p=F;UW-+njx{Z$+nfy8`R@HZdWxVW+*E)l?z zBE@kLqK$Bgvr{C+s?d0lOJ$rAU(6%?m zj()pXB}sK^-~M)nd`$7w-N#3#^fJdjU23QH{KdQLxdo3cq=RPt%rwf!t|rYoM4GE8 za%^j|~p z(TMUdr2MJ|-3)aw@u=nwmPqfWZIR44+6^`z^va0LuwU@F?Hd1*5S@-_+@N>SgZO*5+kmd)ratlzv zDi*m-lo~O^ZQJ_r9`z%Vsc#t&g*85hSZ}aXsoVoiwm1SYB)EGEtSAp6gY8d_?oj|^ z@}02#os9zYMC`I-QBW{2v5S_A0|2#I1Bk-K)Z>*zA6QvsBWynQ(}`m%uD<=@GA^ET zx@Y&z{q^TZ!kelO9q4nE>qPHoWwVspL{vXad3g$i01FR1fH=|NVg@iY$WAXQ<>N_Z z{%fJ}GWZ7@hsSt?5rR3JnW zS2rZWn&YU9wAyX2E8Ib|2C(W$uz1F4V~r^3C*$~?w20!+R1*XnWnSk$e=uC+4JQG` zQj>cd*>w024klA{8*_^GfOO=~qO0yTnUTmQQid$Fyrj4lK*E%$%M!kA{OitcGRX9vC(Si~Rz0xh26|!Qec!i% zblAGtnb%%8@@#6s3*Rh~d=2P4QUvse6qpN~g?u8as6ao!+#q0z83?UtzCG>EJGZX$OmHLF0;Kv^>xIL~b+-qx=`d zE|9=t(%Qi;&u>hq`yZYz?A(2oF)jPXcktxD{p`Y;vj^||eD|B3phx?PdBzeTGTHd^ zj2g?3nVlE#-W4NGW|}`#My5ai6X_I(3e=bKP8|FlUN-4#9qrN}S zdly&1wD`GtkHvt>HQmzy*KMLB34*b4K9`_^#oC7-6 z?$MNyuWTtRhAM*P8W-T51H@<~vR(t?*QEDU?hTrCBH%zLujWar4$>84W@VMUtkgcC zFD@ClSWRNn4u>XR51l^PG*S0`q3ApKocEHll!-6{pyW0vs6ZD3y`%`J2)mh%5tGMj zNbISxyKJRNWHxF?;C-YiSuQ{x+An&a$PD-Ab*mc>LjmP=ftkug8Z^=~nr_x28d8FE zi+dY8SSo2bSTs0&$z_+?Z3_4n*K%Z-7x0gbD_oPDk?oF5%{+AFig)=BInCEI7f$)& zpE5o(&@~0Y?VyaYk_Y!4YIiT6V#In~(K(}r@RQ&+a#;iT4ov^s6lAm^;0MEmv(^op zQn>pUz?BzgoCN~aJ%NDQrFLX(F?%ePIV9+P6hM;*q_Nz{m0s6*T!>*Yfi0bipR9spu>vK#|cc~kUniW_-_$Ru9&5nryQH#uiDyx7m8?%@&$IG}r z#UG-v+x)Y^K=Q7LFQ5s-0l2Z|ght-oKn1>kW@P4?Bzouhc7QGL$V<>HdpYcWy<2I( z9u!;@Y+aGBNhgj!P8cV|2H7Srx!G^bL3W^AQ4SV39xYg6Wo<>ch6*8|rqv1h8AsxF z2nl~KK+8SBeUs0)4OmoQA&muEg78XEt(gS`p-SpnWPO|BA0b%+eOF(jBVF^NNY|>G0 ze>*eo=-o`)Gm?^6Y35hkt%ka<_tfdZC}Ws`=^0ga$}8z7ft$@N{2~ceEGM&;?I|JC z7DOUoP% z0IXBbLi}YeExH?UJ}Z^O40D z*M50WwQ$>q;CiTguOAf_vGhVBy%V^cV+ikE%v>X+_C~Bqc7-lHq$z1}i`UG2y8vNGw zB*co>KQLxZ(}6Nv>iU{=snCK6Y!$IZ+I4Wo+|c}vb{3^lTs_{T4(>EJ)yyg*JJmrh zCK_5ZRBE?H4QM6q*3v+2njNHB-yh9EC?IG7tV7|S-CtHp+l7*kXvKrp4%ZE2`DEgl z?W*sg3-4S~Tp6S3WtaUBZU#Rqm^W4nJNTJPh<+ecGRuZSif(m1bj@0|3DFHT0O&}S zP3=bJ+xTN%I=r}RipqbiFDs=&`B%fE0|a{yHD(K;Jm^GB3&1Yhsf-E!57a(Z165U( z?vYI8d)r&qXE2;gDHQO>V~4b2l;)Z>;3h{oH~HX!Z69j2-gUMgXDh!vULzpsW}&bK z*rvn{cS4Fyj`74=8TR0|lZ1If!u)@>zRkX*jGQo(3FWwWr2&>0nmvJrT8GoCAc%s^ zPGgO=-f6s*iR5z7>JXo3#S;?HBGx*ttiE^T2FhLZFPCzlnf}tI{>dtbXANND@0vnh z_Tv`EKm%?I0ds=^WIQ=jlE23#PQjBc(X6FRYhl0&r&2?B8lx*C6I+B6EM<%$e+)6O zpdJL?F*dpas{q=&nTde2oc#MApXqSMlV9Ra(V71;iETb^iC7q_IH?Trbx4q$z@ju! ztnlnuY%tLGt`lo~?4*nrJe5(cfKu14e)qb%(0j77YT?;H&y<{t-50w_t83+ArKOe` zj7nSaIWQzeft#FEd>dh#A@5A?-tVBZPOE(ytePq+t4e6hC0vR|XCX88eZQ4b%O+Z2 zanhFHQ^QqMsHhiovojEjpz?$x*EK-fGnd*x_#rIe43*O))ZA)QTU|#ys_HXF4d=8%e>LdV`=dFlD>3CkRtISP<7&3y1{Y=dXV^YxcBid1`83xE}@*MZ?>UwFa$@n4Cm+uNXU+R}2v9Nmgv zh1p|S39(w;pf_ry!w|if?%-u;IXF5q{vn9 z6vNKznKHpPMh&+x8887#AbAU~5S^2w<~DbPsg$i`4RZz+`vd+@jT#t+ke$s?cK%2* z5t${WkA*4#oyC0SL}JWvb97Ei&r#B>#hoFumx~YjRDR6L#Sp`=TtG@8L2vOIB2-!` zPjC^61BF}2^|iJ0e66-pXS1!pghczh;R&MQj+d8`xPfv?KwwB$2o$Nla|`^Pqd89o zcMd?SG7uliQWOk zc5fpuAc1x8SUk#r4D1%DxuEb7IuSH~?j&eO!^wNFX&1xytQD-~Q8KMgubV|@8t8^9 zsy7{8+Okf80)SGw2D6}X^m+p!QHSfTQfgWUD;nDLc(9AlSwdby`~A)2e$qWTZ=}}x z1v9vAA^lPM7O*&5aiFh0%|s@+>s=1ORj$H%yl0)@)I+*wyy{3Ec?ws9&*&t86|;X4J9>`-a$S7%#Ji4aMhHozY%RGqaMso+*g7muvlm2c}Q6 z_eLRYiQx3~b-2=53A9DHvxnwZ?@5PI3`~;@R0eFiW-~rzX;Ux}bFBok05^Zk>Nz07 zrULgH>op@r?KCbm(}BTWS!FUr2psP>W77=AI(&VEZF{T-{IK!x6U!Q zC)}=)q&BVxv)ZI1Wrt4z^lNma8AA`75W7e#d^1g82|~$ykdBH6R z65693q}BPnZWa}q>2Q0Ye@=m!vVAKw{U#GL1a7CoiYHju3jq-zH4jz7$qS7~Y_)5a z_6)w318Zo{Iz{=qfIZskQl|>HD!AS!BzN*BzuJC$?%d;N?G$(1BamM611n(Hgl!t0 z%EGVUb^T+AhBA#L7?7y>RymDB#OS2R1b|$V=w4YZ?GRIDvavH#=YN z0pKz)&@c$00+1HI4&JPSyTet!T!SaOl(#}@sMG7=pu}AV$552N6ds>h30eG*42Hi< zB&)Fl1~PQ|q5d*_q_FqOD(*<(oYrgl)iw53({~E(Mh&N4u37>E(DZ&15n32pto{mU z3xkvK`lw2Z2s2H1#rs8(63keC(O%P%lnDM9x`o1n9yc@8!Vqw*0)Wg_=Mz+KE1KG5I8$Uu#Iuo1<)qFzBV6BCb|8(`juA!k|D>W9VB6_ zSq}5SE?`8EKmA*>TeccTBAs5XN1_l~I{)$RY2X<=DghVt$;m+(?8D_Tc$r%_u8F57o?{DQR_lD`})rN_}ky!9Us^TL>C627yP?Zha7ik zy_cYSgndg}V1`p8u)(}7iQh+WDk}J}mNiGX7E%OEgXw(biH?^!IqDANK>}Jc2W)=7 z8rG|V9MeeV*4g`1*K;e9I3i4E%iBiKVzU5AsV=LeO4)@4i4gfVe_ti z4MwfF7gTVSRA$zAJr%jE)`(}~5bc?2-X&~a{rSLrlYYjG0|Hbuy+wq_^0%LF;~9rKT49>N75PQOZEj{jY;S-*suv$+nc%-;1nX9~o+OVn zeAMj80AnF}lQbJBgeSp`{E(n*h8k1MuNH~*I+7$$0};40b|-99B`-m(ZY!&*sR(0q z+YgjYeyeaa4A}{DkIIPCaCB0QLh2PFoBmh{sT7!7g>R>bL$<|aLq4b0t3AaoFmsbV z3|IV$nV{GXjK6Q%1xE+h)hn*0!d`az4mSYn6Ww419#r8WH?+IOnAml;p4|MJB`~CO z%!8Ehiq-BfDI!?+`a}ydtm}ykJ70kQ+J?2Zxj1On-2>AjNj}XbDdsol7YgtD{&_I( z_0C9(`KwjumqyRc?uO!>9y_Q_F8t$HO`0YMVuW~+>9;sThms{KG1{rH4a2F5bd9d8 zD3BO^>hY|D8qr=Iuql)VTH{?!+a+biD}jzyh`JLF0a(yl!tY~KVWPBK#*dH_3Pg=m zmpW|yqP=kLXvFZRpJ8F_5j|vAw!<+{;`l6Z5wc^ZwL%Eb|KoN1yo``5yy1|zR|ei3 ziehx<22bqLv!E`!aMo!8J{RZ|@Aj0^)q$R{X2phLXl>yqQ25m-2eLkJ)*=n{`B9)U*^I9 literal 0 HcmV?d00001 diff --git a/pipeline_360_only/caa085b6/caa085b6_360_stereographic_5.jpg b/pipeline_360_only/caa085b6/caa085b6_360_stereographic_5.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ca923a116fb553788a3510cc9e2800a87ddaeafb GIT binary patch literal 29498 zcmcG#2Urtb*ETwIq>6;zr3H{CLFrXWKuSPBsUp%udPh0}8hTemKnO(;B1&&jklv&V z3QCdQYv}((pZERB`LA>R=R4FcKtw=DNJu~gz7Y`-5tC4ml7KI2a`HWbQ&Q8?GhV#R%*#g=6c!b~ zE&1@VvZ}hKwywUV^-EiOM`u^}(D2CU*!Z`J$(h-?`QHnROUo=pJ|slF{?PW_Y)8L;Ewb|FeOG|BpuY?}7ctxF!J#0z7c?2xtIB0Nag(=Hma4 z+J>CuR(^-fT=^8UjttvxW@l9~A}-HE(rM*i_>kBfn}l0i<7;Q6YAIhb*J6K@iaD-8 zgj{|d#Z%4H6n~SJ_gidl+a|VnI^=B_l#bpy)63~_S2qeDDtP$Qc2lwC;3%8K7jn zxkBccjI2znaLhtN5)2)At}s$I`NWlCDo-f&>hI*KkD-3|$daX3W4-qzzb#(?A1{FC zmfH;aW0f&aY+mmuJxuE$VKgYAmOAIc?xFLG(Nq}b1xdxIe2%oAb0M8ay?W353#?{} zKl=+|+Nz4)%$l049Ts-D^_5`LH&)o53YwW+qhR4q?lq^0-HxBjmYyDJCiY|QpDCzg z&d!S@dsgnEQY_6OwlhlQ*J1AAw``fC=I`y7AW^z)93mvz-4SCIBY_-W_@e#W;T+B? zU(=!&oqk9&z!A+Y$`Xy>?d?ZJWE*8gv46SzWNiGDE12p6c%!F$0etqdI@NJSm`h5q z+G_ZZrtGI&01|zruWU*hM#^rk@?8LiBl6h3Zc6I6nPIn(l54HLh5Rh7ZzKYuMKPco=?^$efj!%7f)Ij z&tdTeFhjoOdqx&OeNLWt0n9oDAn8RfR~m*qAxdcDwKcGMA{O%BdhqLGjkeB@w>Jv6 zeq7?7yLt%~DFwJa`>dhcn%(47nOB}|9aFeT_Cc=;vkNQ8oO>%fT*h;X8ej1*L~p5n z%4jy3DBIlYdpUJrmN#wZKHf(?%vLe7C}%@ra+U*g3`DQxT>d#}qhLSz27c((eq*g8 zxUOI0w&yy`#9M@`ciAlc<8U+P^?+71-6BM|=UMvev(5g)qa2w>n2#G6wwCg=^5qQu zwI2EIMhX`G&=OXfSkLUmcA@ufTQ$FT6qrZ7nzZ4yX3i$M4ewtj z%5->=tGB_qImJkr2<^X(YMr#y30uve!zA^D))g4vCv^A!{P4EA-#zO{&DU!)%QL>P zsqrS;P=E<)JRkFG{U}H6>9%0ZFs(9Ls`3ILaqY$KAdji7&T_oQYo>CYYTRxYFWRoW zi{ZBU&5|gfoHajFzc-g7w=Mel0$^P#9x8v${+2A)E=a!j>6LHa--pic@sp`jd4BE~ zoRNwVX}hOVqW5ulSD}WW&0qIMH(jzTs%ad%d?+!n^j744=H#rl(~E~u4q2$A&VqZ4 zte2JNFZ;D=SPuD)a;RiJT7u>wZOEW`q5r=%L2U=Vy4v{3NhZEblS{1bZ|!{e0YI83 zTih~dh9m4;aCA0m)hPX^T6IeDNBl!Pt>1d-QcOJfOleV68jNy&y{(kt2{*1jQq1Xc z>8A*GjOdveKo;jU9tOww?r5Z&*vwEzWs7Y~}l_ZaPMXffAM}N2Dt#%mxG@Nqet0=XF@_{j4&BDpH)4HStdzupB@b?{>)lWFKur) z%x&EsK-*D-b!rJ_+4^DarNcbb*8lmGvpXj3bDps-H?gBE z7S&cBU$LyMioKrXXAer9s(Ut!mDM07nSL{B5dBc-;`=;gY1%`6SW|Ic+7}2yjq5Iu zhZKkfr%d@is-sE7p%CA5Ra*@b3T=bvbBTjT07AJBaxd%^dWo42<2vtTYE*Dc=%or% z&J=5cY}wL&)+)4_rP(Lr)`-Ynv^`0W^}(|=m0yt*s`FW_8S>cW1k1c8 z3U;U1&ms8v+BJH0%UJeu0@;q=7r@4-xry`mj|~jLJccfiXS+J=cg=z8!8^UM1;;mq zlO1w&ldNts>)$(>2IxMIPt*1IVP+1Wp5ED1C@s)9XjiwY`qVD%aR>UqB$^X1ZYH$j z_Cu3zwx{mA8*^MCQWpToiHb>w6IRc5^inup_D|%MSpPAmRj7CCtbtc$euwrOm7~c_l@0qfMQRH3N6EDlnT$q)0 za(I~OXwKu_Uhwi;768mA`?Uqsgej1$PGZvhX9Yqk-o2un|0UUj?sl(1`4dH%T|;@K zt%i^XGR&~i>P1}u@b_1K+AK8GU4@4o)w5blJ-|Sj82g_ha zffoS54wb;1xg9N?30`#JayMnWuJ*K!tgNuXzV8w1W`rEw4Ca#A)4|H6xfg{KBB-Oq zl*oSb9Xvs*!N>tlvL%PSX8Njtw1q_jotn{=V2fv8M!$@B3&FefN_|0l8asRf0qaXn z1Oga$Bm&ykfbstEpc=&dKkFd^f*~Zd$c>B=Qkh6OP+Wk5p{RzSIWJ3{Z5HSky z4GB2$e>rFV8mMC@>^*gT`atp|@-sENCcjhc^+kt%(ta_&vPVK%M*Lb@ZqAaU4m|~? z1xzKW^K-l(b^8JbvgKU2SP+#{o-?vP*GX$HfLryp@aQR4MbaPXa%-P}!1l%``X3sH z(d+~Im9>?12`n?&*W>Ge*thc!FNZLnE@d^LJqITgBBA}SLRYIIANK{EmWZyMc$mhO%7UuF~EyX1teX(2<*xFS)bT9IjpW)oC|lQ%M+>C#E(Kw|tquY%HTw@_j<7Sd2h{_((ka?%_)C z%Q?E=bDmm$AwDuaJNpVJkyvtuEYqeZ!}&koVOnzZ)*wtRuq;sR_N}_Yjo|&XhgiA7==UwkYTg^C-&gueXAaN@LYo!c zi`}onQa(v82RXIUAjNEXXgVcV*(pXdjD~1lM^ADc@v@&K%$-Z_O!QwF_^z=K!h6Oe zMe@x!`pOLVM*m=`OY)?|(Wz*kM(K^E0+i%0j&3}S03rwlh`0x!FHHOwTqPN8rjgKm z{r~WfxhFV^KopDR4yxf_Nso8v`(t-eNGd{U1DpYTuxm{G430&>$sgCaF}D7;yEfTq z^5q(4-h^#Nhj%~DPUtZW+3D`hJnHRb?0AJ5vl?T(Bq>r1U?to7_|Be3KGqT)qPmZ! zsD1L?*#EWdMu(<tuH_1wq z;n(Eaj+z5spQwyiBP?&2Wj^@Oz0yqYa@{H+#Y}R3;Cm5ipQ+|)6E@+ODCAUOykJO9 zr=ag~i@K(Wig--}d1XeF3g8}P@TB|~qd;f5CFahl*-Q>K_M>`=#x`G)W69lSy)<`5 z=h!>td-8}+c6cLEOuvxW{J9HYZKlkRWnet;YE8h;D6X2_a#M9xPum*2m`L&_>8Uz~ z-*`0ugJmw_NC)a~5a#DWgH%Gs+BAt2Xtq}$?@v(UO{|w89_sq;K zEi{~*XtiO{WTih(kB6LjeeQ4xChLbc1n|^6j%xTxiWNQ6ee+KAMBAa~h~X^Qs_+SN zVk;w3c+dFc&PL76r}uCCxMuFbu)jF`eo>y2ZrAU0s^`#){q+UVdDieWP&S_ZTjQXm z1=pZ|cYoiJ^YqEpR5B=EBjvzYD+~kyAo+2I&7v~Sg7-n-a|L+xi2^3tw7uZniea3l zRF9qi%YH;;zZ>0$(}(_VM`L0v^-6ZliUv^wUd9w}Qo~6*xtoMNtf;&ryo)XXd)pxDULuXK};)wO%tTuLJ-1#JRJpe0ESQkJO5vU(=au!3xL|obzS(b zMeob%Dz119iVL7c5k0eccf5=lHU)p2paJa>vU}0(rzP9SH>tLy1V}bBbJ90l4X0N#y^%& z00b_cs0a6rO^Y=)Z~KO$qFODIZ=^5kLFw649!J#PP(rG+e9#nJuY8Sa$U%xayF7N~ z|Ik#EIk_^ej&!h7Nnm-~DkLy}S(%VM?YN!NKcOQhf}I0fR%BN7b)fP3fJWzU8R`2Qt$4fT2xVbv~`!(mVIx1>8@S4PgR6ZE*eACwf?xkArw$j3C zhh1jJiAV^-^cgq{!2ek*cVzaSE)7igao+P&`6vh z!bN9-x4|0aI37^8b8n)fUVT30P~Z2#6YAZuCX-f6wu-NFHdM#Dj?LYsD$tEuW=9~q4*0)K{sB7u>Ig)Q0?4jPg_gBsXsl@=Ib&u zo2Dvo9{s}$eVOii;LWkX8ksvAKH|dkda6Ijwm$~)R~D^v*PkXVDxz-Ug*e*^$+oEP zzweo@-YIIh06uXvHfA-(pCh5@^omnKkO|~*N&sg*CGZ2m*MC0XqyYmXh-?6Cmm)z} zgZdcP1eS0j19yPKeQ5HufCzs)>7B-8v0I2ADWO6WfwF}fC`YjgK5}SpEXhyLkqbaC zm##BVY_O*DxOc^;(BKFABKFmBaB$iZ-Of?JXm)VBtngXLFODWZ$JU>BS{mCw9IiNW zKo2i~nw(!5ZXkqP6%k~r+G+sj9;_0f6N=DO6V_z4Z$n3A(_>%fwiWm#Pv0Ng@lUTw z?|tj;qI;9@0ck5hAVA1Yb-hI6lOoeOlV8M42=bN27{fs2E6?TQrw+GF+#^Q3C+9e? zU(rgId28%C%)A(U6)W0$uCP@$5G|+SxHze^%l#JL%pZ`ojeGA?+qIae%o&b~I5_2Q z_lu1$DKM(jf6`Ht6=L*pe`FCIZDcgI_`8Yj$EiKmc9uHwT+o^0u9d=&^FYxG%^Mnv zc51}9Q*3A_Tu^hR;9l*c^VpWz;@PG4rb_}UTFdCwbH*KB1_nFO?p~z-Ov3*Ss0%+- z*EZJ2kB#xAJ5te^V2<`tlUi%VDn>kShvo`8upR9GJYy`z1PX2ybL=BOc3sj@r&L z{)$fO-DP-|!NXCHp*`jg_mj!iQ+rTBwm*;AB0LsV*qpCy`UK5OZcN$3!2-7$iNR5D zlIcTpGXX(uM^w01HlN^<2c??Q8`K`3*PMX}t%;s`L;ggd^jnRRWpN8GqUO~EbvBKo z{W9jAO<U`yh}!T%_RO<3 zz2H~ps{tKj)Vc>BSHKIqbG-P6iU07f^*?OP3F4ReKcq$;t`nS8A=M5kumRGfDZw3H z@QY`H3voq#C6DyKwZ0y@L2e^!(h!XQcDyi9ajJ$SBCeDJn7ubJJz!`FVF zhl|-=ANj$Q2fSIKR7iB0Fa$85MVeB>K0#5-+MpkK$<*#&fFhCIX80V;v3(fn@`o1{ zhFo8=Xi3jpxL{+<evf_qEUGZ)25TZN)F<-ZtqM5c2%hM^NP&6 zdm_^%UN~!-VBfKgc}Ck$d`Aq;>7}3+hy(RKL_UR-?qe{Y+STecoR3 z;0H_FsE3?N0b=@TPGduezkhjY$!=BMHiXW|yY47b&C7}=kO_??X){q^-DWFF@}=&L zN4+TL=2=g_@##l2Nj52GH~~J1tkn?2$q=n8=?Isq@*3-7SJjclc_ajAdYrfX-#0KA zYn1UaGs#|cY|Nqr1BZh1CB=anf?_AG( z1;xDpMBf4ONoX{Xvh)IQTvm~0xp^5tXQO;1e;XCeyhiNBSqi;LVLGz4GLm| zZU7>cXO7n{5|xL17zy49xM+~7WCCvDa3vBLLJhC>AG*a+F0SDOvx71SUmSFCOAGJg zuQRr)*VbI#igpQ$S-XcJ4LOFxyYi*q{v`I@fOmSt4z22gqdfW|g{}R5o$?t9KL-@5 z>a18YADma14q}Zv_Bsf*4i9daH|15CX7=<{25WCWhUTS6E^ych|1%?`8q&GUrn-AT zAt+r4Cnd5x|D-?Ep|JJ)a^nY4*n7^~hM&1_!)HE>uAy!+JO4-uCs^nFvPksQwcTfj zEZ@3%UGcT)*sCY`PB-fn3aFEUY}|RPdd(spCoz_a+(~p(W+FtMS)oDI=d>BYv6ZpZ z(QZjQ6u3!iYO|j7$?b5g8|4-nyz{VSef~-=y7$&2Mcu8F7|h3&cNj2f@xVUuAgvua)3(ItQeBo3CVjS=~a<@9nXD zzFM)j3P*3g`B-D#BN1syMN-#cJ#s>JphK-wGvxK{UD>ExsOx?41qErF{WBRKv=FTQ z%rfU%FhABmC+L&8=vhU~!T$J$V^c`uA!2Xycag!qFn)v;dx-io2Qc2TR>IBkKS@BU z5i$b-C|2>5VmMqX z5yuXyiGer%A{+^Qd!KIfRAw;l044g{aNbNz(EYP}=V|@9QSYQ#kGCgVqo66AQ4o0$ zM4}s`HI)0|8Ff;yPsg;)$hr6Y!Z(bycCu{Gj-txeW;CX9`vNd6!T4%h-Se^%{@LX% z3P#vF{Ck@J%v>lGby)BpdN~%Mt@{Z-5qB#BjESC;=_u-Fn8%o>CJJoZ!W{{Fiq zRnipoDVHds#^vWAzNIB) zJ;EBVOo-2TSs4f?CC0$cMXxL3>NfWyZ!;{+NOI6FD-+g z3BANYh0Q6$K>HkK=nW1ZHAiTFI3y4SlQ@PSZC#SI=1;LaZy! zhilEv&5Jg+j>u^~vj${0VLNo4${_~hEwfHNG>DN5!{d+q-Nm+2Qr1vsI*jiq<_2*x zQiygKoVUb8gfhr+W64O%N+1=SVNnDeoZ?DmEz1!A%mnEJP?)}sEMV6_9d%q=)!=;H zj+}M9{<^M0pbbYnk^2aCYA(q(Zi0=})>eu${wZ@St%~Gwy~j#m2a{Vh1L(evjQJhQ zICi?6m_#)WOUsMu2JczYjhfsY%&V88745nn->>-P{<&0{DK;SWx30^TDUJmy*R%&8 zV!h3YMdYdky4tn-f*m(aLoK($1ZAfbja}?^<;c2M${z(CI;kZ|;Tc_<7 zvb}H~v0c7($LFh>vnySr&~t7QJRi6 z?>xcm?7Xi;sClw~#Ua$dU=SDL{RL>48iWsU$U{EHPh>V#T@e77fi}>Un^N!6-_9#ypbyp>v{DY9#87f?<;3N02Pe6T4K*tBB%3Gq&I--8p_sVIct`@?K7ikI z!!IUSoIHQmEuke{PL(YZKV;?;WYU$*t108*4vHTWl+r(I>klNC27f2n#4$~E8W}P1 zWQUR+aF8xrO)#_G=ogm_-f=w~K#ar_ld!pG@3#fQh7`t2ZscFLW3j4BYSj$zW|U$* zj;L-DGwEIQYv|IC?=cRZnTb+*i%54gdNR~#vpjk_S(6vk24NNu`lM(E!Y~Zy=3C+` z^+5-gF^vRNGX-=wLGPwLmI47?8Z?)v-2s))`V~(Nl540$=nmHkFd$=$`$_l+3@K&0 zkI>a)b2NR~5hrEi6=#>wchTe>bjv@f_ajGOZ|o%&7DkBXce+mbx0bQ^0pkOUQrc)& zj+~>Mt{gZ!-a%qCtu^;MXR@*NO8y-zM@oX7*9bx_a78gePypsARC)DBP+^^r8U_qI5~1#xv?ObL3m_<&nzv*r0wi& z{5_HC2m2*-Z^D(OfSRbP)jo2I_nEB*A5(g=FKGw{L?nGCysV0^On6z7bA06!I{5-{ zRgBKrp4sNst~y_jNB{1)0D|qDOZVeme?RvtraKOa%2L|$>&O@l{%z4%Z~=T5mN$^v zPOZ!DuhHnbgHQ{``u}4weLUSb_IvbFgRg%?8X=P5OF-ec(bll8S)Y4(L4ud@^G18d zU25{gkY5_{PsuuH?jZ^U6W%O8JbJu)+xr^N%~v&fQ<&ri*$aSj`8mpA>z>Ha%1y0S zY}UT?r-9so`$C!qWTse|lr*@swok}CYqycDN^6zm$_L5XU8D|qsmKUBUL3l>IRp^# zjyMN47NJy#r%5Rcc(&2MCn71pk5IkOS%4});SWxyCsODOg+hGun}o$KXDX{hbMFgY z0MYvbUU8-@-*$|2d2Y`IWal3zxCzGbc_O3)+S<(|V#Ip3j^gPHsp;4sd0(C5IWBxR zxM94j;qJ=zMYvvCU*?8*qPlX?IOW#B-5+K7x~+l~>GQuH+Fxrv^E^5>@wB2vwl>O} z=v)Ar7l3%}?o)SG?rh`k)kk@omqK71t)dMYlkoB|aG8p4&-@dqfD-xN(e^idB(x33 zdupEq5cj@a6|l1j1-kf>>o0()%$t+@#H0NB_p7C1Sp$b`jtO$gg(%egz80=%zgd~D zGcy0qrgur(U*5`g^fcp$E$69MSiZxcQn$j_3qWo@#P8r#CYWi{bgweJnZ68e^y*Iv ziJU~6C_0KAS$RL%N_5pqTn&P__nHfy?GDD@;BEZZW`M)%KTF4eSyg*KmyoFsT7YT; z*jD-5gV}}y@6A|fw+4K%w2jYq`Kldh(3R9${!7VQ%9e-qdc6y5 z4BiS{t^t(QxWlQzXZZeO3$b#0)@KvOJC}Qcqm4soTi=W=Oqpene=lX zSgR2uNnij12T`)y27^kMv?q~JJPD~*#4lIH04V@vY;I2kUHEG-uFgehKCI55HT}`y>GGR5E<%W^y(vX)gliPm>1xe= zMYOlBzkLG}hNdqsof&(7&70d=mgDML=xfb;GE}GbMS2_K8LzD6t3>=ZIXX`nI{NW1 zfV&?Q*RR1`pSAJ}-%OML&2gg{dJL;dTJ{2IGDR^Ix3oY2{$VIwG=c{L@izDc>I)7D zdPS2uK|z$S>o?L($eO1Yzj-T+t~0AWm9cw86wBek+pf3GjymV(s}qf4zK)`N*B{#O zTX+YrC7vm|PMWQr!*oOFw`Z(X)!ViF$`C~?$sS465BYsohVfF-;F95PHpMOBvuogm z{Gykw;hCoYtcm3Ge>T)1ua_9v+v6*li2z|pYUQ7#i2%4$3-Eye%=^Xq6$+!6;|mLu zV#)=hJJJP(YktOxEP*%28?P;s!$a(rUF?#Qu6Q54@e%5$S5uc`onz#VIEsGa6W6;Fz?bQLl)6 zjsI>n)C11TBcjxmy3d(;=G4+VH}OEwb~r$9Isu!$x=;mXv~*dWW!rSTr6UnPuDI`8^;FYos8le!Bse|5kAU1)7}C7v|3G2| zF6rfse%d~Eu>V_erDH#Y@^{XJzXhw1v7^&Df9a_iWQdH~6k%Re4e`ovBGd;Qal!6n z><~hU{4c{W10<#rns^19N<^ZMhyhd|K0_53sJ29%_$?KO?G=-Yt{!M4$1o^2~Ab%td`#3(3}aG>PWkiYlK}c$fT< z##!K!R9E}Y&HKJ=A>z|b9?h~6EkTymIpp6@6L$04<3aG<@e9|ZX0a}v_Bs(=h(4>I z&Y5!G1y^O`+z!qchl7WS{~$2e1pE+6HncVU;<&kjGI@hkclmAz8CB7nj@-kmADu>n zIhH!KhvbgFmW__FeP$HIv|xGw{gV7#f>41ygw>U)f#rQVI@|Z4TaMxthUnwds)4c&tPv;#937#k z_V5Ue07}j)sru3o-$DigFgX63uv>>K=UOYV`4{AcUD8(Ll?5up!`&!>g;4rN*@LmI zj8h&f;=)nq_0yhp{25XG4yT3guQ|!K6w~ZyI{dimHT$8)$eZ0Y z*RuKQx)^th?fNv^ZqUzgb4FN3>Albv?l5TUSc5AXjRuYmN+igcKgmgj87Dm7dy+%# zmvQH-s3ns~Wlz_;1wS}+fF<<-Dpa#O>mhZ4y~+L;e&!^@nEHTha$~ZoE#BwN+mgkt zk3)|(?u5lP#$(ouBx@-1KCh^$c$gXmNGqF~_^Ad4U?->?(hxotc4@ePI9^;$JQU`e zKb=tp@Hq`Nvx36#@v!b|iG?SK9s5#^?--koVP}7|X-&Qryzr5m)vfD;H?%GQKQHd) z>zA0yZ~M?aaH#cOUJ3S}jV3gl6X7TggFkUN+572OmUb*4)OOHx%sp9L{MaS8sS>#{ z(b^`sVE|@IGn;vf80r!gndmSZi4 z$y+}6qY_{5it&WcQ_Ou2RU{ihBYP`C?!B9waKNA?j(sDff*!u~vX(sQIvLtuYSz{> zYkmW2h3p`Qe``ifzq}WCxKfoUNa!xo^tS#Nk}~3SB%t)CXvF~JX<-N%6i>hbTt9j) zdzIOHfWE|!1tb)VRE2Tg9AAOqto~GpFJaq``igoAi$!oBYv+ytdm!EeFCz{yd~v)* zX%V+4)HzNY3E$Bz6{{SV%1+_TDk7ESUp%=xKONDgJs-RihQF|6cu|^$u=@B1eKPG*5Mu zcFeQ&8H=Ir85+;nWH%DYjifv^<_>1Tt3XMK7{CV5t=Nw`leRWvd{3p1bDm;F9_w<` zkGG_noHcHoZx|iMvpG6IOnP%_q8urOc?ocn#E>Zg&x8WAVW2#;i%?_L$Dc!L2^fcx zi6gIr*{wE^fPDczq^cU{`@B>G&`=|j5`6+~__Rst4*3uY%fggqD5Mh5haL|Oy?EMA z37OH6_q=WIzBO=9h}+>~veC}g75L}&dqGin@(su1Pb}G7ui;7-$(_@0QpLsoib!<& z7AX6+uneUf(4hMMyI{hM`-lxcd%pZLbVk2hR?5NP#OCS+KzM%jSZ#~?z|Mx9vqj_3 zXsU#Fs3-&u3h*$#pI_p@p){%?SCL~Mm$loz3@;C$HV6uxKNFg)T$Or!%^J*fsgV&`zEnWX)yd^}THbJv zIkOu#Kmnx*(N`ObJ{f8|7Mok-+1^rNUBtc& zBe($A6w?Ca#_d?{x{&!l=R6tgjJ>o6BzmqV^kbC!xn!z_%y?Z#&YrZs^Pv^(;ax9q z)YVxZzJ*Ipaw}11z+M(d71$j3h3#@B)t;@;4GhaMr)kQKM?YA+=vpeOif{c zrPiWgFUQ!5|Lnaffl7pk3sr3Gp2c82K zWhq(nR6(B)%8s#*Mhet%sT{Ce?ehHHcM%fkbQ`)7gOr%vwei$6R6}7n68dg{@h?VA zv+0M-OvkuC{xIew@BI{8LRC8um?*nn{MG#lRoY9x;hT70i!z~6ULMBs9 zKl{#ll=7Q+iR%XLLh-}2m0A)rWd@aqKxW7lff(TS;{_b>pamcs-rhx$5HNuz#)(-z z`9BPaQyI9XB;b&XpJ*_|446Vui@sGLi5l2seZ?RDoKtfn!MQNd!|5BlU5fG0`X)b> zGTDkHzyREL4x^Ty&vp-0*hQy>{Cbn=yjFiRqUza9UtCq&Xywob$F<~lI?Qv@(w93u zh>54&lXB{}78&*f53*cQBl%d1V1dB?$L6l^BMwPFvL8)f*lofH$KN)vX{4ia&L~XG z=A4qK8XTRRA6CzjII_|6Qr6Z@HCS`b6{<_INtGM69NQK7+C0t-biNjeszopzrP>-Q znxkD2CROBpCh+m3=_j?`EccCV#JEPg9<-92&F!yRH5eB$zsL|e#Sr{#wM?g{sjAf5 z9N)KVuJC3%uPSVFmGih|EzxKzMgJ@pymW9dRUvH#mQ;MN+B0rRo% zaG<7JRUjOUO35$nj;%4_l>4;!n|0ls{VBmfox*6_sp4S9ozdQL8j{LtmMpPOA4P9H zMZ+KN5o1jT&7QLDAwAD9R)fJ9_Y=h^R(V@6IW)Z;{aYYpz4UyV9w{I*-k(zu3GvDG z%6%~SM^q+<(*Na7P=pR4w3ROX5u)%+={69LFA8!A^CGmBzJR1l$R7YQGd@y4b}tu< z(KIFEb#-u2kpZ3qD%TL$w4^=pbU2jB`83uPHgk9HwZZg`+kANE=U;ekRJzMwcjNIE zrAVe4#$N@GFw6JXZBe&X8z+##*)peT1vnqqB=Ng{l1qyVJ>=%0a{4~=PGfX8P*QtB zp#3}!1r|BVlkqWx0y9JSUZo}U9 zmFa$^6E7cOxf9u4Y*^#lSHn5y+p8|o?^J~KMwzY*^&Py0VMp#Mv#+bHXmUPod)<%} zWyYm@X^HXI99=gy6f*8QnrU}+LFsnCsfsU0qb6a^TTa#kx-$WI%iWh%P0gk`$_7^= zp=}KXc}I@N%+rLor@I%ih8$%+`2`*dcve_!$bF$Qx;Z;tZ1j%e{a?i%wqij)a^|d0 z4GajhWX6dS(6tC%Sm=`>JXwS~NU2ds%0Flgg@VZ@a&-Y{EGjC9oqza*)t8OB_U&r_Ig1my zlh5&;H9}v?dOjY2gRx^SRA!$b-f2*1*8PQjRLRV6(VKHSzm(6CwKo+?GrBik9lq^4 zE0%9|9~14n`B7OfT!24YipyX}^D~Wxhb~ZP;^$^gvH##%h4HS>WZvmCt^q&{xw{W-t9uA9V? z#5b}*uo~%JzhO!$Sd3)JK&eNr4iOMi7vO13M2sPc;9uE_A!O8A0CZiA7+mJ=6%9iN{Z)`H36!s z;S?m1bM(6<<2a)O(-2Qu6Jteb1r9dbIq%^aM=pn%c-4cuzi0BvX0$&fYSDMUJ&MAc zOX{A6%hh;ld))2cFjZ+y{<51BIPSSsztKM3{;(;kf6j~iX?@*gL4i1B&TKPZ72*xx z$gGB?^!ehb%VKZJQHrd8!;Q@@eji!;a-rkGR7?nLw0h)Vkx9n=(WWN~8>l$w^6Kop z(A#qRPwtJz8RS>5-+Up(%~o2}=I3`9cneb=y>8CSVhb?=OSp$#keNcuxtH-{Xa8t; zNXraZmBv6G_1EXB(f`R~8!W^Qkpbp|aAg?Kh-l-50GK#P7GgS-VlWdr`+_`EqK_9W z63zgN>f8v%;!{C$X9__309lx!r@8~)c0D0SkOlBKGk4uELNZX z3er+QDi=(rwQ)rLiTeJ?oPWukfBo**_9@R)Na88I8uJoQ+k1c)(-h18azM_DZS_H8 zN_)VkG4JrL29~?(ogSnEr8Ym#qzELJY-_FF8IRAHVs?&l%CM53LROi4c$A%Mo8Z=Y?n&5Qc$Nz8)1AjNfB6atT75c;C1yR(Nh7pUH-3A|MTIx8l?Hp ze~fr}2%0nJ-zUtdK@j_o6g!*e8LX9gviVP5Gu)L0JuZNPs2K8jr00c321k=fCLhMMUg> zK>HvDf4fmR`a-g@Qu_EGQ4;^_j-(Y9CvwOY=U-q=&OcR$9P1 zJ61PtMQPGihdo7_#E}quxEi}J)zwvgjL8-oS}35eFF4}pt!rb@5JT&lwR>23Wzr)l z>FO3HdnxgeP%1DXq0goQW{NM9$qs^Ol>MXZ0uai%B3}yfs|d;fCYYxhA#S5UpBWS< zf2w|7ASS^CBRB+5v_NxjgI4`pkC2yNn<@LmUxd%O;#TC;dtHEuA9524)jw~=JI%6xsAj%5JS$z>pi)E_`21_K{1L(SB663@q-tH)*~&2ez`Dkl^wkF% z4JE|^104k2K`n(~D+2LkUvUUShGk}DYTeuN)c!v3v*hsbr}hD|(x`H<={tdC$hNGS z(A8Ca0j{kK;Nh=1vguZU>c`wz6aJf~s_>mLZL{L^yAK&{T`qvdv8KoDyfYHu7I|4>ez)6WP=O!>e6V+9j+=zepn2iC0W#Ic zey1rw5_%_15IGd%Sl;IV?l9A-D}z=7Eky>FX6Q>V|Cb6WlEq?RzFA#lvZE zXUNYVu=2Ga$rJvSoJkJS`;|L#n6;jXbzj$km&trnvF_&$F48A;n_;Uq1%4!JRwe$^ zjN8O9^Uf!{YrdAC37Qu9rfNGFcO*!bx2^h_ypFcM23w^^@BU=6K1XNIT02>q zfOV&3qV0$H5}J;Vl)fC-ar2&-hm_H$Cxb$KLFNh@3-@b-5WHuvto=2_3}xFVlLa$W zJm7%9REP2(jNou$8VSND96YcFt;7H>xj);>tp(t>V&$~6!RRpd1cG>X-gfG@hRpCa@TFC`ZE*&%fMZckNDyXS7e`Sn^wE@DVNMj~4)KT(u1plA70?dp-wd+MzJg z`Cwj$3&44pK|c&Hd*lk1wKC|$Gh)CTufDOrX`wNqT}H@ZvOzuKjM==Agaps?&?6ve z7=4?4f3J>DWc8bNVaIGRzuRgN!Y&f|7h)WXafua==MnFx%ToB+(8wIn_VJS zFjQsC1NYPMh9bZobc z`2EPl%3v&(mhN>A6=Xbot8GMtEJ#U>Ft# z^_#^>tBMuzMRJzn<1lZ{zNWG48j&w-GkJ-Ep`Ni1>;~APbv^gR(9giadf|JpgjiEZ ziY46H)oas{ujN~6XKY12(F#SBLL)w&RJ+Faw9~`nk%iXppgF^Xg=bx8zb#shP3nq= zNcx=}Xm2sV*i%Cd#@`3~-AIpRhVl>*BcX(QWC8*Xml4{NiQc(BWC*yr15qwjB{MWv zgg8u%5ge@Spi)Eu*pnECFC1R}E+$^#KbgS44)2{Flau1NNmaSq3Zvo^q;nBd@1Ib2 zB+EZqHcY!$tl<+dL9ZP%Ai*r{LnSvV*rnz(Ub>b%+Wh>6`%jvxq>#7dq>M-Ov8wwl zsHNcAC8_;2r_{XE@gLSp?U9nY^3?kTky5UgTA9=+N-u308e{Is|K?xh`@S>b!1mTr zz{zF_KRHYz0MKgNjz`w_Cl7*8Bg3Ob@qmX}K|iuX&jNlOl$C(bSlMbla-U+YJ*8v~ zX7+ouvYq+yyrFFT<*yMFvu$n-deh96%UqW&u2|LHlga*JkG6bMnk;$f7qE@ixszac zq#@P4YI}F`h1U1B%_*-CNnC+V#X^)r8>@Z*sIv=DpmD(11gv!clO6e00Dk-)LW#@~ zx~c{+<-egv8Vm{F>BJLkr3mN(%#l2Z{{^W&D&61vC58XC9*X$SvS1HRxU%Sv|!zrzdSkgjC zlmm0tu}DMFD*aoQA9pP@Jg;w#sRDDuj$DFB zr|J671_foYjee5pokN>y_kIbh$ay1Sf#IVIz{>wAEc?cp`UX0`>wUWAq=k>-M}@dk!uQ|&w?#x0&a~Upjx`+h#`LE2KBTWG zK{quX2~8`Ov_vT7eg*3mHno)~VFo#-TEvE9mDnns>ra^86PAX4>mOU2s%K^1j`{Am zwt=dui|v3pSEU{(&TLvPnu~rpWASn-Q_6+li2$EE`{|knq)wZJzIRmwcNcv~|63*a z0-M(Q)1u{C3-WMTIB@0ynA}{N_ypo)U*ngu2AE4AC=gMHvJSD=Tnpi!}t$URAG?2E_Yi5;{MuH22UlfLmA`an~;Z^nBWKfj@6=lMQ;d zyxD%zD=#_JX+7z*QJ5nUV3P|*k6hP>n5P@;*6|kIK>8@$`@d~*R+@;){2O+Y+e-!lO&ijKu|X9~6fJ*{PyPsP)LBk#SPIj0GRy z_js3{7WDB7OQ49r&7jK$Ll5Fdz+yI}s#42KcM4zn9kbqs{4_F|MJB@g$RNmYb+G|5 z**&@5KE^NRc8ca`;fEC7I~#GGCrbewOF^IC9q)@i3lJOeO7W+-KV=&CDXfQoTXbd| zqe5c3GkNkDLtg>Alf9}QBI+Kjd@bdA8WcU%Gaa43yi$FVqo0Q)^5wV&$uHo-nQdA9 zFP#1w^Y8IGNKg8B8jSr8xnJ@3^AcHE=U{eVUtIrx8e$lhii)Ms|D@uO7ojO)zxv^6 zg}~?QVgxz5p65YM_?1;hsy|SwjB)~VmhFavmOHWV}t25R; zI{uN`PVVvrBPPZlyNJGs@F4I0D9tBG?AG86zfM|QjD9Y|taC5t4LjvH=eB4htSR`L z`rDvKDUf&s=ksSr`9H*y%hKei?rg~zH~EFEJ(*<~k)tg33${!8saa1+k5IWLVH+MU z%^%RM8`3EDgxU?=*kXJzVq*vv@I))7c%IgrNL(*_y38aSAa8?(fkLJ{rTqsqckquk zrvD6*4vvY)i6HEt&j&yK(^7#-1h``mp$38A;P@K%h7UHH*R#U9^4##u{jGcI-$rs8 z7jsv!;ErR5i(T^UtoHL)UdeU7k8OCeQY;w0u-#u_uBePsady`IiZ_#keO2W0`uL%D zxm|OOV9OIS^Py?0r1Jl|2yul-h}K(2vFaquj- zvi0!?HDLOjxuC8mW?OuM^l3jF@1<-*yP&O7%u^L z6Eo&)+Lwy75?JF{pLH&2eR%(w(3{ZEXS14nN7VdOe?c-rCsUb5aJ_Y%|IX``4q>5#-=`H?<0WMblB~7fr_E>1XEn`z zUz~tvL|mw#3_NoonbXVMlY!3e(~yof;4E}mX9jj(aQy{(Z@*BIQX~9;#qfeFcHXUX zrDBugGx0Xv1I<9wv8xL;qnIADv(mZNh_n_MrC7DyiqXv3%itpm6!im^%%9o>i6Dh3 z3St&z(o?x;YEORQ;XvfV48P@;DB`P<&3U^YcHv?J6T9W5r}Ge#p~(8PD84B4%}Zd@ zTvBKWE_cqQ%ewK8xSy_lpH^ok;>u;vyWC=b+!_sFU%|mY<=g=sgq2NKfe5`MW7Iun zx&!Arwyucjm;o_sFMTlMD;^M)u9@NjT5p4?tMu_*F4pJlQI9CozH(kjVbt$61el{| z8%lv@*rC={AL4WS-r?s@okJTZ+;KdIclPYI&vNp!J2E14VP-)YcP^s~09vpX#e=HX zm8py!1qmqkDT`dy9ya5^T!J93KNnNyA*=h2u=1_ZkGXYkz%E`LAP}j15cTQ1jpr{h zu0whAL-)9Um0fM-$4O!3Nsfq5a)Z6Kkhy{QDCzJgrW z{_TMNIHGjszYS9RN(AX;4aNrsp!i2OMsC$goFU#*al6ZwBBpwJR4mO483HqgR)MMIlbyG}T6 zwYfMO$EZj~O%nFGxAhF@-^J5p1=$s>n*IroB|5Ql%g6`)hyKW2>$PHn0y)wJS=g=P z$4__Yk~gCcZ{S$fcBg3azp`7~hd!SpjEuVOI6B<`QB!m1T_zi{)6U`7r$gKtG)?aR zVFMRSc%}5ujrMn{K-6$)Ymqt-UZxvR09?Fe#{V-W0$7>=U>PVZz025Gx39~jML0RD z-x-yApRQV|rDIs*aMSQ7CK0#q$!da=PVK@HJthkV1&8)r1L+RxE|L(H-$>*_(8B=x z9=o0#e+hWDhQIXr5;~Unq3o5-YeWW^;=1`nrr0pE8szEAc_a~ZZYns@EDu}j2j7;g z?T`Wc3T%x*S7PJIHWcvJx%e{ELH%Gs9?nv3sE(ln1ez(d$!yKoEKe&-bd{hr*iScA zEtbB}`r7@o4-D)-7p3#8pwL)tyuXdBUei#5(E!4JYun?*qtaMWUDobNhu-{K;=Tw- zC8~O8PsP_;MuV&ACD0h5PHH*|XRL&wQwHpn<7_43Dyow*WNh?2ZF6K{ z=wibSbYgEi5pB^sw8w37fIrg*^eZoYY2@)&`0bBRhS7r$>Lj{AOGt}jw!Sk%YmFEh z)d^{8Q)W7=m!FleB>9mNca8jVGyQ<;91rceFMdBEaG?Lt-g-C4uR@OfAfx!TPG-#~ zRJ)CvxuV=XEiS63X9sK{R)?4hBvhOo$?_GC{MevhKbk_;0U#3?F?PoEfwY$q{5>wT zti0>0?#V!HK#Evbd-$ebNzD8oCPB>oCmEvrJD6Vw(Q3Z&aD;b>g zOs_*H*e&kYqp8lFt4sI1uz^QijgzGZcoY+&;w!Su_gio)GJTVu|H5D)a`ntY6cqs@ z_liC?_&9J@JY6^i*BY;X0-TMLJs^$v^Ie3qK^A|37|uUFullV&xxn(+b2y2G2YkNp zyZi*M?WRiDQMZi3^5fSBF!@E*&_UgTr1r|9uROsTKT2fo1KNgn!Ngb1YjDSg3uAJQ zbB>B;ku5@;T0*;=ty@Sl7T5BK;6SUZtM+{I_UpUPVRG`3r6zuC?~lKImXqLnrI%Ex ztLCfbd4f1XPGM~_B95Xv>O6s;c5@S`6$k)#`Zk2t9Kc^Gbd6ol8@+2LNK;8IYzubpFS~DO znPewp)m=>hF6czVI-Ws$OGBHo>TW#&Z)G@9TerUm41J34td%fX)ttoC5PpQtz zqSimi!+zq4u>q)@5%Dt<4Kh>ZTQhDJAE-a|4(_r9k?lS+T~1hz`B+Kw44Arg1Yk6c zE&B3#6%Ikl=d5_Sl;;CkzF&1+jl4Ta+MiWK`##%`@C)+IN?CXiuM=T>s%00wIU0_f z{T;9=5wD|cbNH#6+N{5t7PkKvD2ot%6}l=G8q&*^{~lv60Mlx%#e)#~)-JgZx?G6J zBI^>Mk0QTQ2B%frp}|eBgi1>sX>6e7K-?$W!9bKZa;bQnTLoZ=xeoD)d1YR9iQ@N7*DlWipc&RTWt4ZLT|jr>7_L{=D4b{@ zR{n-U8$hK&On+zW@@qiq&Swia1}I>;m)2o!U{LE?B3`1~iSORWo&E*lA4mmXhyPU< zTz=aDW*ojijK6Cp%%q9R_GP+;Oi1dEqc-V%`%AaJ#r5a+>_&oC5|O1MgCYUQCoohN z-rMaq!I(!(LG zPe(|ByBmb(j9+0dFg;LV7{LTZ>pYSzoSDyyzTL!#V8#&b414XWd(Fl}9ZxtScSdr> zS&SO55ur<<_d(0iGtRAqyGQx2p{bf0Tmj8YB5)#-eZ|2owzl8nlqM=djly2vp_S7| z&OMsAlM+F00H(FK7*o4~U>>w}1GQ%~nh&H?bTB7CeWh?(Z!P#ZDqp+C(8q(h0Ue>yLmfZo z=TKoni`z>n~4>#ON>F8Vp*|yShC9OXc?*e`Jev&ZXa3aNDW-1 z>lMm+_!G>Ec98|Cw?DRgcv54@7a8WPWw*DpJuYRgm@tmlQnSs7+BzIY)6Il&^X<9~ zZ{la7HIh05iuuSOI?4ilGIdOk!9qYn_Yb6;VE);~-9e7w{jYgxSH3ufM*~x=f_@xw z7xALrFO%}Ac)W~ggf{D>kmc^g+G4nUG@77Fa-Sgx&QB2)sp;>6`4XmIYxYNURCJL+ zN@x~YqQTBfio(Y8_FFDr62dcch1b(MzFue;a<>jO=&cuS(*tm)qHH=KfzI3sehcKi z{yI273IMv3PSK~S0cZpUpo?PWZ^OZ~FBZi}czbDqR;2bN6X}iZbs(4eJdljao)$!x z2a=~!Yq>u!o3di2BV7>a=!>Wf8a3-g^>NViJ!7oEjU3(TukEM}BF@>zEA~P<5$4Z6 zig3ooYWYDf^n2YS##>~3DyqFqW5aHJ z*VABpEoY=>(zTu!6!_I9R(jv8+Z$heE)J7NtKgHvPL}wpoS^`KKA-w7S z;b6YZEcL!W^b9aT1Q>w9#p2sv0<{C7DqvVel=WaP2CHECwmvRmA^?HUYD1#v0;^$rhcq2JU|1HU8NwfVkh!k)oPe>MHQ{@% zvLMgUD*7W_*##+Wtm_E&wv_Hapz7xKyWxaB0AB$f6ttB_OYhha)^# zi}SdI{jL)t2nki}s35Z1?BZN`;dEWlcCxMyb@5J-^8BI$u^9-d;dk##?0Q%Dlr)8e zUm4XHn1luu7FVcxw}kyl{gN$e9QP6ZSoO@ad*Oc3tglxLG=%zgX1OXKY1W5h(G`K*I1MIKk zzLzACu=44ls5TBT3?oG+9^IP$jO8}puGCglE8&#|ChcR7W8eXWnoRMeUvVnE(F@VF z%cJYb2fLB50#v&o;v_BQIJ~iT!iWkz0INb^^87G~8>>@$B1+Qf{jldjazq3`TtOH^ zh>bxc2_w zx4NZZ{V!)Bfo>C1TDG2a!+E<(uH@l&6D9fA>XshFw6RA93*c&lVKjokd=fvkiP_DH z;;5$Dw@SP94mb~!^B9MH6isbweyvrr#8~}n6 z`vcB7FVxnDIck9hfMHqS3qC*>Cw2!YF!^Fbu0=p}R8b#n5Sj9};XjB3xdA=_Q1ISi zjn{_W%C$Qxu$}NYDrDwVPs>>&UU1ot8nxhDSiV(6zY?l z%OZ2i766o8!;8!piLh5W!hUcZQEd;5Y|ndKGhiwrnUjPrR`x3P-yVic^`fYCCE1^=^8-}`x zM+T)#h^f!fTn0q?rieFdE{Z9>KFKK3@Q`O6{V5sSJk{qjMiZlPj*?m7&W=*{YT6j4 zPwN6I21n~w)3c@}%KEg> zqivuMm1FHRAkI^%Hn)geGQ=ErsxY-g?X;v%HSaa}w9Ahw`~~V|O<1~j-yZj%3pT>L zu_MTKlGZil+&DWn{_)WMNMAfU$p@b{*B1|9K~IkRnhD~QA)qTO@dKAzU%CgNO@X+0 z7a}9IuY-akfflO+DYJPnpl%TewM#_#9vYRjs3cDlryt~CoS-9w0>QowZ>L7Z#XgCl z1-=(DC*!xHrAp7Z!-nqZ)rll7A6 zacl^{O8j%om&g5IR|)>+LW*_$%$;}7`?g+x`%+`5ydwR1(Zi{>2}wA0gM|H!G8dgr z+(+ogVMYVEX0@OC8``y@5;T-*%?^Y!e%z0HhbY({K=}izh_vXt`M%9-? z)SoFIV(sTFw}4uAlk=pq88>+MI;_vyvh#$zm})1hFId7pUvPWl@L*6|=x%4B6*A zADukI><1X8bk_ldOQ!wa=tFTKc*oY^-aa8%@DA+^XmcGQMB% zn$(I!ZZ_e$SS*S^CGgj)*Z;!JdAQbB8w zbm&-G${QZG5WS3oCY{55gx1%VbrgE|6eHf(Q7mk)(SS2`DmAn3ik(v~EDQgr%&=)L z5;HdB`NP*$=m&)WOEQqXH&#%>heiTkzQM>8q4nLmZH5Q!izl}7E-YW#NjD4{#7>)C zBpqDR0Zfq(FWXXoFT<~RmHwLMoa>M1&YSzW3h)oS@uwG_>j{9aYJOO(Juj<{hAN>T z1wA(EL`eI4IR4agU%~pL8_BA#M>f|s+8jTsrBb@sw{t&pKyNg0(Ac!oQHC~=o4>Iy zBD7{&oXaCfNR#IMcgwtyc`zYD@Bxs!lOpunc%*>ztk3o#5NgsXn7`|QDei#=sMRBk zuSvkm64*l!ak6^uZ#9#jL`cLcljS~8%lFo)_AkHxpn&hh@HEZug!SG+G9xA9_)Jk! zCEI!ViEyjcFgjb?(`re@-;-;oa`ue^_K*Nb9WU&f7d9*eAl1Ionwq#;U~beQ2OXUy zkiEG_{>RAk-#r&VADb>tTLbOB56Z|a$rMK+n#1-<>Z$b2po&Hwdkglsp9b4?*tXJUcud1=!hOuI zN5|zAM9NhBDZPt>I|>PX(^@RWmA#^lPZ%xCTJ# zMq18>$uKfS*WWyEf<_!pEL=ru*sj~k;`q)#oM6k-VhfKWXra*2QMNUP_#!CKN4^$^ z{xi7ezhnY{dW{RH>;UtBNMD3HtJIPqHw4q$F143g-q2cqI-JKd=xjVP_ao*Z=a04} zeMmKhvA?Zo_dSI3|MUGWA^(3YL9RdDdzaapMUHLBP2?T(JcNVCCZ~b=b>`J0 z3k+L=OTkuD%n>@HL)5_nEmcZcotfCeXKj?UD-s;rKh}bJUylwgmDTw8Gdh(3;K`f7 z>kBZd0A1QbCV{D(9bfgoKqbbf$f+JNPlVh7yx2c%IpfkMp+LO379645%uOi@NZ4cB zVw49|>j9CR*!W5m3BLU~|0-jC(0?`sA^wFaiN{K)9D^W=pl4pN)J~L%&JTV}oA~X~ z%D9Rq3%2YRe0hg>8DupEB% zcenDs^|~ClY_s$x?Q)PCKw2)BXisF@bXSYP_uii&btv zaV?6eCkm!Q*1ak~ZRrn~o~6Ba^$r_$J6rg*zpS3;?Yg%Ii`WS0bpTS)2}O}IUE>t+ z{YRAXMeibV@~}77{TxOM-8jtX6@4qIng%1ECr3=D7~Nxe?R7Vmo{FBYJG=Q3WDi*7 z+7W(6TB`PTx5@d-yQ2W&(c0yW=hrp-UB#!TN)0K^o*5Ogx$78C&i(GBQhU68pg8DE zRi+R|yrkzg;n!|29V9l}bS@*jmC26OwFP^!*#TMPhI-UK2?X-@lnsEH^k*0i;I00_ zCi&+horPwyfdAU7)H`CTa}{h0EZAzg1i$+jh#s;iPAe-~V+6GXR^|WflGyv`FwX)I zD^=PXl$ONQfH&nsbkOz!)hap#?;3Zi9$mTPLG)01Y1cJ%rEr_w-z3LziM_}{FJ>@e z?-%x>LyJhK8KOw0QMx%2<5p7|;5~|x`su01`FrqJyq8+ZiG;>1Cg5Dv2H=iHKfH9I z#v-bG?hoH*0BjIoL<70)+eKptV^gJ@nufH>NscK zUI0Sbi$7=GXnVxx2TUd#S^z>V2%%dVGZIR`6uOHQFlvNo$q}(=C}m?sCTSuhJCq-8 zDipsJ`}K5r*$s!)2y}0r@_o#=()dzPX4)_N^eE6&<}kqHety=kvDibbdX9H3C}4P# zNC`}Ld`p<#gwa@kqrJn;8{oCUez)c@WTV)D9rXR>znsS3&f|Z6Brbc+e7uLUNivvF zjLhd8Dhlt%j#5^q4s-6uUj_yKU%J(x{8y z`Q}`gxz%+Gx!Ss;HoLoi(Cwvge>gNQvl2z6pp_`Hy(xyYUuWw9&3RoW(EuU@a)YcRYmAEw?`|nq%d4jwEN?QDC6) z)Iv*rc5Z>A8+J3!4RW#F+X+pfv)4D_s`+fYiF0genD%oYqCCOr+ZR$jGej`uh- zM89GK;x2|Yg^77jIHScPTby}C^oCt?c z$>Xx~dLoW~Wa1b5@u|`6C(Mo@=zebL=+6mEkK2-!wx{B}*_P<-$3)8dJ2!QLr)xI? zJH7kx3L_yk6t}o|yj~z`|of!#?b& zd^jkGFmfpQ9ri2$xsFHe_&Mgv^D}^1tWk8~ukJO)YO2Yv18rGB$5JmZ;M)A3E1Lf< z&ivQ+s?+AffayCoeB7jpW@%cvYmvk}y|GYd4vM<8lyDl5dD^0^@iHzv_Crvj6FGvpsk+++@jPotVxNfxR_8)oT(h9`X@Ci??N3C^#XDsE>$&Iv}> zrG;f%dhS2J=masff_(K|E`rMrtH_Bj1vBNneu_u|r{ zA`w6x@#fOa^0{AXRQ}H5FCQbT}q+~kSx%gFhc#7IUS&e3-Ue4GY2y(C11*g}Wg^-WxKF0dl4(BjDr?JEvrWs){1SPw zK|>{A*3D%b8~CAMZ+Ay)r%`%uYLkX9)13KqhCGCvJ3|o~*{JFeK3*iBET{UjRK6tj z=qF{S=2!vyXS6xq4(`Fq{nJ1vm~){l?dEI!!qP4oqS8suzd&i>LwOcI=6zmmNlymL zM(j@8>ko;4TlCMFmTy{qw8ahrt$`Ro0kX0FqwTL=TZGovJQuDNAKk>H!IEYIgPNF~ zllqbO7d%$8YZC2e&vw5;@w(VHrouwb=~U}8B+={+)mfx1`6U4-XN6Q-pw zcYru0|GH4Vb=mnm8LnQkj!#XK$M+(NO#PKd^jJi7O3TaLj(XK=we=RJ#vy>9$zCdtj| z9zsO5O*>}IKQI9r(NV*ic=sE^fM3TyD?hER2hD5nF62qFKeC{}8;%vu5dOezlR9I~ zai_*%l|`XACZg1u^GNVo@~9(SWd}V)Aei&uZq^Rv*YmjS+ge4fE(GtYWenrqHOmQc zV5O47d!7#j>)7%N(%?LN$LzL3oKXhJZV6LRgCc{@;{I=730sR$`ea)gDLAxeYfPUo zR!yGBtzxuEi^4X~f<8Ln?oT2|dLK~as_Pw9Ts;rZMzs3PPmS{y-DY+I-6>MQsNc&w z(_}>Kd7V|JSV4!29Yxi`x`!}0oF3{yIP*1^YOkm~UuKF_urF)nj+RN)SjBT{+Yk4R zc0>zSBY(C?vo)s59OmEJT3Okh&llR_D*L)qq4vB`;@UN})pWXD#<1Hj*U!pS_iDDm z7tX&GZi?;DldHMSrLUY-a~uJ+1mUop(vCU9jH-vcVs9(Cs|zIOPWc^TPU=BUR$pfK zm5G_++zms50O+Yn60Cw@UiS^Q>6!&ckeKa_j&vOkZS$w+W$}gYj95(IfXR`jAG((5$RKipgo%q>z!q}(Uk%dZ7<@H`c679;?) zxFS=wz_|Z#-Fv>hAiCS(t#a8c0fh@5hJ7!kkfM#YIO!x?`{V87G~Kt0&licUaMGi| zYdkD`za;^^&XnJev@=K&>JOJITPAAQX*YvsW2Mk!CuSvL+q6C(wQN3R75bsba8VNR zRdyn8BqR($9*YsDa*`}))Btds8b#nHOr#6XJ&cmsq-p_T;Me^Sy7>_{S~(p@QngPB zi?`Z*e>7fM-=$-~i*|b$q|rMj&l_w%PUO|}mNO(!W_AtXPv47LsIFi(O&C7TTEwQU zdmg8OLl>)(YnvsB&zJrJiF+QWrh#2=0Us*tE~54cmmC6oXh+=o?k#D&NVDCT&tgMT zA;XUm^lTLPv4G09;aMuOSVf^@hv_jjap3oM#d55No`Hh` zd*7?Ccfw2tEdk)?Lz3#V@Hxw#;D_xp5 zdacC!y0bTG{cw+4Qbn*sKuB3xNqP+ohJ4P#NW|`m<`|@KbwkyJB`qFB)D6km&fBx3 zdX?Io?M1VHEr|3Sqm((?cxD{dv`uc}uxnLWJ^lUU26Z9BMy)f$2GeFmS5!=0.2.0", - "pillow>=11.2.1", + "pillow>=11.2.1", "msprites2 @ git+https://github.com/rsp2k/msprites2.git", - "procrastinate>=2.15.1", + "procrastinate>=2.15.1,<4.0.0", # Support both 2.x and 3.x during migration "psycopg[pool]>=3.2.9", "python-dateutil>=2.9.0", "pydantic>=2.0.0", "pydantic-settings>=2.0.0", + "exifread>=3.5.1", ] [project.optional-dependencies] @@ -102,11 +103,13 @@ testpaths = ["tests"] python_files = ["test_*.py"] python_classes = ["Test*"] python_functions = ["test_*"] +asyncio_mode = "auto" [dependency-groups] dev = [ "mypy>=1.17.1", "pytest>=8.4.2", + "pytest-asyncio>=0.21.0", "pytest-cov>=6.2.1", "ruff>=0.12.12", ] diff --git a/src/video_processor/__init__.py b/src/video_processor/__init__.py index 2e895b4..223208e 100644 --- a/src/video_processor/__init__.py +++ b/src/video_processor/__init__.py @@ -11,8 +11,8 @@ from .exceptions import EncodingError, StorageError, VideoProcessorError # Optional 360° imports try: - from .utils.video_360 import Video360Detection, Video360Utils, HAS_360_SUPPORT from .core.thumbnails_360 import Thumbnail360Generator + from .utils.video_360 import HAS_360_SUPPORT, Video360Detection, Video360Utils except ImportError: HAS_360_SUPPORT = False @@ -30,6 +30,6 @@ __all__ = [ if HAS_360_SUPPORT: __all__.extend([ "Video360Detection", - "Video360Utils", + "Video360Utils", "Thumbnail360Generator", ]) diff --git a/src/video_processor/config.py b/src/video_processor/config.py index 2330ccf..83a436f 100644 --- a/src/video_processor/config.py +++ b/src/video_processor/config.py @@ -7,7 +7,12 @@ from pydantic import BaseModel, ConfigDict, Field, field_validator # Optional dependency detection for 360° features try: - from .utils.video_360 import Video360Utils, ProjectionType, StereoMode, HAS_360_SUPPORT + from .utils.video_360 import ( + HAS_360_SUPPORT, + ProjectionType, + StereoMode, + Video360Utils, + ) except ImportError: # Fallback types when 360° libraries not available ProjectionType = str @@ -43,7 +48,7 @@ class ProcessorConfig(BaseModel): # File permissions file_permissions: int = 0o644 directory_permissions: int = 0o755 - + # 360° Video settings (only active if 360° libraries are available) enable_360_processing: bool = Field(default=HAS_360_SUPPORT) auto_detect_360: bool = Field(default=True) @@ -67,7 +72,7 @@ class ProcessorConfig(BaseModel): if not v: raise ValueError("At least one output format must be specified") return v - + @field_validator("enable_360_processing") @classmethod def validate_360_processing(cls, v: bool) -> bool: @@ -75,7 +80,7 @@ class ProcessorConfig(BaseModel): if v and not HAS_360_SUPPORT: raise ValueError( "360° processing requires optional dependencies. " - f"Install with: pip install 'video-processor[video-360]' or uv add 'video-processor[video-360]'" + "Install with: pip install 'video-processor[video-360]' or uv add 'video-processor[video-360]'" ) return v diff --git a/src/video_processor/core/metadata.py b/src/video_processor/core/metadata.py index fea5524..6963542 100644 --- a/src/video_processor/core/metadata.py +++ b/src/video_processor/core/metadata.py @@ -57,7 +57,7 @@ class VideoMetadata: # Raw probe data for advanced use cases "raw_probe_data": probe_data, } - + # Add 360° video detection video_360_info = Video360Detection.detect_360_video(metadata) metadata["video_360"] = video_360_info diff --git a/src/video_processor/core/processor.py b/src/video_processor/core/processor.py index b215214..08e9739 100644 --- a/src/video_processor/core/processor.py +++ b/src/video_processor/core/processor.py @@ -55,7 +55,7 @@ class VideoProcessor: self.encoder = VideoEncoder(config) self.thumbnail_generator = ThumbnailGenerator(config) self.metadata_extractor = VideoMetadata(config) - + # Initialize 360° thumbnail generator if available and enabled if HAS_360_SUPPORT and config.enable_360_processing: self.thumbnail_360_generator = Thumbnail360Generator(config) @@ -138,19 +138,19 @@ class VideoProcessor: sprite_file, webvtt_file = self.thumbnail_generator.generate_sprites( encoded_files["mp4"], output_dir, video_id ) - + # Generate 360° thumbnails and sprites if this is a 360° video thumbnails_360 = {} sprite_360_files = {} - - if (self.thumbnail_360_generator and + + if (self.thumbnail_360_generator and self.config.generate_360_thumbnails and metadata.get("video_360", {}).get("is_360_video", False)): - + # Get 360° video information video_360_info = metadata["video_360"] projection_type = video_360_info.get("projection_type", "equirectangular") - + # Generate 360° thumbnails for each timestamp for timestamp in self.config.thumbnail_timestamps: angle_thumbnails = self.thumbnail_360_generator.generate_360_thumbnails( @@ -161,12 +161,12 @@ class VideoProcessor: projection_type, self.config.thumbnail_360_projections, ) - + # Store thumbnails by timestamp and angle for angle, thumbnail_path in angle_thumbnails.items(): key = f"{timestamp}s_{angle}" thumbnails_360[key] = thumbnail_path - + # Generate 360° sprite sheets for each viewing angle if self.config.generate_sprites: for angle in self.config.thumbnail_360_projections: diff --git a/src/video_processor/core/thumbnails.py b/src/video_processor/core/thumbnails.py index 6cfe2e5..481d228 100644 --- a/src/video_processor/core/thumbnails.py +++ b/src/video_processor/core/thumbnails.py @@ -3,10 +3,10 @@ from pathlib import Path import ffmpeg -from msprites2 import MontageSprites from ..config import ProcessorConfig from ..exceptions import EncodingError, FFmpegError +from ..utils.sprite_generator import FixedSpriteGenerator class ThumbnailGenerator: @@ -99,45 +99,28 @@ class ThumbnailGenerator: webvtt_file = output_dir / f"{video_id}_sprite.webvtt" thumbnail_dir = output_dir / "frames" - # Create frames directory - thumbnail_dir.mkdir(exist_ok=True) - try: - # Generate sprites using msprites2 (the forked library) - MontageSprites.from_media( - video_path=str(video_path), - thumbnail_dir=str(thumbnail_dir), - sprite_file=str(sprite_file), - webvtt_file=str(webvtt_file), - # Optional parameters - can be made configurable - interval=self.config.sprite_interval, - width=160, # Individual thumbnail width - height=90, # Individual thumbnail height - columns=10, # Thumbnails per row in sprite + # Use our fixed sprite generator + sprite_path, webvtt_path = FixedSpriteGenerator.create_sprite_sheet( + video_path=video_path, + thumbnail_dir=thumbnail_dir, + sprite_file=sprite_file, + webvtt_file=webvtt_file, + ips=1.0 / self.config.sprite_interval, + width=160, + height=90, + cols=10, + rows=10, + cleanup=True, ) except Exception as e: raise EncodingError(f"Sprite generation failed: {e}") from e - if not sprite_file.exists(): + if not sprite_path.exists(): raise EncodingError("Sprite generation failed - sprite file not created") - if not webvtt_file.exists(): + if not webvtt_path.exists(): raise EncodingError("Sprite generation failed - WebVTT file not created") - # Clean up temporary frames directory - self._cleanup_frames_directory(thumbnail_dir) - - return sprite_file, webvtt_file - - def _cleanup_frames_directory(self, frames_dir: Path) -> None: - """Clean up temporary frame files.""" - try: - if frames_dir.exists(): - for frame_file in frames_dir.iterdir(): - if frame_file.is_file(): - frame_file.unlink() - frames_dir.rmdir() - except Exception: - # Don't fail the entire process if cleanup fails - pass + return sprite_path, webvtt_path diff --git a/src/video_processor/core/thumbnails_360.py b/src/video_processor/core/thumbnails_360.py index 5f73857..30437de 100644 --- a/src/video_processor/core/thumbnails_360.py +++ b/src/video_processor/core/thumbnails_360.py @@ -13,7 +13,8 @@ from ..exceptions import EncodingError, FFmpegError try: import cv2 import numpy as np - from ..utils.video_360 import ProjectionType, Video360Utils, HAS_360_SUPPORT + + from ..utils.video_360 import HAS_360_SUPPORT, ProjectionType, Video360Utils except ImportError: # Fallback types when dependencies not available ProjectionType = str @@ -27,7 +28,7 @@ class Thumbnail360Generator: def __init__(self, config: ProcessorConfig) -> None: self.config = config - + if not HAS_360_SUPPORT: raise ImportError( "360° thumbnail generation requires optional dependencies. " @@ -61,30 +62,30 @@ class Thumbnail360Generator: viewing_angles = self.config.thumbnail_360_projections thumbnails = {} - + # First extract a full equirectangular frame equirect_frame = self._extract_equirectangular_frame( video_path, timestamp, output_dir, video_id ) - + try: # Load the equirectangular image equirect_img = cv2.imread(str(equirect_frame)) if equirect_img is None: raise EncodingError(f"Failed to load equirectangular frame: {equirect_frame}") - + # Generate thumbnails for each viewing angle for angle in viewing_angles: thumbnail_path = self._generate_angle_thumbnail( equirect_img, angle, output_dir, video_id, timestamp ) thumbnails[angle] = thumbnail_path - + finally: # Clean up temporary equirectangular frame if equirect_frame.exists(): equirect_frame.unlink() - + return thumbnails def _extract_equirectangular_frame( @@ -92,7 +93,7 @@ class Thumbnail360Generator: ) -> Path: """Extract a full equirectangular frame from the 360° video.""" temp_frame = output_dir / f"{video_id}_temp_equirect_{timestamp}.jpg" - + try: # Get video info probe = ffmpeg.probe(str(video_path)) @@ -100,15 +101,15 @@ class Thumbnail360Generator: stream for stream in probe["streams"] if stream["codec_type"] == "video" ) - + width = video_stream["width"] height = video_stream["height"] duration = float(video_stream.get("duration", 0)) - + # Adjust timestamp if beyond video duration if timestamp >= duration: timestamp = max(1, int(duration // 2)) - + # Extract full resolution frame ( ffmpeg.input(str(video_path), ss=timestamp) @@ -117,14 +118,14 @@ class Thumbnail360Generator: .overwrite_output() .run(capture_stdout=True, capture_stderr=True, quiet=True) ) - + except ffmpeg.Error as e: error_msg = e.stderr.decode() if e.stderr else "Unknown FFmpeg error" raise FFmpegError(f"Frame extraction failed: {error_msg}") from e - + if not temp_frame.exists(): raise EncodingError("Frame extraction failed - output file not created") - + return temp_frame def _generate_angle_thumbnail( @@ -137,17 +138,17 @@ class Thumbnail360Generator: ) -> Path: """Generate thumbnail for a specific viewing angle.""" output_path = output_dir / f"{video_id}_360_{viewing_angle}_{timestamp}.jpg" - + if viewing_angle == "stereographic": # Generate "little planet" stereographic projection thumbnail = self._create_stereographic_projection(equirect_img) else: # Generate perspective projection for the viewing angle thumbnail = self._create_perspective_projection(equirect_img, viewing_angle) - + # Save thumbnail cv2.imwrite(str(output_path), thumbnail, [cv2.IMWRITE_JPEG_QUALITY, 85]) - + return output_path def _create_perspective_projection( @@ -155,7 +156,7 @@ class Thumbnail360Generator: ) -> "np.ndarray": """Create perspective projection for a viewing angle.""" height, width = equirect_img.shape[:2] - + # Define viewing directions (yaw, pitch) in radians viewing_directions = { "front": (0, 0), @@ -165,68 +166,68 @@ class Thumbnail360Generator: "up": (0, math.pi/2), "down": (0, -math.pi/2), } - + if viewing_angle not in viewing_directions: viewing_angle = "front" - + yaw, pitch = viewing_directions[viewing_angle] - + # Generate perspective view thumbnail_size = self.config.thumbnail_width fov = math.pi / 3 # 60 degrees field of view - + # Create coordinate maps for perspective projection u_map, v_map = self._create_perspective_maps( thumbnail_size, thumbnail_size, fov, yaw, pitch, width, height ) - + # Apply remapping thumbnail = cv2.remap(equirect_img, u_map, v_map, cv2.INTER_LINEAR) - + return thumbnail def _create_stereographic_projection(self, equirect_img: "np.ndarray") -> "np.ndarray": """Create stereographic 'little planet' projection.""" height, width = equirect_img.shape[:2] - + # Output size for stereographic projection output_size = self.config.thumbnail_width - + # Create coordinate maps for stereographic projection y_coords, x_coords = np.mgrid[0:output_size, 0:output_size] - + # Convert to centered coordinates x_centered = (x_coords - output_size // 2) / (output_size // 2) y_centered = (y_coords - output_size // 2) / (output_size // 2) - + # Calculate distance from center r = np.sqrt(x_centered**2 + y_centered**2) - + # Create mask for circular boundary mask = r <= 1.0 - + # Convert to spherical coordinates for stereographic projection theta = np.arctan2(y_centered, x_centered) phi = 2 * np.arctan(r) - + # Convert to equirectangular coordinates u = (theta + np.pi) / (2 * np.pi) * width v = (np.pi/2 - phi) / np.pi * height - + # Clamp coordinates u = np.clip(u, 0, width - 1) v = np.clip(v, 0, height - 1) - + # Create maps for remapping u_map = u.astype(np.float32) v_map = v.astype(np.float32) - + # Apply remapping thumbnail = cv2.remap(equirect_img, u_map, v_map, cv2.INTER_LINEAR) - + # Apply circular mask thumbnail[~mask] = [0, 0, 0] # Black background - + return thumbnail def _create_perspective_maps( @@ -242,48 +243,48 @@ class Thumbnail360Generator: """Create coordinate mapping for perspective projection.""" # Create output coordinate grids y_coords, x_coords = np.mgrid[0:out_height, 0:out_width] - + # Convert to normalized device coordinates [-1, 1] x_ndc = (x_coords - out_width / 2) / (out_width / 2) y_ndc = (y_coords - out_height / 2) / (out_height / 2) - + # Apply perspective projection focal_length = 1.0 / math.tan(fov / 2) - + # Create 3D ray directions x_3d = x_ndc / focal_length y_3d = y_ndc / focal_length z_3d = np.ones_like(x_3d) - + # Normalize ray directions ray_length = np.sqrt(x_3d**2 + y_3d**2 + z_3d**2) x_3d /= ray_length y_3d /= ray_length z_3d /= ray_length - + # Apply rotation for viewing direction # Rotate by yaw (around Y axis) cos_yaw, sin_yaw = math.cos(yaw), math.sin(yaw) x_rot = x_3d * cos_yaw - z_3d * sin_yaw z_rot = x_3d * sin_yaw + z_3d * cos_yaw - + # Rotate by pitch (around X axis) cos_pitch, sin_pitch = math.cos(pitch), math.sin(pitch) y_rot = y_3d * cos_pitch - z_rot * sin_pitch z_final = y_3d * sin_pitch + z_rot * cos_pitch - + # Convert 3D coordinates to spherical theta = np.arctan2(x_rot, z_final) phi = np.arcsin(np.clip(y_rot, -1, 1)) - + # Convert spherical to equirectangular coordinates u = (theta + np.pi) / (2 * np.pi) * equirect_width v = (np.pi/2 - phi) / np.pi * equirect_height - + # Clamp to image boundaries u = np.clip(u, 0, equirect_width - 1) v = np.clip(v, 0, equirect_height - 1) - + return u.astype(np.float32), v.astype(np.float32) def generate_360_sprite_thumbnails( @@ -310,19 +311,19 @@ class Thumbnail360Generator: sprite_file = output_dir / f"{video_id}_360_{viewing_angle}_sprite.jpg" webvtt_file = output_dir / f"{video_id}_360_{viewing_angle}_sprite.webvtt" frames_dir = output_dir / "frames_360" - + # Create frames directory frames_dir.mkdir(exist_ok=True) - + try: # Get video duration probe = ffmpeg.probe(str(video_path)) duration = float(probe["format"]["duration"]) - + # Generate frames at specified intervals interval = self.config.sprite_interval timestamps = list(range(0, int(duration), interval)) - + frame_paths = [] for i, timestamp in enumerate(timestamps): # Generate 360° thumbnail for this timestamp @@ -330,16 +331,16 @@ class Thumbnail360Generator: video_path, frames_dir, timestamp, f"{video_id}_frame_{i}", projection_type, [viewing_angle] ) - + if viewing_angle in thumbnails: frame_paths.append(thumbnails[viewing_angle]) - + # Create sprite sheet from frames if frame_paths: self._create_sprite_sheet(frame_paths, sprite_file, timestamps, webvtt_file) - + return sprite_file, webvtt_file - + finally: # Clean up frame files if frames_dir.exists(): @@ -358,58 +359,58 @@ class Thumbnail360Generator: """Create sprite sheet from individual frames.""" if not frame_paths: raise EncodingError("No frames available for sprite sheet creation") - + # Load first frame to get dimensions first_frame = cv2.imread(str(frame_paths[0])) if first_frame is None: raise EncodingError(f"Failed to load first frame: {frame_paths[0]}") - + frame_height, frame_width = first_frame.shape[:2] - + # Calculate sprite sheet layout cols = 10 # 10 thumbnails per row rows = math.ceil(len(frame_paths) / cols) - + sprite_width = cols * frame_width sprite_height = rows * frame_height - + # Create sprite sheet sprite_img = np.zeros((sprite_height, sprite_width, 3), dtype=np.uint8) - + # Create WebVTT content webvtt_content = ["WEBVTT", ""] - + # Place frames in sprite sheet and create WebVTT entries - for i, (frame_path, timestamp) in enumerate(zip(frame_paths, timestamps)): + for i, (frame_path, timestamp) in enumerate(zip(frame_paths, timestamps, strict=False)): frame = cv2.imread(str(frame_path)) if frame is None: continue - + # Calculate position in sprite col = i % cols row = i // cols - + x_start = col * frame_width y_start = row * frame_height x_end = x_start + frame_width y_end = y_start + frame_height - + # Place frame in sprite sprite_img[y_start:y_end, x_start:x_end] = frame - + # Create WebVTT entry start_time = f"{timestamp//3600:02d}:{(timestamp%3600)//60:02d}:{timestamp%60:02d}.000" end_time = f"{(timestamp+1)//3600:02d}:{((timestamp+1)%3600)//60:02d}:{(timestamp+1)%60:02d}.000" - + webvtt_content.extend([ f"{start_time} --> {end_time}", f"{sprite_file.name}#xywh={x_start},{y_start},{frame_width},{frame_height}", "" ]) - + # Save sprite sheet cv2.imwrite(str(sprite_file), sprite_img, [cv2.IMWRITE_JPEG_QUALITY, 85]) - + # Save WebVTT file with open(webvtt_file, 'w') as f: - f.write('\n'.join(webvtt_content)) \ No newline at end of file + f.write('\n'.join(webvtt_content)) diff --git a/src/video_processor/tasks/compat.py b/src/video_processor/tasks/compat.py new file mode 100644 index 0000000..f8cc989 --- /dev/null +++ b/src/video_processor/tasks/compat.py @@ -0,0 +1,190 @@ +""" +Procrastinate version compatibility layer. + +This module provides compatibility between Procrastinate 2.x and 3.x versions, +allowing the codebase to work with both versions during the migration period. +""" + +from typing import Any + +import procrastinate + + +def get_procrastinate_version() -> tuple[int, int, int]: + """Get the current Procrastinate version.""" + version_str = procrastinate.__version__ + # Handle version strings like "3.0.0", "3.0.0a1", etc. + version_parts = version_str.split('.') + major = int(version_parts[0]) + minor = int(version_parts[1]) + # Handle patch versions with alpha/beta suffixes + patch_str = version_parts[2] if len(version_parts) > 2 else "0" + patch = int(''.join(c for c in patch_str if c.isdigit()) or "0") + return (major, minor, patch) + + +# Check Procrastinate version for compatibility +PROCRASTINATE_VERSION = get_procrastinate_version() +IS_PROCRASTINATE_3_PLUS = PROCRASTINATE_VERSION[0] >= 3 + + +def get_connector_class(): + """Get the appropriate connector class based on Procrastinate version.""" + if IS_PROCRASTINATE_3_PLUS: + # Procrastinate 3.x + try: + from procrastinate import PsycopgConnector + return PsycopgConnector + except ImportError: + # Fall back to AiopgConnector if PsycopgConnector not available + from procrastinate import AiopgConnector + return AiopgConnector + else: + # Procrastinate 2.x + from procrastinate import AiopgConnector + return AiopgConnector + + +def create_connector(database_url: str, **kwargs): + """Create a database connector compatible with the current Procrastinate version.""" + connector_class = get_connector_class() + + if IS_PROCRASTINATE_3_PLUS: + # Procrastinate 3.x uses different parameter names + if connector_class.__name__ == "PsycopgConnector": + # PsycopgConnector uses 'conninfo' parameter (preferred in 3.5.x) + # Default to better pool settings for 3.5.2 + default_kwargs = { + "pool_size": 10, + "max_pool_size": 20, + } + default_kwargs.update(kwargs) + return connector_class(conninfo=database_url, **default_kwargs) + else: + # AiopgConnector fallback + return connector_class(conninfo=database_url, **kwargs) + else: + # Procrastinate 2.x (legacy support) + return connector_class(conninfo=database_url, **kwargs) + + +def create_app_with_connector(database_url: str, **connector_kwargs) -> procrastinate.App: + """Create a Procrastinate App with the appropriate connector.""" + connector = create_connector(database_url, **connector_kwargs) + return procrastinate.App(connector=connector) + + +class CompatJobContext: + """ + Job context compatibility wrapper to handle differences between versions. + """ + + def __init__(self, job_context): + self._context = job_context + self._version = PROCRASTINATE_VERSION + + def should_abort(self) -> bool: + """Check if the job should abort (compatible across versions).""" + if IS_PROCRASTINATE_3_PLUS: + # Procrastinate 3.x + return self._context.should_abort() + else: + # Procrastinate 2.x + if hasattr(self._context, 'should_abort'): + return self._context.should_abort() + else: + # Fallback for older versions + return False + + async def should_abort_async(self) -> bool: + """Check if the job should abort asynchronously.""" + if IS_PROCRASTINATE_3_PLUS: + # In 3.x, should_abort() works for both sync and async + return self.should_abort() + else: + # Procrastinate 2.x + if hasattr(self._context, 'should_abort_async'): + return await self._context.should_abort_async() + else: + return self.should_abort() + + @property + def job(self): + """Access the job object.""" + return self._context.job + + @property + def task(self): + """Access the task object.""" + return self._context.task + + def __getattr__(self, name): + """Delegate other attributes to the wrapped context.""" + return getattr(self._context, name) + + +def get_migration_commands() -> dict[str, str]: + """Get migration commands for the current Procrastinate version.""" + if IS_PROCRASTINATE_3_PLUS: + return { + "pre_migrate": "procrastinate schema --apply --mode=pre", + "post_migrate": "procrastinate schema --apply --mode=post", + "check": "procrastinate schema --check", + } + else: + return { + "migrate": "procrastinate schema --apply", + "check": "procrastinate schema --check", + } + + +def get_worker_options_mapping() -> dict[str, str]: + """Get mapping of worker options between versions.""" + if IS_PROCRASTINATE_3_PLUS: + return { + "timeout": "fetch_job_polling_interval", # Renamed in 3.x + "remove_error": "remove_failed", # Renamed in 3.x + "include_error": "include_failed", # Renamed in 3.x + } + else: + return { + "timeout": "timeout", + "remove_error": "remove_error", + "include_error": "include_error", + } + + +def normalize_worker_kwargs(**kwargs) -> dict[str, Any]: + """Normalize worker keyword arguments for the current version.""" + mapping = get_worker_options_mapping() + normalized = {} + + for key, value in kwargs.items(): + # Map old names to new names if needed + normalized_key = mapping.get(key, key) + normalized[normalized_key] = value + + return normalized + + +# Version-specific feature flags +FEATURES = { + "graceful_shutdown": IS_PROCRASTINATE_3_PLUS, + "job_cancellation": IS_PROCRASTINATE_3_PLUS, + "pre_post_migrations": IS_PROCRASTINATE_3_PLUS, + "psycopg3_support": IS_PROCRASTINATE_3_PLUS, + "improved_performance": PROCRASTINATE_VERSION >= (3, 5, 0), # Performance improvements in 3.5+ + "schema_compatibility": PROCRASTINATE_VERSION >= (3, 5, 2), # Better schema support in 3.5.2 + "enhanced_indexing": PROCRASTINATE_VERSION >= (3, 5, 0), # Improved indexes in 3.5+ +} + + +def get_version_info() -> dict[str, Any]: + """Get version and feature information.""" + return { + "procrastinate_version": procrastinate.__version__, + "version_tuple": PROCRASTINATE_VERSION, + "is_v3_plus": IS_PROCRASTINATE_3_PLUS, + "features": FEATURES, + "migration_commands": get_migration_commands(), + } diff --git a/src/video_processor/tasks/migration.py b/src/video_processor/tasks/migration.py new file mode 100644 index 0000000..6afac17 --- /dev/null +++ b/src/video_processor/tasks/migration.py @@ -0,0 +1,253 @@ +""" +Procrastinate migration utilities for upgrading from 2.x to 3.x. + +This module provides utilities to help with database migrations and +version compatibility during the upgrade process. +""" + +import logging +import subprocess +import sys + +from .compat import ( + IS_PROCRASTINATE_3_PLUS, + get_migration_commands, + get_version_info, +) + +logger = logging.getLogger(__name__) + + +class ProcrastinateMigrationHelper: + """Helper class for managing Procrastinate migrations.""" + + def __init__(self, database_url: str): + self.database_url = database_url + self.version_info = get_version_info() + + def get_migration_steps(self) -> list[str]: + """Get the migration steps for the current version.""" + commands = get_migration_commands() + + if IS_PROCRASTINATE_3_PLUS: + return [ + "1. Apply pre-migrations before deploying new code", + f" Command: {commands['pre_migrate']}", + "2. Deploy new application code", + "3. Apply post-migrations after deployment", + f" Command: {commands['post_migrate']}", + "4. Verify schema is current", + f" Command: {commands['check']}", + ] + else: + return [ + "1. Apply database migrations", + f" Command: {commands['migrate']}", + "2. Verify schema is current", + f" Command: {commands['check']}", + ] + + def print_migration_plan(self) -> None: + """Print the migration plan for the current version.""" + print(f"Procrastinate Migration Plan (v{self.version_info['procrastinate_version']})") + print("=" * 60) + + for step in self.get_migration_steps(): + print(step) + + print("\nVersion Info:") + print(f" Current Version: {self.version_info['procrastinate_version']}") + print(f" Is 3.x+: {self.version_info['is_v3_plus']}") + print(f" Features Available: {list(self.version_info['features'].keys())}") + + def run_migration_command(self, command: str) -> bool: + """ + Run a migration command. + + Args: + command: The command to run + + Returns: + True if successful, False otherwise + """ + try: + logger.info(f"Running migration command: {command}") + + # Set environment variable for database URL + env = {"PROCRASTINATE_DATABASE_URL": self.database_url} + + result = subprocess.run( + command.split(), + env={**dict(sys.environ), **env}, + capture_output=True, + text=True, + check=True + ) + + if result.stdout: + logger.info(f"Migration output: {result.stdout}") + + logger.info("Migration command completed successfully") + return True + + except subprocess.CalledProcessError as e: + logger.error(f"Migration command failed: {e}") + if e.stdout: + logger.error(f"stdout: {e.stdout}") + if e.stderr: + logger.error(f"stderr: {e.stderr}") + return False + + def apply_pre_migration(self) -> bool: + """Apply pre-migration for Procrastinate 3.x.""" + if not IS_PROCRASTINATE_3_PLUS: + logger.warning("Pre-migration only applicable to Procrastinate 3.x+") + return True + + commands = get_migration_commands() + return self.run_migration_command(commands["pre_migrate"]) + + def apply_post_migration(self) -> bool: + """Apply post-migration for Procrastinate 3.x.""" + if not IS_PROCRASTINATE_3_PLUS: + logger.warning("Post-migration only applicable to Procrastinate 3.x+") + return True + + commands = get_migration_commands() + return self.run_migration_command(commands["post_migrate"]) + + def apply_legacy_migration(self) -> bool: + """Apply legacy migration for Procrastinate 2.x.""" + if IS_PROCRASTINATE_3_PLUS: + logger.warning("Legacy migration only applicable to Procrastinate 2.x") + return True + + commands = get_migration_commands() + return self.run_migration_command(commands["migrate"]) + + def check_schema(self) -> bool: + """Check if the database schema is current.""" + commands = get_migration_commands() + return self.run_migration_command(commands["check"]) + + +async def migrate_database( + database_url: str, + pre_migration_only: bool = False, + post_migration_only: bool = False, +) -> bool: + """ + Migrate the Procrastinate database schema. + + Args: + database_url: Database connection string + pre_migration_only: Only apply pre-migration (for 3.x) + post_migration_only: Only apply post-migration (for 3.x) + + Returns: + True if successful, False otherwise + """ + helper = ProcrastinateMigrationHelper(database_url) + + logger.info("Starting Procrastinate database migration") + helper.print_migration_plan() + + try: + if IS_PROCRASTINATE_3_PLUS: + # Procrastinate 3.x migration process + if pre_migration_only: + success = helper.apply_pre_migration() + elif post_migration_only: + success = helper.apply_post_migration() + else: + # Apply both pre and post migrations + logger.warning( + "Applying both pre and post migrations. " + "In production, these should be run separately!" + ) + success = ( + helper.apply_pre_migration() and + helper.apply_post_migration() + ) + else: + # Procrastinate 2.x migration process + success = helper.apply_legacy_migration() + + if success: + # Verify schema is current + success = helper.check_schema() + + if success: + logger.info("Database migration completed successfully") + else: + logger.error("Database migration failed") + + return success + + except Exception as e: + logger.error(f"Migration error: {e}") + return False + + +def create_migration_script() -> str: + """Create a migration script for the current environment.""" + version_info = get_version_info() + + script = f"""#!/usr/bin/env python3 +\"\"\" +Procrastinate migration script for version {version_info['procrastinate_version']} + +This script helps migrate your Procrastinate database schema. +\"\"\" + +import asyncio +import os +import sys + +# Add the project root to Python path +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from video_processor.tasks.migration import migrate_database + + +async def main(): + database_url = os.environ.get( + 'PROCRASTINATE_DATABASE_URL', + 'postgresql://localhost/procrastinate_dev' + ) + + print(f"Migrating database: {{database_url}}") + + # Parse command line arguments + pre_only = '--pre' in sys.argv + post_only = '--post' in sys.argv + + success = await migrate_database( + database_url=database_url, + pre_migration_only=pre_only, + post_migration_only=post_only, + ) + + if not success: + print("Migration failed!") + sys.exit(1) + + print("Migration completed successfully!") + + +if __name__ == "__main__": + asyncio.run(main()) +""" + + return script + + +if __name__ == "__main__": + # Generate migration script when run directly + script_content = create_migration_script() + + with open("migrate_procrastinate.py", "w") as f: + f.write(script_content) + + print("Generated migration script: migrate_procrastinate.py") + print("Run with: python migrate_procrastinate.py [--pre|--post]") diff --git a/src/video_processor/tasks/procrastinate_tasks.py b/src/video_processor/tasks/procrastinate_tasks.py index 713f3ac..00ba28c 100644 --- a/src/video_processor/tasks/procrastinate_tasks.py +++ b/src/video_processor/tasks/procrastinate_tasks.py @@ -8,6 +8,11 @@ from procrastinate import App from ..config import ProcessorConfig from ..core.processor import VideoProcessor from ..exceptions import VideoProcessorError +from .compat import ( + create_app_with_connector, + get_version_info, + normalize_worker_kwargs, +) logger = logging.getLogger(__name__) @@ -15,24 +20,45 @@ logger = logging.getLogger(__name__) app = App(connector=None) # Connector will be set during setup -def setup_procrastinate(database_url: str) -> App: +def setup_procrastinate( + database_url: str, + connector_kwargs: dict | None = None, +) -> App: """ Set up Procrastinate with database connection. Args: database_url: PostgreSQL connection string + connector_kwargs: Additional connector configuration Returns: Configured Procrastinate app """ - from procrastinate import AiopgConnector + connector_kwargs = connector_kwargs or {} - connector = AiopgConnector(conninfo=database_url) - app.connector = connector + # Use compatibility layer to create app with appropriate connector + configured_app = create_app_with_connector(database_url, **connector_kwargs) + # Update the global app instance + app.connector = configured_app.connector + + logger.info(f"Procrastinate setup complete. Version info: {get_version_info()}") return app +def get_worker_kwargs(**kwargs) -> dict: + """ + Get normalized worker kwargs for the current Procrastinate version. + + Args: + **kwargs: Worker configuration options + + Returns: + Normalized kwargs for the current version + """ + return normalize_worker_kwargs(**kwargs) + + @app.task(queue="video_processing") def process_video_async( input_path: str, diff --git a/src/video_processor/tasks/worker_compatibility.py b/src/video_processor/tasks/worker_compatibility.py new file mode 100644 index 0000000..a48579e --- /dev/null +++ b/src/video_processor/tasks/worker_compatibility.py @@ -0,0 +1,159 @@ +#!/usr/bin/env python3 +""" +Worker compatibility module for Procrastinate 2.x and 3.x. + +Provides a unified worker interface that works across different Procrastinate versions. +""" + +import asyncio +import logging +import os +import sys +from typing import Optional + +from .compat import ( + IS_PROCRASTINATE_3_PLUS, + create_app_with_connector, + get_version_info, + map_worker_options, +) + +logger = logging.getLogger(__name__) + + +def setup_worker_app(database_url: str, connector_kwargs: Optional[dict] = None): + """Set up Procrastinate app for worker usage.""" + connector_kwargs = connector_kwargs or {} + + # Create app with proper connector + app = create_app_with_connector(database_url, **connector_kwargs) + + # Import tasks to register them + from . import procrastinate_tasks # noqa: F401 + + logger.info(f"Worker app setup complete. {get_version_info()}") + return app + + +async def run_worker_async( + database_url: str, + queues: Optional[list[str]] = None, + concurrency: int = 1, + **worker_kwargs, +): + """Run Procrastinate worker with version compatibility.""" + logger.info(f"Starting Procrastinate worker (v{get_version_info()['procrastinate_version']})") + + # Set up the app + app = setup_worker_app(database_url) + + # Map worker options for compatibility + mapped_options = map_worker_options(worker_kwargs) + + # Default queues + if queues is None: + queues = ["video_processing", "thumbnail_generation", "default"] + + logger.info(f"Worker config: queues={queues}, concurrency={concurrency}") + logger.info(f"Worker options: {mapped_options}") + + try: + if IS_PROCRASTINATE_3_PLUS: + # Procrastinate 3.x worker + async with app.open_async() as app_context: + worker = app_context.make_worker( + queues=queues, + concurrency=concurrency, + **mapped_options, + ) + await worker.async_run() + else: + # Procrastinate 2.x worker + worker = app.make_worker( + queues=queues, + concurrency=concurrency, + **mapped_options, + ) + await worker.async_run() + + except KeyboardInterrupt: + logger.info("Worker stopped by user") + except Exception as e: + logger.error(f"Worker error: {e}") + raise + + +def run_worker_sync( + database_url: str, + queues: Optional[list[str]] = None, + concurrency: int = 1, + **worker_kwargs, +): + """Synchronous wrapper for running the worker.""" + try: + asyncio.run( + run_worker_async( + database_url=database_url, + queues=queues, + concurrency=concurrency, + **worker_kwargs, + ) + ) + except KeyboardInterrupt: + logger.info("Worker interrupted") + sys.exit(0) + + +def main(): + """Main entry point for worker CLI.""" + import argparse + + parser = argparse.ArgumentParser(description="Procrastinate Worker") + parser.add_argument("command", choices=["worker"], help="Command to run") + parser.add_argument( + "--database-url", + default=os.environ.get("PROCRASTINATE_DATABASE_URL"), + help="Database URL", + ) + parser.add_argument( + "--queues", + nargs="*", + default=["video_processing", "thumbnail_generation", "default"], + help="Queue names to process", + ) + parser.add_argument( + "--concurrency", + type=int, + default=int(os.environ.get("WORKER_CONCURRENCY", "1")), + help="Worker concurrency", + ) + parser.add_argument( + "--timeout", + type=int, + default=int(os.environ.get("WORKER_TIMEOUT", "300")), + help="Worker timeout (maps to fetch_job_polling_interval in 3.x)", + ) + + args = parser.parse_args() + + if not args.database_url: + logger.error("Database URL is required (--database-url or PROCRASTINATE_DATABASE_URL)") + sys.exit(1) + + logger.info(f"Starting {args.command} with database: {args.database_url}") + + if args.command == "worker": + run_worker_sync( + database_url=args.database_url, + queues=args.queues, + concurrency=args.concurrency, + timeout=args.timeout, + ) + + +if __name__ == "__main__": + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + main() \ No newline at end of file diff --git a/src/video_processor/utils/sprite_generator.py b/src/video_processor/utils/sprite_generator.py new file mode 100644 index 0000000..e2ef2a5 --- /dev/null +++ b/src/video_processor/utils/sprite_generator.py @@ -0,0 +1,184 @@ +"""Custom sprite generator that fixes msprites2 ImageMagick compatibility issues.""" + +import logging +import os +import subprocess +import time +from pathlib import Path + +logger = logging.getLogger(__name__) + + +class FixedSpriteGenerator: + """Fixed sprite generator with proper ImageMagick compatibility.""" + + def __init__( + self, + video_path: str | Path, + thumbnail_dir: str | Path, + ips: float = 1.0, + width: int = 160, + height: int = 90, + cols: int = 10, + rows: int = 10, + ): + self.video_path = str(video_path) + self.thumbnail_dir = str(thumbnail_dir) + self.ips = ips + self.width = width + self.height = height + self.cols = cols + self.rows = rows + self.filename_format = "%04d.jpg" + + # Create thumbnail directory if it doesn't exist + Path(self.thumbnail_dir).mkdir(parents=True, exist_ok=True) + + def generate_thumbnails(self) -> None: + """Generate individual thumbnail frames using ffmpeg.""" + output_pattern = os.path.join(self.thumbnail_dir, self.filename_format) + + # Use ffmpeg to extract thumbnails + cmd = [ + "ffmpeg", "-loglevel", "error", "-i", self.video_path, + "-r", f"1/{self.ips}", + "-vf", f"scale={self.width}:{self.height}", + "-y", # Overwrite existing files + output_pattern + ] + + logger.debug(f"Generating thumbnails with: {' '.join(cmd)}") + result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + + if result.returncode != 0: + raise RuntimeError(f"FFmpeg failed: {result.stderr}") + + def generate_sprite(self, sprite_file: str | Path) -> Path: + """Generate sprite sheet using ImageMagick montage.""" + sprite_file = Path(sprite_file) + + # Count available thumbnails + thumbnail_files = list(Path(self.thumbnail_dir).glob("*.jpg")) + if not thumbnail_files: + raise RuntimeError("No thumbnail files found to create sprite") + + # Sort thumbnails by name to ensure correct order + thumbnail_files.sort() + + # Limit number of thumbnails to avoid command line length issues + max_thumbnails = min(len(thumbnail_files), 100) # Limit to 100 thumbnails + thumbnail_files = thumbnail_files[:max_thumbnails] + + # Build montage command with correct syntax + cmd = [ + "magick", "montage", + "-background", "#336699", + "-tile", f"{self.cols}x{self.rows}", + "-geometry", f"{self.width}x{self.height}+0+0", + ] + + # Add thumbnail files + cmd.extend(str(f) for f in thumbnail_files) + cmd.append(str(sprite_file)) + + logger.debug(f"Generating sprite with {len(thumbnail_files)} thumbnails: {sprite_file}") + result = subprocess.run(cmd, check=False) + + if result.returncode != 0: + raise RuntimeError(f"ImageMagick montage failed with return code {result.returncode}") + + return sprite_file + + def generate_webvtt(self, webvtt_file: str | Path, sprite_filename: str) -> Path: + """Generate WebVTT file for seekbar thumbnails.""" + webvtt_file = Path(webvtt_file) + + # Count thumbnail files to determine timeline + thumbnail_files = list(Path(self.thumbnail_dir).glob("*.jpg")) + thumbnail_files.sort() + + content_lines = ["WEBVTT\n\n"] + + for i, _ in enumerate(thumbnail_files): + start_time = i * self.ips + end_time = (i + 1) * self.ips + + # Calculate position in sprite grid + row = i // self.cols + col = i % self.cols + x = col * self.width + y = row * self.height + + # Format timestamps + start_ts = self._seconds_to_timestamp(start_time) + end_ts = self._seconds_to_timestamp(end_time) + + content_lines.extend([ + f"{start_ts} --> {end_ts}\n", + f"{sprite_filename}#xywh={x},{y},{self.width},{self.height}\n\n" + ]) + + # Write WebVTT content + with open(webvtt_file, 'w') as f: + f.writelines(content_lines) + + return webvtt_file + + def _seconds_to_timestamp(self, seconds: float) -> str: + """Convert seconds to WebVTT timestamp format.""" + return time.strftime("%H:%M:%S", time.gmtime(seconds)) + + def cleanup_thumbnails(self) -> None: + """Remove temporary thumbnail files.""" + try: + thumbnail_files = list(Path(self.thumbnail_dir).glob("*.jpg")) + for thumb_file in thumbnail_files: + thumb_file.unlink() + + # Remove directory if empty + thumb_dir = Path(self.thumbnail_dir) + if thumb_dir.exists() and not any(thumb_dir.iterdir()): + thumb_dir.rmdir() + except Exception as e: + logger.warning(f"Failed to cleanup thumbnails: {e}") + + @classmethod + def create_sprite_sheet( + cls, + video_path: str | Path, + thumbnail_dir: str | Path, + sprite_file: str | Path, + webvtt_file: str | Path, + ips: float = 1.0, + width: int = 160, + height: int = 90, + cols: int = 10, + rows: int = 10, + cleanup: bool = True, + ) -> tuple[Path, Path]: + """ + Complete sprite sheet generation process. + + Returns: + Tuple of (sprite_file_path, webvtt_file_path) + """ + generator = cls( + video_path=video_path, + thumbnail_dir=thumbnail_dir, + ips=ips, + width=width, + height=height, + cols=cols, + rows=rows, + ) + + # Generate components + generator.generate_thumbnails() + sprite_path = generator.generate_sprite(sprite_file) + webvtt_path = generator.generate_webvtt(webvtt_file, Path(sprite_file).name) + + # Cleanup temporary thumbnails if requested (but not the final sprite/webvtt) + if cleanup: + generator.cleanup_thumbnails() + + return sprite_path, webvtt_path diff --git a/src/video_processor/utils/video_360.py b/src/video_processor/utils/video_360.py index cf17be7..6b1dc8a 100644 --- a/src/video_processor/utils/video_360.py +++ b/src/video_processor/utils/video_360.py @@ -1,6 +1,5 @@ """360° video detection and utility functions.""" -from pathlib import Path from typing import Any, Literal # Optional dependency handling @@ -38,7 +37,7 @@ StereoMode = Literal["mono", "top-bottom", "left-right", "unknown"] class Video360Detection: """Utilities for detecting and analyzing 360° videos.""" - + @staticmethod def detect_360_video(video_metadata: dict[str, Any]) -> dict[str, Any]: """ @@ -57,7 +56,7 @@ class Video360Detection: "confidence": 0.0, "detection_methods": [], } - + # Check for spherical video metadata (Google/YouTube standard) spherical_metadata = Video360Detection._check_spherical_metadata(video_metadata) if spherical_metadata["found"]: @@ -68,7 +67,7 @@ class Video360Detection: "confidence": 1.0, }) detection_result["detection_methods"].append("spherical_metadata") - + # Check aspect ratio for equirectangular projection aspect_ratio_check = Video360Detection._check_aspect_ratio(video_metadata) if aspect_ratio_check["is_likely_360"]: @@ -79,7 +78,7 @@ class Video360Detection: "confidence": aspect_ratio_check["confidence"], }) detection_result["detection_methods"].append("aspect_ratio") - + # Check filename patterns filename_check = Video360Detection._check_filename_patterns(video_metadata) if filename_check["is_likely_360"]: @@ -90,9 +89,9 @@ class Video360Detection: "confidence": filename_check["confidence"], }) detection_result["detection_methods"].append("filename") - + return detection_result - + @staticmethod def _check_spherical_metadata(metadata: dict[str, Any]) -> dict[str, Any]: """Check for spherical video metadata tags.""" @@ -101,14 +100,14 @@ class Video360Detection: "projection_type": "equirectangular", "stereo_mode": "mono", } - + # Check format tags for spherical metadata format_tags = metadata.get("format", {}).get("tags", {}) - + # Google spherical video standard if "spherical" in format_tags: result["found"] = True - + # Check for specific spherical video tags spherical_indicators = [ "Spherical", @@ -117,11 +116,11 @@ class Video360Detection: "ProjectionType", "projection_type", ] - + for tag_name, tag_value in format_tags.items(): if any(indicator.lower() in tag_name.lower() for indicator in spherical_indicators): result["found"] = True - + # Determine projection type from metadata if isinstance(tag_value, str): tag_lower = tag_value.lower() @@ -129,7 +128,7 @@ class Video360Detection: result["projection_type"] = "equirectangular" elif "cubemap" in tag_lower: result["projection_type"] = "cubemap" - + # Check for stereo mode indicators stereo_indicators = ["StereoMode", "stereo_mode", "StereoscopicMode"] for tag_name, tag_value in format_tags.items(): @@ -140,9 +139,9 @@ class Video360Detection: result["stereo_mode"] = "top-bottom" elif "left-right" in tag_lower or "lr" in tag_lower: result["stereo_mode"] = "left-right" - + return result - + @staticmethod def _check_aspect_ratio(metadata: dict[str, Any]) -> dict[str, Any]: """Check if aspect ratio suggests 360° video.""" @@ -150,28 +149,28 @@ class Video360Detection: "is_likely_360": False, "confidence": 0.0, } - + video_info = metadata.get("video", {}) if not video_info: return result - + width = video_info.get("width", 0) height = video_info.get("height", 0) - + if width <= 0 or height <= 0: return result - + aspect_ratio = width / height - + # Equirectangular videos typically have 2:1 aspect ratio if 1.9 <= aspect_ratio <= 2.1: result["is_likely_360"] = True result["confidence"] = 0.8 - + # Higher confidence for exact 2:1 ratio if 1.98 <= aspect_ratio <= 2.02: result["confidence"] = 0.9 - + # Some 360° videos use different aspect ratios elif 1.5 <= aspect_ratio <= 2.5: # Common resolutions for 360° video @@ -182,16 +181,16 @@ class Video360Detection: (4096, 2048), # Cinema 4K 360° (5760, 2880), # 6K 360° ] - + for res_width, res_height in common_360_resolutions: if (width == res_width and height == res_height) or \ (width == res_height and height == res_width): result["is_likely_360"] = True result["confidence"] = 0.7 break - + return result - + @staticmethod def _check_filename_patterns(metadata: dict[str, Any]) -> dict[str, Any]: """Check filename for 360° indicators.""" @@ -200,31 +199,31 @@ class Video360Detection: "projection_type": "equirectangular", "confidence": 0.0, } - + filename = metadata.get("filename", "").lower() if not filename: return result - + # Common 360° filename patterns patterns_360 = [ - "360", "vr", "spherical", "equirectangular", + "360", "vr", "spherical", "equirectangular", "panoramic", "immersive", "omnidirectional" ] - + # Projection type patterns projection_patterns = { "equirectangular": ["equirect", "equi", "spherical"], "cubemap": ["cube", "cubemap", "cubic"], "cylindrical": ["cylindrical", "cylinder"], } - + # Check for 360° indicators for pattern in patterns_360: if pattern in filename: result["is_likely_360"] = True result["confidence"] = 0.6 break - + # Check for specific projection types if result["is_likely_360"]: for projection, patterns in projection_patterns.items(): @@ -232,13 +231,13 @@ class Video360Detection: result["projection_type"] = projection result["confidence"] = 0.7 break - + return result class Video360Utils: """Utility functions for 360° video processing.""" - + @staticmethod def get_recommended_bitrate_multiplier(projection_type: ProjectionType) -> float: """ @@ -260,9 +259,9 @@ class Video360Utils: "stereographic": 2.2, # Good balance "unknown": 2.0, # Safe default } - + return multipliers.get(projection_type, 2.0) - + @staticmethod def get_optimal_resolutions(projection_type: ProjectionType) -> list[tuple[int, int]]: """ @@ -290,29 +289,29 @@ class Video360Utils: (4096, 4096), # 4K per face ], } - + return resolutions.get(projection_type, resolutions["equirectangular"]) - + @staticmethod def is_360_library_available() -> bool: """Check if 360° processing libraries are available.""" return HAS_360_SUPPORT - + @staticmethod def get_missing_dependencies() -> list[str]: """Get list of missing dependencies for 360° processing.""" missing = [] - + if not HAS_OPENCV: missing.append("opencv-python") - + if not HAS_NUMPY: missing.append("numpy") - + if not HAS_PY360CONVERT: missing.append("py360convert") - + if not HAS_EXIFREAD: missing.append("exifread") - - return missing \ No newline at end of file + + return missing diff --git a/tests/test_procrastinate_compat.py b/tests/test_procrastinate_compat.py new file mode 100644 index 0000000..8dcb6d7 --- /dev/null +++ b/tests/test_procrastinate_compat.py @@ -0,0 +1,314 @@ +"""Tests for Procrastinate compatibility layer.""" + +import pytest + +from video_processor.tasks.compat import ( + CompatJobContext, + FEATURES, + IS_PROCRASTINATE_3_PLUS, + PROCRASTINATE_VERSION, + create_app_with_connector, + create_connector, + get_migration_commands, + get_procrastinate_version, + get_version_info, + get_worker_options_mapping, + normalize_worker_kwargs, +) + + +class TestProcrastinateVersionDetection: + """Test version detection functionality.""" + + def test_version_parsing(self): + """Test version string parsing.""" + version = get_procrastinate_version() + assert isinstance(version, tuple) + assert len(version) == 3 + assert all(isinstance(v, int) for v in version) + assert version[0] >= 2 # Should be at least version 2.x + + def test_version_flags(self): + """Test version-specific flags.""" + assert isinstance(IS_PROCRASTINATE_3_PLUS, bool) + assert isinstance(PROCRASTINATE_VERSION, tuple) + + if PROCRASTINATE_VERSION[0] >= 3: + assert IS_PROCRASTINATE_3_PLUS is True + else: + assert IS_PROCRASTINATE_3_PLUS is False + + def test_version_info(self): + """Test version info structure.""" + info = get_version_info() + + required_keys = { + "procrastinate_version", + "version_tuple", + "is_v3_plus", + "features", + "migration_commands", + } + + assert set(info.keys()) == required_keys + assert isinstance(info["version_tuple"], tuple) + assert isinstance(info["is_v3_plus"], bool) + assert isinstance(info["features"], dict) + assert isinstance(info["migration_commands"], dict) + + def test_features(self): + """Test feature flags.""" + assert isinstance(FEATURES, dict) + + expected_features = { + "graceful_shutdown", + "job_cancellation", + "pre_post_migrations", + "psycopg3_support", + "improved_performance", + "schema_compatibility", + "enhanced_indexing", + } + + assert set(FEATURES.keys()) == expected_features + assert all(isinstance(v, bool) for v in FEATURES.values()) + + +class TestConnectorCreation: + """Test connector creation functionality.""" + + def test_connector_class_selection(self): + """Test that appropriate connector class is selected.""" + from video_processor.tasks.compat import get_connector_class + + connector_class = get_connector_class() + assert connector_class is not None + assert hasattr(connector_class, "__name__") + + if IS_PROCRASTINATE_3_PLUS: + # Should prefer PsycopgConnector in 3.x + assert connector_class.__name__ in ["PsycopgConnector", "AiopgConnector"] + else: + assert connector_class.__name__ == "AiopgConnector" + + def test_connector_creation(self): + """Test connector creation with various parameters.""" + database_url = "postgresql://test:test@localhost/test" + + # Test basic creation + connector = create_connector(database_url) + assert connector is not None + + # Test with additional kwargs + connector_with_kwargs = create_connector( + database_url, + pool_size=5, + max_pool_size=10, + ) + assert connector_with_kwargs is not None + + def test_app_creation(self): + """Test Procrastinate app creation.""" + database_url = "postgresql://test:test@localhost/test" + + app = create_app_with_connector(database_url) + assert app is not None + assert hasattr(app, 'connector') + assert app.connector is not None + + +class TestWorkerOptions: + """Test worker options compatibility.""" + + def test_option_mapping(self): + """Test worker option mapping between versions.""" + mapping = get_worker_options_mapping() + assert isinstance(mapping, dict) + + if IS_PROCRASTINATE_3_PLUS: + expected_mappings = { + "timeout": "fetch_job_polling_interval", + "remove_error": "remove_failed", + "include_error": "include_failed", + } + assert mapping == expected_mappings + else: + # In 2.x, mappings should be identity + assert mapping["timeout"] == "timeout" + assert mapping["remove_error"] == "remove_error" + + def test_kwargs_normalization(self): + """Test worker kwargs normalization.""" + test_kwargs = { + "concurrency": 4, + "timeout": 5, + "remove_error": True, + "include_error": False, + "name": "test-worker", + } + + normalized = normalize_worker_kwargs(**test_kwargs) + + assert isinstance(normalized, dict) + assert normalized["concurrency"] == 4 + assert normalized["name"] == "test-worker" + + if IS_PROCRASTINATE_3_PLUS: + assert "fetch_job_polling_interval" in normalized + assert "remove_failed" in normalized + assert "include_failed" in normalized + assert normalized["fetch_job_polling_interval"] == 5 + assert normalized["remove_failed"] is True + assert normalized["include_failed"] is False + else: + assert normalized["timeout"] == 5 + assert normalized["remove_error"] is True + assert normalized["include_error"] is False + + def test_kwargs_passthrough(self): + """Test that unknown kwargs are passed through unchanged.""" + test_kwargs = { + "custom_option": "value", + "another_option": 42, + } + + normalized = normalize_worker_kwargs(**test_kwargs) + assert normalized == test_kwargs + + +class TestMigrationCommands: + """Test migration command generation.""" + + def test_migration_commands_structure(self): + """Test migration command structure.""" + commands = get_migration_commands() + assert isinstance(commands, dict) + + if IS_PROCRASTINATE_3_PLUS: + expected_keys = {"pre_migrate", "post_migrate", "check"} + assert set(commands.keys()) == expected_keys + + assert "procrastinate schema --apply --mode=pre" in commands["pre_migrate"] + assert "procrastinate schema --apply --mode=post" in commands["post_migrate"] + else: + expected_keys = {"migrate", "check"} + assert set(commands.keys()) == expected_keys + + assert "procrastinate schema --apply" == commands["migrate"] + + assert "procrastinate schema --check" == commands["check"] + + +class TestJobContextCompat: + """Test job context compatibility wrapper.""" + + def test_compat_context_creation(self): + """Test creation of compatibility context.""" + # Create a mock context object + class MockContext: + def __init__(self): + self.job = "mock_job" + self.task = "mock_task" + + def should_abort(self): + return False + + async def should_abort_async(self): + return False + + mock_context = MockContext() + compat_context = CompatJobContext(mock_context) + + assert compat_context is not None + assert compat_context.job == "mock_job" + assert compat_context.task == "mock_task" + + def test_should_abort_methods(self): + """Test should_abort method compatibility.""" + class MockContext: + def should_abort(self): + return True + + async def should_abort_async(self): + return True + + mock_context = MockContext() + compat_context = CompatJobContext(mock_context) + + # Test synchronous method + assert compat_context.should_abort() is True + + @pytest.mark.asyncio + async def test_should_abort_async(self): + """Test async should_abort method.""" + class MockContext: + def should_abort(self): + return True + + async def should_abort_async(self): + return True + + mock_context = MockContext() + compat_context = CompatJobContext(mock_context) + + # Test asynchronous method + result = await compat_context.should_abort_async() + assert result is True + + def test_attribute_delegation(self): + """Test that unknown attributes are delegated to wrapped context.""" + class MockContext: + def __init__(self): + self.custom_attr = "custom_value" + + def custom_method(self): + return "custom_result" + + mock_context = MockContext() + compat_context = CompatJobContext(mock_context) + + assert compat_context.custom_attr == "custom_value" + assert compat_context.custom_method() == "custom_result" + + +class TestIntegration: + """Integration tests for compatibility features.""" + + def test_full_compatibility_workflow(self): + """Test complete compatibility workflow.""" + # Get version info + version_info = get_version_info() + assert version_info["is_v3_plus"] == IS_PROCRASTINATE_3_PLUS + + # Test worker options + worker_kwargs = normalize_worker_kwargs( + concurrency=2, + timeout=10, + remove_error=False, + ) + assert "concurrency" in worker_kwargs + + # Test migration commands + migration_commands = get_migration_commands() + assert "check" in migration_commands + + if IS_PROCRASTINATE_3_PLUS: + assert "pre_migrate" in migration_commands + assert "post_migrate" in migration_commands + else: + assert "migrate" in migration_commands + + def test_version_specific_behavior(self): + """Test that version-specific behavior is consistent.""" + version_info = get_version_info() + + if version_info["is_v3_plus"]: + # Test 3.x specific features + assert FEATURES["graceful_shutdown"] is True + assert FEATURES["job_cancellation"] is True + assert FEATURES["pre_post_migrations"] is True + else: + # Test 2.x behavior + assert FEATURES["graceful_shutdown"] is False + assert FEATURES["job_cancellation"] is False + assert FEATURES["pre_post_migrations"] is False \ No newline at end of file diff --git a/tests/test_procrastinate_migration.py b/tests/test_procrastinate_migration.py new file mode 100644 index 0000000..e4925a1 --- /dev/null +++ b/tests/test_procrastinate_migration.py @@ -0,0 +1,216 @@ +"""Tests for Procrastinate migration utilities.""" + +import pytest + +from video_processor.tasks.migration import ProcrastinateMigrationHelper, create_migration_script +from video_processor.tasks.compat import IS_PROCRASTINATE_3_PLUS + + +class TestProcrastinateMigrationHelper: + """Test migration helper functionality.""" + + def test_migration_helper_creation(self): + """Test migration helper initialization.""" + database_url = "postgresql://test:test@localhost/test" + helper = ProcrastinateMigrationHelper(database_url) + + assert helper.database_url == database_url + assert helper.version_info is not None + assert "procrastinate_version" in helper.version_info + + def test_migration_steps_generation(self): + """Test migration steps generation.""" + helper = ProcrastinateMigrationHelper("postgresql://fake/db") + steps = helper.get_migration_steps() + + assert isinstance(steps, list) + assert len(steps) > 0 + + if IS_PROCRASTINATE_3_PLUS: + # Should have pre/post migration steps + assert len(steps) >= 7 # Pre, deploy, post, verify + assert any("pre-migration" in step.lower() for step in steps) + assert any("post-migration" in step.lower() for step in steps) + else: + # Should have single migration step + assert len(steps) >= 2 # Migrate, verify + assert any("migration" in step.lower() for step in steps) + + def test_print_migration_plan(self, capsys): + """Test migration plan printing.""" + helper = ProcrastinateMigrationHelper("postgresql://fake/db") + helper.print_migration_plan() + + captured = capsys.readouterr() + assert "Procrastinate Migration Plan" in captured.out + assert "Version Info:" in captured.out + assert "Current Version:" in captured.out + + def test_migration_command_structure(self): + """Test that migration commands have correct structure.""" + helper = ProcrastinateMigrationHelper("postgresql://fake/db") + + # Test method availability + assert hasattr(helper, 'apply_pre_migration') + assert hasattr(helper, 'apply_post_migration') + assert hasattr(helper, 'apply_legacy_migration') + assert hasattr(helper, 'check_schema') + assert hasattr(helper, 'run_migration_command') + + def test_migration_command_validation(self): + """Test migration command validation without actually running.""" + helper = ProcrastinateMigrationHelper("postgresql://fake/db") + + # Test that methods return appropriate responses for invalid DB + if IS_PROCRASTINATE_3_PLUS: + # Pre-migration should be available + assert hasattr(helper, 'apply_pre_migration') + assert hasattr(helper, 'apply_post_migration') + else: + # Legacy migration should be available + assert hasattr(helper, 'apply_legacy_migration') + + +class TestMigrationScriptGeneration: + """Test migration script generation.""" + + def test_script_generation(self): + """Test that migration script is generated correctly.""" + script_content = create_migration_script() + + assert isinstance(script_content, str) + assert len(script_content) > 0 + + # Check for essential script components + assert "#!/usr/bin/env python3" in script_content + assert "Procrastinate migration script" in script_content + assert "migrate_database" in script_content + assert "asyncio" in script_content + + # Check for command line argument handling + assert "--pre" in script_content or "--post" in script_content + + def test_script_has_proper_structure(self): + """Test that generated script has proper Python structure.""" + script_content = create_migration_script() + + # Should have proper Python script structure + lines = script_content.split('\n') + + # Check shebang + assert lines[0] == "#!/usr/bin/env python3" + + # Check for main function + assert 'def main():' in script_content + + # Check for asyncio usage + assert 'asyncio.run(main())' in script_content + + +class TestMigrationWorkflow: + """Test complete migration workflow scenarios.""" + + def test_version_aware_migration_selection(self): + """Test that correct migration path is selected based on version.""" + helper = ProcrastinateMigrationHelper("postgresql://fake/db") + + if IS_PROCRASTINATE_3_PLUS: + # 3.x should use pre/post migrations + steps = helper.get_migration_steps() + step_text = ' '.join(steps).lower() + assert 'pre-migration' in step_text + assert 'post-migration' in step_text + else: + # 2.x should use legacy migration + steps = helper.get_migration_steps() + step_text = ' '.join(steps).lower() + assert 'migration' in step_text + assert 'pre-migration' not in step_text + + def test_migration_helper_consistency(self): + """Test that migration helper provides consistent information.""" + helper = ProcrastinateMigrationHelper("postgresql://fake/db") + + # Version info should be consistent + version_info = helper.version_info + steps = helper.get_migration_steps() + + assert version_info["is_v3_plus"] == IS_PROCRASTINATE_3_PLUS + + # Steps should match version + if version_info["is_v3_plus"]: + assert len(steps) > 4 # Should have multiple steps for 3.x + else: + assert len(steps) >= 2 # Should have basic steps for 2.x + + +@pytest.mark.asyncio +class TestAsyncMigration: + """Test async migration functionality.""" + + async def test_migrate_database_function_exists(self): + """Test that async migration function exists and is callable.""" + from video_processor.tasks.migration import migrate_database + + # Function should exist and be async + assert callable(migrate_database) + + # Should handle invalid database gracefully (don't actually run) + # Just test that it exists and has the right signature + import inspect + sig = inspect.signature(migrate_database) + + expected_params = ['database_url', 'pre_migration_only', 'post_migration_only'] + actual_params = list(sig.parameters.keys()) + + for param in expected_params: + assert param in actual_params + + +class TestRegressionPrevention: + """Tests to prevent regressions in migration functionality.""" + + def test_migration_helper_backwards_compatibility(self): + """Ensure migration helper maintains backwards compatibility.""" + helper = ProcrastinateMigrationHelper("postgresql://fake/db") + + # Essential methods should always exist + required_methods = [ + 'get_migration_steps', + 'print_migration_plan', + 'run_migration_command', + 'check_schema', + ] + + for method in required_methods: + assert hasattr(helper, method) + assert callable(getattr(helper, method)) + + def test_version_detection_stability(self): + """Test that version detection is stable and predictable.""" + from video_processor.tasks.compat import get_version_info, PROCRASTINATE_VERSION + + info1 = get_version_info() + info2 = get_version_info() + + # Should return consistent results + assert info1 == info2 + assert info1["version_tuple"] == PROCRASTINATE_VERSION + + def test_feature_flags_consistency(self): + """Test that feature flags are consistent with version.""" + from video_processor.tasks.compat import FEATURES, IS_PROCRASTINATE_3_PLUS + + # 3.x features should only be available in 3.x + v3_features = [ + "graceful_shutdown", + "job_cancellation", + "pre_post_migrations", + "psycopg3_support" + ] + + for feature in v3_features: + if IS_PROCRASTINATE_3_PLUS: + assert FEATURES[feature] is True, f"{feature} should be True in 3.x" + else: + assert FEATURES[feature] is False, f"{feature} should be False in 2.x" \ No newline at end of file