import asyncio
import os
import shutil
import functools

if not hasattr(asyncio, 'to_thread'):
    async def to_thread(func, *args, **kwargs):
        loop = asyncio.get_event_loop()
        return await loop.run_in_executor(None, functools.partial(func, *args, **kwargs))
    asyncio.to_thread = to_thread
import subprocess
import uuid
from datetime import datetime
from pathlib import Path
from typing import Dict, Optional

import boto3
from google.cloud import storage
from fastapi import FastAPI, File, Form, HTTPException, UploadFile, Header
from fastapi.responses import JSONResponse
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from fastapi import Depends
import uvicorn

import firebase_admin
from firebase_admin import credentials
from firebase_admin import messaging

from botocore.exceptions import ClientError

cred = credentials.Certificate("durlabhdarshan-eff42-firebase-adminsdk-jo207-dd9fa8b9d1.json")
firebase_admin.initialize_app(cred)


app = FastAPI(title="Video Processing Server")
security = HTTPBearer()

# Authentication configuration
API_PASSKEY = "asdlkbcalwb232h3u2bdib29"  # Change this to your desired passkey

# Configuration
AWS_ACCESS_KEY_ID = "AKIA4ALHXKCHCYRYRQCQ"
AWS_SECRET_ACCESS_KEY = "RJ/6OfVxjNGCKZzCw9Bm3l1MstdjXzlQMzO+EHyc"
AWS_BUCKET_NAME = "co.techxr.system.backend.upload.dev"
AWS_REGION = "ap-south-1"

# GCP Configuration
GCP_CREDENTIALS_FILE = "durlabhdarshan-cdn-63bbf0b295f1.json"
GCP_BUCKET_NAME = "durlabh_darshan"

# Local directories
UPLOAD_DIR = Path("uploads")
PROCESSING_DIR = Path("processing")
HLS_OUTPUT_DIR = Path("hls_output")

# Ensure directories exist
UPLOAD_DIR.mkdir(exist_ok=True)
PROCESSING_DIR.mkdir(exist_ok=True)
HLS_OUTPUT_DIR.mkdir(exist_ok=True)

# S3 client
s3_client = boto3.client(
    's3',
    aws_access_key_id=AWS_ACCESS_KEY_ID,
    aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
    region_name=AWS_REGION
)

# GCP client
try:
    gcp_client = storage.Client.from_service_account_json(GCP_CREDENTIALS_FILE)
except Exception as e:
    print(f"Warning: Failed to initialize GCP storage client: {e}")
    gcp_client = None

# Task tracking
conversion_tasks: Dict[str, Dict] = {}
server_busy = False

def verify_passkey(credentials: HTTPAuthorizationCredentials = Depends(security)):
    """Verify the passkey from Authorization header"""
    if credentials.credentials != API_PASSKEY:
        raise HTTPException(
            status_code=401,
            detail="Invalid passkey",
            headers={"WWW-Authenticate": "Bearer"},
        )
    return credentials.credentials

from botocore.exceptions import ClientError

async def _cloud_object_exists(key: str) -> bool:
    def check_s3():
        try:
            s3_client.head_object(Bucket=AWS_BUCKET_NAME, Key=key)
            return True
        except ClientError as e:
            code = e.response.get("Error", {}).get("Code")
            if code in ("404", "NoSuchKey", "NotFound"):
                return False
            return False
            
    def check_gcp():
        try:
            if gcp_client:
                return gcp_client.bucket(GCP_BUCKET_NAME).blob(key).exists()
        except:
            pass
        return False
        
    s3_exists, gcp_exists = await asyncio.gather(
        asyncio.to_thread(check_s3),
        asyncio.to_thread(check_gcp)
    )
    return s3_exists or gcp_exists

async def _move_object_in_clouds(src_key: str, dst_key: str) -> dict:
    def move_s3() -> dict:
        out = {"from": src_key, "to": dst_key, "copied": False, "deleted": False, "error": None}
        try:
            s3_client.copy({"Bucket": AWS_BUCKET_NAME, "Key": src_key}, AWS_BUCKET_NAME, dst_key)
            out["copied"] = True
            s3_client.delete_object(Bucket=AWS_BUCKET_NAME, Key=src_key)
            try:
                s3_client.head_object(Bucket=AWS_BUCKET_NAME, Key=src_key)
                out["error"] = "delete_failed_or_blocked"
            except ClientError as e:
                code = e.response.get("Error", {}).get("Code")
                if code in ("404", "NoSuchKey", "NotFound"):
                    out["deleted"] = True
                else:
                    out["error"] = f"head_after_delete_error: {code}"
        except Exception as e:
            out["error"] = str(e)
        return out
        
    def move_gcp() -> dict:
        out = {"from": src_key, "to": dst_key, "copied": False, "deleted": False, "error": None}
        if not gcp_client:
            out["error"] = "gcp_client_not_initialized"
            return out
        try:
            bucket = gcp_client.bucket(GCP_BUCKET_NAME)
            src_blob = bucket.blob(src_key)
            if src_blob.exists():
                bucket.copy_blob(src_blob, bucket, dst_key)
                out["copied"] = True
                src_blob.delete()
                out["deleted"] = True
            else:
                out["error"] = "gcp_not_found"
        except Exception as e:
            out["error"] = str(e)
        return out
        
    s3_res, gcp_res = await asyncio.gather(
        asyncio.to_thread(move_s3),
        asyncio.to_thread(move_gcp)
    )
    
    return {
        "from": src_key,
        "to": dst_key,
        "deleted": s3_res.get("deleted") or gcp_res.get("deleted"),
        "error": s3_res.get("error") if s3_res.get("error") else gcp_res.get("error"),
        "s3": s3_res,
        "gcp": gcp_res,
    }

async def _move_prefix_in_clouds(src_prefix: str, dst_prefix: str) -> dict:
    moved, skipped = [], []
    def get_s3_keys():
        keys = []
        try:
            paginator = s3_client.get_paginator("list_objects_v2")
            for page in paginator.paginate(Bucket=AWS_BUCKET_NAME, Prefix=src_prefix):
                for item in page.get("Contents", []):
                    keys.append(item["Key"])
        except:
            pass
        return keys
        
    def get_gcp_keys():
        keys = []
        try:
            if gcp_client:
                blobs = gcp_client.list_blobs(GCP_BUCKET_NAME, prefix=src_prefix)
                for blob in blobs:
                    keys.append(blob.name)
        except:
            pass
        return keys

    s3_keys, gcp_keys = await asyncio.gather(
        asyncio.to_thread(get_s3_keys),
        asyncio.to_thread(get_gcp_keys)
    )
    all_keys = set(s3_keys + gcp_keys)
    
    move_tasks = []
    for key in all_keys:
        dst_key = dst_prefix + key[len(src_prefix):]
        move_tasks.append(_move_object_in_clouds(key, dst_key))
        
    results = await asyncio.gather(*move_tasks)
    
    for key, res in zip(all_keys, results):
        if res.get("deleted"):
            moved.append(res)
        else:
            skipped.append(res)
            
    return {"moved": moved, "skipped": skipped}

async def _check_prefix_has_objects(prefix: str) -> bool:
    def check_s3():
        try:
            head = s3_client.list_objects_v2(Bucket=AWS_BUCKET_NAME, Prefix=prefix, MaxKeys=1)
            return head.get("KeyCount", 0) > 0
        except:
            return False
            
    def check_gcp():
        try:
            if gcp_client:
                blobs = gcp_client.list_blobs(GCP_BUCKET_NAME, prefix=prefix, max_results=1)
                for _ in blobs:
                    return True
        except:
            pass
        return False
        
    s3_has, gcp_has = await asyncio.gather(
        asyncio.to_thread(check_s3),
        asyncio.to_thread(check_gcp)
    )
    return s3_has or gcp_has

async def _dry_run_prefix(src_prefix: str, dst_prefix: str) -> list:
    def get_s3_keys():
        keys = []
        try:
            paginator = s3_client.get_paginator("list_objects_v2")
            for page in paginator.paginate(Bucket=AWS_BUCKET_NAME, Prefix=src_prefix):
                for item in page.get("Contents", []):
                    keys.append(item["Key"])
        except:
            pass
        return keys
        
    def get_gcp_keys():
        keys = []
        try:
            if gcp_client:
                blobs = gcp_client.list_blobs(GCP_BUCKET_NAME, prefix=src_prefix)
                for blob in blobs:
                    keys.append(blob.name)
        except:
            pass
        return keys

    s3_keys, gcp_keys = await asyncio.gather(
        asyncio.to_thread(get_s3_keys),
        asyncio.to_thread(get_gcp_keys)
    )
    all_keys = set(s3_keys + gcp_keys)
    
    items = []
    for key in all_keys:
        dst_key = dst_prefix + key[len(src_prefix):]
        items.append({"from": key, "to": dst_key, "note": "dry_run"})
    return items

async def _upload_to_clouds(file_path: Path, cloud_key: str, content_type: str):
    def upload_s3():
        try:
            with open(file_path, 'rb') as f:
                s3_client.upload_fileobj(
                    f,
                    AWS_BUCKET_NAME,
                    cloud_key,
                    ExtraArgs={'ContentType': content_type}
                )
        except Exception as e:
            print(f"S3 upload error for {cloud_key}: {e}")

    def upload_gcp():
        if not gcp_client:
            return
        try:
            bucket = gcp_client.bucket(GCP_BUCKET_NAME)
            blob = bucket.blob(cloud_key)
            blob.upload_from_filename(str(file_path), content_type=content_type)
        except Exception as e:
            print(f"GCP upload error for {cloud_key}: {e}")

    await asyncio.gather(
        asyncio.to_thread(upload_s3),
        asyncio.to_thread(upload_gcp)
    )


@app.post("/promote-unapproved-slot")
async def promote_unapproved_slot(
    temple: str = Form(...),
    date: str = Form(...),   # "yyyy-MM-dd"
    slot: str = Form(...),   # "morning"/"evening"/etc.
    move_jpg: bool = Form(True),
    move_json: bool = Form(True),
    move_hls: bool = Form(True),
    dry_run: bool = Form(False),
    passkey: str = Depends(verify_passkey),
):
    """
    Move objects from:
      DurlabhDarshanAssets/App/aaj-ke-darshan/{temple}/{date}/unapproved/{slot}[.jpg|.json|/*]
    to:
      DurlabhDarshanAssets/App/aaj-ke-darshan/{temple}/{date}/{slot}[.jpg|.json|/*]
    """
    base = f"DurlabhDarshanAssets/App/aaj-ke-darshan/{temple}/{date}"
    results = {
        "moved": [],
        "skipped": [],
        "hls": {"moved": [], "skipped": []},
        "dry_run": dry_run,
        "bucket": AWS_BUCKET_NAME,
    }

    # Image (.jpg)
    if move_jpg:
        src_jpg = f"{base}/unapproved/{slot}.jpg"
        dst_jpg = f"{base}/{slot}.jpg"
        if await _cloud_object_exists(src_jpg):
            if dry_run:
                results["moved"].append({"from": src_jpg, "to": dst_jpg, "note": "dry_run"})
            else:
                try:
                    res = await _move_object_in_clouds(src_jpg, dst_jpg)
                    if res.get("deleted"):
                        results["moved"].append(res)
                    else:
                        results["skipped"].append(res)
                except Exception as e:
                    results["skipped"].append({"from": src_jpg, "error": str(e)})
        else:
            results["skipped"].append({"from": src_jpg, "error": "not_found"})

    # JSON (.json)
    if move_json:
        src_json = f"{base}/unapproved/{slot}.json"
        dst_json = f"{base}/{slot}.json"
        if await _cloud_object_exists(src_json):
            if dry_run:
                results["moved"].append({"from": src_json, "to": dst_json, "note": "dry_run"})
            else:
                try:
                    res = await _move_object_in_clouds(src_json, dst_json)
                    if res.get("deleted"):
                        results["moved"].append(res)
                    else:
                        results["skipped"].append(res)
                except Exception as e:
                    results["skipped"].append({"from": src_json, "error": str(e)})
        else:
            results["skipped"].append({"from": src_json, "error": "not_found"})

    # HLS folder (prefix)
    if move_hls:
        src_prefix = f"{base}/unapproved/{slot}/"
        dst_prefix = f"{base}/{slot}/"
        
        has_objects = await _check_prefix_has_objects(src_prefix)

        if has_objects:
            if dry_run:
                results["hls"]["moved"] = await _dry_run_prefix(src_prefix, dst_prefix)
            else:
                out = await _move_prefix_in_clouds(src_prefix, dst_prefix)
                results["hls"]["moved"] = out["moved"]
                results["hls"]["skipped"] = out["skipped"]
        else:
            results["hls"]["skipped"].append({"from_prefix": src_prefix, "error": "not_found"})

    return JSONResponse(content={"success": True, "message": "Promotion attempted", "result": results})

class ConversionStatus:
    PROCESSING = "processing"
    UPLOADING = "uploading"
    COMPLETED = "completed"
    FAILED = "failed"
    SERVER_BUSY = "server_busy"

@app.post("/notify-image-upload")
async def notify_image_upload(
    temple: str = Form(...),
    slot: str = Form(...),
    url: str = Form(...),
    authorization: str = Header(None)
):
    if authorization != f"Bearer {API_PASSKEY}":
        return {"success": False, "message": "Unauthorized"}

    send_push_notification(temple, slot, url)
    return {"success": True, "message": "Notification sent"}

@app.post("/upload-video")
async def upload_video(
    video: UploadFile = File(...),
    temple: str = Form(...),
    date: str = Form(...),
    slot: str = Form(...),
    passkey: str = Depends(verify_passkey)
):
    global server_busy
    
    if server_busy:
        return JSONResponse(
            content={
                "success": False,
                "message": "Server is busy processing another video. Please try again later.",
                "task_id": None
            },
            status_code=202
        )
    
    try:
        # Generate unique task ID
        task_id = str(uuid.uuid4())
        
        # Save uploaded file
        file_path = UPLOAD_DIR / f"{task_id}_{video.filename}"
        with open(file_path, "wb") as buffer:
            content = await video.read()
            buffer.write(content)
        
        # Initialize task tracking
        conversion_tasks[task_id] = {
            "status": ConversionStatus.PROCESSING,
            "temple": temple,
            "date": date,
            "slot": slot,
            "file_path": str(file_path),
            "error": None,
            "s3_path": None
        }
        
        # Start background processing
        asyncio.create_task(process_video(task_id))
        
        return JSONResponse(content={
            "success": True,
            "message": "Video uploaded successfully. Processing started.",
            "task_id": task_id
        })
        
    except Exception as e:
        return JSONResponse(
            content={
                "success": False,
                "message": f"Upload failed: {str(e)}",
                "task_id": None
            },
            status_code=500
        )

@app.post("/upload-unapproved-video")
async def upload_unapproved_video(
    video: UploadFile = File(...),
    temple: str = Form(...),
    date: str = Form(...),
    slot: str = Form(...),
    passkey: str = Depends(verify_passkey)
):
    global server_busy
    
    if server_busy:
        return JSONResponse(
            content={
                "success": False,
                "message": "Server is busy processing another video. Please try again later.",
                "task_id": None
            },
            status_code=202
        )
    
    try:
        # Generate unique task ID
        task_id = str(uuid.uuid4())
        
        # Save uploaded file
        file_path = UPLOAD_DIR / f"{task_id}_{video.filename}"
        with open(file_path, "wb") as buffer:
            content = await video.read()
            buffer.write(content)
        
        # Initialize task tracking
        conversion_tasks[task_id] = {
            "status": ConversionStatus.PROCESSING,
            "temple": temple,
            "date": date,
            "slot": slot,
            "file_path": str(file_path),
            "error": None,
            "s3_path": None
        }
        
        # Start background processing
        asyncio.create_task(process_unapproved_video(task_id))
        
        return JSONResponse(content={
            "success": True,
            "message": "Video uploaded successfully. Processing started.",
            "task_id": task_id
        })
        
    except Exception as e:
        return JSONResponse(
            content={
                "success": False,
                "message": f"Upload failed: {str(e)}",
                "task_id": None
            },
            status_code=500
        )

@app.get("/conversion-status/{task_id}")
async def get_conversion_status(task_id: str, passkey: str = Depends(verify_passkey)):
    if task_id not in conversion_tasks:
        raise HTTPException(status_code=404, detail="Task not found")
    
    task = conversion_tasks[task_id]
    
    response = {
        "status": task["status"],
        "error": task.get("error"),
        "s3_path": task.get("s3_path")
    }
    
    # Clean up completed or failed tasks after returning status
    if task["status"] in [ConversionStatus.COMPLETED, ConversionStatus.FAILED]:
        # Keep task for a bit longer in case client polls again
        pass
    
    return JSONResponse(content=response)
def send_push_notification(temple, slot, s3_url):
    slot_mapping = {
        "morning": "प्रातः",
        "evening": "सायंकाल"
    }
    slot_hindi = slot_mapping.get(slot.lower(), slot)
    print(slot_hindi)
    temple_mapping = {
        "mahakaleshwar": "महाकालेश्वर",
        "kashivishwanath": "काशी विश्वनाथ",
        "omkareshwar" : "ओंकारेश्वर",
        "somnath": "सोमनाथ",
        "vaishnodevi" : "वैष्णोदेवी",
        "dwarkadhishtemple" : "द्वारकाधीश",
        "siddhivinayak" : "सिद्धि विनायक",
        "grishneshwar" : "घृष्णेश्वर",
        "trimbkeshwar" : "त्र्यंबकेश्वर"
    }
    temple_hindi = temple_mapping.get(temple.lower(), temple)
    print(temple_hindi)

    title = f"आज के दर्शन – श्री {temple_hindi}"
    body = f"आज {slot_hindi} के दर्शन उपलब्ध हैं।"
    print(s3_url)
    message = messaging.Message(
        notification=messaging.Notification(
            title=title,
            body=body,
            image=s3_url
        ),
        topic="all_users"
    )

    try:
        response = messaging.send(message)
        print("✅ Notification sent:", response)
    except Exception as e:
        print("❌ Error sending notification:", e)

async def process_video(task_id: str):
    global server_busy
    server_busy = True
    
    try:
        task = conversion_tasks[task_id]
        file_path = Path(task["file_path"])
        temple = task["temple"]
        date = task["date"]
        slot = task["slot"]
        
        # Create processing directory for this task
        task_processing_dir = PROCESSING_DIR / task_id
        task_processing_dir.mkdir(exist_ok=True)
        
        # Move file to processing directory
        processing_file_path = task_processing_dir / file_path.name
        shutil.move(str(file_path), str(processing_file_path))
        
        # Create HLS output directory
        hls_output_path = HLS_OUTPUT_DIR / task_id
        hls_output_path.mkdir(exist_ok=True)
        
        # Convert to HLS using FFmpeg
        m3u8_file = hls_output_path / "playlist.m3u8"
        
        ffmpeg_command = [
            "ffmpeg",
            "-i", str(processing_file_path),
            "-codec:", "copy",
            "-start_number", "0",
            "-hls_time", "10",
            "-hls_list_size", "0",
            "-f", "hls",
            str(m3u8_file)
        ]
        
        # Run FFmpeg conversion
        process = await asyncio.create_subprocess_exec(
            *ffmpeg_command,
            stdout=asyncio.subprocess.PIPE,
            stderr=asyncio.subprocess.PIPE
        )
        
        stdout, stderr = await process.communicate()
        
        if process.returncode != 0:
            raise Exception(f"FFmpeg conversion failed: {stderr.decode()}")
        
        # Update status to uploading
        conversion_tasks[task_id]["status"] = ConversionStatus.UPLOADING
        
        # Upload HLS files to S3
        s3_base_path = f"DurlabhDarshanAssets/App/aaj-ke-darshan/{temple}/{date}/{slot}"
        
        # Upload all files in the HLS output directory concurrently to both clouds
        upload_tasks = []
        for file_item in hls_output_path.iterdir():
            if file_item.is_file():
                s3_key = f"{s3_base_path}/{file_item.name}"
                
                # Determine content type
                content_type = "application/vnd.apple.mpegurl" if file_item.suffix == ".m3u8" else "video/MP2T"
                
                upload_tasks.append(_upload_to_clouds(file_item, s3_key, content_type))
        
        # Execute uploads
        await asyncio.gather(*upload_tasks)
        
        # Update task status to completed
        conversion_tasks[task_id]["status"] = ConversionStatus.COMPLETED
        conversion_tasks[task_id]["s3_path"] = s3_base_path
        
        # Cleanup local files
        cleanup_task_files(task_id, task_processing_dir, hls_output_path)
    except Exception as e:
        conversion_tasks[task_id]["status"] = ConversionStatus.FAILED
        conversion_tasks[task_id]["error"] = str(e)
        cleanup_task_files(task_id, task_processing_dir, hls_output_path)
    finally:
        server_busy = False

async def process_unapproved_video(task_id: str):
    global server_busy
    server_busy = True
    
    try:
        task = conversion_tasks[task_id]
        file_path = Path(task["file_path"])
        temple = task["temple"]
        date = task["date"]
        slot = task["slot"]
        
        # Create processing directory for this task
        task_processing_dir = PROCESSING_DIR / task_id
        task_processing_dir.mkdir(exist_ok=True)
        
        # Move file to processing directory
        processing_file_path = task_processing_dir / file_path.name
        shutil.move(str(file_path), str(processing_file_path))
        
        # Create HLS output directory
        hls_output_path = HLS_OUTPUT_DIR / task_id
        hls_output_path.mkdir(exist_ok=True)
        
        # Convert to HLS using FFmpeg
        m3u8_file = hls_output_path / "playlist.m3u8"
        
        ffmpeg_command = [
            "ffmpeg",
            "-i", str(processing_file_path),
            "-codec:", "copy",
            "-start_number", "0",
            "-hls_time", "10",
            "-hls_list_size", "0",
            "-f", "hls",
            str(m3u8_file)
        ]
        
        # Run FFmpeg conversion
        process = await asyncio.create_subprocess_exec(
            *ffmpeg_command,
            stdout=asyncio.subprocess.PIPE,
            stderr=asyncio.subprocess.PIPE
        )
        
        stdout, stderr = await process.communicate()
        
        if process.returncode != 0:
            raise Exception(f"FFmpeg conversion failed: {stderr.decode()}")
        
        # Update status to uploading
        conversion_tasks[task_id]["status"] = ConversionStatus.UPLOADING
        
        # Upload HLS files to S3
        s3_base_path = f"DurlabhDarshanAssets/App/aaj-ke-darshan/{temple}/{date}/unapproved/{slot}"
        
        # Upload all files in the HLS output directory concurrently to both clouds
        upload_tasks = []
        for file_item in hls_output_path.iterdir():
            if file_item.is_file():
                s3_key = f"{s3_base_path}/{file_item.name}"
                
                # Determine content type
                content_type = "application/vnd.apple.mpegurl" if file_item.suffix == ".m3u8" else "video/MP2T"
                
                upload_tasks.append(_upload_to_clouds(file_item, s3_key, content_type))
                
        # Execute uploads
        await asyncio.gather(*upload_tasks)
        
        # Update task status to completed
        conversion_tasks[task_id]["status"] = ConversionStatus.COMPLETED
        conversion_tasks[task_id]["s3_path"] = s3_base_path
        
        # Cleanup local files
        cleanup_task_files(task_id, task_processing_dir, hls_output_path)

        
    except Exception as e:
        conversion_tasks[task_id]["status"] = ConversionStatus.FAILED
        conversion_tasks[task_id]["error"] = str(e)
        
        # Cleanup on error
        cleanup_task_files(task_id, task_processing_dir, hls_output_path)
        
    finally:
        server_busy = False

def cleanup_task_files(task_id: str, processing_dir: Path, hls_dir: Path):
    """Clean up all files related to a task"""
    try:
        # Remove processing directory
        if processing_dir.exists():
            shutil.rmtree(processing_dir)
        
        # Remove HLS output directory
        if hls_dir.exists():
            shutil.rmtree(hls_dir)
            
        # Remove original upload file if it exists
        original_file = UPLOAD_DIR / f"{task_id}_*.mp4"
        for file_path in UPLOAD_DIR.glob(f"{task_id}_*"):
            if file_path.is_file():
                file_path.unlink()
                
    except Exception as e:
        print(f"Error cleaning up files for task {task_id}: {e}")

@app.get("/")
async def root():
    return {"message": "Video Processing Server is running"}

@app.get("/health")
async def health_check():
    return {
        "status": "healthy",
        "server_busy": server_busy,
        "active_tasks": len([t for t in conversion_tasks.values() if t["status"] not in [ConversionStatus.COMPLETED, ConversionStatus.FAILED]])
    }

if __name__ == "__main__":
    # Check if FFmpeg is available
    try:
        subprocess.run(["ffmpeg", "-version"], capture_output=True, check=True)
        print("✓ FFmpeg is available")
    except (subprocess.CalledProcessError, FileNotFoundError):
        print("✗ FFmpeg is not available. Please install FFmpeg first.")
        exit(1)
    
    print("Starting Video Processing Server...")
    print("Make sure to update the serverBaseUrl in Unity script to match this server's address")
    
    uvicorn.run(app, host="0.0.0.0", port=3019)
