from fastapi import FastAPI, HTTPException, Request from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import FileResponse, StreamingResponse, Response from pathlib import Path from typing import List, Dict from pydantic import BaseModel import os from datetime import datetime import aiofiles import asyncio import json from sse_starlette.sse import EventSourceResponse from compression import CompressionManager app = FastAPI(title="Drone Footage Manager API") # Initialize compression manager compression_manager = CompressionManager() # CORS middleware for frontend communication app.add_middleware( CORSMiddleware, allow_origins=["*"], # In production, specify your frontend domain allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) # Base path for footages FOOTAGES_PATH = Path("/footages") # Supported video and image extensions VIDEO_EXTENSIONS = {".mp4", ".MP4", ".mov", ".MOV", ".avi", ".AVI"} IMAGE_EXTENSIONS = {".jpg", ".JPG", ".jpeg", ".JPEG", ".png", ".PNG"} def is_media_file(filename: str) -> bool: """Check if file is a video or image""" ext = Path(filename).suffix return ext in VIDEO_EXTENSIONS or ext in IMAGE_EXTENSIONS def get_file_info(file_path: Path) -> Dict: """Get file metadata""" stat = file_path.stat() return { "name": file_path.name, "size": stat.st_size, "modified": datetime.fromtimestamp(stat.st_mtime).isoformat(), "is_video": file_path.suffix in VIDEO_EXTENSIONS, "is_image": file_path.suffix in IMAGE_EXTENSIONS, } @app.get("/") async def root(): return {"message": "Drone Footage Manager API", "status": "running"} @app.get("/api/locations") async def get_locations() -> List[Dict]: """Get list of all location folders with metadata""" if not FOOTAGES_PATH.exists(): raise HTTPException(status_code=500, detail="Footages directory not found") locations = [] for item in FOOTAGES_PATH.iterdir(): if item.is_dir(): stat = item.stat() locations.append({ "name": item.name, "modified": datetime.fromtimestamp(stat.st_mtime).isoformat() }) return locations @app.get("/api/locations/{location}/dates") async def get_dates(location: str) -> List[Dict]: """Get list of date folders for a location with metadata""" location_path = FOOTAGES_PATH / location if not location_path.exists() or not location_path.is_dir(): raise HTTPException(status_code=404, detail="Location not found") dates = [] for item in location_path.iterdir(): if item.is_dir(): stat = item.stat() dates.append({ "name": item.name, "modified": datetime.fromtimestamp(stat.st_mtime).isoformat() }) return dates @app.get("/api/files/{location}/{date}") async def get_files(location: str, date: str) -> List[Dict]: """Get list of files for a location and date""" files_path = FOOTAGES_PATH / location / date if not files_path.exists() or not files_path.is_dir(): raise HTTPException(status_code=404, detail="Path not found") files = [] for item in sorted(files_path.iterdir()): if item.is_file() and is_media_file(item.name): files.append(get_file_info(item)) return files @app.get("/api/stream/{location}/{date}/{filename}") async def stream_video(location: str, date: str, filename: str, request: Request): """Stream video file with HTTP range request support for fast seeking""" file_path = FOOTAGES_PATH / location / date / filename if not file_path.exists() or not file_path.is_file(): raise HTTPException(status_code=404, detail="File not found") # Check if it's a video file if file_path.suffix not in VIDEO_EXTENSIONS: raise HTTPException(status_code=400, detail="Not a video file") # Get file size file_size = file_path.stat().st_size # Parse range header range_header = request.headers.get("range") if range_header: # Parse range header (e.g., "bytes=0-1023") range_match = range_header.replace("bytes=", "").split("-") start = int(range_match[0]) if range_match[0] else 0 end = int(range_match[1]) if range_match[1] else file_size - 1 end = min(end, file_size - 1) # Calculate content length content_length = end - start + 1 # Create streaming response async def iterfile(): async with aiofiles.open(file_path, mode='rb') as f: await f.seek(start) remaining = content_length chunk_size = 1024 * 1024 # 1MB chunks while remaining > 0: chunk = await f.read(min(chunk_size, remaining)) if not chunk: break remaining -= len(chunk) yield chunk headers = { "Content-Range": f"bytes {start}-{end}/{file_size}", "Accept-Ranges": "bytes", "Content-Length": str(content_length), "Content-Type": "video/mp4", } return StreamingResponse( iterfile(), status_code=206, headers=headers, media_type="video/mp4" ) # No range header - return full file return FileResponse( file_path, media_type="video/mp4", headers={"Accept-Ranges": "bytes"} ) @app.get("/api/image/{location}/{date}/{filename}") async def get_image(location: str, date: str, filename: str): """Serve image file""" file_path = FOOTAGES_PATH / location / date / filename if not file_path.exists() or not file_path.is_file(): raise HTTPException(status_code=404, detail="File not found") # Check if it's an image file if file_path.suffix not in IMAGE_EXTENSIONS: raise HTTPException(status_code=400, detail="Not an image file") # Determine media type media_type = "image/jpeg" if file_path.suffix.lower() in {".jpg", ".jpeg"} else "image/png" return FileResponse(file_path, media_type=media_type) # ========== COMPRESSION API ENDPOINTS ========== class CompressionRequest(BaseModel): location: str date: str filename: str reduce_percentage: int @app.post("/api/compress/start") async def start_compression(request: CompressionRequest): """Start a compression job""" if not 1 <= request.reduce_percentage <= 90: raise HTTPException(status_code=400, detail="Percentage must be between 1-90") file_path = FOOTAGES_PATH / request.location / request.date / request.filename if not file_path.exists(): raise HTTPException(status_code=404, detail="File not found") if file_path.suffix not in VIDEO_EXTENSIONS: raise HTTPException(status_code=400, detail="File is not a video") job_id = await compression_manager.start_compression(str(file_path), request.reduce_percentage) return {"job_id": job_id, "status": "started"} @app.get("/api/compress/jobs") async def get_all_jobs(): """Get all compression jobs""" jobs = [] for job in compression_manager.jobs.values(): jobs.append({ "job_id": job.job_id, "file_path": job.file_path, "file_name": Path(job.file_path).name, "reduce_percentage": job.reduce_percentage, "status": job.status, "progress": round(job.progress, 1), "eta_seconds": job.eta_seconds, "current_pass": job.current_pass, "current_size_mb": round(job.current_size_mb, 2) if job.current_size_mb else None, "target_size_mb": round(job.target_size_mb, 2) if job.target_size_mb else None, "video_bitrate": job.video_bitrate, "created_at": job.created_at.isoformat() if job.created_at else None, "output_file": Path(job.output_file).name if job.output_file else None, "error": job.error }) return jobs @app.get("/api/compress/jobs/{job_id}") async def get_job_status(job_id: str): """Get status of specific compression job""" if job_id not in compression_manager.jobs: raise HTTPException(status_code=404, detail="Job not found") job = compression_manager.jobs[job_id] return { "job_id": job.job_id, "status": job.status, "progress": round(job.progress, 1), "eta_seconds": job.eta_seconds, "current_pass": job.current_pass, "output_file": Path(job.output_file).name if job.output_file else None, "error": job.error } @app.delete("/api/compress/jobs/{job_id}") async def cancel_job(job_id: str): """Cancel a compression job""" if job_id not in compression_manager.jobs: raise HTTPException(status_code=404, detail="Job not found") await compression_manager.cancel_job(job_id) return {"status": "cancelled"} @app.get("/api/compress/events") async def compression_events(request: Request): """Server-Sent Events endpoint for real-time progress updates""" async def event_generator(): try: while True: # Check if client is still connected if await request.is_disconnected(): break # Send status of all active jobs active_jobs = [] for job in compression_manager.jobs.values(): if job.status in ["pending", "processing", "validating"]: active_jobs.append({ "job_id": job.job_id, "status": job.status, "progress": round(job.progress, 1), "eta_seconds": job.eta_seconds, "current_pass": job.current_pass }) if active_jobs: yield { "event": "progress", "data": json.dumps(active_jobs) } await asyncio.sleep(0.5) # Update every 500ms except asyncio.CancelledError: pass return EventSourceResponse(event_generator()) if __name__ == "__main__": import uvicorn uvicorn.run(app, host="0.0.0.0", port=8000)