Compare commits

...

4 Commits

Author SHA1 Message Date
Alihan
7db829d10c bugfix and security harden 2025-10-13 00:10:02 +03:00
Alihan
dd5fe1617a Fix 9 critical bugs: security, race conditions, precision, and UX improvements
Security fixes:
- Add filename path traversal validation (missing "/" check)
- Prevents attacks like filename="../../../etc/passwd"

Race condition and concurrency fixes:
- Add async locking to get_jobs_snapshot() to prevent dictionary iteration errors
- Fix watchdog loop to detect process completion immediately (move sleep to end)
- Fix EventSource ref updates during SSE reconnection to prevent memory leaks

Precision and calculation fixes:
- Keep duration as float instead of int for accurate bitrate calculations (~1% improvement)
- Prevents cumulative rounding errors in compression

Type safety improvements:
- Import and use Any from typing module instead of lowercase "any"
- Fixes Python type hints for proper static analysis

Media handling improvements:
- Determine MIME types dynamically using mimetypes module
- Supports MOV (video/quicktime), AVI, PNG properly instead of hardcoded types

UX fixes:
- Fix formatETA() to handle 0 seconds correctly (was showing "--" instead of "0m 0s")
- Use stable key for React video element (prevents unnecessary remounts)

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-10-13 00:07:54 +03:00
Alihan
2acb4d9f4e Fix compression for files in root directory
- Handle __root__ special case in compression start endpoint
- Allow compression of videos not organized in date folders
2025-10-12 23:47:15 +03:00
Alihan
a7b7ad41e9 Fix video playback for files in root directory
- Handle __root__ special case in video streaming and image endpoints
- Support locations with files not organized by date folders
- Add visual indicator for root-level file collections in UI
2025-10-12 23:42:21 +03:00
7 changed files with 75 additions and 29 deletions

1
CLAUDE.md Normal file
View File

@@ -0,0 +1 @@
- app is running as docker container, so everytime there are need to reflect changes we have to docker compose down & build & up -d

View File

@@ -80,7 +80,7 @@ class CompressionManager:
stderr=asyncio.subprocess.PIPE
)
stdout, _ = await result.communicate()
duration = int(float(stdout.decode().strip()))
duration = float(stdout.decode().strip())
return {
'size_mb': file_size_mb,
@@ -89,7 +89,7 @@ class CompressionManager:
def calculate_bitrates(self, current_size_mb: float,
target_size_mb: float,
duration_seconds: int,
duration_seconds: float,
audio_bitrate: int = DEFAULT_AUDIO_BITRATE) -> int:
"""Calculate video bitrate based on target size"""
# Total bitrate in kbps
@@ -277,8 +277,6 @@ class CompressionManager:
"""Monitor for stuck jobs - kills process if no progress"""
nonlocal last_progress_update
while process.returncode is None:
await asyncio.sleep(WATCHDOG_CHECK_INTERVAL)
if job.status == "cancelled":
process.kill()
return
@@ -292,6 +290,8 @@ class CompressionManager:
process.kill()
raise Exception(error_msg)
await asyncio.sleep(WATCHDOG_CHECK_INTERVAL)
# Run stdout, stderr readers and watchdog concurrently to prevent deadlock
try:
await asyncio.gather(
@@ -386,9 +386,10 @@ class CompressionManager:
for jid, _ in inactive[:num_to_remove]:
self.jobs.pop(jid, None)
def get_jobs_snapshot(self) -> list:
async def get_jobs_snapshot(self) -> list:
"""Get a safe snapshot of all jobs for iteration"""
return list(self.jobs.values())
async with self._jobs_lock:
return list(self.jobs.values())
def get_pending_count(self) -> int:
"""Get count of pending jobs"""

View File

@@ -2,7 +2,7 @@ from fastapi import FastAPI, HTTPException, Request
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse, StreamingResponse, Response
from pathlib import Path
from typing import List, Dict, Optional
from typing import List, Dict, Optional, Any
from pydantic import BaseModel
import os
from datetime import datetime
@@ -12,6 +12,7 @@ import asyncio
import json
import time
import logging
import mimetypes
from sse_starlette.sse import EventSourceResponse
from compression import CompressionManager
from filesystem_health import FilesystemHealthChecker
@@ -32,10 +33,10 @@ FOOTAGES_PATH = Path("/footages")
# Simple in-memory cache for directory listings
class SimpleCache:
def __init__(self, ttl_seconds: int = CACHE_TTL_SECONDS):
self.cache: Dict[str, tuple[float, any]] = {}
self.cache: Dict[str, tuple[float, Any]] = {}
self.ttl = ttl_seconds
def get(self, key: str) -> Optional[any]:
def get(self, key: str) -> Optional[Any]:
if key in self.cache:
timestamp, value = self.cache[key]
if time.time() - timestamp < self.ttl:
@@ -44,7 +45,7 @@ class SimpleCache:
del self.cache[key]
return None
def set(self, key: str, value: any):
def set(self, key: str, value: Any):
self.cache[key] = (time.time(), value)
def clear(self):
@@ -176,6 +177,8 @@ async def get_dates(location: str) -> List[Dict]:
raise HTTPException(status_code=404, detail="Location not found")
dates = []
has_files_in_root = False
for item in location_path.iterdir():
if item.is_dir():
stat = await aiofiles.os.stat(item)
@@ -183,6 +186,16 @@ async def get_dates(location: str) -> List[Dict]:
"name": item.name,
"modified": datetime.fromtimestamp(stat.st_mtime).isoformat()
})
elif item.is_file():
has_files_in_root = True
# If no date folders but has files in root, return special marker
if not dates and has_files_in_root:
dates.append({
"name": "__root__",
"modified": None,
"message": "📁 Files not organized by date"
})
# Cache the result
directory_cache.set(cache_key, dates)
@@ -202,7 +215,11 @@ async def get_files(location: str, date: str) -> List[Dict]:
if ".." in location or ".." in date or "/" in location or "/" in date:
raise HTTPException(status_code=400, detail="Invalid path characters")
files_path = (FOOTAGES_PATH / location / date).resolve()
# Handle special __root__ marker for locations with files in root
if date == "__root__":
files_path = (FOOTAGES_PATH / location).resolve()
else:
files_path = (FOOTAGES_PATH / location / date).resolve()
# Ensure resolved path is still within FOOTAGES_PATH
try:
@@ -227,10 +244,14 @@ async def get_files(location: str, date: str) -> List[Dict]:
async def stream_video(location: str, date: str, filename: str, request: Request):
"""Stream video file with HTTP range request support for fast seeking"""
# Sanitize path components to prevent traversal
if ".." in location or ".." in date or ".." in filename or "/" in location or "/" in date:
if ".." in location or ".." in date or ".." in filename or "/" in location or "/" in date or "/" in filename:
raise HTTPException(status_code=400, detail="Invalid path characters")
file_path = (FOOTAGES_PATH / location / date / filename).resolve()
# Handle __root__ case (files not in date subdirectories)
if date == "__root__":
file_path = (FOOTAGES_PATH / location / filename).resolve()
else:
file_path = (FOOTAGES_PATH / location / date / filename).resolve()
# Ensure resolved path is still within FOOTAGES_PATH
try:
@@ -279,20 +300,20 @@ async def stream_video(location: str, date: str, filename: str, request: Request
"Content-Range": f"bytes {start}-{end}/{file_size}",
"Accept-Ranges": "bytes",
"Content-Length": str(content_length),
"Content-Type": "video/mp4",
"Content-Type": mimetypes.guess_type(file_path)[0] or "video/mp4",
}
return StreamingResponse(
iterfile(),
status_code=206,
headers=headers,
media_type="video/mp4"
media_type=mimetypes.guess_type(file_path)[0] or "video/mp4"
)
# No range header - return full file
return FileResponse(
file_path,
media_type="video/mp4",
media_type=mimetypes.guess_type(file_path)[0] or "video/mp4",
headers={"Accept-Ranges": "bytes"}
)
@@ -301,10 +322,14 @@ async def stream_video(location: str, date: str, filename: str, request: Request
async def get_image(location: str, date: str, filename: str):
"""Serve image file"""
# Sanitize path components to prevent traversal
if ".." in location or ".." in date or ".." in filename or "/" in location or "/" in date:
if ".." in location or ".." in date or ".." in filename or "/" in location or "/" in date or "/" in filename:
raise HTTPException(status_code=400, detail="Invalid path characters")
file_path = (FOOTAGES_PATH / location / date / filename).resolve()
# Handle __root__ case (files not in date subdirectories)
if date == "__root__":
file_path = (FOOTAGES_PATH / location / filename).resolve()
else:
file_path = (FOOTAGES_PATH / location / date / filename).resolve()
# Ensure resolved path is still within FOOTAGES_PATH
try:
@@ -319,8 +344,8 @@ async def get_image(location: str, date: str, filename: str):
if file_path.suffix not in IMAGE_EXTENSIONS:
raise HTTPException(status_code=400, detail="Not an image file")
# Determine media type
media_type = "image/jpeg" if file_path.suffix.lower() in {".jpg", ".jpeg"} else "image/png"
# Determine media type dynamically
media_type = mimetypes.guess_type(file_path)[0] or "image/jpeg"
return FileResponse(file_path, media_type=media_type)
@@ -340,7 +365,11 @@ async def start_compression(request: CompressionRequest):
if not 1 <= request.reduce_percentage <= 90:
raise HTTPException(status_code=400, detail="Percentage must be between 1-90")
file_path = FOOTAGES_PATH / request.location / request.date / request.filename
# Handle __root__ case (files not in date subdirectories)
if request.date == "__root__":
file_path = FOOTAGES_PATH / request.location / request.filename
else:
file_path = FOOTAGES_PATH / request.location / request.date / request.filename
if not file_path.exists():
raise HTTPException(status_code=404, detail="File not found")
@@ -358,7 +387,7 @@ async def get_all_jobs():
"""Get all compression jobs"""
jobs = []
# Use snapshot to avoid race condition during iteration
for job in compression_manager.get_jobs_snapshot():
for job in await compression_manager.get_jobs_snapshot():
jobs.append({
"job_id": job.job_id,
"file_path": job.file_path,
@@ -418,7 +447,7 @@ async def compression_events(request: Request):
# Send status of all active jobs (use snapshot to avoid race condition)
active_jobs = []
for job in compression_manager.get_jobs_snapshot():
for job in await compression_manager.get_jobs_snapshot():
if job.status in ["pending", "processing", "validating"]:
active_jobs.append({
"job_id": job.job_id,

View File

@@ -261,7 +261,7 @@ function App() {
{selectedFile.is_video ? (
<video
ref={videoRef}
key={getMediaUrl(selectedFile)}
key={selectedFile.name}
controls
preload="metadata"
className="w-full max-h-[70vh]"
@@ -409,15 +409,26 @@ function App() {
<ul className="space-y-1">
{getSortedDates().map((date) => {
const dateName = date.name || date
const isRootFiles = dateName === "__root__"
const displayName = isRootFiles ? "All Files" : dateName
const message = date.message || null
return (
<li key={dateName}>
<button
onClick={() => handleDateClick(dateName)}
className={`w-full text-left px-3 py-2 rounded hover:bg-blue-50 transition ${
className={`w-full text-left px-3 py-2 rounded transition ${
isRootFiles
? 'bg-yellow-50 border border-yellow-200 hover:bg-yellow-100'
: 'hover:bg-blue-50'
} ${
selectedDate === dateName ? 'bg-blue-100 font-semibold' : ''
}`}
>
{dateName}
<div className="font-medium">{displayName}</div>
{message && (
<div className="text-xs text-yellow-700 mt-1">{message}</div>
)}
</button>
</li>
)

View File

@@ -44,7 +44,9 @@ export function useCompressionJobs() {
eventSource.onerror = () => {
console.error('SSE connection error')
eventSource.close()
setTimeout(connectSSE, 5000) // Reconnect after 5s
setTimeout(() => {
connectSSE()
}, 5000)
}
eventSourceRef.current = eventSource

View File

@@ -53,7 +53,9 @@ export function useSystemHealth() {
console.error('System health SSE connection error')
eventSource.close()
// Reconnect after 10 seconds
setTimeout(connectSSE, 10000)
setTimeout(() => {
connectSSE()
}, 10000)
}
eventSourceRef.current = eventSource

View File

@@ -11,7 +11,7 @@ export const formatFileSize = (bytes) => {
}
export const formatETA = (seconds) => {
if (!seconds) return '--'
if (seconds == null || seconds === undefined) return '--'
const mins = Math.floor(seconds / 60)
const secs = seconds % 60
return `${mins}m ${secs}s`