Files
Fast-Whisper-MCP-Server/run_api_server.sh
2025-10-12 03:09:04 +03:00

67 lines
2.3 KiB
Bash
Executable File

#!/bin/bash
set -e
datetime_prefix() {
date "+[%Y-%m-%d %H:%M:%S]"
}
# Set Python path to include src directory
export PYTHONPATH="/home/uad/agents/tools/mcp-transcriptor/src:$PYTHONPATH"
# Set CUDA library path
export LD_LIBRARY_PATH=/usr/local/cuda-12.4/targets/x86_64-linux/lib:$LD_LIBRARY_PATH
# Set proxy for model downloads
export HTTP_PROXY=http://192.168.1.212:8080
export HTTPS_PROXY=http://192.168.1.212:8080
# Set environment variables
export CUDA_VISIBLE_DEVICES=1
export WHISPER_MODEL_DIR="/home/uad/agents/tools/mcp-transcriptor/data/models"
export TRANSCRIPTION_OUTPUT_DIR="/media/raid/agents/tools/mcp-transcriptor/outputs"
export TRANSCRIPTION_BATCH_OUTPUT_DIR="/media/raid/agents/tools/mcp-transcriptor/outputs/batch"
export TRANSCRIPTION_MODEL="large-v3"
export TRANSCRIPTION_DEVICE="cuda"
export TRANSCRIPTION_COMPUTE_TYPE="float16"
export TRANSCRIPTION_OUTPUT_FORMAT="txt"
export TRANSCRIPTION_BEAM_SIZE="5"
export TRANSCRIPTION_TEMPERATURE="0.0"
export TRANSCRIPTION_USE_TIMESTAMP="false"
export TRANSCRIPTION_FILENAME_PREFIX=""
# API server configuration
export API_HOST="0.0.0.0"
export API_PORT="33767"
# GPU Auto-Reset Configuration
export GPU_RESET_COOLDOWN_MINUTES=5 # Minimum time between GPU reset attempts
# Job Queue Configuration
export JOB_QUEUE_MAX_SIZE=5
export JOB_METADATA_DIR="/media/raid/agents/tools/mcp-transcriptor/outputs/jobs"
export JOB_RETENTION_DAYS=7
# GPU Health Monitoring
export GPU_HEALTH_CHECK_ENABLED=true
export GPU_HEALTH_CHECK_INTERVAL_MINUTES=10
export GPU_HEALTH_TEST_MODEL="tiny"
# Log start of the script
echo "$(datetime_prefix) Starting Whisper REST API server..."
echo "$(datetime_prefix) Model directory: $WHISPER_MODEL_DIR"
echo "$(datetime_prefix) API server: http://$API_HOST:$API_PORT"
# Optional: Verify required directories exist
if [ ! -d "$WHISPER_MODEL_DIR" ]; then
echo "$(datetime_prefix) Warning: Whisper model directory does not exist: $WHISPER_MODEL_DIR"
echo "$(datetime_prefix) Models will be downloaded to default cache directory"
fi
# Ensure output directories exist
mkdir -p "$TRANSCRIPTION_OUTPUT_DIR"
mkdir -p "$TRANSCRIPTION_BATCH_OUTPUT_DIR"
mkdir -p "$JOB_METADATA_DIR"
# Run the API server
/home/uad/agents/tools/mcp-transcriptor/venv/bin/python -u /home/uad/agents/tools/mcp-transcriptor/src/servers/api_server.py 2>&1 | tee /home/uad/agents/tools/mcp-transcriptor/api.logs