From f7d26a2325f20bca1effbd30c658a8399be347f5 Mon Sep 17 00:00:00 2001 From: tcsenpai Date: Fri, 23 May 2025 11:39:45 +0200 Subject: [PATCH] updated reqs for whisper and cuda --- app.py | 51 ++++++++++++++++++++++++++++++++++++++++++++++++ requirements.txt | 12 ++++++------ 2 files changed, 57 insertions(+), 6 deletions(-) diff --git a/app.py b/app.py index da38461..da62849 100644 --- a/app.py +++ b/app.py @@ -7,6 +7,8 @@ from typing import List, Tuple, Optional import youtube_handler as yt from ollama_handler import OllamaHandler import logging +import subprocess +import sys # Configure logging logging.basicConfig( @@ -487,8 +489,57 @@ def create_interface(): return app +def check_cuda_compatibility(): + """Check if the current CUDA setup is compatible with Whisper.""" + logger.info("Checking CUDA compatibility...") + + # Check PyTorch CUDA + if not torch.cuda.is_available(): + logger.warning("CUDA is not available in PyTorch") + return False + + cuda_version = torch.version.cuda + cudnn_version = torch.backends.cudnn.version() + device_name = torch.cuda.get_device_name(0) + + logger.info(f"CUDA Version: {cuda_version}") + logger.info(f"cuDNN Version: {cudnn_version}") + logger.info(f"GPU Device: {device_name}") + + # Check CUDA version + try: + cuda_major = int(cuda_version.split(".")[0]) + if cuda_major > 11: + logger.warning( + f"CUDA {cuda_version} might not be fully compatible with Whisper. Recommended: CUDA 11.x" + ) + logger.info( + "Consider creating a new environment with CUDA 11.x if you encounter issues" + ) + except Exception as e: + logger.error(f"Error parsing CUDA version: {str(e)}") + + # Check if faster-whisper is installed + try: + import faster_whisper + + logger.info(f"faster-whisper version: {faster_whisper.__version__}") + except ImportError: + logger.error("faster-whisper is not installed") + return False + + return True + + if __name__ == "__main__": logger.info("Starting Whisper Transcription Web App") + + # Check CUDA compatibility before starting + if not check_cuda_compatibility(): + logger.warning( + "CUDA compatibility check failed. The application might not work as expected." + ) + logger.info(f"Server will be available at http://{SERVER_NAME}:{SERVER_PORT}") app = create_interface() app.launch(share=SHARE, server_name=SERVER_NAME, server_port=SERVER_PORT) diff --git a/requirements.txt b/requirements.txt index 2fc964b..b3b0c24 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,9 @@ gradio>=4.0.0 faster-whisper>=0.9.0 +torch>=2.0.0,<2.1.0 +torchvision>=0.15.0,<0.16.0 +torchaudio>=2.0.0,<2.1.0 +yt-dlp>=2023.0.0 python-dotenv>=1.0.0 -torch>=2.0.0 -torchaudio>=2.0.0 -yt-dlp>=2023.12.30 -pytube>=15.0.0 -requests>=2.31.0 -ollama>=0.3.0 \ No newline at end of file +requests>=2.31.0 +ollama>=0.1.0 \ No newline at end of file