amd graphic card support

This commit is contained in:
Yaroslav P
2025-03-05 16:23:10 +02:00
parent 56a925463f
commit 049a168c81

View File

@@ -2,7 +2,7 @@ import os
import datetime import datetime
from glob import glob from glob import glob
import whisper import whisper
from torch import cuda, Generator from torch import backends, cuda, Generator
import colorama import colorama
from colorama import Back,Fore from colorama import Back,Fore
colorama.init(autoreset=True) colorama.init(autoreset=True)
@@ -39,12 +39,15 @@ def transcribe(path, glob_file, model=None, language=None, verbose=False):
- The transcribed text files will be saved in a "transcriptions" folder - The transcribed text files will be saved in a "transcriptions" folder
within the specified path. within the specified path.
""" """
# Check for GPU acceleration # Check for GPU acceleration
if cuda.is_available(): if backends.mps.is_available():
Generator('mps').manual_seed(42)
elif cuda.is_available():
Generator('cuda').manual_seed(42) Generator('cuda').manual_seed(42)
else: else:
Generator().manual_seed(42) Generator().manual_seed(42)
# Load model # Load model
model = whisper.load_model(model) model = whisper.load_model(model)
# Start main loop # Start main loop