From 2a2c865770e5ffa2df75a3e8371012b406514c5c Mon Sep 17 00:00:00 2001 From: jhj0517 <97279763+jhj0517@users.noreply.github.com> Date: Thu, 21 Nov 2024 23:39:40 +0900 Subject: [PATCH] Set conditional `local_files_only` --- modules/whisper/faster_whisper_inference.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/modules/whisper/faster_whisper_inference.py b/modules/whisper/faster_whisper_inference.py index bc1e8ed..e497f9b 100644 --- a/modules/whisper/faster_whisper_inference.py +++ b/modules/whisper/faster_whisper_inference.py @@ -127,12 +127,21 @@ def update_model(self, """ progress(0, desc="Initializing Model..") self.current_model_size = self.model_paths[model_size] + + local_files_only = False + hf_prefix = "models--Systran--faster-whisper-" + official_model_path = os.path.join(self.model_dir, hf_prefix+model_size) + if ((os.path.isdir(self.current_model_size) and os.path.exists(self.current_model_size)) or + (model_size in faster_whisper.available_models() and os.path.exists(official_model_path))): + local_files_only = True + self.current_compute_type = compute_type self.model = faster_whisper.WhisperModel( device=self.device, model_size_or_path=self.current_model_size, download_root=self.model_dir, - compute_type=self.current_compute_type + compute_type=self.current_compute_type, + local_files_only=local_files_only ) def get_model_paths(self):