From 365f3d790ae705ce7af51dc4400d45a180231652 Mon Sep 17 00:00:00 2001 From: henryruhs Date: Tue, 30 May 2023 09:01:03 +0200 Subject: [PATCH] Introduce pre_check(), Cleanup requirements.txt --- core/processor.py | 5 +---- requirements.txt | 3 ++- run.py | 40 ++++++++++++++++++++++++++++------------ 3 files changed, 31 insertions(+), 17 deletions(-) diff --git a/core/processor.py b/core/processor.py index d67b1cf..82d61c1 100644 --- a/core/processor.py +++ b/core/processor.py @@ -5,10 +5,7 @@ import core.globals from core.config import get_face from core.utils import rreplace -if os.path.isfile('inswapper_128.onnx'): - face_swapper = insightface.model_zoo.get_model('inswapper_128.onnx', providers=core.globals.providers) -else: - quit('File "inswapper_128.onnx" does not exist!') +face_swapper = insightface.model_zoo.get_model('inswapper_128.onnx', providers=core.globals.providers) def process_video(source_img, frame_paths): diff --git a/requirements.txt b/requirements.txt index 14bb741..a38b874 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,5 +4,6 @@ onnx==1.14.0 insightface==0.7.3 psutil==5.9.5 tk==0.1.0 -pillow==9.0.1 +pillow==9.5.0 torch==2.0.1 +onnxruntime-gpu==1.15.0 \ No newline at end of file diff --git a/run.py b/run.py index adde8d9..525d9a5 100644 --- a/run.py +++ b/run.py @@ -1,19 +1,9 @@ #!/usr/bin/env python3 import sys import time +import torch import shutil import core.globals - -if not shutil.which('ffmpeg'): - print('ffmpeg is not installed. Read the docs: https://github.com/s0md3v/roop#installation.\n' * 10) - quit() -if '--gpu' not in sys.argv: - core.globals.providers = ['CPUExecutionProvider'] -elif 'ROCMExecutionProvider' not in core.globals.providers: - import torch - if not torch.cuda.is_available(): - quit("You are using --gpu flag but CUDA isn't available or properly installed on your system.") - import glob import argparse import multiprocessing as mp @@ -45,12 +35,35 @@ parser.add_argument('--keep-frames', help='keep frames directory', dest='keep_fr for name, value in vars(parser.parse_args()).items(): args[name] = value - sep = "/" if os.name == "nt": sep = "\\" +def pre_check(): + if not shutil.which('ffmpeg'): + quit('ffmpeg is not installed!') + if os.path.isfile('../inswapper_128.onnx'): + quit('File "inswapper_128.onnx" does not exist!') + if '--gpu' in sys.argv: + CUDA_VERSION = torch.version.cuda + CUDNN_VERSION = torch.backends.cudnn.version() + + if 'ROCMExecutionProvider' not in core.globals.providers: + if CUDA_VERSION > '11.8': + quit(f"CUDA version {CUDA_VERSION} is not supported - please downgrade to 11.8.") + if CUDA_VERSION < '11.6': + quit(f"CUDA version {CUDA_VERSION} is not supported - please upgrade to 11.8.") + if CUDNN_VERSION < 8220: + quit(f"CUDNN version {CUDNN_VERSION} is not supported - please upgrade to 8.9.1") + if CUDNN_VERSION > 8910: + quit(f"CUDNN version {CUDNN_VERSION} is not supported - please downgrade to 8.9.1") + if not torch.cuda.is_available(): + quit("You are using --gpu flag but CUDA isn't available or properly installed on your system.") + else: + core.globals.providers = ['CPUExecutionProvider'] + + def start_processing(): start_time = time.time() if args['gpu']: @@ -73,6 +86,7 @@ def start_processing(): print(flush=True) print(f"Processing time: {end_time - start_time:.2f} seconds", flush=True) + def preview_image(image_path): img = Image.open(image_path) img = img.resize((180, 180), Image.ANTIALIAS) @@ -183,6 +197,8 @@ def start(): if __name__ == "__main__": global status_label, window + + pre_check() if args['source_img']: start() quit()