Introduce pre_check(), Cleanup requirements.txt

This commit is contained in:
henryruhs 2023-05-30 09:01:03 +02:00
parent 1d17cf1307
commit 365f3d790a
3 changed files with 31 additions and 17 deletions

View File

@ -5,10 +5,7 @@ import core.globals
from core.config import get_face
from core.utils import rreplace
if os.path.isfile('inswapper_128.onnx'):
face_swapper = insightface.model_zoo.get_model('inswapper_128.onnx', providers=core.globals.providers)
else:
quit('File "inswapper_128.onnx" does not exist!')
face_swapper = insightface.model_zoo.get_model('inswapper_128.onnx', providers=core.globals.providers)
def process_video(source_img, frame_paths):

View File

@ -4,5 +4,6 @@ onnx==1.14.0
insightface==0.7.3
psutil==5.9.5
tk==0.1.0
pillow==9.0.1
pillow==9.5.0
torch==2.0.1
onnxruntime-gpu==1.15.0

40
run.py
View File

@ -1,19 +1,9 @@
#!/usr/bin/env python3
import sys
import time
import torch
import shutil
import core.globals
if not shutil.which('ffmpeg'):
print('ffmpeg is not installed. Read the docs: https://github.com/s0md3v/roop#installation.\n' * 10)
quit()
if '--gpu' not in sys.argv:
core.globals.providers = ['CPUExecutionProvider']
elif 'ROCMExecutionProvider' not in core.globals.providers:
import torch
if not torch.cuda.is_available():
quit("You are using --gpu flag but CUDA isn't available or properly installed on your system.")
import glob
import argparse
import multiprocessing as mp
@ -45,12 +35,35 @@ parser.add_argument('--keep-frames', help='keep frames directory', dest='keep_fr
for name, value in vars(parser.parse_args()).items():
args[name] = value
sep = "/"
if os.name == "nt":
sep = "\\"
def pre_check():
if not shutil.which('ffmpeg'):
quit('ffmpeg is not installed!')
if os.path.isfile('../inswapper_128.onnx'):
quit('File "inswapper_128.onnx" does not exist!')
if '--gpu' in sys.argv:
CUDA_VERSION = torch.version.cuda
CUDNN_VERSION = torch.backends.cudnn.version()
if 'ROCMExecutionProvider' not in core.globals.providers:
if CUDA_VERSION > '11.8':
quit(f"CUDA version {CUDA_VERSION} is not supported - please downgrade to 11.8.")
if CUDA_VERSION < '11.6':
quit(f"CUDA version {CUDA_VERSION} is not supported - please upgrade to 11.8.")
if CUDNN_VERSION < 8220:
quit(f"CUDNN version {CUDNN_VERSION} is not supported - please upgrade to 8.9.1")
if CUDNN_VERSION > 8910:
quit(f"CUDNN version {CUDNN_VERSION} is not supported - please downgrade to 8.9.1")
if not torch.cuda.is_available():
quit("You are using --gpu flag but CUDA isn't available or properly installed on your system.")
else:
core.globals.providers = ['CPUExecutionProvider']
def start_processing():
start_time = time.time()
if args['gpu']:
@ -73,6 +86,7 @@ def start_processing():
print(flush=True)
print(f"Processing time: {end_time - start_time:.2f} seconds", flush=True)
def preview_image(image_path):
img = Image.open(image_path)
img = img.resize((180, 180), Image.ANTIALIAS)
@ -183,6 +197,8 @@ def start():
if __name__ == "__main__":
global status_label, window
pre_check()
if args['source_img']:
start()
quit()