Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
77 changes: 77 additions & 0 deletions install.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Install requirements for WD14-tagger."""
import os
import sys
import subprocess

from launch import run # pylint: disable=import-error

Expand All @@ -11,3 +12,79 @@
run(f'"{sys.executable}" -m pip install -q -r "{req_file}"',
f"Checking {NAME} requirements.",
f"Couldn't install {NAME} requirements.")

def check_rocm_version():
try:
if subprocess.run(['rocminfo'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True).returncode != 0:
print("ROCm is not installed.")
return None
result = subprocess.run(['hipcc', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
for line in result.stdout.split('\n'):
if 'HIP version:' in line:
version = line.split()[-1]
return version
except FileNotFoundError:
print("ROCm is not installed.")
return None

def install_tensorflow_rocm(version):
if version.startswith('6.3.'):
tf_version = '2.17.0'
ort_version = '1.19.0'
tf_url = 'https://repo.radeon.com/rocm/manylinux/rocm-rel-6.3/tensorflow_rocm-2.17.0-cp310-cp310-manylinux_2_28_x86_64.whl'
ort_url = 'https://repo.radeon.com/rocm/manylinux/rocm-rel-6.3/onnxruntime_rocm-1.19.0-cp310-cp310-linux_x86_64.whl'
run(f'"{sys.executable}" -m pip install {tf_url} {ort_url}',
f"Installing tensorflow-rocm {tf_version} for ROCm {version} from {tf_version} and onnxruntime-rocm for ROCm {version} from {ort_version}.",
f"Couldn't install tensorflow-rocm {tf_version} or onnxruntime-rocm {ort_version}.")
return 1
elif version.startswith('6.2.'):
tf_version = '2.16.1'
ort_version = '1.18.0'
tf_url = 'https://repo.radeon.com/rocm/manylinux/rocm-rel-6.2/tensorflow_rocm-2.16.1-cp310-cp310-manylinux_2_28_x86_64.whl'
ort_url = 'https://repo.radeon.com/rocm/manylinux/rocm-rel-6.2/onnxruntime_rocm-1.18.0-cp310-cp310-linux_x86_64.whl'
run(f'"{sys.executable}" -m pip install {tf_url} {ort_url}',
f"Installing tensorflow-rocm {tf_version} for ROCm {version} from {tf_version} and onnxruntime-rocm for ROCm {version} from {ort_version}.",
f"Couldn't install tensorflow-rocm {tf_version} or onnxruntime-rocm {ort_version}.")
return 1
elif version.startswith('6.1.'):
tf_version = '2.15.0'
ort_version = '1.17.0'
tf_url = 'https://repo.radeon.com/rocm/manylinux/rocm-rel-6.1/tensorflow_rocm-2.15.0-cp310-cp310-manylinux2014_x86_64.whl'
ort_url = 'https://repo.radeon.com/rocm/manylinux/rocm-rel-6.1/onnxruntime_rocm-inference-1.17.0-cp310-cp310-linux_x86_64.whl'
run(f'"{sys.executable}" -m pip install {tf_url} {ort_url}',
f"Installing tensorflow-rocm {tf_version} for ROCm {version} from {tf_version} and onnxruntime-rocm for ROCm {version} from {ort_version}.",
f"Couldn't install tensorflow-rocm {tf_version} or onnxruntime-rocm {ort_version}.")
return 1
elif version.startswith('6.0.'):
tf_version = '2.14.0'
run(f'"{sys.executable}" -m pip install tensorflow-rocm=={tf_version} onnxruntime',
f"Installing tensorflow-rocm {tf_version} for ROCm {version} and onnxruntime for CPU, because onnxruntime-rocm doesn't support ROCm 6.0.x.",
f"Couldn't install tensorflow-rocm {tf_version} or onnxruntime.")
return 1
else:
print(f"Unsupported ROCm version: {version}. Please upgrade to ROCm 6.0.x, 6.1.x, 6.2.x, or 6.3.x")
return 0

# Check if the system has an AMD GPU and ROCm installed
if 'Radeon' in subprocess.run(['lspci'], stdout=subprocess.PIPE, text=True).stdout:
rocm_version = check_rocm_version()
if rocm_version:
result = install_tensorflow_rocm(rocm_version)
if result == 0:
# Install tensorflow for CPU
run(f'"{sys.executable}" -m pip install tensorflow onnxruntime',
f"Now install tensorflow for CPU instead.",
f"Couldn't install tensorflow.")
else:
print("Successfully installed tensorflow-rocm.")
else:
print("Please install ROCm to use AMD GPUs if you want to use GPU acceleration.")
# Install tensorflow for CPU
run(f'"{sys.executable}" -m pip install tensorflow onnxruntime',
f"Now install tensorflow for CPU instead.",
f"Couldn't install tensorflow.")
else:
# Install tensorflow for CUDA
run(f'"{sys.executable}" -m pip install tensorflow onnxruntime-gpu',
f"Installing tensorflow for CUDA.",
f"Couldn't install tensorflow.")
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,4 @@ opencv_python_headless
packaging
pandas
Pillow
tensorflow
tqdm
18 changes: 15 additions & 3 deletions tagger/interrogator.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
from pathlib import Path
import io
import json
import re
import subprocess
import inspect
from re import match as re_match
from platform import system, uname
Expand All @@ -27,21 +29,29 @@

# https://onnxruntime.ai/docs/execution-providers/
# https://github.com/toriato/stable-diffusion-webui-wd14-tagger/commit/e4ec460122cf674bbf984df30cdb10b4370c1224#r92654958
onnxrt_providers = ['CUDAExecutionProvider', 'CPUExecutionProvider']
onnxrt_providers = ['CUDAExecutionProvider', 'ROCMExecutionProvider', 'CPUExecutionProvider']

if shared.cmd_opts.additional_device_ids is not None:
m = re_match(r'([cg])pu:\d+$', shared.cmd_opts.additional_device_ids)
if m is None:
raise ValueError('--device-id is not cpu:<nr> or gpu:<nr>')
if m.group(1) == 'c':
onnxrt_providers.pop(0)
onnxrt_providers = onnxrt_providers[2:]
TF_DEVICE_NAME = f'/{shared.cmd_opts.additional_device_ids}'
elif use_cpu:
TF_DEVICE_NAME = '/cpu:0'
onnxrt_providers.pop(0)
onnxrt_providers = onnxrt_providers[2:]
else:
TF_DEVICE_NAME = '/gpu:0'

# Check ROCm installation
try:
rocm_check = subprocess.run(['rocminfo'], stdout=subprocess.PIPE, stderr=subprocess.PIPE).returncode
except FileNotFoundError:
rocm_check = 1
if rocm_check== 0:
onnxrt_providers.pop(0)

print(f'== WD14 tagger {TF_DEVICE_NAME}, {uname()} ==')


Expand Down Expand Up @@ -347,6 +357,8 @@ def get_onnxrt():
if not is_installed('onnxruntime'):
if system() == "Darwin":
package_name = "onnxruntime-silicon"
elif rocm_check == 0:
package_name = "https://repo.radeon.com/rocm/manylinux/rocm-rel-6.3/onnxruntime_rocm-1.19.0-cp310-cp310-linux_x86_64.whl"
else:
package_name = "onnxruntime-gpu"
package = os.environ.get(
Expand Down