|
|
|
@ -12,15 +12,22 @@ total_vram = 0
|
|
|
|
|
total_vram_available_mb = -1
|
|
|
|
|
|
|
|
|
|
import sys
|
|
|
|
|
import psutil
|
|
|
|
|
|
|
|
|
|
set_vram_to = NORMAL_VRAM
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
import torch
|
|
|
|
|
total_vram = torch.cuda.mem_get_info(torch.cuda.current_device())[1] / (1024 * 1024)
|
|
|
|
|
if total_vram <= 4096 and not "--normalvram" in sys.argv:
|
|
|
|
|
print("Trying to enable lowvram mode because your GPU seems to have 4GB or less. If you don't want this use: --normalvram")
|
|
|
|
|
set_vram_to = LOW_VRAM
|
|
|
|
|
total_ram = psutil.virtual_memory().total / (1024 * 1024)
|
|
|
|
|
forced_normal_vram = "--normalvram" in sys.argv
|
|
|
|
|
if not forced_normal_vram:
|
|
|
|
|
if total_vram <= 4096:
|
|
|
|
|
print("Trying to enable lowvram mode because your GPU seems to have 4GB or less. If you don't want this use: --normalvram")
|
|
|
|
|
set_vram_to = LOW_VRAM
|
|
|
|
|
elif total_vram > total_ram * 1.2 and total_vram > 14336:
|
|
|
|
|
print("Enabling highvram mode because your GPU has more vram than your computer has ram. If you don't want this use: --normalvram")
|
|
|
|
|
vram_state = HIGH_VRAM
|
|
|
|
|
except:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|