my-sd/modules_forge/initialization.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

78 lines
2.2 KiB
Python
Raw Normal View History

2024-02-04 01:16:21 +00:00
import os
2024-01-30 06:52:11 +00:00
import sys
2024-01-31 22:02:05 +00:00
MONITOR_MODEL_MOVING = False
def monitor_module_moving():
if not MONITOR_MODEL_MOVING:
return
import torch
import traceback
old_to = torch.nn.Module.to
def new_to(*args, **kwargs):
traceback.print_stack()
print('Model Movement')
return old_to(*args, **kwargs)
torch.nn.Module.to = new_to
return
def initialize_forge():
2024-01-30 06:52:11 +00:00
bad_list = ['--lowvram', '--medvram', '--medvram-sdxl']
for bad in bad_list:
if bad in sys.argv:
print(f'Arg {bad} is removed in Forge.')
print(f'Now memory management is fully automatic and you do not need any command flags.')
print(f'Please just remove this flag.')
print(f'In extreme cases, if you want to force previous lowvram/medvram behaviors, '
f'please use --always-offload-from-vram')
2024-01-27 19:43:54 +00:00
from ldm_patched.modules import args_parser
2024-01-27 19:38:37 +00:00
args_parser.args, _ = args_parser.parser.parse_known_args()
2024-02-06 07:52:50 +00:00
if args_parser.args.gpu_device_id is not None:
os.environ['CUDA_VISIBLE_DEVICES'] = str(args_parser.args.gpu_device_id)
print("Set device to:", args_parser.args.gpu_device_id)
import ldm_patched.modules.model_management as model_management
import torch
2024-01-31 22:02:05 +00:00
monitor_module_moving()
device = model_management.get_torch_device()
torch.zeros((1, 1)).to(device, torch.float32)
model_management.soft_empty_cache()
2024-01-24 20:25:32 +00:00
2024-01-28 04:42:38 +00:00
import modules_forge.patch_basic
modules_forge.patch_basic.patch_all_basics()
2024-02-04 01:16:21 +00:00
from modules_forge.shared import diffusers_dir
if 'TRANSFORMERS_CACHE' not in os.environ:
os.environ['TRANSFORMERS_CACHE'] = diffusers_dir
if 'HF_HOME' not in os.environ:
os.environ['HF_HOME'] = diffusers_dir
if 'HF_DATASETS_CACHE' not in os.environ:
os.environ['HF_DATASETS_CACHE'] = diffusers_dir
if 'HUGGINGFACE_HUB_CACHE' not in os.environ:
os.environ['HUGGINGFACE_HUB_CACHE'] = diffusers_dir
if 'HUGGINGFACE_ASSETS_CACHE' not in os.environ:
os.environ['HUGGINGFACE_ASSETS_CACHE'] = diffusers_dir
if 'HF_HUB_CACHE' not in os.environ:
os.environ['HF_HUB_CACHE'] = diffusers_dir
return