my-sd/modules/xpu_specific.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

51 lines
2.0 KiB
Python
Raw Normal View History

2023-11-10 03:06:26 +00:00
from modules import shared
from modules.sd_hijack_utils import CondFunc
has_ipex = False
try:
import torch
2023-12-02 07:54:25 +00:00
import intel_extension_for_pytorch as ipex # noqa: F401
2023-11-10 03:06:26 +00:00
has_ipex = True
except Exception:
pass
def check_for_xpu():
return has_ipex and hasattr(torch, 'xpu') and torch.xpu.is_available()
2023-11-10 03:06:26 +00:00
def get_xpu_device_string():
if shared.cmd_opts.device_id is not None:
return f"xpu:{shared.cmd_opts.device_id}"
return "xpu"
def torch_xpu_gc():
with torch.xpu.device(get_xpu_device_string()):
torch.xpu.empty_cache()
has_xpu = check_for_xpu()
2023-11-10 03:06:26 +00:00
if has_xpu:
# W/A for https://github.com/intel/intel-extension-for-pytorch/issues/452: torch.Generator API doesn't support XPU device
2023-11-10 03:06:26 +00:00
CondFunc('torch.Generator',
lambda orig_func, device=None: torch.xpu.Generator(device),
lambda orig_func, device=None: device is not None and device.type == "xpu")
2023-11-10 03:06:26 +00:00
# W/A for some OPs that could not handle different input dtypes
2023-11-10 03:06:26 +00:00
CondFunc('torch.nn.functional.layer_norm',
lambda orig_func, input, normalized_shape=None, weight=None, *args, **kwargs:
orig_func(input.to(weight.data.dtype), normalized_shape, weight, *args, **kwargs),
lambda orig_func, input, normalized_shape=None, weight=None, *args, **kwargs:
weight is not None and input.dtype != weight.data.dtype)
CondFunc('torch.nn.modules.GroupNorm.forward',
lambda orig_func, self, input: orig_func(self, input.to(self.weight.data.dtype)),
lambda orig_func, self, input: input.dtype != self.weight.data.dtype)
CondFunc('torch.nn.modules.linear.Linear.forward',
lambda orig_func, self, input: orig_func(self, input.to(self.weight.data.dtype)),
lambda orig_func, self, input: input.dtype != self.weight.data.dtype)
CondFunc('torch.nn.modules.conv.Conv2d.forward',
lambda orig_func, self, input: orig_func(self, input.to(self.weight.data.dtype)),
lambda orig_func, self, input: input.dtype != self.weight.data.dtype)