Revert "safer device"

This reverts commit 1204d490d9.
This commit is contained in:
lllyasviel 2024-02-06 05:27:07 -08:00
parent 1204d490d9
commit 7359740f36

View File

@ -6,10 +6,7 @@ from modules.shared import opts
class CLIP_SD_15_L(FrozenCLIPEmbedderWithCustomWords):
def encode_with_transformers(self, tokens):
model_management.load_model_gpu(self.forge_objects.clip.patcher)
current_device = self.wrapped.transformer.text_model.embeddings.token_embedding.weight.device
tokens = tokens.to(current_device)
self.wrapped.transformer.text_model.embeddings.to(tokens.device)
outputs = self.wrapped.transformer(input_ids=tokens, output_hidden_states=-opts.CLIP_stop_at_last_layers)
if opts.CLIP_stop_at_last_layers > 1:
@ -35,10 +32,7 @@ class CLIP_SD_21_H(FrozenCLIPEmbedderWithCustomWords):
def encode_with_transformers(self, tokens):
model_management.load_model_gpu(self.forge_objects.clip.patcher)
current_device = self.wrapped.transformer.text_model.embeddings.token_embedding.weight.device
tokens = tokens.to(current_device)
self.wrapped.transformer.text_model.embeddings.to(tokens.device)
outputs = self.wrapped.transformer(tokens, output_hidden_states=self.wrapped.layer == "hidden")
if self.wrapped.layer == "last":
@ -55,9 +49,7 @@ class CLIP_SD_XL_L(FrozenCLIPEmbedderWithCustomWords):
super().__init__(wrapped, hijack)
def encode_with_transformers(self, tokens):
current_device = self.wrapped.transformer.text_model.embeddings.token_embedding.weight.device
tokens = tokens.to(current_device)
self.wrapped.transformer.text_model.embeddings.to(tokens.device)
outputs = self.wrapped.transformer(tokens, output_hidden_states=self.wrapped.layer == "hidden")
if self.wrapped.layer == "last":
@ -81,9 +73,7 @@ class CLIP_SD_XL_G(FrozenCLIPEmbedderWithCustomWords):
self.id_pad = 0
def encode_with_transformers(self, tokens):
current_device = self.wrapped.transformer.text_model.embeddings.token_embedding.weight.device
tokens = tokens.to(current_device)
self.wrapped.transformer.text_model.embeddings.to(tokens.device)
outputs = self.wrapped.transformer(tokens, output_hidden_states=self.wrapped.layer == "hidden")
if self.wrapped.layer == "last":