diff --git a/comfy/model_patcher.py b/comfy/model_patcher.py index e8c859689897..c0b68fb8cff7 100644 --- a/comfy/model_patcher.py +++ b/comfy/model_patcher.py @@ -130,17 +130,21 @@ def __init__(self, key, patches, convert_func=None, set_func=None): self.set_func = set_func def __call__(self, weight): + intermediate_dtype = weight.dtype if self.convert_func is not None: weight = self.convert_func(weight.to(dtype=torch.float32, copy=True), inplace=True) - intermediate_dtype = weight.dtype - if self.set_func is None and intermediate_dtype not in [torch.float32, torch.float16, torch.bfloat16]: #intermediate_dtype has to be one that is supported in math ops + if intermediate_dtype not in [torch.float32, torch.float16, torch.bfloat16]: #intermediate_dtype has to be one that is supported in math ops intermediate_dtype = torch.float32 - return comfy.float.stochastic_rounding(comfy.lora.calculate_weight(self.patches[self.key], weight.to(intermediate_dtype), self.key, intermediate_dtype=intermediate_dtype), weight.dtype, seed=string_to_seed(self.key)) + out = comfy.lora.calculate_weight(self.patches[self.key], weight.to(intermediate_dtype), self.key, intermediate_dtype=intermediate_dtype) + if self.set_func is None: + return comfy.float.stochastic_rounding(out, weight.dtype, seed=string_to_seed(self.key)) + else: + return self.set_func(out, seed=string_to_seed(self.key), return_weight=True) out = comfy.lora.calculate_weight(self.patches[self.key], weight, self.key, intermediate_dtype=intermediate_dtype) if self.set_func is not None: - return self.set_func(out, seed=string_to_seed(self.key), return_weight=True) + return self.set_func(out, seed=string_to_seed(self.key), return_weight=True).to(dtype=intermediate_dtype) else: return out