Error when trying to load LoRA
Expected Behavior
Workflow finishing
Actual Behavior
Error. CLIP Text Encode also takes forever before error pops up
Steps to Reproduce
Just adding Load LoRA node
Debug Logs
Error occurred when executing KSampler:
ModelPatcher.calculate_weight() got an unexpected keyword argument 'intermediate_dtype'
File "D:\ComfyUI\execution.py", line 316, in execute
output_data, output_ui, has_subgraph = get_output_data(obj, input_data_all, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
File "D:\ComfyUI\execution.py", line 191, in get_output_data
return_values = _map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
File "D:\ComfyUI\execution.py", line 168, in _map_node_over_list
process_inputs(input_dict, i)
File "D:\ComfyUI\execution.py", line 157, in process_inputs
results.append(getattr(obj, func)(**inputs))
File "D:\ComfyUI\nodes.py", line 1429, in sample
return common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=denoise)
File "D:\ComfyUI\nodes.py", line 1396, in common_ksampler
samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image,
File "D:\ComfyUI\custom_nodes\ComfyUI-Impact-Pack\modules\impact\sample_error_enhancer.py", line 9, in informative_sample
return original_sample(*args, **kwargs) # This code helps interpret error messages that occur within exceptions but does not have any impact on other operations.
File "D:\ComfyUI\custom_nodes\ComfyUI-Advanced-ControlNet\adv_control\sampling.py", line 116, in acn_sample
return orig_comfy_sample(model, *args, **kwargs)
File "D:\ComfyUI\custom_nodes\ComfyUI-Advanced-ControlNet\adv_control\utils.py", line 116, in uncond_multiplier_check_cn_sample
return orig_comfy_sample(model, *args, **kwargs)
File "D:\ComfyUI\comfy\sample.py", line 43, in sample
samples = sampler.sample(noise, positive, negative, cfg=cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, denoise_mask=noise_mask, sigmas=sigmas, callback=callback, disable_pbar=disable_pbar, seed=seed)
File "D:\ComfyUI\comfy\samplers.py", line 829, in sample
return sample(self.model, noise, positive, negative, cfg, self.device, sampler, sigmas, self.model_options, latent_image=latent_image, denoise_mask=denoise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed)
File "D:\ComfyUI\comfy\samplers.py", line 729, in sample
return cfg_guider.sample(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
File "D:\ComfyUI\comfy\samplers.py", line 716, in sample
output = self.inner_sample(noise, latent_image, device, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
File "D:\ComfyUI\comfy\samplers.py", line 695, in inner_sample
samples = sampler.sample(self, sigmas, extra_args, callback, noise, latent_image, denoise_mask, disable_pbar)
File "D:\ComfyUI\comfy\samplers.py", line 600, in sample
samples = self.sampler_function(model_k, noise, sigmas, extra_args=extra_args, callback=k_callback, disable=disable_pbar, **self.extra_options)
File "C:\Users\stasd\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\utils\_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "D:\ComfyUI\comfy\k_diffusion\sampling.py", line 161, in sample_euler_ancestral
denoised = model(x, sigmas[i] * s_in, **extra_args)
File "D:\ComfyUI\comfy\samplers.py", line 299, in __call__
out = self.inner_model(x, sigma, model_options=model_options, seed=seed)
File "D:\ComfyUI\comfy\samplers.py", line 682, in __call__
return self.predict_noise(*args, **kwargs)
File "D:\ComfyUI\comfy\samplers.py", line 685, in predict_noise
return sampling_function(self.inner_model, x, timestep, self.conds.get("negative", None), self.conds.get("positive", None), self.cfg, model_options=model_options, seed=seed)
File "D:\ComfyUI\comfy\samplers.py", line 279, in sampling_function
out = calc_cond_batch(model, conds, x, timestep, model_options)
File "D:\ComfyUI\comfy\samplers.py", line 228, in calc_cond_batch
output = model.apply_model(input_x, timestep_, **c).chunk(batch_chunks)
File "D:\ComfyUI\custom_nodes\ComfyUI-Advanced-ControlNet\adv_control\utils.py", line 68, in apply_model_uncond_cleanup_wrapper
return orig_apply_model(self, *args, **kwargs)
File "D:\ComfyUI\comfy\model_base.py", line 145, in apply_model
model_output = self.diffusion_model(xc, t, context=context, control=control, transformer_options=transformer_options, **extra_conds).float()
File "C:\Users\stasd\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\stasd\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl
return forward_call(*args, **kwargs)
File "D:\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 887, in forward
h = forward_timestep_embed(module, h, emb, context, transformer_options, output_shape, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator)
File "D:\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 44, in forward_timestep_embed
x = layer(x, context, transformer_options)
File "C:\Users\stasd\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\stasd\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl
return forward_call(*args, **kwargs)
File "D:\ComfyUI\comfy\ldm\modules\attention.py", line 694, in forward
x = block(x, context=context[i], transformer_options=transformer_options)
File "C:\Users\stasd\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\stasd\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl
return forward_call(*args, **kwargs)
File "D:\ComfyUI\custom_nodes\ComfyUI-layerdiffuse\lib_layerdiffusion\attention_sharing.py", line 253, in forward
return func(self, x, context, transformer_options)
File "D:\ComfyUI\comfy\ldm\modules\attention.py", line 621, in forward
n = self.attn2(n, context=context_attn2, value=value_attn2)
File "C:\Users\stasd\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\stasd\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl
return forward_call(*args, **kwargs)
File "D:\ComfyUI\comfy\ldm\modules\attention.py", line 472, in forward
v = self.to_v(context)
File "C:\Users\stasd\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\stasd\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl
return forward_call(*args, **kwargs)
File "D:\ComfyUI\comfy\ops.py", line 65, in forward
return self.forward_comfy_cast_weights(*args, **kwargs)
File "D:\ComfyUI\comfy\ops.py", line 60, in forward_comfy_cast_weights
weight, bias = cast_bias_weight(self, input)
File "D:\ComfyUI\comfy\ops.py", line 46, in cast_bias_weight
weight = s.weight_function(weight)
File "D:\ComfyUI\comfy\model_patcher.py", line 99, in __call__
return self.model_patcher.calculate_weight(self.model_patcher.patches[self.key], weight, self.key, intermediate_dtype=weight.dtype)
Other
No response
Same error here after update.
Update to the latest version, disable all custom nodes, and test it.
If you still encounter the same error, please attach the comfyui.log file.
Yeah, it's broken. Just updated and tested again.
same
I have no problem loading the official lora, but the following error occurs when loading Civitai's lora.
`!!!`` Exception during processing !!! ModelPatcher.calculate_weight() got an unexpected keyword argument 'intermediate_dtype'
Traceback (most recent call last): File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\execution.py", line 316, in execute output_data, output_ui, has_subgraph = get_output_data(obj, input_data_all, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\execution.py", line 191, in get_output_data return_values = _map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\execution.py", line 168, in map_node_over_list process_inputs(input_dict, i) File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\execution.py", line 157, in process_inputs results.append(getattr(obj, func)(**inputs)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy_extras\nodes_custom_sampler.py", line 612, in sample samples = guider.sample(noise.generate_noise(latent), latent_image, sampler, sigmas, denoise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=noise.seed) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\samplers.py", line 716, in sample output = self.inner_sample(noise, latent_image, device, sampler, sigmas, denoise_mask, callback, disable_pbar, seed) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\samplers.py", line 695, in inner_sample samples = sampler.sample(self, sigmas, extra_args, callback, noise, latent_image, denoise_mask, disable_pbar) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\samplers.py", line 600, in sample samples = self.sampler_function(model_k, noise, sigmas, extra_args=extra_args, callback=k_callback, disable=disable_pbar, **self.extra_options) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\python_embeded\Lib\site-packages\torch\utils_contextlib.py", line 116, in decorate_context return func(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\k_diffusion\sampling.py", line 144, in sample_euler denoised = model(x, sigma_hat * s_in, **extra_args) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\samplers.py", line 299, in call out = self.inner_model(x, sigma, model_options=model_options, seed=seed) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\samplers.py", line 682, in call return self.predict_noise(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\samplers.py", line 685, in predict_noise return sampling_function(self.inner_model, x, timestep, self.conds.get("negative", None), self.conds.get("positive", None), self.cfg, model_options=model_options, seed=seed) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\samplers.py", line 279, in sampling_function out = calc_cond_batch(model, conds, x, timestep, model_options) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\samplers.py", line 228, in calc_cond_batch output = model.apply_model(input_x, timestep, **c).chunk(batch_chunks) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\model_base.py", line 142, in apply_model model_output = self.diffusion_model(xc, t, context=context, control=control, transformer_options=transformer_options, **extra_conds).float() ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1562, in _call_impl return forward_call(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\ldm\flux\model.py", line 159, in forward out = self.forward_orig(img, img_ids, context, txt_ids, timestep, y, guidance, control) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\ldm\flux\model.py", line 130, in forward_orig img = block(img, vec=vec, pe=pe) ^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1562, in _call_impl return forward_call(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\ldm\flux\layers.py", line 223, in forward mod, _ = self.modulation(vec) ^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1562, in _call_impl return forward_call(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\ldm\flux\layers.py", line 110, in forward out = self.lin(nn.functional.silu(vec))[:, None, :].chunk(self.multiplier, dim=-1) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1562, in _call_impl return forward_call(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\ops.py", line 67, in forward return self.forward_comfy_cast_weights(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\ops.py", line 62, in forward_comfy_cast_weights weight, bias = cast_bias_weight(self, input) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\ops.py", line 48, in cast_bias_weight weight = s.weight_function(weight) ^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Projects\Procedure\Stable-Diffusion-ComfyUI\ComfyUI\comfy\model_patcher.py", line 99, in call return self.model_patcher.calculate_weight(self.model_patcher.patches[self.key], weight, self.key, intermediate_dtype=weight.dtype) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ TypeError: ModelPatcher.calculate_weight() got an unexpected keyword argument 'intermediate_dtype'
Prompt executed in 64.33 seconds
got prompt
Failed to validate prompt for output 686:
-
BasicScheduler 105:
- Required input is missing: model
-
CLIPTextEncodeFlux 1855:
- Required input is missing: clip
-
BasicGuider 106:
- Required input is missing: model Output will be ignored invalid prompt: {'type': 'prompt_outputs_failed_validation', 'message': 'Prompt outputs failed validation', 'details': '', 'extra_info': {}}
got prompt
Failed to validate prompt for output 686:
-
BasicScheduler 105:
- Required input is missing: model
-
CLIPTextEncodeFlux 1855:
- Required input is missing: clip
-
BasicGuider 106:
- Required input is missing: model Output will be ignored invalid prompt: {'type': 'prompt_outputs_failed_validation', 'message': 'Prompt outputs failed validation', 'details': '', 'extra_info': {}}
got prompt
Failed to validate prompt for output 686:
-
BasicScheduler 105:
- Required input is missing: model
-
CLIPTextEncodeFlux 1855:
- Required input is missing: clip
-
BasicGuider 106:
- Required input is missing: model Output will be ignored invalid prompt: {'type': 'prompt_outputs_failed_validation', 'message': 'Prompt outputs failed validation', 'details': '', 'extra_info': {}}``
can confirm that. My custom loras are not working but realsim is working.
Problem coming from load lora node, changing it with random lora loader from custom pack resolved the issue
Update to the latest version, disable all custom nodes, and test it. If you still encounter the same error, please attach the
comfyui.logfile.
Some custom nodes replace the function ModelPatcher.calculate_weight() without the argument 'intermediate_dtype'. For example: ComfyUI-Easy-Use, ComfyUI-IC-Light, comfyui-inpaint-nodes, ComfyUI-layerdiffuse, etc. Some custom nodes are already updated. The fucking code style...
Getting the same error. Posting here to bump this topic, need an answer!
as noted by allmelgr above, the most likely cause is custom nodes that try to replace the internal functions. Update comfy and remove custom nodes and try that way. There's likely one or a few nodes you need to update or remove (& report the bug to anything that breaks it when updated)
Same here, tried rgthree-comfy power lora loader and was nodes lora loader but doesn't resolve the issue.