You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
RuntimeError: The size of tensor a (640) must match the size of tensor b (320) at non-singleton dimension 1
use gguf xl
remove the --fast for startup, also change the script: not work
if is_SDXL:
ComfyUI Error Report
Error Details
Node ID: 52
Node Type: KSampler
Exception Type: RuntimeError
Exception Message: The size of tensor a (640) must match the size of tensor b (320) at non-singleton dimension 1
Stack Trace
File "F:\ComfyUI\ComfyUI\execution.py", line 323, in execute
output_data, output_ui, has_subgraph = get_output_data(obj, input_data_all, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\execution.py", line 198, in get_output_data
return_values = _map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\execution.py", line 169, in _map_node_over_list
process_inputs(input_dict, i)
File "F:\ComfyUI\ComfyUI\execution.py", line 158, in process_inputs
results.append(getattr(obj, func)(**inputs))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\nodes.py", line 1465, in sample
return common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=denoise)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\nodes.py", line 1432, in common_ksampler
samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-Impact-Pack\modules\impact\sample_error_enhancer.py", line 22, in informative_sample
raise e
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-Impact-Pack\modules\impact\sample_error_enhancer.py", line 9, in informative_sample
return original_sample(*args, **kwargs) # This code helps interpret error messages that occur within exceptions but does not have any impact on other operations.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-BrushNet\model_patch.py", line 126, in modified_sample
return cfg_guider.sample(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 904, in sample
output = executor.execute(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 873, in outer_sample
output = self.inner_sample(noise, latent_image, device, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 857, in inner_sample
samples = executor.execute(self, sigmas, extra_args, callback, noise, latent_image, denoise_mask, disable_pbar)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 714, in sample
samples = self.sampler_function(model_k, noise, sigmas, extra_args=extra_args, callback=k_callback, disable=disable_pbar, **self.extra_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\utils_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\k_diffusion\sampling.py", line 861, in sample_dpmpp_2m_sde_gpu
return sample_dpmpp_2m_sde(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, eta=eta, s_noise=s_noise, noise_sampler=noise_sampler, solver_type=solver_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\utils_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\k_diffusion\sampling.py", line 764, in sample_dpmpp_2m_sde
denoised = model(x, sigmas[i] * s_in, **extra_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 384, in call
out = self.inner_model(x, sigma, model_options=model_options, seed=seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 839, in call
return self.predict_noise(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 842, in predict_noise
return sampling_function(self.inner_model, x, timestep, self.conds.get("negative", None), self.conds.get("positive", None), self.cfg, model_options=model_options, seed=seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 364, in sampling_function
out = calc_cond_batch(model, conds, x, timestep, model_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 200, in calc_cond_batch
return executor.execute(model, conds, x_in, timestep, model_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 311, in calc_cond_batch
output = model_options['model_function_wrapper'](model.apply_model, {"input": input_x, "timestep": timestep, "c": c, "cond_or_uncond": cond_or_uncond}).chunk(batch_chunks)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-BrushNet\model_patch.py", line 58, in brushnet_model_function_wrapper
return apply_model_method(x, timestep, **options_dict['c'])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\model_base.py", line 128, in apply_model
return comfy.patcher_extension.WrapperExecutor.new_class_executor(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\model_base.py", line 157, in _apply_model
model_output = self.diffusion_model(xc, t, context=context, control=control, transformer_options=transformer_options, **extra_conds).float()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 831, in forward
return comfy.patcher_extension.WrapperExecutor.new_class_executor(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 873, in _forward
h = forward_timestep_embed(module, h, emb, context, transformer_options, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 44, in forward_timestep_embed
x = layer(x, context, transformer_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-BrushNet\brushnet_nodes.py", line 1060, in forward_patched_by_brushnet
h += to_add.to(h.dtype).to(h.device)
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch_tensor.py", line 1512, in torch_function
ret = func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
System Information
ComfyUI Version: v0.3.7-11-ga220d11e6b
Arguments: ComfyUI\main.py --multi-user --disable-xformers --auto-launch
OS: nt
Python Version: 3.11.9 (tags/v3.11.9:de54cf5, Apr 2 2024, 10:12:12) [MSC v.1938 64 bit (AMD64)]
Embedded Python: true
PyTorch Version: 2.5.1+cu124
Devices
Name: cuda:0 NVIDIA GeForce RTX 3060 Laptop GPU : cudaMallocAsync
Type: cuda
VRAM Total: 12884246528
VRAM Free: 920282668
Torch VRAM Total: 5435817984
Torch VRAM Free: 149931564
Logs
2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 -
2024-12-14T09:59:42.827792 - 2024-12-14T09:59:42.827792 - File "F:\ComfyUI\python_embeded\Lib\site-packages\aiohttp\connector.py", line 944, in _create_connection
2024-12-14T09:59:42.827792 - 2024-12-14T09:59:42.827792 - 2024-12-14T09:59:42.827792 - _, proto = await self._create_direct_connection(req, traces, timeout)2024-12-14T09:59:42.827792 -
2024-12-14T09:59:42.827792 - 2024-12-14T09:59:42.827792 - 2024-12-14T09:59:42.828792 - 2024-12-14T09:59:42.828792 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - ^2024-12-14T09:59:42.829793 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 -
2024-12-14T09:59:42.832793 - 2024-12-14T09:59:42.832793 - File "F:\ComfyUI\python_embeded\Lib\site-packages\aiohttp\connector.py", line 1257, in _create_direct_connection
2024-12-14T09:59:42.833793 - 2024-12-14T09:59:42.833793 - 2024-12-14T09:59:42.833793 - raise last_exc2024-12-14T09:59:42.833793 -
2024-12-14T09:59:42.833793 - 2024-12-14T09:59:42.833793 - File "F:\ComfyUI\python_embeded\Lib\site-packages\aiohttp\connector.py", line 1226, in _create_direct_connection
2024-12-14T09:59:42.833793 - 2024-12-14T09:59:42.833793 - 2024-12-14T09:59:42.834793 - transp, proto = await self._wrap_create_connection(2024-12-14T09:59:42.834793 -
2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.835794 - 2024-12-14T09:59:42.835794 - 2024-12-14T09:59:42.835794 - 2024-12-14T09:59:42.835794 - 2024-12-14T09:59:42.835794 - 2024-12-14T09:59:42.835794 - 2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 -
2024-12-14T09:59:42.836794 - 2024-12-14T09:59:42.836794 - File "F:\ComfyUI\python_embeded\Lib\site-packages\aiohttp\connector.py", line 1033, in _wrap_create_connection
2024-12-14T09:59:42.837794 - 2024-12-14T09:59:42.837794 - 2024-12-14T09:59:42.837794 - raise client_error(req.connection_key, exc) from exc2024-12-14T09:59:42.837794 -
2024-12-14T09:59:42.837794 - 2024-12-14T09:59:42.837794 - aiohttp.client_exceptions2024-12-14T09:59:42.838794 - .2024-12-14T09:59:42.838794 - ClientConnectorError2024-12-14T09:59:42.838794 - : 2024-12-14T09:59:42.838794 - Cannot connect to host raw.githubusercontent.com:443 ssl:default [信号灯超时时间已到]2024-12-14T09:59:42.838794 -
2024-12-14T09:59:48.147822 - []2024-12-14T09:59:48.147822 -
2024-12-14T09:59:48.147822 - []2024-12-14T09:59:48.148822 -
2024-12-14T09:59:48.287854 - WARNING: request with non matching host and origin 127.0.0.1:8188 != 127.0.0.1:8000, returning 403
2024-12-14T09:59:54.193816 - WARNING: request with non matching host and origin 127.0.0.1:8188 != 127.0.0.1:8000, returning 403
2024-12-14T09:59:56.621112 - got prompt
2024-12-14T09:59:56.646117 - WARNING: [Errno 2] No such file or directory: 'F:\ComfyUI\ComfyUI\input\test_mask3 (1).png'
2024-12-14T09:59:56.741138 - SELECTED: input12024-12-14T09:59:56.741138 -
2024-12-14T09:59:56.754141 - BrushNet model type: Loading SDXL2024-12-14T09:59:56.754141 -
2024-12-14T09:59:56.776146 - BrushNet model file:2024-12-14T09:59:56.777146 - 2024-12-14T09:59:56.777146 - F:\ComfyUI\ComfyUI\models\inpaint\brushnet\brushnet_segmentation_mask_XL.safetensors2024-12-14T09:59:56.777146 -
2024-12-14T09:59:56.803152 - We will use 90% of the memory on device 0 for storing the model, and 10% for the buffer to avoid OOM. You can set max_memory in to a higher value to use more memory (at your own risk).
2024-12-14T09:59:58.246023 - BrushNet SDXL model is loaded2024-12-14T09:59:58.246023 -
2024-12-14T09:59:58.282031 - Using pytorch attention in VAE
2024-12-14T09:59:58.284032 - Using pytorch attention in VAE
2024-12-14T09:59:58.491078 - Requested to load SDXLClipModel
2024-12-14T09:59:58.501081 - loaded completely 9.5367431640625e+25 1560.802734375 True
2024-12-14T09:59:59.113744 - clip missing: ['text_projection.weight']
2024-12-14T09:59:59.260777 -
ggml_sd_loader:2024-12-14T09:59:59.261777 -
2024-12-14T09:59:59.261777 - 1 9122024-12-14T09:59:59.262777 -
2024-12-14T09:59:59.262777 - 0 22024-12-14T09:59:59.262777 -
2024-12-14T09:59:59.262777 - 8 7662024-12-14T09:59:59.262777 -
2024-12-14T09:59:59.325791 - model weight dtype torch.float16, manual cast: None
2024-12-14T09:59:59.326792 - model_type EPS
2024-12-14T09:59:59.541839 - SELECTED: input12024-12-14T09:59:59.541839 -
2024-12-14T09:59:59.813900 - Base model type: SDXL2024-12-14T09:59:59.813900 -
2024-12-14T09:59:59.814901 - BrushNet image.shape =2024-12-14T09:59:59.814901 - 2024-12-14T09:59:59.814901 - torch.Size([1, 900, 600, 3])2024-12-14T09:59:59.814901 - 2024-12-14T09:59:59.814901 - mask.shape =2024-12-14T09:59:59.814901 - 2024-12-14T09:59:59.814901 - torch.Size([1, 900, 600])2024-12-14T09:59:59.814901 -
2024-12-14T09:59:59.818901 - Requested to load AutoencoderKL
2024-12-14T09:59:59.858910 - loaded completely 9.5367431640625e+25 159.55708122253418 True
2024-12-14T10:00:00.213990 - WARNING: request with non matching host and origin 127.0.0.1:8188 != 127.0.0.1:8000, returning 403
2024-12-14T10:00:00.261000 - BrushNet CL: image_latents shape =2024-12-14T10:00:00.261000 - 2024-12-14T10:00:00.261000 - torch.Size([1, 4, 112, 75])2024-12-14T10:00:00.261000 - 2024-12-14T10:00:00.261000 - interpolated_mask shape =2024-12-14T10:00:00.261000 - 2024-12-14T10:00:00.261000 - torch.Size([1, 1, 112, 75])2024-12-14T10:00:00.261000 -
2024-12-14T10:00:00.297009 - Requested to load SDXL
2024-12-14T10:00:01.447680 - loaded completely 9.5367431640625e+25 2630.7519607543945 True
2024-12-14T10:00:01.570708 -
0%| | 0/20 [00:00<?, ?it/s]2024-12-14T10:00:01.580709 - BrushNet inference: do_classifier_free_guidance is True2024-12-14T10:00:01.581709 -
2024-12-14T10:00:01.581709 - BrushNet inference, step = 0: image batch = 1, got 2 latents, starting from 02024-12-14T10:00:01.581709 -
2024-12-14T10:00:01.582710 - BrushNet inference: sample2024-12-14T10:00:01.582710 - 2024-12-14T10:00:01.582710 - torch.Size([2, 4, 112, 75])2024-12-14T10:00:01.582710 - 2024-12-14T10:00:01.582710 - , CL2024-12-14T10:00:01.582710 - 2024-12-14T10:00:01.582710 - torch.Size([2, 5, 112, 75])2024-12-14T10:00:01.582710 - 2024-12-14T10:00:01.583711 - dtype2024-12-14T10:00:01.583711 - 2024-12-14T10:00:01.583711 - torch.float162024-12-14T10:00:01.583711 -
2024-12-14T10:00:01.766751 - BrushNet can't find2024-12-14T10:00:01.766751 - 2024-12-14T10:00:01.766751 - <class 'comfy.ops.manual_cast.Conv2d'>2024-12-14T10:00:01.766751 - 2024-12-14T10:00:01.766751 - layer in2024-12-14T10:00:01.766751 - 2024-12-14T10:00:01.767752 - 02024-12-14T10:00:01.767752 - 2024-12-14T10:00:01.767752 - input block:2024-12-14T10:00:01.767752 - 2024-12-14T10:00:01.767752 - None2024-12-14T10:00:01.767752 -
2024-12-14T10:00:01.808761 -
0%| | 0/20 [00:00<?, ?it/s]2024-12-14T10:00:01.808761 -
2024-12-14T10:00:01.818763 - !!! Exception during processing !!! The size of tensor a (640) must match the size of tensor b (320) at non-singleton dimension 1
2024-12-14T10:00:01.823765 - Traceback (most recent call last):
File "F:\ComfyUI\ComfyUI\execution.py", line 323, in execute
output_data, output_ui, has_subgraph = get_output_data(obj, input_data_all, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\execution.py", line 198, in get_output_data
return_values = _map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\execution.py", line 169, in _map_node_over_list
process_inputs(input_dict, i)
File "F:\ComfyUI\ComfyUI\execution.py", line 158, in process_inputs
results.append(getattr(obj, func)(**inputs))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\nodes.py", line 1465, in sample
return common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=denoise)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\nodes.py", line 1432, in common_ksampler
samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-Impact-Pack\modules\impact\sample_error_enhancer.py", line 22, in informative_sample
raise e
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-Impact-Pack\modules\impact\sample_error_enhancer.py", line 9, in informative_sample
return original_sample(*args, **kwargs) # This code helps interpret error messages that occur within exceptions but does not have any impact on other operations.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\sample.py", line 43, in sample
samples = sampler.sample(noise, positive, negative, cfg=cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, denoise_mask=noise_mask, sigmas=sigmas, callback=callback, disable_pbar=disable_pbar, seed=seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 1020, in sample
return sample(self.model, noise, positive, negative, cfg, self.device, sampler, sigmas, self.model_options, latent_image=latent_image, denoise_mask=denoise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-BrushNet\model_patch.py", line 126, in modified_sample
return cfg_guider.sample(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 904, in sample
output = executor.execute(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 873, in outer_sample
output = self.inner_sample(noise, latent_image, device, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 857, in inner_sample
samples = executor.execute(self, sigmas, extra_args, callback, noise, latent_image, denoise_mask, disable_pbar)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 714, in sample
samples = self.sampler_function(model_k, noise, sigmas, extra_args=extra_args, callback=k_callback, disable=disable_pbar, **self.extra_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\utils_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\k_diffusion\sampling.py", line 861, in sample_dpmpp_2m_sde_gpu
return sample_dpmpp_2m_sde(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, eta=eta, s_noise=s_noise, noise_sampler=noise_sampler, solver_type=solver_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\utils_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\k_diffusion\sampling.py", line 764, in sample_dpmpp_2m_sde
denoised = model(x, sigmas[i] * s_in, **extra_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 384, in call
out = self.inner_model(x, sigma, model_options=model_options, seed=seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 839, in call
return self.predict_noise(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 842, in predict_noise
return sampling_function(self.inner_model, x, timestep, self.conds.get("negative", None), self.conds.get("positive", None), self.cfg, model_options=model_options, seed=seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 364, in sampling_function
out = calc_cond_batch(model, conds, x, timestep, model_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 200, in calc_cond_batch
return executor.execute(model, conds, x_in, timestep, model_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 311, in calc_cond_batch
output = model_options['model_function_wrapper'](model.apply_model, {"input": input_x, "timestep": timestep, "c": c, "cond_or_uncond": cond_or_uncond}).chunk(batch_chunks)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-BrushNet\model_patch.py", line 58, in brushnet_model_function_wrapper
return apply_model_method(x, timestep, **options_dict['c'])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\model_base.py", line 128, in apply_model
return comfy.patcher_extension.WrapperExecutor.new_class_executor(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\model_base.py", line 157, in _apply_model
model_output = self.diffusion_model(xc, t, context=context, control=control, transformer_options=transformer_options, **extra_conds).float()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 831, in forward
return comfy.patcher_extension.WrapperExecutor.new_class_executor(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 873, in _forward
h = forward_timestep_embed(module, h, emb, context, transformer_options, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 44, in forward_timestep_embed
x = layer(x, context, transformer_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-BrushNet\brushnet_nodes.py", line 1060, in forward_patched_by_brushnet
h += to_add.to(h.dtype).to(h.device)
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch_tensor.py", line 1512, in torch_function
ret = func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
RuntimeError: The size of tensor a (640) must match the size of tensor b (320) at non-singleton dimension 1
2024-12-14T10:00:01.828765 - Prompt executed in 5.20 seconds
Attached Workflow
Please make sure that workflow does not contain any sensitive information such as API keys or passwords.
{"last_node_id":63,"last_link_id":134,"nodes":[{"id":54,"type":"VAEDecode","pos":[1921,38],"size":[210,46],"flags":{},"order":9,"mode":0,"inputs":[{"name":"samples","type":"LATENT","link":91},{"name":"vae","type":"VAE","link":133}],"outputs":[{"name":"IMAGE","type":"IMAGE","links":[93],"slot_index":0,"shape":3}],"properties":{"Node name for S&R":"VAEDecode"},"widgets_values":[]},{"id":12,"type":"PreviewImage","pos":[1515,419],"size":[617.4000244140625,673.7999267578125],"flags":{},"order":10,"mode":0,"inputs":[{"name":"images","type":"IMAGE","link":93}],"outputs":[],"properties":{"Node name for S&R":"PreviewImage"},"widgets_values":[]},{"id":52,"type":"KSampler","pos":[1564,101],"size":[315,262],"flags":{},"order":8,"mode":0,"inputs":[{"name":"model","type":"MODEL","link":118},{"name":"positive","type":"CONDITIONING","link":119},{"name":"negative","type":"CONDITIONING","link":120},{"name":"latent_image","type":"LATENT","link":121,"slot_index":3}],"outputs":[{"name":"LATENT","type":"LATENT","links":[91],"slot_index":0,"shape":3}],"properties":{"Node name for S&R":"KSampler"},"widgets_values":[2,"fixed",20,5,"dpmpp_2m_sde_gpu","karras",1]},{"id":49,"type":"CLIPTextEncode","pos":[649,21],"size":[339.20001220703125,96.39999389648438],"flags":{},"order":5,"mode":0,"inputs":[{"name":"clip","type":"CLIP","link":130}],"outputs":[{"name":"CONDITIONING","type":"CONDITIONING","links":[123],"slot_index":0,"shape":3}],"properties":{"Node name for S&R":"CLIPTextEncode"},"widgets_values":["a vase"],"color":"#232","bgcolor":"#353"},{"id":50,"type":"CLIPTextEncode","pos":[651,168],"size":[339.20001220703125,96.39999389648438],"flags":{},"order":6,"mode":0,"inputs":[{"name":"clip","type":"CLIP","link":131}],"outputs":[{"name":"CONDITIONING","type":"CONDITIONING","links":[124],"slot_index":0,"shape":3}],"properties":{"Node name for S&R":"CLIPTextEncode"},"widgets_values":[""],"color":"#322","bgcolor":"#533"},{"id":59,"type":"LoadImageMask","pos":[689,601],"size":[315,318],"flags":{},"order":0,"mode":0,"inputs":[],"outputs":[{"name":"MASK","type":"MASK","links":[],"slot_index":0,"shape":3}],"properties":{"Node name for S&R":"LoadImageMask"},"widgets_values":["test_mask3 (1).png","red","image"]},{"id":62,"type":"BrushNet","pos":[1130,102],"size":[315,226],"flags":{},"order":7,"mode":0,"inputs":[{"name":"model","type":"MODEL","link":132},{"name":"vae","type":"VAE","link":134},{"name":"image","type":"IMAGE","link":126},{"name":"mask","type":"MASK","link":129},{"name":"brushnet","type":"BRMODEL","link":125},{"name":"positive","type":"CONDITIONING","link":123},{"name":"negative","type":"CONDITIONING","link":124}],"outputs":[{"name":"model","type":"MODEL","links":[118],"slot_index":0,"shape":3},{"name":"positive","type":"CONDITIONING","links":[119],"slot_index":1,"shape":3},{"name":"negative","type":"CONDITIONING","links":[120],"slot_index":2,"shape":3},{"name":"latent","type":"LATENT","links":[121],"slot_index":3,"shape":3}],"properties":{"Node name for S&R":"BrushNet"},"widgets_values":[1,0,10000]},{"id":58,"type":"LoadImage","pos":[10,404],"size":[646.0000610351562,703.5999755859375],"flags":{},"order":1,"mode":0,"inputs":[],"outputs":[{"name":"IMAGE","type":"IMAGE","links":[126],"slot_index":0,"shape":3},{"name":"MASK","type":"MASK","links":[129],"slot_index":1,"shape":3}],"properties":{"Node name for S&R":"LoadImage"},"widgets_values":["clipspace/clipspace-mask-169552.09999999998.png [input]","image"]},{"id":63,"type":"VIV_Subgraph","pos":[30,-390],"size":[300,646],"flags":{},"order":2,"mode":0,"inputs":[],"outputs":[{"name":"bus","type":"BUS","links":[],"slot_index":0},{"name":"model","type":"MODEL","links":[132],"slot_index":1},{"name":"clip","type":"CLIP","links":[130,131]},{"name":"vae","type":"VAE","links":[133,134],"slot_index":3}],"properties":{"XL":"05XL\fustercluck_v2.safetensors","3.5":"3.5\35LTurbo_q51.gguf","Flux":"flux\小红书风格模型丨日常旅拍,极致真实_GGUF_Q4.gguf","Select":1,"positive":"","nagitive":"","XL(gguf)":"05XL\fustercluck_v2-Q8_0.gguf","lora1":"Lcm\HyperL-XL-加速器-PAseer_V1.safetensors","loraS1":1,"lora2":"3.5\可爱扁平风格_v2.0.safetensors","loraS2":0,"flux":"05XL\2gbImprovedSDXLFP32_ggufFP32Q8.gguf","lora3":"2.1\DesuZenkaiV21beta2-lyco.safetensors","loraS3":1,"clipL":"clip_l.safetensors","clipG":"clip_g.safetensors","t5":"t5xxl_flan_latest-fp8_e4m3fn.safetensors","pos":"","neg":"","1XL2(3.5)3flux":1,"ckpt":"00pick\AWPainting_v1.3.safetensors","lora4":"2.1\DesuZenkaiV21beta2-lyco.safetensors","loraS4":1,"1XL2(3.5)3flux4Ckpt":1,"loraS":1,"♈XL♈":"05XL\fustercluck_v2-Q8_0.gguf","✌3.5✌":"05XL\STOIQOAfroditeFLUXXL_XL31-Q8_0.gguf","♉flux♉":"05XL\STOIQOAfroditeFLUXXL_XL31-Q8_0.gguf","✍1XL2(3.5)3flux4Ckpt5Pony✍":1,"♓ckpt♓":"00pick\AWPainting_v1.3.safetensors","☝pony☝":"05XL\STOIQOAfroditeFLUXXL_XL31-Q8_0.gguf","lora5":"2.1\DesuZenkaiV21beta2-lyco.safetensors","loraS5":1,"clipGPony":"CLIP-G_XL.safetensors","clipLPony":"CLIP-G_XL.safetensors"},"widgets_values":["BusCktpSel3.json",null,"05XL\fustercluck_v2-Q8_0.gguf","Lcm\HyperL-XL-加速器-PAseer_V1.safetensors",1,"05XL\STOIQOAfroditeFLUXXL_XL31-Q8_0.gguf","3.5\可爱扁平风格_v2.0.safetensors",0,"05XL\STOIQOAfroditeFLUXXL_XL31-Q8_0.gguf","2.1\DesuZenkaiV21beta2-lyco.safetensors",1,"clip_g.safetensors","clip_l.safetensors","t5xxl_flan_latest-fp8_e4m3fn.safetensors",1,"00pick\AWPainting_v1.3.safetensors","2.1\DesuZenkaiV21beta2-lyco.safetensors",1,"05XL\STOIQOAfroditeFLUXXL_XL31-Q8_0.gguf","2.1\DesuZenkaiV21beta2-lyco.safetensors",1,"CLIP-G_XL.safetensors","CLIP-G_XL.safetensors"]},{"id":47,"type":"CheckpointLoaderSimple","pos":[-570,120],"size":[481,158],"flags":{},"order":3,"mode":4,"inputs":[],"outputs":[{"name":"MODEL","type":"MODEL","links":[],"slot_index":0,"shape":3},{"name":"CLIP","type":"CLIP","links":[],"slot_index":1,"shape":3},{"name":"VAE","type":"VAE","links":[],"slot_index":2,"shape":3}],"properties":{"Node name for S&R":"CheckpointLoaderSimple"},"widgets_values":["Juggernaut-XL_v9_RunDiffusionPhoto_v2.safetensors"]},{"id":45,"type":"BrushNetLoader","pos":[10,260],"size":[576.2000122070312,104],"flags":{},"order":4,"mode":0,"inputs":[],"outputs":[{"name":"brushnet","type":"BRMODEL","links":[125],"slot_index":0,"shape":3}],"properties":{"Node name for S&R":"BrushNetLoader"},"widgets_values":["brushnet\brushnet_segmentation_mask_XL.safetensors","float16"]}],"links":[[91,52,0,54,0,"LATENT"],[93,54,0,12,0,"IMAGE"],[118,62,0,52,0,"MODEL"],[119,62,1,52,1,"CONDITIONING"],[120,62,2,52,2,"CONDITIONING"],[121,62,3,52,3,"LATENT"],[123,49,0,62,5,"CONDITIONING"],[124,50,0,62,6,"CONDITIONING"],[125,45,0,62,4,"BRMODEL"],[126,58,0,62,2,"IMAGE"],[129,58,1,62,3,"MASK"],[130,63,2,49,0,"CLIP"],[131,63,2,50,0,"CLIP"],[132,63,1,62,0,"MODEL"],[133,63,3,54,1,"VAE"],[134,63,3,62,1,"VAE"]],"groups":[],"config":{},"extra":{"ds":{"scale":0.7972024500000005,"offset":[195.02571742847988,273.94518383169316]},"ue_links":[]},"version":0.4}
Additional Context
(Please add any additional context or steps to reproduce the error here)
The text was updated successfully, but these errors were encountered:
RuntimeError: The size of tensor a (640) must match the size of tensor b (320) at non-singleton dimension 1
use gguf xl
remove the --fast for startup, also change the script: not work
if is_SDXL:
input_blocks = [[0, comfy.ops.disable_weight_init.Conv2d],
input_blocks = [[0, comfy.ops.manual_cast.Conv2d],
.....
else:
input_blocks = [[0, comfy.ops.disable_weight_init.Conv2d],
input_blocks = [[0, comfy.ops.manual_cast.Conv2d],
ComfyUI Error Report
Error Details
Node ID: 52
Node Type: KSampler
Exception Type: RuntimeError
Exception Message: The size of tensor a (640) must match the size of tensor b (320) at non-singleton dimension 1
Stack Trace
File "F:\ComfyUI\ComfyUI\execution.py", line 323, in execute
output_data, output_ui, has_subgraph = get_output_data(obj, input_data_all, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\execution.py", line 198, in get_output_data
return_values = _map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\execution.py", line 169, in _map_node_over_list
process_inputs(input_dict, i)
File "F:\ComfyUI\ComfyUI\execution.py", line 158, in process_inputs
results.append(getattr(obj, func)(**inputs))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\nodes.py", line 1465, in sample
return common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=denoise)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\nodes.py", line 1432, in common_ksampler
samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-Impact-Pack\modules\impact\sample_error_enhancer.py", line 22, in informative_sample
raise e
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-Impact-Pack\modules\impact\sample_error_enhancer.py", line 9, in informative_sample
return original_sample(*args, **kwargs) # This code helps interpret error messages that occur within exceptions but does not have any impact on other operations.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\sample.py", line 43, in sample
samples = sampler.sample(noise, positive, negative, cfg=cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, denoise_mask=noise_mask, sigmas=sigmas, callback=callback, disable_pbar=disable_pbar, seed=seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 1020, in sample
return sample(self.model, noise, positive, negative, cfg, self.device, sampler, sigmas, self.model_options, latent_image=latent_image, denoise_mask=denoise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-BrushNet\model_patch.py", line 126, in modified_sample
return cfg_guider.sample(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 904, in sample
output = executor.execute(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 873, in outer_sample
output = self.inner_sample(noise, latent_image, device, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 857, in inner_sample
samples = executor.execute(self, sigmas, extra_args, callback, noise, latent_image, denoise_mask, disable_pbar)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 714, in sample
samples = self.sampler_function(model_k, noise, sigmas, extra_args=extra_args, callback=k_callback, disable=disable_pbar, **self.extra_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\utils_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\k_diffusion\sampling.py", line 861, in sample_dpmpp_2m_sde_gpu
return sample_dpmpp_2m_sde(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, eta=eta, s_noise=s_noise, noise_sampler=noise_sampler, solver_type=solver_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\utils_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\k_diffusion\sampling.py", line 764, in sample_dpmpp_2m_sde
denoised = model(x, sigmas[i] * s_in, **extra_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 384, in call
out = self.inner_model(x, sigma, model_options=model_options, seed=seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 839, in call
return self.predict_noise(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 842, in predict_noise
return sampling_function(self.inner_model, x, timestep, self.conds.get("negative", None), self.conds.get("positive", None), self.cfg, model_options=model_options, seed=seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 364, in sampling_function
out = calc_cond_batch(model, conds, x, timestep, model_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 200, in calc_cond_batch
return executor.execute(model, conds, x_in, timestep, model_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 311, in calc_cond_batch
output = model_options['model_function_wrapper'](model.apply_model, {"input": input_x, "timestep": timestep, "c": c, "cond_or_uncond": cond_or_uncond}).chunk(batch_chunks)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-BrushNet\model_patch.py", line 58, in brushnet_model_function_wrapper
return apply_model_method(x, timestep, **options_dict['c'])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\model_base.py", line 128, in apply_model
return comfy.patcher_extension.WrapperExecutor.new_class_executor(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\model_base.py", line 157, in _apply_model
model_output = self.diffusion_model(xc, t, context=context, control=control, transformer_options=transformer_options, **extra_conds).float()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 831, in forward
return comfy.patcher_extension.WrapperExecutor.new_class_executor(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 873, in _forward
h = forward_timestep_embed(module, h, emb, context, transformer_options, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 44, in forward_timestep_embed
x = layer(x, context, transformer_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-BrushNet\brushnet_nodes.py", line 1060, in forward_patched_by_brushnet
h += to_add.to(h.dtype).to(h.device)
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch_tensor.py", line 1512, in torch_function
ret = func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
System Information
ComfyUI Version: v0.3.7-11-ga220d11e6b
Arguments: ComfyUI\main.py --multi-user --disable-xformers --auto-launch
OS: nt
Python Version: 3.11.9 (tags/v3.11.9:de54cf5, Apr 2 2024, 10:12:12) [MSC v.1938 64 bit (AMD64)]
Embedded Python: true
PyTorch Version: 2.5.1+cu124
Devices
Name: cuda:0 NVIDIA GeForce RTX 3060 Laptop GPU : cudaMallocAsync
Type: cuda
VRAM Total: 12884246528
VRAM Free: 920282668
Torch VRAM Total: 5435817984
Torch VRAM Free: 149931564
Logs
2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.825791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 - ^2024-12-14T09:59:42.826791 -
2024-12-14T09:59:42.827792 - 2024-12-14T09:59:42.827792 - File "F:\ComfyUI\python_embeded\Lib\site-packages\aiohttp\connector.py", line 944, in _create_connection
2024-12-14T09:59:42.827792 - 2024-12-14T09:59:42.827792 - 2024-12-14T09:59:42.827792 - _, proto = await self._create_direct_connection(req, traces, timeout)2024-12-14T09:59:42.827792 -
2024-12-14T09:59:42.827792 - 2024-12-14T09:59:42.827792 - 2024-12-14T09:59:42.828792 - 2024-12-14T09:59:42.828792 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - 2024-12-14T09:59:42.829793 - ^2024-12-14T09:59:42.829793 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.830792 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.831793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 - ^2024-12-14T09:59:42.832793 -
2024-12-14T09:59:42.832793 - 2024-12-14T09:59:42.832793 - File "F:\ComfyUI\python_embeded\Lib\site-packages\aiohttp\connector.py", line 1257, in _create_direct_connection
2024-12-14T09:59:42.833793 - 2024-12-14T09:59:42.833793 - 2024-12-14T09:59:42.833793 - raise last_exc2024-12-14T09:59:42.833793 -
2024-12-14T09:59:42.833793 - 2024-12-14T09:59:42.833793 - File "F:\ComfyUI\python_embeded\Lib\site-packages\aiohttp\connector.py", line 1226, in _create_direct_connection
2024-12-14T09:59:42.833793 - 2024-12-14T09:59:42.833793 - 2024-12-14T09:59:42.834793 - transp, proto = await self._wrap_create_connection(2024-12-14T09:59:42.834793 -
2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.834793 - 2024-12-14T09:59:42.835794 - 2024-12-14T09:59:42.835794 - 2024-12-14T09:59:42.835794 - 2024-12-14T09:59:42.835794 - 2024-12-14T09:59:42.835794 - 2024-12-14T09:59:42.835794 - 2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.835794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 - ^2024-12-14T09:59:42.836794 -
2024-12-14T09:59:42.836794 - 2024-12-14T09:59:42.836794 - File "F:\ComfyUI\python_embeded\Lib\site-packages\aiohttp\connector.py", line 1033, in _wrap_create_connection
2024-12-14T09:59:42.837794 - 2024-12-14T09:59:42.837794 - 2024-12-14T09:59:42.837794 - raise client_error(req.connection_key, exc) from exc2024-12-14T09:59:42.837794 -
2024-12-14T09:59:42.837794 - 2024-12-14T09:59:42.837794 - aiohttp.client_exceptions2024-12-14T09:59:42.838794 - .2024-12-14T09:59:42.838794 - ClientConnectorError2024-12-14T09:59:42.838794 - : 2024-12-14T09:59:42.838794 - Cannot connect to host raw.githubusercontent.com:443 ssl:default [信号灯超时时间已到]2024-12-14T09:59:42.838794 -
2024-12-14T09:59:48.147822 - []2024-12-14T09:59:48.147822 -
2024-12-14T09:59:48.147822 - []2024-12-14T09:59:48.148822 -
2024-12-14T09:59:48.287854 - WARNING: request with non matching host and origin 127.0.0.1:8188 != 127.0.0.1:8000, returning 403
2024-12-14T09:59:54.193816 - WARNING: request with non matching host and origin 127.0.0.1:8188 != 127.0.0.1:8000, returning 403
2024-12-14T09:59:56.621112 - got prompt
2024-12-14T09:59:56.646117 - WARNING: [Errno 2] No such file or directory: 'F:\ComfyUI\ComfyUI\input\test_mask3 (1).png'
2024-12-14T09:59:56.741138 - SELECTED: input12024-12-14T09:59:56.741138 -
2024-12-14T09:59:56.754141 - BrushNet model type: Loading SDXL2024-12-14T09:59:56.754141 -
2024-12-14T09:59:56.776146 - BrushNet model file:2024-12-14T09:59:56.777146 - 2024-12-14T09:59:56.777146 - F:\ComfyUI\ComfyUI\models\inpaint\brushnet\brushnet_segmentation_mask_XL.safetensors2024-12-14T09:59:56.777146 -
2024-12-14T09:59:56.803152 - We will use 90% of the memory on device 0 for storing the model, and 10% for the buffer to avoid OOM. You can set
max_memory
in to a higher value to use more memory (at your own risk).2024-12-14T09:59:58.246023 - BrushNet SDXL model is loaded2024-12-14T09:59:58.246023 -
2024-12-14T09:59:58.282031 - Using pytorch attention in VAE
2024-12-14T09:59:58.284032 - Using pytorch attention in VAE
2024-12-14T09:59:58.491078 - Requested to load SDXLClipModel
2024-12-14T09:59:58.501081 - loaded completely 9.5367431640625e+25 1560.802734375 True
2024-12-14T09:59:59.113744 - clip missing: ['text_projection.weight']
2024-12-14T09:59:59.260777 -
ggml_sd_loader:2024-12-14T09:59:59.261777 -
2024-12-14T09:59:59.261777 - 1 9122024-12-14T09:59:59.262777 -
2024-12-14T09:59:59.262777 - 0 22024-12-14T09:59:59.262777 -
2024-12-14T09:59:59.262777 - 8 7662024-12-14T09:59:59.262777 -
2024-12-14T09:59:59.325791 - model weight dtype torch.float16, manual cast: None
2024-12-14T09:59:59.326792 - model_type EPS
2024-12-14T09:59:59.541839 - SELECTED: input12024-12-14T09:59:59.541839 -
2024-12-14T09:59:59.813900 - Base model type: SDXL2024-12-14T09:59:59.813900 -
2024-12-14T09:59:59.814901 - BrushNet image.shape =2024-12-14T09:59:59.814901 - 2024-12-14T09:59:59.814901 - torch.Size([1, 900, 600, 3])2024-12-14T09:59:59.814901 - 2024-12-14T09:59:59.814901 - mask.shape =2024-12-14T09:59:59.814901 - 2024-12-14T09:59:59.814901 - torch.Size([1, 900, 600])2024-12-14T09:59:59.814901 -
2024-12-14T09:59:59.818901 - Requested to load AutoencoderKL
2024-12-14T09:59:59.858910 - loaded completely 9.5367431640625e+25 159.55708122253418 True
2024-12-14T10:00:00.213990 - WARNING: request with non matching host and origin 127.0.0.1:8188 != 127.0.0.1:8000, returning 403
2024-12-14T10:00:00.261000 - BrushNet CL: image_latents shape =2024-12-14T10:00:00.261000 - 2024-12-14T10:00:00.261000 - torch.Size([1, 4, 112, 75])2024-12-14T10:00:00.261000 - 2024-12-14T10:00:00.261000 - interpolated_mask shape =2024-12-14T10:00:00.261000 - 2024-12-14T10:00:00.261000 - torch.Size([1, 1, 112, 75])2024-12-14T10:00:00.261000 -
2024-12-14T10:00:00.297009 - Requested to load SDXL
2024-12-14T10:00:01.447680 - loaded completely 9.5367431640625e+25 2630.7519607543945 True
2024-12-14T10:00:01.570708 -
0%| | 0/20 [00:00<?, ?it/s]2024-12-14T10:00:01.580709 - BrushNet inference: do_classifier_free_guidance is True2024-12-14T10:00:01.581709 -
2024-12-14T10:00:01.581709 - BrushNet inference, step = 0: image batch = 1, got 2 latents, starting from 02024-12-14T10:00:01.581709 -
2024-12-14T10:00:01.582710 - BrushNet inference: sample2024-12-14T10:00:01.582710 - 2024-12-14T10:00:01.582710 - torch.Size([2, 4, 112, 75])2024-12-14T10:00:01.582710 - 2024-12-14T10:00:01.582710 - , CL2024-12-14T10:00:01.582710 - 2024-12-14T10:00:01.582710 - torch.Size([2, 5, 112, 75])2024-12-14T10:00:01.582710 - 2024-12-14T10:00:01.583711 - dtype2024-12-14T10:00:01.583711 - 2024-12-14T10:00:01.583711 - torch.float162024-12-14T10:00:01.583711 -
2024-12-14T10:00:01.766751 - BrushNet can't find2024-12-14T10:00:01.766751 - 2024-12-14T10:00:01.766751 - <class 'comfy.ops.manual_cast.Conv2d'>2024-12-14T10:00:01.766751 - 2024-12-14T10:00:01.766751 - layer in2024-12-14T10:00:01.766751 - 2024-12-14T10:00:01.767752 - 02024-12-14T10:00:01.767752 - 2024-12-14T10:00:01.767752 - input block:2024-12-14T10:00:01.767752 - 2024-12-14T10:00:01.767752 - None2024-12-14T10:00:01.767752 -
2024-12-14T10:00:01.808761 -
0%| | 0/20 [00:00<?, ?it/s]2024-12-14T10:00:01.808761 -
2024-12-14T10:00:01.818763 - !!! Exception during processing !!! The size of tensor a (640) must match the size of tensor b (320) at non-singleton dimension 1
2024-12-14T10:00:01.823765 - Traceback (most recent call last):
File "F:\ComfyUI\ComfyUI\execution.py", line 323, in execute
output_data, output_ui, has_subgraph = get_output_data(obj, input_data_all, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\execution.py", line 198, in get_output_data
return_values = _map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\execution.py", line 169, in _map_node_over_list
process_inputs(input_dict, i)
File "F:\ComfyUI\ComfyUI\execution.py", line 158, in process_inputs
results.append(getattr(obj, func)(**inputs))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\nodes.py", line 1465, in sample
return common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=denoise)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\nodes.py", line 1432, in common_ksampler
samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-Impact-Pack\modules\impact\sample_error_enhancer.py", line 22, in informative_sample
raise e
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-Impact-Pack\modules\impact\sample_error_enhancer.py", line 9, in informative_sample
return original_sample(*args, **kwargs) # This code helps interpret error messages that occur within exceptions but does not have any impact on other operations.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\sample.py", line 43, in sample
samples = sampler.sample(noise, positive, negative, cfg=cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, denoise_mask=noise_mask, sigmas=sigmas, callback=callback, disable_pbar=disable_pbar, seed=seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 1020, in sample
return sample(self.model, noise, positive, negative, cfg, self.device, sampler, sigmas, self.model_options, latent_image=latent_image, denoise_mask=denoise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-BrushNet\model_patch.py", line 126, in modified_sample
return cfg_guider.sample(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 904, in sample
output = executor.execute(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 873, in outer_sample
output = self.inner_sample(noise, latent_image, device, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 857, in inner_sample
samples = executor.execute(self, sigmas, extra_args, callback, noise, latent_image, denoise_mask, disable_pbar)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 714, in sample
samples = self.sampler_function(model_k, noise, sigmas, extra_args=extra_args, callback=k_callback, disable=disable_pbar, **self.extra_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\utils_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\k_diffusion\sampling.py", line 861, in sample_dpmpp_2m_sde_gpu
return sample_dpmpp_2m_sde(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, eta=eta, s_noise=s_noise, noise_sampler=noise_sampler, solver_type=solver_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\utils_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\k_diffusion\sampling.py", line 764, in sample_dpmpp_2m_sde
denoised = model(x, sigmas[i] * s_in, **extra_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 384, in call
out = self.inner_model(x, sigma, model_options=model_options, seed=seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 839, in call
return self.predict_noise(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 842, in predict_noise
return sampling_function(self.inner_model, x, timestep, self.conds.get("negative", None), self.conds.get("positive", None), self.cfg, model_options=model_options, seed=seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 364, in sampling_function
out = calc_cond_batch(model, conds, x, timestep, model_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 200, in calc_cond_batch
return executor.execute(model, conds, x_in, timestep, model_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\samplers.py", line 311, in calc_cond_batch
output = model_options['model_function_wrapper'](model.apply_model, {"input": input_x, "timestep": timestep, "c": c, "cond_or_uncond": cond_or_uncond}).chunk(batch_chunks)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-BrushNet\model_patch.py", line 58, in brushnet_model_function_wrapper
return apply_model_method(x, timestep, **options_dict['c'])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\model_base.py", line 128, in apply_model
return comfy.patcher_extension.WrapperExecutor.new_class_executor(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\model_base.py", line 157, in _apply_model
model_output = self.diffusion_model(xc, t, context=context, control=control, transformer_options=transformer_options, **extra_conds).float()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 831, in forward
return comfy.patcher_extension.WrapperExecutor.new_class_executor(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 873, in _forward
h = forward_timestep_embed(module, h, emb, context, transformer_options, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 44, in forward_timestep_embed
x = layer(x, context, transformer_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "F:\ComfyUI\ComfyUI\custom_nodes\ComfyUI-BrushNet\brushnet_nodes.py", line 1060, in forward_patched_by_brushnet
h += to_add.to(h.dtype).to(h.device)
File "F:\ComfyUI\python_embeded\Lib\site-packages\torch_tensor.py", line 1512, in torch_function
ret = func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
RuntimeError: The size of tensor a (640) must match the size of tensor b (320) at non-singleton dimension 1
2024-12-14T10:00:01.828765 - Prompt executed in 5.20 seconds
Attached Workflow
Please make sure that workflow does not contain any sensitive information such as API keys or passwords.
{"last_node_id":63,"last_link_id":134,"nodes":[{"id":54,"type":"VAEDecode","pos":[1921,38],"size":[210,46],"flags":{},"order":9,"mode":0,"inputs":[{"name":"samples","type":"LATENT","link":91},{"name":"vae","type":"VAE","link":133}],"outputs":[{"name":"IMAGE","type":"IMAGE","links":[93],"slot_index":0,"shape":3}],"properties":{"Node name for S&R":"VAEDecode"},"widgets_values":[]},{"id":12,"type":"PreviewImage","pos":[1515,419],"size":[617.4000244140625,673.7999267578125],"flags":{},"order":10,"mode":0,"inputs":[{"name":"images","type":"IMAGE","link":93}],"outputs":[],"properties":{"Node name for S&R":"PreviewImage"},"widgets_values":[]},{"id":52,"type":"KSampler","pos":[1564,101],"size":[315,262],"flags":{},"order":8,"mode":0,"inputs":[{"name":"model","type":"MODEL","link":118},{"name":"positive","type":"CONDITIONING","link":119},{"name":"negative","type":"CONDITIONING","link":120},{"name":"latent_image","type":"LATENT","link":121,"slot_index":3}],"outputs":[{"name":"LATENT","type":"LATENT","links":[91],"slot_index":0,"shape":3}],"properties":{"Node name for S&R":"KSampler"},"widgets_values":[2,"fixed",20,5,"dpmpp_2m_sde_gpu","karras",1]},{"id":49,"type":"CLIPTextEncode","pos":[649,21],"size":[339.20001220703125,96.39999389648438],"flags":{},"order":5,"mode":0,"inputs":[{"name":"clip","type":"CLIP","link":130}],"outputs":[{"name":"CONDITIONING","type":"CONDITIONING","links":[123],"slot_index":0,"shape":3}],"properties":{"Node name for S&R":"CLIPTextEncode"},"widgets_values":["a vase"],"color":"#232","bgcolor":"#353"},{"id":50,"type":"CLIPTextEncode","pos":[651,168],"size":[339.20001220703125,96.39999389648438],"flags":{},"order":6,"mode":0,"inputs":[{"name":"clip","type":"CLIP","link":131}],"outputs":[{"name":"CONDITIONING","type":"CONDITIONING","links":[124],"slot_index":0,"shape":3}],"properties":{"Node name for S&R":"CLIPTextEncode"},"widgets_values":[""],"color":"#322","bgcolor":"#533"},{"id":59,"type":"LoadImageMask","pos":[689,601],"size":[315,318],"flags":{},"order":0,"mode":0,"inputs":[],"outputs":[{"name":"MASK","type":"MASK","links":[],"slot_index":0,"shape":3}],"properties":{"Node name for S&R":"LoadImageMask"},"widgets_values":["test_mask3 (1).png","red","image"]},{"id":62,"type":"BrushNet","pos":[1130,102],"size":[315,226],"flags":{},"order":7,"mode":0,"inputs":[{"name":"model","type":"MODEL","link":132},{"name":"vae","type":"VAE","link":134},{"name":"image","type":"IMAGE","link":126},{"name":"mask","type":"MASK","link":129},{"name":"brushnet","type":"BRMODEL","link":125},{"name":"positive","type":"CONDITIONING","link":123},{"name":"negative","type":"CONDITIONING","link":124}],"outputs":[{"name":"model","type":"MODEL","links":[118],"slot_index":0,"shape":3},{"name":"positive","type":"CONDITIONING","links":[119],"slot_index":1,"shape":3},{"name":"negative","type":"CONDITIONING","links":[120],"slot_index":2,"shape":3},{"name":"latent","type":"LATENT","links":[121],"slot_index":3,"shape":3}],"properties":{"Node name for S&R":"BrushNet"},"widgets_values":[1,0,10000]},{"id":58,"type":"LoadImage","pos":[10,404],"size":[646.0000610351562,703.5999755859375],"flags":{},"order":1,"mode":0,"inputs":[],"outputs":[{"name":"IMAGE","type":"IMAGE","links":[126],"slot_index":0,"shape":3},{"name":"MASK","type":"MASK","links":[129],"slot_index":1,"shape":3}],"properties":{"Node name for S&R":"LoadImage"},"widgets_values":["clipspace/clipspace-mask-169552.09999999998.png [input]","image"]},{"id":63,"type":"VIV_Subgraph","pos":[30,-390],"size":[300,646],"flags":{},"order":2,"mode":0,"inputs":[],"outputs":[{"name":"bus","type":"BUS","links":[],"slot_index":0},{"name":"model","type":"MODEL","links":[132],"slot_index":1},{"name":"clip","type":"CLIP","links":[130,131]},{"name":"vae","type":"VAE","links":[133,134],"slot_index":3}],"properties":{"XL":"05XL\fustercluck_v2.safetensors","3.5":"3.5\35LTurbo_q51.gguf","Flux":"flux\小红书风格模型丨日常旅拍,极致真实_GGUF_Q4.gguf","Select":1,"positive":"","nagitive":"","XL(gguf)":"05XL\fustercluck_v2-Q8_0.gguf","lora1":"Lcm\HyperL-XL-加速器-PAseer_V1.safetensors","loraS1":1,"lora2":"3.5\可爱扁平风格_v2.0.safetensors","loraS2":0,"flux":"05XL\2gbImprovedSDXLFP32_ggufFP32Q8.gguf","lora3":"2.1\DesuZenkaiV21beta2-lyco.safetensors","loraS3":1,"clipL":"clip_l.safetensors","clipG":"clip_g.safetensors","t5":"t5xxl_flan_latest-fp8_e4m3fn.safetensors","pos":"","neg":"","1XL2(3.5)3flux":1,"ckpt":"00pick\AWPainting_v1.3.safetensors","lora4":"2.1\DesuZenkaiV21beta2-lyco.safetensors","loraS4":1,"1XL2(3.5)3flux4Ckpt":1,"loraS":1,"♈XL♈":"05XL\fustercluck_v2-Q8_0.gguf","✌3.5✌":"05XL\STOIQOAfroditeFLUXXL_XL31-Q8_0.gguf","♉flux♉":"05XL\STOIQOAfroditeFLUXXL_XL31-Q8_0.gguf","✍1XL2(3.5)3flux4Ckpt5Pony✍":1,"♓ckpt♓":"00pick\AWPainting_v1.3.safetensors","☝pony☝":"05XL\STOIQOAfroditeFLUXXL_XL31-Q8_0.gguf","lora5":"2.1\DesuZenkaiV21beta2-lyco.safetensors","loraS5":1,"clipGPony":"CLIP-G_XL.safetensors","clipLPony":"CLIP-G_XL.safetensors"},"widgets_values":["BusCktpSel3.json",null,"05XL\fustercluck_v2-Q8_0.gguf","Lcm\HyperL-XL-加速器-PAseer_V1.safetensors",1,"05XL\STOIQOAfroditeFLUXXL_XL31-Q8_0.gguf","3.5\可爱扁平风格_v2.0.safetensors",0,"05XL\STOIQOAfroditeFLUXXL_XL31-Q8_0.gguf","2.1\DesuZenkaiV21beta2-lyco.safetensors",1,"clip_g.safetensors","clip_l.safetensors","t5xxl_flan_latest-fp8_e4m3fn.safetensors",1,"00pick\AWPainting_v1.3.safetensors","2.1\DesuZenkaiV21beta2-lyco.safetensors",1,"05XL\STOIQOAfroditeFLUXXL_XL31-Q8_0.gguf","2.1\DesuZenkaiV21beta2-lyco.safetensors",1,"CLIP-G_XL.safetensors","CLIP-G_XL.safetensors"]},{"id":47,"type":"CheckpointLoaderSimple","pos":[-570,120],"size":[481,158],"flags":{},"order":3,"mode":4,"inputs":[],"outputs":[{"name":"MODEL","type":"MODEL","links":[],"slot_index":0,"shape":3},{"name":"CLIP","type":"CLIP","links":[],"slot_index":1,"shape":3},{"name":"VAE","type":"VAE","links":[],"slot_index":2,"shape":3}],"properties":{"Node name for S&R":"CheckpointLoaderSimple"},"widgets_values":["Juggernaut-XL_v9_RunDiffusionPhoto_v2.safetensors"]},{"id":45,"type":"BrushNetLoader","pos":[10,260],"size":[576.2000122070312,104],"flags":{},"order":4,"mode":0,"inputs":[],"outputs":[{"name":"brushnet","type":"BRMODEL","links":[125],"slot_index":0,"shape":3}],"properties":{"Node name for S&R":"BrushNetLoader"},"widgets_values":["brushnet\brushnet_segmentation_mask_XL.safetensors","float16"]}],"links":[[91,52,0,54,0,"LATENT"],[93,54,0,12,0,"IMAGE"],[118,62,0,52,0,"MODEL"],[119,62,1,52,1,"CONDITIONING"],[120,62,2,52,2,"CONDITIONING"],[121,62,3,52,3,"LATENT"],[123,49,0,62,5,"CONDITIONING"],[124,50,0,62,6,"CONDITIONING"],[125,45,0,62,4,"BRMODEL"],[126,58,0,62,2,"IMAGE"],[129,58,1,62,3,"MASK"],[130,63,2,49,0,"CLIP"],[131,63,2,50,0,"CLIP"],[132,63,1,62,0,"MODEL"],[133,63,3,54,1,"VAE"],[134,63,3,62,1,"VAE"]],"groups":[],"config":{},"extra":{"ds":{"scale":0.7972024500000005,"offset":[195.02571742847988,273.94518383169316]},"ue_links":[]},"version":0.4}
Additional Context
(Please add any additional context or steps to reproduce the error here)
The text was updated successfully, but these errors were encountered: