gameltb / ComfyUI_stable_fast

Experimental usage of stable-fast and TensorRT.

Geek Repo:Geek Repo

Github PK Tool:Github PK Tool

RuntimeError: Failed to execute cutlass gemm: Error Internal

Zxilly opened this issue · comments

Error occurred when executing KSampler:

The following operation failed in the TorchScript interpreter.
Traceback of TorchScript (most recent call last):

graph(%input, %weight, %bias, %chunks, %dim, %approximate):
%output = sfast::cutlass_linear_geglu_unified(%input, %weight, %bias)
~~~~~ <--- HERE
return (%output)
RuntimeError: Failed to execute cutlass gemm: Error Internal


File "/content/drive/MyDrive/SDUI/execution.py", line 152, in recursive_execute
output_data, output_ui = get_output_data(obj, input_data_all)
File "/content/drive/MyDrive/SDUI/execution.py", line 82, in get_output_data
return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True)
File "/content/drive/MyDrive/SDUI/execution.py", line 75, in map_node_over_list
results.append(getattr(obj, func)(**slice_dict(input_data_all, i)))
File "/content/drive/MyDrive/SDUI/nodes.py", line 1368, in sample
return common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=denoise)
File "/content/drive/MyDrive/SDUI/nodes.py", line 1338, in common_ksampler
samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image,
File "/content/drive/MyDrive/SDUI/comfy/sample.py", line 100, in sample
samples = sampler.sample(noise, positive_copy, negative_copy, cfg=cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, denoise_mask=noise_mask, sigmas=sigmas, callback=callback, disable_pbar=disable_pbar, seed=seed)
File "/content/drive/MyDrive/SDUI/comfy/samplers.py", line 703, in sample
return sample(self.model, noise, positive, negative, cfg, self.device, sampler, sigmas, self.model_options, latent_image=latent_image, denoise_mask=denoise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed)
File "/content/drive/MyDrive/SDUI/comfy/samplers.py", line 608, in sample
samples = sampler.sample(model_wrap, sigmas, extra_args, callback, noise, latent_image, denoise_mask, disable_pbar)
File "/content/drive/MyDrive/SDUI/comfy/samplers.py", line 547, in sample
samples = self.sampler_function(model_k, noise, sigmas, extra_args=extra_args, callback=k_callback, disable=disable_pbar, **self.extra_options)
File "/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "/content/drive/MyDrive/SDUI/comfy/k_diffusion/sampling.py", line 137, in sample_euler
denoised = model(x, sigma_hat * s_in, **extra_args)
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1520, in _call_impl
return forward_call(*args, **kwargs)
File "/content/drive/MyDrive/SDUI/comfy/samplers.py", line 285, in forward
out = self.inner_model(x, sigma, cond=cond, uncond=uncond, cond_scale=cond_scale, model_options=model_options, seed=seed)
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1520, in _call_impl
return forward_call(*args, **kwargs)
File "/content/drive/MyDrive/SDUI/comfy/samplers.py", line 272, in forward
return self.apply_model(*args, **kwargs)
File "/content/drive/MyDrive/SDUI/comfy/samplers.py", line 269, in apply_model
out = sampling_function(self.inner_model, x, timestep, uncond, cond, cond_scale, model_options=model_options, seed=seed)
File "/content/drive/MyDrive/SDUI/comfy/samplers.py", line 249, in sampling_function
cond_pred, uncond_pred = calc_cond_uncond_batch(model, cond, uncond_, x, timestep, model_options)
File "/content/drive/MyDrive/SDUI/comfy/samplers.py", line 221, in calc_cond_uncond_batch
output = model_options['model_function_wrapper'](model.apply_model, {"input": input_x, "timestep": timestep_, "c": c, "cond_or_uncond": cond_or_uncond}).chunk(batch_chunks)
File "/content/drive/MyDrive/SDUI/custom_nodes/ComfyUI_stable_fast/node.py", line 65, in __call__
return self.stable_fast_model(
File "/content/drive/MyDrive/SDUI/custom_nodes/ComfyUI_stable_fast/module/sfast_pipeline_compiler.py", line 104, in __call__
return traced_module(**kwargs)
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1520, in _call_impl
return forward_call(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/sfast/jit/trace_helper.py", line 133, in forward
outputs = self.module(*self.convert_inputs(args, kwargs))
File "/usr/local/lib/python3.10/dist-packages/sfast/cuda/graphs.py", line 40, in dynamic_graphed_callable
cached_callable = simple_make_graphed_callable(
File "/usr/local/lib/python3.10/dist-packages/sfast/cuda/graphs.py", line 61, in simple_make_graphed_callable
return make_graphed_callable(func,
File "/usr/local/lib/python3.10/dist-packages/sfast/cuda/graphs.py", line 90, in make_graphed_callable
func(*tree_copy(example_inputs, detach=True),
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1520, in _call_impl
return forward_call(*args, **kwargs)