You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
setting conv_dim, conv_alpha with algo=lokr gives the following attached error.
NOTE: removing conv_dim, conv_alpha works perfectly. Also using algo=lora works as well
sample_image_inference(
File "/mnt/dashtoon_data/ayushman/repos/sd-scripts/library/train_util.py", line 5294, in sample_image_inference
latents = pipeline(
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
File "/mnt/dashtoon_data/ayushman/repos/sd-scripts/library/sdxl_lpw_stable_diffusion.py", line 1012, in __call__
noise_pred = self.unet(latent_model_input, t, text_embedding, vector_embedding)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1553, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1562, in _call_impl
return forward_call(*args, **kwargs)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/accelerate/utils/operations.py", line 680, in forward
return model_forward(*args, **kwargs)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/accelerate/utils/operations.py", line 668, in __call__
return convert_to_fp32(self.model_forward(*args, **kwargs))
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/amp/autocast_mode.py", line 43, in decorate_autocast
return func(*args, **kwargs)
File "/mnt/dashtoon_data/ayushman/repos/sd-scripts/library/sdxl_original_unet.py", line 1104, in forward
h = call_module(module, h, emb, context)
File "/mnt/dashtoon_data/ayushman/repos/sd-scripts/library/sdxl_original_unet.py", line 1093, in call_module
x = layer(x, emb)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1553, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1562, in _call_impl
return forward_call(*args, **kwargs)
File "/mnt/dashtoon_data/ayushman/repos/sd-scripts/library/sdxl_original_unet.py", line 348, in forward
x = torch.utils.checkpoint.checkpoint(create_custom_forward(self.forward_body), x, emb, use_reentrant=USE_REENTRANT)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/_compile.py", line 31, in inner
return disable_fn(*args, **kwargs)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py", line 600, in _fn
return fn(*args, **kwargs)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/utils/checkpoint.py", line 481, in checkpoint
return CheckpointFunction.apply(function, preserve, *args)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/autograd/function.py", line 574, in apply
returnsuper().apply(*args, **kwargs) # type: ignore[misc]
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/utils/checkpoint.py", line 255, in forward
outputs = run_function(*args)
File "/mnt/dashtoon_data/ayushman/repos/sd-scripts/library/sdxl_original_unet.py", line 344, in custom_forward
return func(*inputs)
File "/mnt/dashtoon_data/ayushman/repos/sd-scripts/library/sdxl_original_unet.py", line 331, in forward_body
h = self.in_layers(x)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1553, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1562, in _call_impl
return forward_call(*args, **kwargs)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/nn/modules/container.py", line 219, in forward
input = module(input)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1553, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1562, in _call_impl
return forward_call(*args, **kwargs)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/lycoris/modules/lokr.py", line 530, in forward
return self.bypass_forward(x, self.multiplier)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/lycoris/modules/lokr.py", line 523, in bypass_forward
return self.org_forward(x) + self.bypass_forward_diff(x, scale=scale)
File "/mnt/data/ayushman/miniforge3/envs/kohya_sdxl/lib/python3.10/site-packages/lycoris/modules/lokr.py", line 463, in bypass_forward_diff
a = a.view(*a.shape, *self.shape[2:])
RuntimeError: shape '[16, 288, 3, 3]' is invalid for input of size 4608
setting
conv_dim
,conv_alpha
withalgo=lokr
gives the following attached error.NOTE: removing
conv_dim
,conv_alpha
works perfectly. Also usingalgo=lora
works as wellrelevant parts of config
library versions
using this kohya commit kohya-ss/sd-scripts@b755ebd
The text was updated successfully, but these errors were encountered: