DDColor does not work with FP16 enabled, here is the vsviewer error
2024-03-28 06:38:46.829
Qt warning: QPixmap:
caled: Pixmap is a null pixmap
2024-03-28 06:38:47.533
Error on frame 0 request:
Traceback (most recent call last):
File "src\cython\vapoursynth.pyx", line 2941, in vapoursynth.publicFunction
File "src\cython\vapoursynth.pyx", line 2943, in vapoursynth.publicFunction
File "src\cython\vapoursynth.pyx", line 683, in vapoursynth.FuncData.__call__
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\utils\_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\vsddcolor\__init__.py", line 94, in inference
output_ab = module(img_gray_rgb)
^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\vsddcolor\ddcolor_arch.py", line 59, in forward
out_feat = self.decoder()
^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\vsddcolor\ddcolor_arch.py", line 113, in forward
out = self.color_decoder([out0, out1, out2], out3)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\vsddcolor\ddcolor_arch.py", line 305, in forward
output = self.transformer_cross_attention_layers[i](
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\vsddcolor\ddcolor_arch_utils\transformer_utils.py", line 124, in forward
return self.forward_post(tgt, memory, memory_mask,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\vsddcolor\ddcolor_arch_utils\transformer_utils.py", line 93, in forward_post
tgt2 = self.multihead_attn(query=self.with_pos_embed(tgt, query_pos),
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\activation.py", line 1241, in forward
attn_output, attn_output_weights = F.multi_head_attention_forward(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\functional.py", line 5440, in multi_head_attention_forward
attn_output = scaled_dot_product_attention(q, k, v, attn_mask, dropout_p, is_causal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
RuntimeError: cutlassF: no kernel found to launch!
With Render factor at 10 and Combine Simple also doesn't work, here is the vsviewer error
2024-03-28 06:45:51.129
Qt warning: QPixmap:
caled: Pixmap is a null pixmap
2024-03-28 06:45:51.715
Error on frame 0 request:
Traceback (most recent call last):
File "src\cython\vapoursynth.pyx", line 2941, in vapoursynth.publicFunction
File "src\cython\vapoursynth.pyx", line 2943, in vapoursynth.publicFunction
File "src\cython\vapoursynth.pyx", line 683, in vapoursynth.FuncData.__call__
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\utils\_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\vsddcolor\__init__.py", line 94, in inference
output_ab = module(img_gray_rgb)
^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\vsddcolor\ddcolor_arch.py", line 59, in forward
out_feat = self.decoder()
^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\vsddcolor\ddcolor_arch.py", line 113, in forward
out = self.color_decoder([out0, out1, out2], out3)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\vsddcolor\ddcolor_arch.py", line 305, in forward
output = self.transformer_cross_attention_layers[i](
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\vsddcolor\ddcolor_arch_utils\transformer_utils.py", line 124, in forward
return self.forward_post(tgt, memory, memory_mask,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\vsddcolor\ddcolor_arch_utils\transformer_utils.py", line 93, in forward_post
tgt2 = self.multihead_attn(query=self.with_pos_embed(tgt, query_pos),
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\modules\activation.py", line 1241, in forward
attn_output, attn_output_weights = F.multi_head_attention_forward(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Hybrid\64bit\Vapoursynth\Lib\site-packages\torch\nn\functional.py", line 5440, in multi_head_attention_forward
attn_output = scaled_dot_product_attention(q, k, v, attn_mask, dropout_p, is_causal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
RuntimeError: cutlassF: no kernel found to launch!