This forum uses cookies
This forum makes use of cookies to store your login information if you are registered, and your last visit if you are not. Cookies are small text documents stored on your computer; the cookies set by this forum can only be used on this website and pose no security risk. Cookies on this forum also track the specific topics you have read and when you last read them. Please confirm whether you accept or reject these cookies being set.

A cookie will be stored in your browser regardless of choice to prevent you being asked this question again. You will be able to change your cookie settings at any time using the link in the footer.

NVEnc Preview
#21
The following script is working

import vapoursynth as vs
from vapoursynth import core
import subprocess
import ctypes

ffmpeg = r'E:\VideoTest\TestSubs\ffmpeg.exe'
NVEnc = r'E:\VideoTest\TestSubs\NVEncC64.exe'
source_path=r'E:\VideoTest\TestSubs\TestVideo.mp4'
# Loading Plugins
core.std.LoadPlugin(path="E:/VideoTest/TestSubs/BestSource.dll") #from https://forum.doom9.org/showthread.php?t=184255
# source: 'TestVideo.mp4'
# current color space: YUV420P8, bit depth: 8, resolution: 1280x536, fps: 25, color matrix: 470bg, yuv luminance scale: limited, scanorder: progressive
# this clip is not not needed, just to get width and height get width and height
clip = core.bs.VideoSource(source="E:/VideoTest/TestSubs/TestVideo.mp4")

# Setting detected color matrix (470bg).
clip = core.std.SetFrameProps(clip, _Matrix=5)
# Setting color transfer info (470bg), when it is not set
clip = clip if not core.text.FrameProps(clip,'_Transfer') else core.std.SetFrameProps(clip, _Transfer=5)
# Setting color primaries info (BT.709), when it is not set
clip = clip if not core.text.FrameProps(clip,'_Primaries') else core.std.SetFrameProps(clip, _Primaries=1)
# Setting color range to TV (limited) range.
clip = core.std.SetFrameProp(clip=clip, prop="_ColorRange", intval=1)
# making sure frame rate is set to 25
clip = core.std.AssumeFPS(clip=clip, fpsnum=25, fpsden=1)
clip = core.std.SetFrameProp(clip=clip, prop="_FieldBased", intval=0) # progressive
# set output frame rate to 25fps (progressive)
clip = core.std.AssumeFPS(clip=clip, fpsnum=25, fpsden=1)

#clip = core.std.BlankClip(clip) 

w = clip.width
h = clip.height
Ysize  = w * h
UVsize = w * h//4
frame_len = Ysize + 2*UVsize  #YUV420

nvenc_filters = [
'--vpp-pmd apply_count=3,strength=100,threshold=100',
'--vpp-unsharp radius=4,weight=0.5,threshold=10',                 
'--vpp-smooth quality=4,qp=60,prec=fp32',
'--vpp-tweak brightness=0.01,contrast=1.01,gamma=0.98,saturation=1.35,hue=-2,swapuv=false',
'--vpp-deband range=15,sample=1,thre=15,dither=15,seed=1234,blurfirst=off,rand_each_frame=off',
'--vpp-resize spline36',
'--output-res 720x304',
]

#command = [NVEnc, '--avhw','--input', source_path, '-c raw', '--lossless', '-output-csp yuv420', '--output-depth 8', '--output-format raw', '--output -']                 

command = [ ffmpeg, '--input', source_path,'-vcodec', 'rawvideo', '-pix_fmt', 'yuv420p',  '-f', 'rawvideo', '-']

#core.log_message(2,' '.join(NVEnc_cmd))

pipe = subprocess.Popen(command, stdout = subprocess.PIPE, bufsize=frame_len)

def load_frame(n,f):
    vs_frame = f.copy()
    try:
        #for i, size in enumerate([Ysize, UVsize, UVsize]):
        #    ctypes.memmove(vs_frame.get_write_ptr(i), pipe.stdout.read(size),  size)
        pipe.stdout.flush()
    except Exception as e:
        raise ValueError(repr(e))   
    return vs_frame

try:
    clip = core.std.ModifyFrame(clip, clip, load_frame)
except ValueError as e:
    pipe.terminate()
    print(e)

clip.set_output()

but because I commented "ctypes.memmove" so that the pipe is not used. I think that the problems are:

1) the buffersize should be increased to allow the encoder to encode at least some seconds of movie
2) I'm not sure the copy of plane is correct

I'm working on these issues...
Reply
#22
# source: 'TestVideo.mp4'
# current color space: YUV420P8, bit depth: 8, resolution: 1280x536, fps: 25, color matrix: 470bg, yuv luminance scale: limited, scanorder: progressive
...
'--output-res 720x304',
that seems wrong,...

Quote:but because I commented "ctypes.memmove" so that the pipe is not used.
so it is not working at all,...

Quote:1) the buffersize should be increased to allow the encoder to encode at least some seconds of movie
What buffer size? There is no buffer in this at all,.. ah, you mean from popen (that should change the result)
----
Dev versions are in the 'experimental'-folder of my GoogleDrive, which is linked on the download page.
Reply
#23
This script is working

import vapoursynth as vs
from vapoursynth import core
import subprocess
import ctypes

ffmpeg = r'E:\VideoTest\TestSubs\ffmpeg.exe'
source_path=r'E:\VideoTest\TestSubs\TestVideo.mp4'
# Loading Plugins
core.std.LoadPlugin(path="E:/VideoTest/TestSubs/BestSource.dll") #from https://forum.doom9.org/showthread.php?t=184255
#current color space: YUV420P8, bit depth: 8
#resolution: 1280x536, fps: 25, color matrix: 470bg, yuv luminance scale: limited, scanorder: progressive
clip = core.bs.VideoSource(source=source_path) #this clip is not not needed, just to get width and height
# Setting detected color matrix (470bg).
clip = core.std.SetFrameProps(clip, _Matrix=5)
# Setting color transfer info (470bg), when it is not set
clip = clip if not core.text.FrameProps(clip,'_Transfer') else core.std.SetFrameProps(clip, _Transfer=5)
# Setting color primaries info (BT.709), when it is not set
clip = clip if not core.text.FrameProps(clip,'_Primaries') else core.std.SetFrameProps(clip, _Primaries=1)
# Setting color range to TV (limited) range.
clip = core.std.SetFrameProp(clip=clip, prop="_ColorRange", intval=1)
clip = core.std.SetFrameProp(clip=clip, prop="_FieldBased", intval=0) # progressive
# set output frame rate to 25fps (progressive)
clip = core.std.AssumeFPS(clip=clip, fpsnum=25, fpsden=1)

clip = core.std.BlankClip(clip)

w = clip.width
h = clip.height
Ysize  = w * h
UVsize = w * h//4
frame_len = w * h * 3 // 2 #YUV420

command = [ ffmpeg, '-i', source_path,'-vcodec', 'rawvideo', '-pix_fmt', 'yuv420p',  '-f', 'rawvideo', '-']

pipe = subprocess.Popen(command, stdout = subprocess.PIPE, bufsize=frame_len)

def load_frame(n,f):
    try:
        vs_frame = f.copy()
        for plane, size in enumerate([Ysize, UVsize, UVsize]):
            ctypes.memmove(vs_frame.get_write_ptr(plane), pipe.stdout.read(size),  size)
        pipe.stdout.flush()
    except Exception as e:
        raise ValueError(repr(e))   
    return vs_frame

try:
    clip = core.std.ModifyFrame(clip, clip, load_frame)
except ValueError as e:
    pipe.terminate()
    print(e)

clip.set_output()

I used for testing the following file: https://filebin.net/trb7yof9h0g335e0

It seems that the problem was related to mod 8 and to conversion to YUV420P810.

But at least now we have a version working with ffmpeg, I'm testing the NVEnc version...
Reply
#24
If you stay with yuv420p(8) and your source is mod8 everything works fine here.

Quote:I used for testing the following file: ...
then
'--output-res 720x304',
should be wrong

Also if you use:
'--vpp-smooth quality=4,qp=60,prec=fp32',
you need to adjust the frame count of your clip in Vapoursynth, since you iterate over the frames in the clip loaded there,...

Cu Selur
----
Dev versions are in the 'experimental'-folder of my GoogleDrive, which is linked on the download page.
Reply
#25
I was unable to get the NVEnc version working. I opened another issue with rigaya : https://github.com/rigaya/NVEnc/issues/5...1931769047
Reply
#26
Works fine here,...
import vapoursynth as vs
from vapoursynth import core
import subprocess
import ctypes

NVEnc = r'F:\Hybrid\64bit\NVEncC.exe'
ffmpeg= r'F:\Hybrid\64bit\ffmpeg.exe'
# Loading Plugins
core.std.LoadPlugin(path="F:/Hybrid/64bit/vsfilters/SourceFilter/LSmashSource/vslsmashsource.dll") # L-SMASH Source is only usable for mp4 and mov files
source_path=r"C:/Users/Selur/Desktop/TestSubs-1.mp4" # current color space: YUV420P8, bit depth: 8, resolution: 720x300, fps: 25, color matrix: 470bg, yuv luminance scale: limited, scanorder: progressive, Note storage resolution is 720x304
source_path=r'G:\TestClips&Co\test.avi' # current color space: YUV420P8, bit depth: 8, resolution: 640x352, fps: 25, color matrix: 470bg, yuv luminance scale: limited, scanorder: progressive
source_path="C:/Users/Selur/Desktop/TestVideo.mp4"

clip = core.lsmas.LWLibavSource(source=source_path, format="YUV420P8", stream_index=0, cache=0, prefer_hw=0)
clip = clip.resize.Bicubic(format=vs.YUV420P16)
w = clip.width
h = clip.height
bitdepth=16
Ysize  = w * h * bitdepth // 8
Usize = w * h//4 * bitdepth // 8
Vsize = w * h//4 * bitdepth // 8
frame_len = Ysize + Usize + Vsize #YUV420

command = [NVEnc,
                 '--avhw',
                 '--input "'+source_path+'" '
                 '-c raw',
                 '--output-res '+str(w)+'x'+str(h),
                 '--output-csp yuv420p',
                 '--output-depth '+str(bitdepth),
                 '--output-format raw',                
                 '--output -',          
             ]
command = [ ffmpeg, '-i', source_path,'-vcodec', 'rawvideo', '-pix_fmt', 'yuv420p16',  '-f', 'rawvideo', '-']            
command = ' '.join(command)

core.log_message(2,command)

pipe = subprocess.Popen(command, stdout = subprocess.PIPE, bufsize=frame_len)

def load_frame(n,f):
    try:
        vs_frame = f.copy()
        for i, size in enumerate([Ysize, Usize, Vsize]):
            ctypes.memmove(vs_frame.get_write_ptr(i), pipe.stdout.read(size),  size)
        pipe.stdout.flush()
    except Exception as e:
        raise ValueError(repr(e))    
    return vs_frame

try:
    clip = core.std.ModifyFrame(clip, clip, load_frame)
except ValueError as e:
    pipe.terminate()
    print(e)

clip.set_output()
(used yuv420p16 in the example)
----
Dev versions are in the 'experimental'-folder of my GoogleDrive, which is linked on the download page.
Reply
#27
In your script the command row is duplicated, the second one will override the first and practically you are still using ffmpeg, this time with YUV420P16.
The script using NVenc instead of ffmpeg is still not working.
Reply
#28
import vapoursynth as vs
from vapoursynth import core
import subprocess
import ctypes

NVEnc = r'F:\Hybrid\64bit\NVEncC.exe'
ffmpeg= r'F:\Hybrid\64bit\ffmpeg.exe'
# Loading Plugins
core.std.LoadPlugin(path="F:/Hybrid/64bit/vsfilters/SourceFilter/LSmashSource/vslsmashsource.dll") # L-SMASH Source is only usable for mp4 and mov files
source_path="C:/Users/Selur/Desktop/TestVideo.mp4"
clip = core.lsmas.LWLibavSource(source=source_path, format="YUV420P8", stream_index=0, cache=0, prefer_hw=0)
clip = clip.resize.Bicubic(format=vs.YUV420P16)
w = clip.width
h = clip.height
bitdepth=16
Ysize  = w * h * bitdepth // 8
Usize = w * h//4 * bitdepth // 8
Vsize = w * h//4 * bitdepth // 8
frame_len = Ysize + Usize + Vsize #YUV420

command = [NVEnc,
                 '--avhw',
                 '--input "'+source_path+'" '
                 '-c raw',
                 '--output-res '+str(w)+'x'+str(h),
                 '--output-csp yuv420',
                 '--output-depth '+str(bitdepth),
                 '--output-format raw',                
                 '--output -',          
             ]
command = ' '.join(command)

core.log_message(2,command)

pipe = subprocess.Popen(command, stdout = subprocess.PIPE, bufsize=frame_len)

def load_frame(n,f):
    try:
        vs_frame = f.copy()
        for i, size in enumerate([Ysize, Usize, Vsize]):
            ctypes.memmove(vs_frame.get_write_ptr(i), pipe.stdout.read(size),  size)
        pipe.stdout.flush()
    except Exception as e:
        raise ValueError(repr(e))    
    return vs_frame

try:
    clip = core.std.ModifyFrame(clip, clip, load_frame)
except ValueError as e:
    pipe.terminate()
    print(e)

clip.set_output()
works here
----
Dev versions are in the 'experimental'-folder of my GoogleDrive, which is linked on the download page.
Reply
#29
That's interesting. I compared your version with mine, and the only significant difference is that you generate a command string using: ' '.join(command), while I was using the join only in log_message, and in Popen I was using the array version of command. Theoretically both the approaches should produce the same behavior, but in practice only your approach is working (despite the documentation).
Great ! now there is the possibility to introduce the preview of NVEnc in Hybrid. What do you think ?
Reply
#30
Quote: now there is the possibility to introduce the preview of NVEnc in Hybrid. What do you think ?
No, like I wrote before: there is no way to navigate this.
A pipe goes only one way: forward

Cu Selur
----
Dev versions are in the 'experimental'-folder of my GoogleDrive, which is linked on the download page.
Reply


Forum Jump:


Users browsing this thread: 1 Guest(s)