Source isn't interlaced, otherwise bobbing would not create a duplicate frame every other frame.
You really might want to check the capturing settings. (but all those chroma artifacts&co and the deinterlacing seems like something went wrong)
About filtering the given source:
Stuff that should be done on a per-scene level:
- Fixing chroma derivations at the borders using BalanceBorders or Bore
- Luma adjustment (Retinex with tv scale looks fine to me, but that's just me)
Avoiding machine learning stuff, I try something like:
Code:
# Chroma adjustment using ChromaShiftSP
clip = chromashift.ChromaShiftSP(clip=clip, X=2.00, Y=2.00)
# contrast sharpening using CAS
clip = core.cas.CAS(clip=clip, sharpness=0.700)
# Denoising using QTGMC
clip = qtgmc.QTGMC(Input=clip, Preset="Fast", InputType=3, TR2=1, TFF=False, SourceMatch=0, Lossless=0, opencl=True, Denoiser="KNLMeansCL", TR0=2, TR1=2, Rep0=1, Rep1=0, Rep2=4, DCT=5, ThSCD1=200, ThSCD2=110, Sbb=0, NoiseProcess=2, GrainRestore=0.0, NoiseRestore=1, StabilizeNoise=True, NoiseTR=0, NoiseDeint="bob") # probably overkill with all thse options, especiall try adjusting ThSCD1=200, ThSCD2=110
clip = core.std.Crop(clip=clip, left=12, right=4, top=4, bottom=4)# cropping to 704x472
# denoising using HQDN3D
clip = core.hqdn3d.Hqdn3d(clip=clip, lum_spac=0.00, chrom_spac=10.00) # try tweaking the chroma denoising
# removing grain using SMDegrain
clip = smdegrain.SMDegrain(input=clip, RefineMotion=True, interlaced=False, opencl=True, device=-1)
# adjusting color space from YUV422P8 to YUV444P10 for vsRetinex
clip = core.resize.Bicubic(clip=clip, format=vs.YUV444P10, range_s="limited")
# color adjustment using Retinex
clip = core.retinex.MSRCP(input=clip, sigma=[25,80,250], fulls=False, fulld=False)
# denoising using KNLMeansCL
clip = denoise.KNLMeansCL(clip=clip, d=0)
# applying dehalo using YAHR
clip = dehalo.YAHR(clip, depth=24)
# Using FastLineDarkenMOD for line darkening
clip = linedarken.FastLineDarkenMOD(c=clip)
if machine learning is fine, I would use BasicVSR++ for the chroma cleanup instead of HQDN3D
Code:
# Chroma adjustment using ChromaShiftSP
clip = chromashift.ChromaShiftSP(clip=clip, X=2.00, Y=2.00)
# contrast sharpening using CAS
clip = core.cas.CAS(clip=clip, sharpness=0.700)
# Denoising using QTGMC
clip = qtgmc.QTGMC(Input=clip, Preset="Fast", InputType=3, TR2=1, TFF=False, SourceMatch=0, Lossless=0, opencl=True, Denoiser="KNLMeansCL", TR0=2, TR1=2, Rep0=1, Rep1=0, Rep2=4, DCT=5, ThSCD1=200, ThSCD2=110, Sbb=0, NoiseProcess=2, GrainRestore=0.0, NoiseRestore=1, StabilizeNoise=True, NoiseTR=0, NoiseDeint="bob")
clip = core.std.Crop(clip=clip, left=12, right=4, top=4, bottom=4)# cropping to 704x472
clipMerge = clip
# adjusting color space from YUV422P8 to RGBH for vsBasicVSRPPFilter
clipMerge = core.resize.Bicubic(clip=clipMerge, format=vs.RGBH, matrix_in_s="470bg", range_s="limited")
# Quality enhancement using BasicVSR++
from vsbasicvsrpp import basicvsrpp as BasicVSRPP
clipMerge = BasicVSRPP(clip=clipMerge, model=4)
clipMerge = core.resize.Bicubic(clip=clipMerge, format=vs.YUV444P16, matrix_s="470bg", range_s="limited")
clip = core.resize.Bicubic(clip=clip, format=vs.YUV444P16, range_s="limited")
clip = core.std.Merge(clip,clipMerge,[0, 1, 1]) # merge filtered and unfiltered by plane
# removing grain using SMDegrain
clip = smdegrain.SMDegrain(input=clip, RefineMotion=True, interlaced=False, opencl=True, device=-1)
# adjusting color space from YUV444P16 to YUV444P10 for vsRetinex
clip = core.resize.Bicubic(clip=clip, format=vs.YUV444P10, range_s="limited", dither_type="error_diffusion")
# color adjustment using Retinex
clip = core.retinex.MSRCP(input=clip, sigma=[25,80,250], fulls=False, fulld=False)
# denoising using KNLMeansCL
clip = denoise.KNLMeansCL(clip=clip, d=0)
# applying dehalo using YAHR
clip = dehalo.YAHR(clip, depth=24)
# Using FastLineDarkenMOD for line darkening
clip = linedarken.FastLineDarkenMOD(c=clip)
Cu Selur