
    9iM                     ^   S SK r S SKrS SKrS SKrS SKJrJrJrJr  S SK	r	S SK
rS SKrS SKJr  S SKJr  S SKJr  S SKJr  S SKJrJr  S SKJr  S S	KJr  S S
KJr  S SKJr  Sr \" 5       r!\RD                  S4S jr#\RH                  " \RJ                  \RJ                  S9 " S S\5      5       r&g)    N)AnyDictOptionalUnion)	make_grid)	Pipelines)UNetForVideoDeinterlace)
OutputKeys)InputPipeline)	PIPELINES)VideoReader)Tasks)
get_logger).mp4z.mov)r      c                 z   [         R                  " U 5      =(       d)    [        U [        5      =(       a    [	        S U  5       5      nU(       d  [        S[        U 5       35      e[         R                  " U 5      (       a  U /n / nU  GH  nUR                  S5      R                  S5      nUR                  5       R                  5       R                  5       R                  " U6 nXRS   -
  US   US   -
  -  nUR                  5       nUS:X  ai  [        U[        [        R                   " UR#                  S5      5      5      SS9R%                  5       n[&        R(                  " U/ SQS	S	2S	S	24   S
5      nO_US:X  a4  UR%                  5       n[&        R(                  " U/ SQS	S	2S	S	24   S
5      nO%US:X  a  UR%                  5       nO[+        SU 35      eU[&        R,                  :X  a  US-  R/                  5       nUR1                  U5      nUR3                  U5        GM     [5        U5      S:X  a  US   nU$ UnU$ )a  Convert torch Tensors into image numpy arrays.
After clamping to (min, max), image values will be normalized to [0, 1].
For different tensor shapes, this function will have different behaviors:
    1. 4D mini-batch Tensor of shape (N x 3/1 x H x W):
        Use `make_grid` to stitch images in the batch dimension, and then
        convert it to numpy array.
    2. 3D Tensor of shape (3/1 x H x W) and 2D Tensor of shape (H x W):
        Directly change to numpy array.
Note that the image channel in input tensors should be RGB order. This
function will convert it to cv2 convention, i.e., (H x W x C) with BGR
order.
Args:
    tensor (Tensor | list[Tensor]): Input tensors.
    out_type (numpy type): Output types. If ``np.uint8``, transform outputs
        to uint8 type with range [0, 255]; otherwise, float type with
        range [0, 1]. Default: ``np.uint8``.
    min_max (tuple): min and max values for clamp.
Returns:
    (Tensor | list[Tensor]): 3D ndarray of shape (H x W x C) or 2D ndarray
    of shape (H x W).
c              3   N   #    U  H  n[         R                  " U5      v   M     g 7fN)torch	is_tensor).0ts     r/var/www/html/land-doc-ocr/venv/lib/python3.13/site-packages/modelscope/pipelines/cv/video_deinterlace_pipeline.py	<genexpr>tensor2img.<locals>.<genexpr>4   s       M,$*qFs   #%z(tensor or list of tensors expected, got r   r      F)nrow	normalize)   r   r   N)r   r    r      r    z?Only support 4D, 3D or 2D tensor. But received with dimension:      o@)r   r   
isinstancelistall	TypeErrortypesqueezefloatdetachcpuclamp_dimr   intmathsqrtsizenumpynp	transpose
ValueErroruint8roundastypeappendlen)tensorout_typemin_max	conditionresult_tensorn_dimimg_nps           r   
tensor2imgrC      s   , ' -Jvt,D -, M,$*M, J,I6tF|nEG 	G vF
 //!$,,Q/--/((*..077AQZ'GAJ,CDA:c$))GLLO"<=!!&  \\&Aq"99EFaZ]]_F\\&Aq"99EFaZ]]_F ==BGE F Frxxun++-Fx(f5 6 f+*VAYFM 17FM    )module_namec                      ^  \ rS rSr SS\\\4   4U 4S jjjrS\S\	\\
4   4S jrS\	\\
4   S\	\\
4   4S jrS\	\\
4   S\	\\
4   4S	 jrS
rU =r$ )VideoDeinterlacePipeline\   modelc                   > [         TU ]  " SXS.UD6  [        R                  R	                  5       (       a  [        R
                  " S5      U l        O[        R
                  " S5      U l        U R                  R                  U l        U R                  R                  U R                  5        U R                  R                  5         [        R                  S5        g)a  The inference pipeline for all the video deinterlace sub-tasks.

Args:
    model (`str` or `Model` or module instance): A model instance or a model local dir
        or a model id in the model hub.
    preprocessor (`Preprocessor`, `optional`): A Preprocessor instance.
    kwargs (dict, `optional`):
        Extra kwargs passed into the preprocessor's constructor.

Example:
    >>> from modelscope.pipelines import pipeline
    >>> pipeline_ins = pipeline('video-deinterlace',
        model='damo/cv_unet_video-deinterlace')
    >>> input = 'https://modelscope.oss-cn-beijing.aliyuncs.com/test/videos/video_deinterlace_test.mp4'
    >>> print(pipeline_ins(input)[OutputKeys.OUTPUT_VIDEO])
)rI   preprocessorcudar+   z!load video deinterlace model doneN )super__init__r   rL   is_availabledevice_devicerI   nettoevalloggerinfo)selfrI   rK   kwargs	__class__s       r   rO   !VideoDeinterlacePipeline.__init__`   s    ( 	JuJ6J::""$$ <</DL <<.DL::##DLL!78rD   inputreturnc                 ~   [        U5      n/ nU H'  nUR                  [        R                  " USS95        M)     UR                  n[        U5       HP  u  pg[        R                  " US-  5      R                  SSS5      R                  5       nUR                  S5      X6'   MR     [        R                  " USS9nX5S.$ )Nr    )axisr"   r   r   r-   )videofps)r   r9   r3   fliprb   	enumerater   
from_numpypermuter)   	unsqueezestack)rX   r\   video_readerinputsframerb   iimgs           r   
preprocess#VideoDeinterlacePipeline.preprocess   s    "5)!EMM"''%a01 "'FA""3:.66q!Q?EEGCa(FI ( V+,,rD   c           
         US   S   nU R                   R                  nU R                   R                  n[        R                  " 5          / n/ n[        SUR                  S5      5       H  nUR                  U" X'US-   2S4   R                  U R                  5      5      5        US:X  a  US   /S-  nMM  UR                  U" U5      R                  5       R                  S5      5        USS  nM     UR                  US   5        UR                  U" U5      R                  5       R                  S5      5        [        R                  " USS9nS S S 5        WUS   S	.$ ! , (       d  f       N= f)
Nra   r   r   .r    r`   rb   )outputrb   )rS   frenetenhnetr   no_gradranger1   r9   rT   rR   r+   rg   cat)rX   r\   rj   rs   rt   outputsframesrl   s           r   forward VideoDeinterlacePipeline.forward   s2   w"]]_GF1fkk!n-fVa!eGSL%9%<%<T\\%JKL6$Rj\A-Fvf~113==a@A . MM&*%NN6&>--/99!<=iiQ/G  "%,77 _s   
DE!!
E/rj   c           
         UR                  SS 5      nUR                  SS5      nUc  [        R                  " SS9R                  nUS   R                  SS  u  pV[
        R                  " S6 n[
        R                  " X7US	   Xe45      n[        S
US   R                  S5      5       HO  n	[        US   S S 2U	S S 2S S 2S S 24   5      n
UR                  U
R                  [        R                  5      5        MQ     UR                  5         U(       aX  [         R"                  " S5      S
:X  d   S5       eUS S S-   nSU SU 3n[$        R&                  " USS9  [(        R*                  U0$ [(        R*                  U0$ )Noutput_videodemo_serviceFr   )suffixrr   mp4vrb   r   r   zffmpeg -versionzaffmpeg is not installed correctly, please refer to https://trac.ffmpeg.org/wiki/CompilationGuide.z_web.mp4z
ffmpeg -i z -vcodec h264 -crf 5 T)shell)gettempfileNamedTemporaryFilenameshapecv2VideoWriter_fourccVideoWriterrv   r1   rC   writer8   r3   r6   releaseossystem
subprocesscallr
   OUTPUT_VIDEO)rX   rj   rY   output_video_pathr~   hwfourccvideo_writerrl   rm   output_video_path_for_webconvert_cmds                r   postprocess$VideoDeinterlacePipeline.postprocess   sx   "JJ~t<zz.%8$ ( ; ;6 J O Oh%%bc*''0'8'-e}qf>q&*//23AVH-aAq!m<=Cszz"((34 4 	99! xwx  ):#2(>(K%&'8&99NOhNijKOOKt4++-FGG++->??rD   )rR   rS   r   )__name__
__module____qualname____firstlineno__r   r	   strrO   r   r   r   rn   rz   r   __static_attributes____classcell__)rZ   s   @r   rG   rG   \   s     #95s:;9 9@- -$sCx. -8T#s(^ 8S#X 8(@$sCx. @tCH~ @ @rD   rG   )'r/   r   r   r   typingr   r   r   r   r   r2   r3   r   torchvision.utilsr   modelscope.metainfor   Amodelscope.models.cv.video_deinterlace.UNet_for_video_deinterlacer	   modelscope.outputsr
   modelscope.pipelines.baser   r   modelscope.pipelines.builderr   modelscope.preprocessors.cvr   modelscope.utils.constantr   modelscope.utils.loggerr   VIDEO_EXTENSIONSrV   r6   rC   register_modulevideo_deinterlacerG   rM   rD   r   <module>r      s     	   - - 
   ' ) ) 5 2 3 + .# 	 !#& ;| 	)D)DF\@x \@F\@rD   