
    9i	                     F   S SK Jr  S SKJrJr  S SKrS SKrS SK	r	S SK
r
S SKJs  Jr  S SK
Jr  S SKJr  S SKJr  S SKJr  S SKJr  S SKJrJr  S S	KJr  S S
KJr  S SKJrJ r   S SK!J"r"  \"" 5       r#\RH                  " \ RJ                  \RL                  S9 " S S\5      5       r'g)    N)AnyDict)nn)
transforms)	Pipelines)
OutputKeys)pipeline)InputPipeline)	PIPELINES)	LoadImage)	ModelFileTasks)
get_logger)module_namec                      ^  \ rS rSrS\4U 4S jjrS\S\\\4   4S jr	S\\\4   S\\\4   4S jr
S\\\4   S\\\4   4S	 jrS
rU =r$ )ContentCheckPipeline   modelc           
      6  > [         TU ]  " SSU0UD6  [        R                  " [        R                  " S5      [        R
                  " S5      [        R                  " 5       [        R                  " / SQ/ SQS9/5      U l        [        R                  S5        g)	a  
use `model` to create a content check pipeline for prediction
Args:
    model: model id on modelscope hub.
Example:
ContentCheckPipeline can judge whether the picture is pornographic

```python
>>> from modelscope.pipelines import pipeline
>>> cc_func = pipeline('image_classification', 'damo/cv_resnet50_image-classification_cc')
>>> cc_func("https://modelscope.oss-cn-beijing.aliyuncs.com/test/images/content_check.jpg")
{'scores': [0.2789826989173889], 'labels': 'pornographic'}
```
r      )g
ףp=
?gv/?gCl?)gZd;O?gy&1?g?)meanstdzcontent check model loaded!N )super__init__r   ComposeResize
CenterCropToTensor	Normalizetest_transformsloggerinfo)selfr   kwargs	__class__s      n/var/www/html/land-doc-ocr/venv/lib/python3.13/site-packages/modelscope/pipelines/cv/content_check_pipeline.pyr   ContentCheckPipeline.__init__   s~    " 	/u//)11c"!!#&!  *0EG	3
   	12    inputreturnc                 |    [         R                  " U5      nU R                  U5      R                  5       n0 nX#S'   U$ )Nimg)r   convert_to_imgr"   float)r%   r+   r.   results       r(   
preprocessContentCheckPipeline.preprocess8   s<    &&u-""3'--/ur*   c                    US   R                  S5      nU R                  U5      nS[        R                  " US S 2S S24   5      S   S   R	                  5       -
  /nUS   S:  a  SnOSn[
        R                  U[
        R                  U0$ )	Nr.   r         g      ?pornographicnormal)	unsqueezer   Fsoftmaxtolistr   SCORESLABELS)r%   r+   r.   r1   scorelabels         r(   forwardContentCheckPipeline.forward?   s    El$$Q'CQYYva!e}-a04;;==>8c>"EE!!5**;*;UCCr*   inputsc                     U$ )Nr   )r%   rD   s     r(   postprocess ContentCheckPipeline.postprocessI   s    r*   )r"   )__name__
__module____qualname____firstlineno__strr   r
   r   r   r2   rB   rF   __static_attributes____classcell__)r'   s   @r(   r   r      sx    3c 36 $sCx. DT#s(^ DS#X D$sCx. T#s(^  r*   r   )(os.pathpathosptypingr   r   cv2numpynpPILtorchtorch.nn.functionalr   
functionalr;   torchvisionr   modelscope.metainfor   modelscope.outputsr   modelscope.pipelinesr	   modelscope.pipelines.baser
   r   modelscope.pipelines.builderr   modelscope.preprocessorsr   modelscope.utils.constantr   r   modelscope.utils.loggerr   r#   register_moduleimage_classificationcontent_checkr   r   r*   r(   <module>rf      s{      
  
     " ) ) ) 5 2 . 6 .	 	I,C,CE/8 /E/r*   