
    9ip                         S SK JrJrJrJr  S SKrS SKrS SKJ	r	  S SK
Jr  S SKJr  S SKJr  S SKJr  S SKJr  S S	KJrJr  S
/r\R0                  " \R2                  \	R2                  S9 " S S
\5      5       rg)    )AnyDictOptionalUnionN)	Pipelines)Model)
OutputKeys)Pipeline)	PIPELINES)Preprocessor)	ModelFileTasksSentenceEmbeddingPipeline)module_namec            	          ^  \ rS rSr     SS\\\4   S\\   S\S\4U 4S jjjr	S\
\\4   S\
\\4   4S	 jrS\
\\4   S\
\\4   4S
 jrSrU =r$ )r      modelpreprocessorconfig_filedevicec                 F  > [         TU ]  UUUUUUR                  SS5      UR                  S0 5      S9  [        U R                  [
        5      (       d   S[        R                   35       eUc3  [        R                  " U R                  R                  4SU0UD6U l        gg)a  Use `model` and `preprocessor` to create a nlp text dual encoder then generates the text representation.
Args:
    model (str or Model): Supply either a local model dir which supported the WS task,
    or a model id from the model hub, or a torch model instance.
    preprocessor (Preprocessor): An optional preprocessor instance, please make sure the preprocessor fits for
    the model if supplied.
    kwargs (dict, `optional`):
        Extra kwargs passed into the preprocessor's constructor.
compileFcompile_options)r   r   r   r   auto_collater   r   z,please check whether model config exists in Nsequence_length)super__init__pop
isinstancer   r   r   CONFIGURATIONr   from_pretrained	model_dirr   )	selfr   r   r   r   r   r   kwargs	__class__s	           t/var/www/html/land-doc-ocr/venv/lib/python3.13/site-packages/modelscope/pipelines/nlp/sentence_embedding_pipeline.pyr   "SentenceEmbeddingPipeline.__init__   s    " 	%#%JJy%0"JJ'8"= 	 	? $**e,, 	U:9;R;R:ST	U,  , < <

$$! /! !D      inputsreturnc                 *    U R                   " S0 UDUD6$ )N )r   )r#   r)   forward_paramss      r&   forward!SentenceEmbeddingPipeline.forward:   s    zz5F5n55r(   c                    US   nUS   nUb  [         R                  " X#4SS9nUR                  5       R                  5       R	                  5       nUbJ  [
        R                  " USS24   [
        R                  " USS24   S5      5      R                  5       S   nO/ n[        R                  U[        R                  U0$ )zprocess the prediction results

Args:
    inputs (Dict[str, Any]): _description_

Returns:
    Dict[str, Any]: the predicted text representation
query_embeddingsdoc_embeddingsNr   )dim   )r4   r   )torchcatdetachcpunumpynpdot	transposetolistr	   TEXT_EMBEDDINGSCORES)r#   r)   
embeddingsr2   scoress        r&   postprocess%SentenceEmbeddingPipeline.postprocess>   s     ./
 01%J#?QGJ&&(,,.446
%VVJqst-LLABC)96BDDJFHQPF F%%zv
 	
r(   )r   )NNgpuT   )__name__
__module____qualname____firstlineno__r   r   strr   r   r   r   r   r.   rB   __static_attributes____classcell__)r%   s   @r&   r   r      s     9=$($"!$!eSj)!'5! "! 	! !F6d38n 6%)#s(^6
$sCx. 
T#s(^ 
 
r(   )typingr   r   r   r   r9   r:   r5   modelscope.metainfor   modelscope.modelsr   modelscope.outputsr	   modelscope.pipelines.baser
   modelscope.pipelines.builderr   modelscope.preprocessorsr   modelscope.utils.constantr   r   __all__register_modulesentence_embeddingr   r,   r(   r&   <module>rX      si    . -   ) # ) . 2 1 6&
' 	)*F*FH?
 ?
H?
r(   