
    9i                     *    S r SSKJr   " S S\5      rg)zPLUG model configuration     )PretrainedConfigc                   L   ^  \ rS rSrSrSr            SU 4S jjrSrU =r$ )
PlugConfig   a  
Configuration objects inherit from :class:`~transformers.PretrainedConfig` and can be used to control the model
outputs. Read the documentation from :class:`~transformers.PretrainedConfig` for more information.


Args:
    vocab_size (:obj:`int`, `optional`, defaults to 30522):
        Vocabulary size of the BERT model. Defines the number of different tokens that can be represented by the
        :obj:`inputs_ids` passed when calling :class:`~transformers.BertModel` or
        :class:`~transformers.TFBertModel`.
    hidden_size (:obj:`int`, `optional`, defaults to 768):
        Dimensionality of the encoder layers and the pooler layer.
    num_hidden_layers (:obj:`int`, `optional`, defaults to 12):
        Number of hidden layers in the Transformer encoder.
    num_attention_heads (:obj:`int`, `optional`, defaults to 12):
        Number of attention heads for each attention layer in the Transformer encoder.
    intermediate_size (:obj:`int`, `optional`, defaults to 3072):
        Dimensionality of the "intermediate" (often named feed-forward) layer in the Transformer encoder.
    hidden_act (:obj:`str` or :obj:`Callable`, `optional`, defaults to :obj:`"gelu"`):
        The non-linear activation function (function or string) in the encoder and pooler. If string,
        :obj:`"gelu"`, :obj:`"relu"`, :obj:`"silu"` and :obj:`"gelu_new"` are supported.
    hidden_dropout_prob (:obj:`float`, `optional`, defaults to 0.1):
        The dropout probability for all fully connected layers in the embeddings, encoder, and pooler.
    attention_probs_dropout_prob (:obj:`float`, `optional`, defaults to 0.1):
        The dropout ratio for the attention probabilities.
    max_position_embeddings (:obj:`int`, `optional`, defaults to 512):
        The maximum sequence length that this model might ever be used with. Typically set this to something large
        just in case (e.g., 512 or 1024 or 2048).
    type_vocab_size (:obj:`int`, `optional`, defaults to 2):
        The vocabulary size of the :obj:`token_type_ids` passed when calling :class:`~transformers.BertModel` or
        :class:`~transformers.TFBertModel`.
    initializer_range (:obj:`float`, `optional`, defaults to 0.02):
        The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
    layernorm_epsilon (:obj:`float`, `optional`, defaults to 1e-12):
        The epsilon used by the layer normalization layers.
    dec_hidden_layers (:obj:`int`, `optional`, defaults to 12):
        Number of hidden layers in the Transformer decoder.
    attn_separate (:obj:`bool`, `optional`, defaults to false):
        Whether or not to separate the q, k, v of attention.

Examples::

    >>> import PlugModel, PlugConfig
    >>> configuration = PlugConfig()

    >>> # Initializing a model from the configuration
    >>> model = PlugModel(configuration)

    >>> # Accessing the model configuration
    >>> configuration = model.config
plugc                    > [         TU ]  " S0 UD6  Xl        X l        X0l        X@l        XPl        X`l        Xpl        Xl	        Xl
        Xl        Xl        Xl        g )N )super__init__encoderencoder_pthmax_pos	share_emb
dec_layersdec_hidden_size	dec_headsdec_ff_sizedec_dropoutuse_bert_emblabel_smoothingblock_trigram)selfr   r   r   r   r   r   r   r   r   r   r   r   kwargs	__class__s                 l/var/www/html/land-doc-ocr/venv/lib/python3.13/site-packages/modelscope/models/nlp/fid_plug/configuration.pyr   PlugConfig.__init__J   sX     	"6"&"$."&&(.*    )r   r   r   r   r   r   r   r   r   r   r   r   )robertazroberta-basei   F   i      i   g?Tg?F)	__name__
__module____qualname____firstlineno____doc__
model_typer   __static_attributes____classcell__)r   s   @r   r   r      s@    2f J #+ !$! "!$$+ +r   r   N)r%    transformers.configuration_utilsr   r   r	   r   r   <module>r*      s     ! =Q+! Q+r   