o
    ß¥µiœ  ã                   @   s8   d Z ddlmZ ddlmZ e ¡ ZG dd„ deƒZdS )z PALM model configuration é    )ÚPretrainedConfig)Úloggerc                       sH   e Zd ZdZdZ									
									d‡ fdd„	Z‡  ZS )Ú
PalmConfiga  
    Configuration objects inherit from :class:`~transformers.PretrainedConfig` and can be used to control the model
    outputs. Read the documentation from :class:`~transformers.PretrainedConfig` for more information.


    Args:
        vocab_size (:obj:`int`, `optional`, defaults to 30522):
            Vocabulary size of the BERT model. Defines the number of different tokens that can be represented by the
            :obj:`inputs_ids` passed when calling :class:`~transformers.BertModel` or
            :class:`~transformers.TFBertModel`.
        hidden_size (:obj:`int`, `optional`, defaults to 768):
            Dimensionality of the encoder layers and the pooler layer.
        num_hidden_layers (:obj:`int`, `optional`, defaults to 12):
            Number of hidden layers in the Transformer encoder.
        num_attention_heads (:obj:`int`, `optional`, defaults to 12):
            Number of attention heads for each attention layer in the Transformer encoder.
        intermediate_size (:obj:`int`, `optional`, defaults to 3072):
            Dimensionality of the "intermediate" (often named feed-forward) layer in the Transformer encoder.
        hidden_act (:obj:`str` or :obj:`Callable`, `optional`, defaults to :obj:`"gelu"`):
            The non-linear activation function (function or string) in the encoder and pooler. If string,
            :obj:`"gelu"`, :obj:`"relu"`, :obj:`"silu"` and :obj:`"gelu_new"` are supported.
        hidden_dropout_prob (:obj:`float`, `optional`, defaults to 0.1):
            The dropout probability for all fully connected layers in the embeddings, encoder, and pooler.
        attention_probs_dropout_prob (:obj:`float`, `optional`, defaults to 0.1):
            The dropout ratio for the attention probabilities.
        max_position_embeddings (:obj:`int`, `optional`, defaults to 512):
            The maximum sequence length that this model might ever be used with. Typically set this to something large
            just in case (e.g., 512 or 1024 or 2048).
        type_vocab_size (:obj:`int`, `optional`, defaults to 2):
            The vocabulary size of the :obj:`token_type_ids` passed when calling :class:`~transformers.BertModel` or
            :class:`~transformers.TFBertModel`.
        initializer_range (:obj:`float`, `optional`, defaults to 0.02):
            The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
        layernorm_epsilon (:obj:`float`, `optional`, defaults to 1e-12):
            The epsilon used by the layer normalization layers.
        dec_hidden_layers (:obj:`int`, `optional`, defaults to 12):
            Number of hidden layers in the Transformer decoder.
        attn_separate (:obj:`bool`, `optional`, defaults to false):
            Whether or not to separate the q, k, v of attention.

    Examples:

        >>> from modelscope.models.nlp.palm_v2 import PalmForConditionalGeneration, PalmConfig
        >>> configuration = PalmConfig()

        >>> # Initializing a model from the configuration
        >>> model = PalmForConditionalGeneration(configuration)

        >>> # Accessing the model configuration
        >>> configuration = model.config
    ÚpalmÚrobertaúroberta-baseé   Fé   é   é   é   çš™™™™™É?Tçš™™™™™¹?çffffffî?é   é(   é‚   c                    s|   t ƒ jdi |¤Ž || _|| _|| _|| _|| _|| _|| _|| _	|	| _
|
| _|| _|| _|| _|| _|| _|| _|| _d S )N© )ÚsuperÚ__init__ÚencoderÚencoder_pthÚmax_posÚ	share_embÚ
dec_layersÚdec_hidden_sizeÚ	dec_headsÚdec_ff_sizeÚdec_dropoutÚuse_bert_embÚlabel_smoothingÚalphaÚ	beam_sizeÚ
min_lengthÚ
max_lengthÚsample_topkÚblock_trigram)Úselfr   r   r   r   r   r   r   r   r   r   r    r!   r"   r#   r$   r%   r&   Úkwargs©Ú	__class__r   ú_/home/ubuntu/.local/lib/python3.10/site-packages/modelscope/models/nlp/palm_v2/configuration.pyr   O   s$   
zPalmConfig.__init__)r   r   r   Fr	   r
   r   r   r   Tr   r   r   r   r   FF)Ú__name__Ú
__module__Ú__qualname__Ú__doc__Ú
model_typer   Ú__classcell__r   r   r)   r+   r      s*    3ïr   N)r/   Ú transformers.configuration_utilsr   Úmodelscope.utilsr   ÚloggingÚ
get_loggerr   r   r   r   r+   Ú<module>   s
   