Source code for transformers.models.roberta.configuration_roberta

# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION.  All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# See the License for the specific language governing permissions and
# limitations under the License.
""" RoBERTa configuration """
from collections import OrderedDict
from typing import Mapping

from ...onnx import OnnxConfig
from ...utils import logging
from ..bert.configuration_bert import BertConfig

logger = logging.get_logger(__name__)

    "roberta-base": "",
    "roberta-large": "",
    "roberta-large-mnli": "",
    "distilroberta-base": "",
    "roberta-base-openai-detector": "",
    "roberta-large-openai-detector": "",

[docs]class RobertaConfig(BertConfig): r""" This is the configuration class to store the configuration of a :class:`~transformers.RobertaModel` or a :class:`~transformers.TFRobertaModel`. It is used to instantiate a RoBERTa model according to the specified arguments, defining the model architecture. Configuration objects inherit from :class:`~transformers.PretrainedConfig` and can be used to control the model outputs. Read the documentation from :class:`~transformers.PretrainedConfig` for more information. The :class:`~transformers.RobertaConfig` class directly inherits :class:`~transformers.BertConfig`. It reuses the same defaults. Please check the parent class for more information. Examples:: >>> from transformers import RobertaConfig, RobertaModel >>> # Initializing a RoBERTa configuration >>> configuration = RobertaConfig() >>> # Initializing a model from the configuration >>> model = RobertaModel(configuration) >>> # Accessing the model configuration >>> configuration = model.config """ model_type = "roberta" def __init__(self, pad_token_id=1, bos_token_id=0, eos_token_id=2, **kwargs): """Constructs RobertaConfig.""" super().__init__(pad_token_id=pad_token_id, bos_token_id=bos_token_id, eos_token_id=eos_token_id, **kwargs)
class RobertaOnnxConfig(OnnxConfig): @property def inputs(self) -> Mapping[str, Mapping[int, str]]: return OrderedDict( [ ("input_ids", {0: "batch", 1: "sequence"}), ("attention_mask", {0: "batch", 1: "sequence"}), ] ) @property def outputs(self) -> Mapping[str, Mapping[int, str]]: return OrderedDict([("last_hidden_state", {0: "batch", 1: "sequence"}), ("pooler_output", {0: "batch"})])