# SOME DESCRIPTIVE TITLE.
# Copyright (C) 2021, PaddleNLP
# This file is distributed under the same license as the PaddleNLP package.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2022.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: PaddleNLP \n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2022-05-19 14:17+0800\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel 2.10.1\n"

#: ../model_zoo/transformers/ELECTRA/contents.rst:5
msgid "ELECTRA模型汇总"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:9
msgid "下表汇总介绍了目前PaddleNLP支持的ELECTRA模型对应预训练权重。 关于模型的具体细节可以参考对应链接。"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:13
msgid "Pretrained Weight"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:13
msgid "Language"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:13
msgid "Details of the model"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:15
msgid "``electra-small``"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:15
#: ../model_zoo/transformers/ELECTRA/contents.rst:19
#: ../model_zoo/transformers/ELECTRA/contents.rst:23
msgid "English"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:15
msgid ""
"12-layer, 768-hidden, 4-heads, 14M parameters. Trained on lower-cased "
"English text."
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:19
msgid "``electra-base``"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:19
msgid ""
"12-layer, 768-hidden, 12-heads, 109M parameters. Trained on lower-cased "
"English text."
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:23
msgid "``electra-large``"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:23
msgid ""
"24-layer, 1024-hidden, 16-heads, 334M parameters. Trained on lower-cased "
"English text."
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:27
msgid "``chinese-electra-small``"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:27
#: ../model_zoo/transformers/ELECTRA/contents.rst:31
#: ../model_zoo/transformers/ELECTRA/contents.rst:35
#: ../model_zoo/transformers/ELECTRA/contents.rst:39
#: ../model_zoo/transformers/ELECTRA/contents.rst:43
#: ../model_zoo/transformers/ELECTRA/contents.rst:47
msgid "Chinese"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:27
msgid "12-layer, 768-hidden, 4-heads, 12M parameters. Trained on Chinese text."
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:31
msgid "``chinese-electra-base``"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:31
msgid "12-layer, 768-hidden, 12-heads, 102M parameters. Trained on Chinese text."
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:35
msgid "``ernie-health-chinese``"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:35
msgid ""
"12-layer, 768-hidden, 12-heads, 102M parameters. Trained on Chinese "
"medical corpus."
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:39
msgid "``junnyu/hfl-chinese-electra-180g-base-discriminator``"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:39
msgid ""
"Discriminator, 12-layer, 768-hidden, 12-heads, 102M parameters. Trained "
"on 180g Chinese text."
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:43
msgid "``junnyu/hfl-chinese-electra-180g-small-ex-discriminator``"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:43
msgid ""
"Discriminator, 24-layer, 256-hidden, 4-heads, 24M parameters. Trained on "
"180g Chinese text."
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:47
msgid "``junnyu/hfl-chinese-legal-electra-small-generator``"
msgstr ""

#: ../model_zoo/transformers/ELECTRA/contents.rst:47
msgid ""
"Generator, 12-layer, 64-hidden, 1-heads, 3M parameters. Trained on "
"Chinese legal corpus."
msgstr ""

