Update nsmc_small.tflite
1dd8033
-
345 Bytes
initial commit
-
1.33 kB
Update config.json
nsmc_small.pt
Detected Pickle imports (224)
- "__torch__.transformers.modeling_bert.___torch_mangle_164.BertOutput",
- "__torch__.transformers.modeling_bert.___torch_mangle_56.BertAttention",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_128.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_134.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_165.BertLayer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_184.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_45.BertOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_159.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_25.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_110.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_166.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_50.Dropout",
- "__torch__.transformers.modeling_bert.___torch_mangle_17.BertSelfAttention",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_104.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_154.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_194.BertIntermediate",
- "__torch__.transformers.modeling_electra.ElectraModel",
- "__torch__.transformers.modeling_bert.___torch_mangle_92.BertIntermediate",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_60.LayerNorm",
- "__torch__.torch.nn.modules.normalization.LayerNorm",
- "__torch__.transformers.modeling_bert.___torch_mangle_157.BertSelfOutput",
- "__torch__.torch.nn.modules.activation.Softmax",
- "__torch__.transformers.modeling_bert.___torch_mangle_147.BertOutput",
- "__torch__.transformers.modeling_bert.___torch_mangle_136.BertSelfAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_81.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_148.BertLayer",
- "__torch__.transformers.modeling_bert.___torch_mangle_85.BertSelfAttention",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_189.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_44.Dropout",
- "__torch__.transformers.modeling_bert.___torch_mangle_97.BertLayer",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_20.Dropout",
- "__torch__.transformers.modeling_bert.___torch_mangle_123.BertSelfOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_83.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_88.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_129.Dropout",
- "__torch__.transformers.modeling_bert.___torch_mangle_29.BertLayer",
- "__torch__.transformers.modeling_electra.ElectraEmbeddings",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_162.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_180.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_201.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_69.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_140.BertSelfOutput",
- "__torch__.torch.nn.modules.sparse.___torch_mangle_0.Embedding",
- "__torch__.torch.nn.modules.linear.___torch_mangle_65.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_143.BertIntermediate",
- "__torch__.torch.nn.modules.linear.___torch_mangle_48.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_126.BertIntermediate",
- "__torch__.transformers.modeling_bert.___torch_mangle_198.BertOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_100.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_197.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_137.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_68.BertSelfAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_15.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_111.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_193.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_161.Linear",
- "__torch__.transformers.modeling_bert.BertOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_116.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_121.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_76.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_177.BertIntermediate",
- "__torch__.transformers.modeling_bert.___torch_mangle_22.BertAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_132.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_127.Linear",
- "__torch__.torch.nn.modules.linear.Linear",
- "__torch__.transformers.modeling_bert.BertEncoder",
- "__torch__.torch.nn.modules.linear.___torch_mangle_103.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_96.BertOutput",
- "__torch__.transformers.modeling_bert.___torch_mangle_46.BertLayer",
- "__torch__.transformers.modeling_bert.___torch_mangle_187.BertSelfAttention",
- "__torch__.transformers.modeling_bert.BertSelfOutput",
- "__torch__.transformers.modeling_bert.___torch_mangle_131.BertLayer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_66.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_10.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_72.BertSelfOutput",
- "__torch__.transformers.modeling_bert.___torch_mangle_170.BertSelfAttention",
- "__torch__.transformers.modeling_bert.___torch_mangle_181.BertOutput",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_70.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_151.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_49.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_9.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_21.BertSelfOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_42.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_59.Linear",
- "__torch__.model.ElectraForSequenceClassification",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_11.LayerNorm",
- "__torch__.transformers.modeling_bert.___torch_mangle_191.BertSelfOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_200.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_61.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_82.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_90.BertAttention",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_118.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_86.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_73.BertAttention",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_112.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_115.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_138.LayerNorm",
- "__torch__.transformers.modeling_bert.___torch_mangle_175.BertAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_6.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_47.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_31.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_183.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_43.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_173.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_95.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_67.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_64.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_108.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_163.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_40.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_87.LayerNorm",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_19.LayerNorm",
- "__torch__.transformers.modeling_bert.___torch_mangle_24.BertIntermediate",
- "__torch__.transformers.modeling_bert.___torch_mangle_130.BertOutput",
- "__torch__.transformers.modeling_bert.___torch_mangle_28.BertOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_185.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_3.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_54.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_186.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_146.Dropout",
- "__torch__.transformers.modeling_bert.___torch_mangle_107.BertAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_167.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_34.BertSelfAttention",
- "__torch__.transformers.modeling_bert.___torch_mangle_63.BertLayer",
- "__torch__.transformers.modeling_bert.BertSelfAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_133.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_53.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_139.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_155.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_13.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_152.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_122.Dropout",
- "__torch__.transformers.modeling_bert.___torch_mangle_153.BertSelfAttention",
- "__torch__.transformers.modeling_bert.___torch_mangle_158.BertAttention",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_105.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_93.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_144.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_113.BertOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_32.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_149.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_5.Dropout",
- "__torch__.transformers.modeling_bert.___torch_mangle_102.BertSelfAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_117.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_141.BertAttention",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_135.Dropout",
- "__torch__.transformers.modeling_bert.___torch_mangle_38.BertSelfOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_37.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_16.Dropout",
- "__torch__.transformers.modeling_bert.___torch_mangle_79.BertOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_91.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_27.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_101.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_36.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_2.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_18.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_84.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_57.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_71.Dropout",
- "__torch__.transformers.modeling_bert.___torch_mangle_114.BertLayer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_99.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_58.BertIntermediate",
- "__torch__.torch.nn.modules.linear.___torch_mangle_178.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_35.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_8.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_196.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_78.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_188.Linear",
- "__torch__.transformers.modeling_bert.BertAttention",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_7.LayerNorm",
- "__torch__.torch.nn.modules.container.ModuleList",
- "__torch__.transformers.modeling_bert.___torch_mangle_80.BertLayer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_195.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_55.BertSelfOutput",
- "__torch__.torch.nn.modules.sparse.Embedding",
- "__torch__.transformers.modeling_bert.___torch_mangle_160.BertIntermediate",
- "__torch__.transformers.modeling_bert.___torch_mangle_174.BertSelfOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_142.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_109.BertIntermediate",
- "__torch__.torch.nn.modules.linear.___torch_mangle_176.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_192.BertAttention",
- "__torch__.transformers.modeling_bert.___torch_mangle_39.BertAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_74.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_171.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_120.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_14.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_168.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_190.Dropout",
- "__torch__.transformers.modeling_bert.___torch_mangle_51.BertSelfAttention",
- "__torch__.transformers.modeling_bert.BertLayer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_52.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_94.LayerNorm",
- "__torch__.transformers.modeling_bert.___torch_mangle_106.BertSelfOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_169.Dropout",
- "torch.FloatStorage",
- "__torch__.torch.nn.modules.linear.___torch_mangle_150.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_26.LayerNorm",
- "__torch__.transformers.modeling_bert.___torch_mangle_62.BertOutput",
- "__torch__.transformers.modeling_bert.___torch_mangle_199.BertLayer",
- "__torch__.transformers.modeling_bert.___torch_mangle_89.BertSelfOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_156.Dropout",
- "__torch__.torch.nn.modules.dropout.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_179.LayerNorm",
- "collections.OrderedDict",
- "__torch__.transformers.modeling_bert.___torch_mangle_182.BertLayer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_30.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_145.LayerNorm",
- "torch._utils._rebuild_tensor_v2",
- "__torch__.torch.nn.modules.linear.___torch_mangle_98.Linear",
- "__torch__.transformers.modeling_bert.___torch_mangle_124.BertAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_23.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_12.Dropout",
- "__torch__.transformers.modeling_bert.___torch_mangle_75.BertIntermediate",
- "__torch__.transformers.modeling_bert.___torch_mangle_41.BertIntermediate",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_77.LayerNorm",
- "__torch__.torch.nn.modules.sparse.___torch_mangle_1.Embedding",
- "__torch__.transformers.modeling_bert.___torch_mangle_119.BertSelfAttention",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_172.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_33.Dropout",
- "__torch__.transformers.modeling_bert.BertIntermediate",
- "__torch__.torch.nn.modules.linear.___torch_mangle_125.Linear",
- "torch._utils._rebuild_tensor_v2",
- "torch.DoubleStorage",
- "collections.OrderedDict"
How to fix it?
55.1 MB
Update nsmc_small.pt
-
55.7 MB
Update nsmc_small.tflite
-
14.1 MB
Update nsmc_small_8bits.tflite
-
27.6 MB
Update nsmc_small_fp16.tflite
-
54.8 MB
Update pytorch_model.bin
-
51 Bytes
Update tokenizer_config.json
-
279 kB
Update vocab.txt