id
stringlengths
1
4
tokens
sequence
ner_tags
sequence
2700
[ "Ram", "et", "al.,", "2021),", "and", "entity", "recognition", "(de", "Lichy", "et", "al.,", "2021;", "Tong", "et", "al.,", "2021;", "Ding", "et", "al.,", "2021),", "by", "meta", "learning", "(Li", "and", "Zhang,", "2021;", "Bansal", "et", "al.,", "2020;", "Sharaf", "et", "al.,", "2020),", "data", "augmentation", "(Wei", "et", "al.,", "2021;", "Wei", "and", "Zou,", "2019;", "Karimi", "et", "al.,", "2021;", "Jian", "et", "al.,", "2022),", "and", "prompts", "(Gao", "et", "al.,", "2021;", "Tam", "et", "al.,", "2021)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2701
[ "In", "NLP,", "few-shot", "learning", "has", "been", "successfully", "applied", "to", "machine", "translation", "(Arthaud", "et", "al.,", "2021),", "abstract", "summarizing", "(Fabbri", "et", "al.,", "2021),", "question", "and", "answering", "(Hua", "et", "al.,", "2020;" ]
[ 0, 0, 11, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2702
[ "Learning", "from", "limited", "labeled", "data", "(few-shot", "learning)", "in", "Computer", "Vision", "is", "usually", "achieved", "by", "meta-learning", "(Ren", "et", "al.,", "2018a,b;", "Jian", "et", "al.,", "2020;", "Jian", "and", "Gao,", "2021)", "or", "transfer", "learning", "(Tian", "et", "al.,", "2020)." ]
[ 0, 0, 0, 0, 0, 11, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2703
[ "Label", "Hallucination", "(Jian", "and", "Torresani,", "2022)", "assigns", "soft", "pseudo-labels", "for", "unlabelled", "images", "to", "extend", "the", "fine-tuning", "few-shot", "dataset." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2704
[ "Tjio", "et", "al.,", "2022)." ]
[ 0, 0, 0, 0 ]
2705
[ "Feature", "Hallucination", "of", "examples", "is", "first", "introduced", "for", "visual", "recognition", "(Hariharan", "and", "Girshick,", "2017)", "by", "metalearning", "(Wang", "et", "al.,", "2018),", "variational", "inference", "(Luo", "et", "al.,", "2021;", "Lazarou", "et", "al.,", "2022),", "and", "adversarial", "learning", "(Li", "et", "al.,", "2020;" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2706
[ "Hallucination", "Methods." ]
[ 0, 0 ]
2707
[ "Guo", "et", "al.,", "2021)", "or", "a", "few", "additional", "parameters", "(Houlsby", "et", "al.,", "2019)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2708
[ "Instead", "of", "fine-tuning", "all", "parameters", "in", "a", "model,", "other", "work", "explore", "only", "learning", "a", "few", "vectors", "(Lester", "et", "al.,", "2021;", "Li", "and", "Liang,", "2021;" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2709
[ "Other", "tricks", "include", "bias", "correction", "in", "optimizer", "and", "re-initialization", "of", "top", "layers", "in", "Transformer", "(Zhang", "et", "al.,", "2021)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2710
[ "Better", "finetuning", "of", "language", "models", "can", "be", "achieved", "by", "proper", "initialization", "(Dodge", "et", "al.,", "2020),", "regularization", "(Lee", "et", "al.,", "2019)", "or", "prompts", "(Schick", "and", "Schütze,", "2021)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2711
[ "Fine-tuning", "of", "Language", "Models." ]
[ 0, 0, 0, 0 ]
2712
[ "2", "Related", "Work" ]
[ 0, 0, 0 ]
2713
[ "Finally,", "since", "our", "method", "is", "a", "form", "of", "data", "augmentation,", "we", "also", "compare", "EmbedHalluc", "to", "a", "common", "data", "augmentation", "technique", "EDA,", "and", "semi-supervised", "learning", "where", "unlabeled", "data", "is", "already", "available." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 2, 0, 0, 0, 0, 0, 0 ]
2714
[ "We", "further", "experimentally", "show", "the", "overall", "superiority", "of", "EmbedHalluc", "when", "comparing", "to", "regularization", "methods", "proposed", "to", "address", "the", "problem", "of", "over-fitting", "during", "fine-tuning", "of", "LMs,", "such", "as", "Mixout", "(Lee", "et", "al.,", "2019)", "and", "Re-Init", "(Zhang", "et", "al.,", "2021)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0 ]
2715
[ "We", "evaluate", "our", "method,", "called", "Embedding", "Hallucination", "(Embedhalluc),", "on", "15", "tasks", "and", "show", "that", "it", "generally", "improves", "over", "recent", "fine-tuning" ]
[ 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2716
[ "This", "effectively", "extends", "the", "fine-tuning", "dataset", "with", "diverse", "embedding-label", "pairs", "which", "carry", "intra-class", "variation", "that", "can", "be", "a", "useful", "learning", "signal", "for", "the", "language", "learner." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2717
[ "Once", "the", "halluciantor", "is", "trained,", "we", "condition", "it", "on", "labels", "to", "generate", "diverse", "embeddings", "at", "each", "fine-tuning", "step." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2718
[ "By", "observing", "the", "real", "embeddings", "of", "examples", "from", "the", "fine-tuning", "dataset,", "the", "cWGAN", "plays", "an", "adversarial", "game", "to", "hallucinate", "embeddings", "that", "can", "fool", "the", "discriminator,", "while", "the", "discriminator", "is", "trying", "to", "classify", "the", "fake", "embeddings", "from", "the", "real", "ones." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2719
[ "To", "be", "specific,", "we", "adapt", "a", "conditional", "Wasserstein", "Generative", "Adversarial", "Network", "(cWGAN)", "(Arjovsky", "et", "al.,", "2017)", "as", "our", "hallucinator", "to", "hallucinate", "embeddings", "of", "sentences." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2720
[ "The", "underlying", "hypothesis", "is", "that", "the", "intra-class", "relation", "of", "the", "observed", "examples", "can", "be", "modeled", "and", "that", "this", "can", "be", "learned", "from", "a", "few-samples", "to", "hallucinate", "diverse", "unseen", "examples." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2721
[ "In", "this", "work,", "we", "propose", "to", "use", "a", "generative", "augmentation", "method", "at", "the", "embedding", "space", "for", "few-shot", "learning." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 12 ]
2722
[ "Current", "common", "text", "data", "augmentation", "methods,", "such", "as", "EDA", "(Wei", "and", "Zou,", "2019)", "(which", "have", "been", "used", "in", "recent", "few-shot", "learning", "papers", "(Wei", "et", "al.,", "2021;", "Basu", "et", "al.,", "2021))", "and", "AEDA", "(Karimi", "et", "al.,", "2021)", "operate", "at", "the", "lexical", "level,", "which", "while", "resulting", "in", "human", "readable", "texts,", "lead", "to", "limited", "diversity", "due", "to", "the", "discrete", "nature", "of", "the", "lexical", "space." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2723
[ "We", "speculate", "that", "the", "key", "to", "solve", "this", "issue", "is", "by", "data", "augmentation." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2724
[ "However,", "we", "show", "in", "our", "experiments", "that", "these", "methods", "fail", "in", "extreme", "data", "scarce", "setting." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2725
[ "Prior", "work", "have", "proposed", "regularization", "methods", "to", "overcome", "this", "problem", "(Lee", "et", "al.,", "2019;", "Zhang", "et", "al.,", "2021)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2726
[ "Indeed,", "tuning", "a", "language", "model", "with", "hundreds", "of", "millions", "of", "parameters", "(e.g.,", "BERT-large", "has", "300M", "parameters)", "with", "only", "a", "few", "examples", "inevitably", "faces", "the", "over-fitting", "problem." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2727
[ "Experiments", "from", "recent", "work", "(Gao", "et", "al.,", "2021)", "have", "shown", "that", "fine-tuning", "performs", "poorly", "in", "the", "setting", "where", "only", "16", "examples", "per", "class", "are" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2728
[ "the", "performance", "drops", "drastically", "when", "the", "number", "of", "examples", "falls", "to", "only", "a", "few", "dozens." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2729
[ "Data", "augmentation", "(Wei", "and", "Zou,", "2019),", "regularization", "(Lee", "et", "al.,", "2019)", "and", "re-initialization", "(Zhang", "et", "al.,", "2021)", "further", "improve", "the", "results." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2730
[ "Conventional", "finetuning", "has", "been", "shown", "to", "be", "effective", "when", "a", "few", "thousands", "of", "labeled", "examples", "are", "available." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2731
[ "Fine-tuning", "a", "pre-trained", "language", "model", "(LM)", "on", "a", "downstream", "task", "with", "the", "labeled", "data", "has", "been", "the", "de", "facto", "approach", "in", "many", "NLP", "tasks", "(Wang", "et", "al.,", "2019;" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2732
[ "Introduction" ]
[ 0 ]
2733
[ "The", "code", "will", "be", "made", "available", "at:", "https://github.com/yiren-jian/EmbedHalluc." ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
2734
[ "Further,", "we", "show", "that", "EmbedHalluc", "outperforms", "other", "methods", "that", "address", "this", "over-fitting", "problem,", "such", "as", "common", "data", "augmentation,", "semi-supervised", "pseudo-labeling,", "and", "regularization." ]
[ 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2735
[ "Experiments", "demonstrate", "that", "our", "proposed", "method", "is", "effective", "in", "a", "wide", "range", "of", "language", "tasks,", "outperforming", "current", "fine-tuning", "methods." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2736
[ "By", "training", "with", "the", "extended", "dataset,", "the", "language", "learner", "effectively", "learns", "from", "the", "diverse", "hallucinated", "embeddings", "to", "overcome", "the", "over-fitting", "issue." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2737
[ "The", "hallucinator", "is", "trained", "by", "playing", "an", "adversarial", "game", "with", "the", "discriminator,", "such", "that", "the", "hallucinated", "embedding", "is", "indiscriminative", "to", "the", "real", "ones", "in", "the", "finetuning", "dataset." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2738
[ "In", "this", "paper,", "we", "propose", "an", "Embedding", "Hallucination", "(EmbedHalluc)", "method,", "which", "generates", "auxiliary", "embedding-label", "pairs", "to", "expand", "the", "finetuning", "dataset." ]
[ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2739
[ "In", "such", "settings,", "fine-tuning", "a", "pre-trained", "language", "model", "can", "cause", "severe", "over-fitting." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2740
[ "Few-shot", "language", "learners", "adapt", "knowledge", "from", "a", "pre-trained", "model", "to", "recognize", "novel", "classes", "from", "a", "few-labeled", "sentences." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2741
[ "Abstract" ]
[ 0 ]
2742
[ "Embedding", "Hallucination", "for", "Few-Shot", "Language", "Fine-tuning" ]
[ 1, 2, 0, 11, 12, 12 ]
2743
[ "References" ]
[ 0 ]
2744
[ "Therefore,", "we", "show", "that", "it", "can", "be", "easily", "extended", "to", "Korean", "as", "well", "as", "English,", "and", "it", "is", "expected", "to", "be", "effective", "in", "other", "countries." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2745
[ "However,", "we", "find", "that", "context", "modeling", "is", "more", "important", "than", "pre-trained", "memory", "for", "emotion", "recognition", "in", "conversation,", "and", "future", "research", "will", "focus", "on", "context", "modeling." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 12, 12, 12, 0, 0, 0, 0, 0, 0, 0, 0 ]
2746
[ "Since", "we", "believe", "that", "pre-trained", "memory", "is", "proportional", "to", "the", "performance", "of", "a", "language", "model,", "a", "language", "model", "with", "a", "large", "training", "corpus", "and", "many", "parameters", "is", "considered", "to", "be", "more", "effective." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2747
[ "By", "combining", "other", "pre-trained", "memories,", "we", "find", "that", "the", "pre-trained", "memory", "extracted", "with", "RoBERTa", "is", "richer", "and", "more", "effective", "than", "the", "pre-trained", "memory", "extracted", "with", "BERT", "or", "GPT2." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2748
[ "In", "addition,", "CoMPM", "achieves", "performance", "comparable", "to", "cutting-edge", "systems", "that", "leverage", "structured", "external", "knowledge,", "which", "is", "the", "effect", "of", "pre-trained", "memory", "of", "the", "language", "model." ]
[ 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2749
[ "CoMPM", "outperforms", "baselines", "on", "both", "dyadic-party", "and", "multi-party", "datasets", "and", "achieves", "state-of-the-art", "among", "systems", "that", "do", "not", "use", "external", "knowledge." ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2750
[ "CoMPM", "consists", "of", "a", "context", "embedding", "module", "(CoM)", "and", "a", "pre-trained", "memory", "module", "(PM),", "and", "the", "experimental", "results", "show", "that", "each", "module", "is", "effective", "in", "improving", "the", "performance." ]
[ 1, 0, 0, 0, 1, 2, 2, 1, 0, 0, 1, 2, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2751
[ "We", "propose", "CoMPM", "that", "leverages", "pre-trained", "memory", "using", "a", "pre-trained", "language", "model." ]
[ 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2752
[ "5", "Conclusion" ]
[ 0, 0 ]
2753
[ "Models" ]
[ 0 ]
2754
[ "Our", "approach" ]
[ 0, 0 ]
2755
[ "In", "the", "Korean", "dataset,", "like", "the", "English", "dataset,", "the", "performance", "is", "good", "in", "the", "order", "of", "CoMPM,", "CoM,", "and", "PM." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1 ]
2756
[ "The", "backbone", "of", "CoM", "and", "PM", "is", "Korean-BERT", "owned", "by", "the", "company,", "respectively." ]
[ 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0 ]
2757
[ "4.7.2", "Results", "in", "the", "Korean", "Dataset", "In", "Korean,", "our", "results", "are", "shown", "in", "Table", "4." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2758
[ "This", "dataset", "is", "for", "actual", "service", "and", "is", "not", "released", "to", "the", "public." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2759
[ "We", "use", "the", "data", "randomly", "divided", "into", "train:dev:test", "in", "a", "ratio", "of", "8:1:1." ]
[ 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 5 ]
2760
[ "The", "total", "number", "of", "sessions", "is", "1000,", "and", "the", "average", "number", "of", "utterance", "turns", "is", "13.4." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2761
[ "4.7.1", "Korean", "Dataset", "We", "constructed", "data", "composed", "of", "two", "speakers", "in", "Korean,", "and", "emotion-inventory", "is", "given", "as", "\"surprise,", "fear,", "ambiguous,", "sad,", "disgust,", "joy,", "bored,", "embarrassed,", "neutral\"." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2762
[ "Our", "approach", "can", "be", "extended", "to", "other", "languages", "without", "building", "additional", "external", "knowledge", "and", "achieves", "better", "performance", "than", "simply", "using", "a", "pre-trained", "model." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2763
[ "Indeed,", "structured", "knowledge", "and", "ERC", "data", "are", "lacking", "in", "other", "languages." ]
[ 0, 0, 0, 0, 11, 0, 0, 0, 0, 0, 0 ]
2764
[ "Previous", "studies", "mostly", "utilize", "external", "knowledge", "to", "improve", "performance,", "but", "these", "approaches", "require", "additional", "publicly", "available", "data,", "which", "are", "mainly", "available", "for", "English." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2765
[ "4.7", "ERC", "in", "other", "languages" ]
[ 0, 11, 0, 0, 0 ]
2766
[ "Therefore,", "if", "there", "is", "a", "lot", "of", "training", "data", "in", "the", "real-world", "application,", "CoMPM", "is", "expected", "to", "achieve", "good", "performance,", "otherwise", "it", "is", "CoMPM(f)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 ]
2767
[ "Figure", "3", "shows", "that", "as", "the", "number", "of", "data", "decreases,", "CoMPM(f)", "shows", "better", "results", "than", "CoMPM,", "which", "indicates", "that", "it", "is", "better", "to", "freeze", "the", "parameters", "of", "PM", "when", "the", "number", "of", "training", "data", "is", "insufficient." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ]
2768
[ "On", "the", "other", "hand,", "if", "there", "is", "a", "lot", "of", "training", "data,", "CoMPM", "shows", "better", "performance." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0 ]
2769
[ "Table", "2", "shows", "that", "CoMPM(f)", "achieves", "better", "performance", "than", "CoMPM", "in", "the", "emotion", "classification", "of", "IMEOCAP", "and", "EmoryNLP,", "which", "has", "fewer", "training", "data", "than", "other", "settings." ]
[ 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 11, 12, 0, 13, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0 ]
2770
[ "In", "MELD", "and", "EmoryNLP,", "even", "if", "only", "60%", "and", "80%", "are", "used,", "respectively,", "the", "performance", "decreases", "by", "only", "3", "points." ]
[ 0, 13, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0 ]
2771
[ "Figure", "3", "shows", "the", "performance", "of", "the", "model", "according", "to", "the", "ratio", "of", "the", "training", "data." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2772
[ "Therefore,", "we", "conduct", "additional", "experiments", "according", "to", "the", "use", "ratio", "of", "training", "data", "in", "MELD", "and", "EmoryNLP,", "where", "there", "is", "neither", "too", "much", "nor", "too", "little", "data." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2773
[ "However,", "the", "insufficient", "number", "of", "emotional", "data", "available", "in", "other", "countries", "(or", "actual", "service)", "remains", "a", "problem." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2774
[ "CoMPM", "is", "an", "approach", "that", "eliminates", "dependence", "on", "external", "sources", "and", "is", "easily", "extensible", "to", "any", "language." ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2775
[ "RoBERTa+BERT", "and", "RoBERTa+GPT2", "(combination", "of", "CoM", "and", "PM(f))", "have", "lower", "performance", "than", "RoBERTa+RoBERTa,", "which", "is", "inferred", "because", "pre-trained", "memory", "of", "RoBERTa", "contains" ]
[ 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2776
[ "If", "PM", "and", "CoM", "are", "based", "on", "different", "backbones,", "the", "pre-trained", "memory", "is", "projected", "through", "Wp", "as", "the", "dimension", "of", "the", "context", "output." ]
[ 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2777
[ "Table", "3", "shows", "the", "performance", "of", "the", "pretrained", "memory", "extracted", "by", "the", "different", "language", "models." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2778
[ "To", "eliminate", "the", "influence", "of", "the", "PM", "structure,", "we", "freeze", "the", "parameters", "of", "PM", "and", "use", "it", "as", "a", "feature", "extractor." ]
[ 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0 ]
2779
[ "We", "experiment", "with", "the", "effect", "of", "pre-trained", "memory", "of", "different", "language", "models." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2780
[ "4.6", "Training", "with", "Less", "Data" ]
[ 0, 0, 0, 0, 0 ]
2781
[ "4.5", "Combinations", "of", "CoM", "and", "PM" ]
[ 0, 0, 0, 0, 0, 0 ]
2782
[ "In", "other", "words,", "we", "confirm", "that", "CoM", "is", "more", "important", "than", "PM", "in", "our", "system", "for", "performance,", "and", "it", "is", "effective", "to", "focus", "on", "context", "modeling", "rather", "than", "external", "knowledge", "in", "the", "study", "of", "emotion", "recognition", "in", "conversation." ]
[ 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 12, 12, 12 ]
2783
[ "In", "addition,", "the", "performance", "of", "BERT+RoBERTa", "is", "lower", "than", "CoM", "(RoBERTa),", "which", "supports", "that", "the", "performance", "of", "CoM", "is", "a", "more", "important", "factor", "than", "the", "use", "of", "pre-trained", "memory." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2784
[ "In", "particular,", "in", "IEMOCAP", "data", "with", "a", "long", "average", "number", "of", "turns", "in", "the", "context,", "the", "performance", "deteriorates", "significantly." ]
[ 0, 0, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2785
[ "BERT+RoBERTa", "has", "a", "larger", "performance", "decrease", "than", "RoBERTa+BERT." ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
2786
[ "However,", "even", "if", "other", "PMs", "are", "used,", "the", "performance", "is", "improved", "compared", "to", "using", "only", "CoM,", "so", "the", "pre-trained", "memory", "of", "other", "language", "models", "is", "also", "effective", "for", "emotion", "recognition." ]
[ 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 12 ]
2787
[ "Therefore,", "we", "infer", "that", "performance", "does", "not", "decrease", "even", "if", "the", "PM", "changes", "from", "the", "dailydialog." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0 ]
2788
[ "Since", "there", "is", "a", "lot", "of", "training", "data", "in", "the", "diallydialog", "and", "Wp", "is", "fine-tuned", "to", "the", "data", "to", "mutually", "understand", "the", "pre-trained", "memory", "and", "context", "representation." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2789
[ "Figure", "3:", "Performance", "according", "to", "the", "size", "of", "training", "data", "of", "MELD", "and", "EmoryNLP" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2790
[ "Therefore,", "our", "approach", "can", "be", "extended", "to", "other", "languages", "without", "structured", "external", "data", "as", "well,", "which", "is", "described", "in", "Section", "4.7." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2791
[ "We", "achieve", "state-of-the-art", "performance", "among", "all", "systems", "that", "do", "not", "leverage", "structured", "external", "data", "and", "achieve", "the", "first", "or", "second", "performance", "even", "including", "systems", "that", "leverage", "external", "data." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2792
[ "The", "best", "performance", "of", "our", "approaches", "is", "CoMPM", "or", "CoMPM(f),", "both", "of", "which", "combine", "pre-trained", "memory." ]
[ 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0 ]
2793
[ "As", "a", "result,", "we", "regard", "pre-trained", "memory", "as", "compressed", "knowledge,", "which", "can", "play", "a", "role", "similar", "to", "external", "knowledge", "used", "in", "cuttingedge", "systems." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2794
[ "In", "other", "words,", "ConceptNet", "improves", "the", "performance", "of", "CoMPM,", "but", "is", "not", "as", "effective", "as", "pretrained", "memory." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ]
2795
[ "In", "IEMOCAP,", "CoMPM(k)", "has", "lower", "performance", "than", "CoM.", "For", "all", "datasets,", "CoMPM(k)", "performs", "slightly", "worse", "than", "CoMPM." ]
[ 0, 13, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1 ]
2796
[ "In", "addition,", "CoMPM(k)", "shows", "better", "performance", "than", "CoM,", "PM,", "and", "CoMPM(s)", "except", "for", "IEMOCAP." ]
[ 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 13 ]
2797
[ "The", "comparison", "between", "PM", "and", "PM(f)", "will", "be", "further", "described", "in", "Section", "4.6." ]
[ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2798
[ "PM(f)", "is", "not", "fine-tuned", "on", "the", "data,", "but", "it", "extracts", "general", "pre-trained", "memory", "from", "a", "pretrained", "language", "model." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2799
[ "CoMPM(f)", "shows", "similar", "performance", "to", "CoMPM", "and", "achieves", "better", "performance", "depending", "on", "the", "data." ]
[ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ]