id
stringlengths
1
4
tokens
sequence
ner_tags
sequence
2500
[ "Prompt", "tuning", "(Lester", "et", "al.,", "2021)", "and", "Ptuning", "(Liu", "et", "al.,", "2021b)", "both", "insert", "continuous", "prompts", "into", "the", "first", "transformer", "layer", "(cf." ]
[ 1, 2, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2501
[ "3.2.2", "Multi-layer", "Prompt", "Tuning" ]
[ 0, 0, 0, 0 ]
2502
[ "By", "substituting", "the", "W1", "and", "W2", "by", "two", "PHM", "layers", "and", "letting", "Ai", "shared", "by", "both", "layers,", "we", "can", "reduce", "the", "number", "of", "parameters", "from", "1.5M", "to", "105K." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2503
[ "For", "example,", "we", "use", "RoBERTa-Large", "with", "hidden", "size", "d", "=", "1024,", "generator", "hidden", "size", "m", "=", "256,", "n", "=", "16,", "prompt", "length", "t", "=", "5." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 5, 0, 3, 4, 3, 0, 5, 3, 0, 5, 3, 4, 3, 0, 5 ]
2504
[ "Suppose", "that", "we", "have", "a", "two", "layer", "perceptron", "d", "and", "upwith", "down-sample", "projection", "W1", "∈", "×", "m,", "where", "d", "is", "the", "sample", "projection", "W2", "∈", "×", "input", "embedding", "dimension,", "m", "is", "the", "hidden", "layer", "dimension,", "and", "t", "is", "the", "number", "of", "tokens", "we", "generate." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 3, 4, 4, 0, 3, 0, 0, 0, 0, 0, 0, 0 ]
2505
[ "As", "n", "is", "usually", "much", "smaller", "than", "m", "and", "d,", "PHM", "reduces", "the", "amount", "of", "parameters", "by", "a", "factor", "of", "n." ]
[ 0, 3, 0, 0, 0, 0, 0, 3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 ]
2506
[ "PHM", "replaces", "the", "matrix", "W", "by", "a", "sum", "of", "Kronecker", "products", "of", "several", "small", "matrices." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2507
[ "When", "m", "and", "d", "are", "large,", "the", "cost", "of", "learning", "W", "becomes", "the", "main", "bottleneck." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2508
[ "Inspired", "by", "the", "recent", "application", "of", "parameterized", "hypercomplex", "multiplication", "(PHM)", "layers", "(Zhang", "et", "al.,", "2021)", "in", "Compacter", "(Mahabadi", "et", "al.,", "2021),", "we", "leverage", "PHM", "layers", "to", "optimize", "our", "prompt", "generator,", "G.", "Generally,", "the", "PHM", "layer", "is", "a", "fullyconnected", "layer", "with", "form" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2509
[ "3.2.1", "Parameterized", "Hypercomplex", "Multiplication", "(PHM)", "Layers" ]
[ 0, 0, 0, 0, 0, 0 ]
2510
[ "We", "propose", "two", "optimization", "techniques", "to", "further", "improve", "our", "proposed", "method." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2511
[ "3.2", "Optimization" ]
[ 0, 0 ]
2512
[ "In", "practice,", "we", "can", "cache", "the", "prediction", "M(xi)", "and", "use", "it", "in", "various", "downstream", "tasks", "or", "rely", "on", "a", "lightweight", "sentence", "representation", "such", "as", "GloVe", "(Pennington", "et", "al.,", "2014)", "(Cf.", "Section", "4.5.1)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2513
[ "However,", "the", "sentence", "representation", "M(xi)", "used", "in", "our", "method", "is", "task-agnostic." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2514
[ "One", "caveat", "is", "that", "this", "method", "will", "have", "two", "forward", "passes", "of", "the", "pre-trained", "LM", "during", "inference", "time", "–", "first", "to", "generate", "M(xi)", "and", "then", "to", "generate", "classification", "results." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2515
[ "Note", "that", "our", "proposed", "method", "relies", "on", "the", "input", "sentence", "representation", "M(xi)", "to", "generate", "prompts." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2516
[ "In", "the", "sequel,", "we", "will", "introduce", "a", "parameter", "squeezing", "method", "to", "further", "reduce", "trainable", "parameters", "without", "sacrificing", "performance." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2517
[ "We", "can", "control", "the", "added", "number", "of", "trainable", "parameters", "by", "setting", "m", "d,", "but", "it", "is", "still", "expensive", "since", "hidden", "dimension", "d", "is", "usually", "large", "(1024", "in", "BERT/RoBERTa-Large)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 0, 0, 0, 0, 0, 0, 3, 4, 1, 0, 0, 0, 5, 0, 0 ]
2518
[ "An", "optimization", "of", "multi-layer", "prompt", "generation", "will", "be", "introduced", "in", "Section", "3.2.2." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2519
[ "In", "short,", "what", "we", "discussed", "here", "is", "to", "generate", "a", "t-length", "prompt", "for", "one", "Transformer", "layer." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2520
[ "The", "final", "prompt", "our", "method", "generated", "is", "a", "combination", "of", "both." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2521
[ "This", "model", "can", "be", "regarded", "as", "the", "general", "version", "of", "prompt", "tuning:", "in", "the", "second", "layer", "of", "G,", "the", "bias", "term", "td", "is", "a", "task-specific", "prompt,", "with", "preceding", "parts", "td", "m", "generating", "an", "instance-dependent", "prompt." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2522
[ "5510" ]
[ 0 ]
2523
[ "After", "passing", "through", "a", "nonlinear", "function,", "generator", "G", "projects", "the", "hidden", "representation", "back", "to", "a", "d", "dimensions" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2524
[ "As", "illustrated", "in", "Figure", "2", "(c),", "the", "generator", "G", "first", "projects", "the", "original", "d-dimensional", "sentence", "representation", "hi", "into", "m", "dimensions." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2525
[ "To", "reduce", "the", "number", "of", "trainable", "parameters", "in", "G,", "we", "apply", "a", "lightweight", "bottleneck", "architecture", "(i.e.,", "a", "two-layer", "perceptron)", "for", "generation." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2526
[ "If", "M(xi)" ]
[ 0, 0 ]
2527
[ "Specifically,", "we", "suppose", "that", "the", "generation", "of", "prompt", "should", "not", "only", "depend", "on", "the", "task", "T", ",", "but", "also", "be", "affected", "by", "input", "sequence", "Rd", "is", "a", "representation", "of", "the", "input", "sexi." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2528
[ "Different", "from", "all", "previous", "works", "that", "only", "define", "a", "task-specific", "prompt", "t,", "where", "t", "is", "the", "number", "of", "tokens", "Wp(T", ")", "×", "in", "prompt", "representation", "and", "d", "is", "the", "hidden", "dimension,", "we", "propose", "a", "instance-dependent", "prompt", "generation", "method." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2529
[ "Following", "prompt", "tuning,", "we", "define", "{", "}", "the", "input", "xi", "=", "E([SEP]S1[SEP]S2[EOS])", "for", "sentence-pair", "task", "or", "xi", "=", "E([SEP]S1[EOS])", "for", "single-sentence", "task,", "where", "E(", ")", "is", "the", "token", "em·", "bedding", "for", "input", "sentences." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2530
[ "Let", "us", "assume", "a", "task", "T", "with", "training", "data", "Dtrain", "=" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2531
[ "In", "this", "way,", "we", "have", "a", "unified", "template" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
2532
[ "Then,", "we", "insert", "a", "prompt", "Wp(T", ")", "together", "with", "input", "sequence", "xi", "to", "infer", "yi", "during", "fine-tuning." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2533
[ "Instance-Dependent", "Generation" ]
[ 0, 0 ]
2534
[ "3.1" ]
[ 0 ]
2535
[ "The", "main", "procedure", "is", "illustrated", "in", "Figure", "2." ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
2536
[ "We", "now", "introduce", "our", "proposed", "method,", "IDPG,", "along", "with", "various", "model", "optimizations." ]
[ 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0 ]
2537
[ "Apart", "from", "LM-BFF", "and", "EFL,", "there", "is", "no", "corresponding", "real", "text", "for", "the", "prompt", "as", "Wp", "is", "a", "set", "of", "random-initialized", "tensors", "to", "represent", "the", "soft", "prompt." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2538
[ "Specifically,", "they", "reformulate", "the", "input", "for", "single", "sentence", "tasks", "as" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2539
[ "Prompt", "tuning", "(Lester", "et", "al.,", "2021),", "prefix", "tuning", "(Li", "and", "Liang,", "2021),", "and", "P-tuning", "(Liu", "et", "al.,", "2021a,b)", "methods", "propose", "to", "insert", "a", "trainable", "prefix", "in", "front", "of", "the", "input", "sequence." ]
[ 1, 2, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2540
[ "2.2", "Prompt", "Tuning" ]
[ 0, 0, 0 ]
2541
[ "EFL", "(Wang", "et", "al.,", "2021)", "further", "suggests", "that", "reformulating", "the", "task", "as", "entailment", "can", "further", "improve", "the", "performance", "in", "both", "lowresource", "and", "high-resource", "scenarios." ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2542
[ "LM-BFF", "(Gao", "et", "al.,", "2021a)", "shows", "that", "adding", "a", "specifically", "designed", "prompt", "during", "fine-tuning", "can", "benefit", "the", "few-shot", "scenario." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2543
[ "Using", "the", "pre-trained", "language", "model", "M,", "we", "can", "obtain", "the", "sentence", "representation", "h[CLS]", "=", "M(xin),", "and", "train", "a", "task-specific", "head", "softmax(Wh[CLS])", "to", "maximize", "the", "logprobability", "of", "the", "correct", "label." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2544
[ "xin", "=" ]
[ 0, 0 ]
2545
[ "For", "example,", "it", "reformulates", "a", "sentence", "sentiment", "classification", "task", "with", "an", "input", "sentence", "S1", "as" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2546
[ "Manual", "prompt", "learning", "(Brown", "et", "al.,", "2020;", "Schick", "and", "Schütze,", "2021)", "inserts", "a", "pre-defined", "label", "words", "in", "each", "input", "sentence." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2547
[ "2.1", "Manual", "Prompt" ]
[ 0, 0, 0 ]
2548
[ "2", "Preliminary" ]
[ 0, 0 ]
2549
[ "We", "conduct", "substantial", "intrinsic", "studies,", "revealing", "how", "and", "why", "each", "component", "of", "the", "proposed", "model", "and", "the", "generated", "prompts", "could", "help", "the", "downstream", "tasks." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2550
[ "Additionally,", "it", "offers", "comparable", "performance", "to", "Adapter-based", "methods", "while", "using", "fewer", "parameters." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2551
[ "consistently", "outperforms", "task-specific", "prompt", "tuning", "methods", "by", "1.6–3.1", "points." ]
[ 0, 0, 0, 0, 0, 0, 0, 9, 0 ]
2552
[ "•", "Extensive", "evaluations", "on", "ten", "natural", "language", "understanding", "(NLU)", "tasks", "show", "that", "IDPG" ]
[ 0, 0, 0, 0, 0, 11, 12, 12, 11, 0, 0, 0, 1 ]
2553
[ "We", "introduce", "an", "input-dependent", "prompt", "generation", "method—IDPG—that", "only", "requires", "training", "134K", "parameters", "per", "task,", "corresponding", "to", "0.04%", "of", "a", "pre-trained", "LM", "such", "as", "RoBERTa-Large" ]
[ 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2554
[ "To", "summarize,", "this", "work", "makes", "the", "following", "contributions:" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
2555
[ "To", "further", "reduce", "the", "number", "of", "parameters", "in", "the", "generator", "f", "(x;", "W),", "we", "propose", "to", "apply", "a", "lightweight", "bottleneck", "architecture", "(i.e.,", "a", "two-layer", "perceptron)", "and", "then", "decompose", "it", "by", "a", "parameterized", "hypercomplex", "multiplication", "(PHM)", "layer", "(Zhang", "et", "al.,", "2021)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2556
[ "Note", "that", "by", "setting", "W", "to", "a", "zero", "matrix", "and", "only", "training", "the", "bias,", "IDPG", "would", "degenerate", "into", "the", "traditional", "prompt", "tuning", "process", "(Lester", "et", "al.,", "2021)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2557
[ "Formally,", "the", "IDPG", "generator", "can", "be", "denoted", "as", "f", "(x;", "W),", "where", "x", "is", "the", "instance", "representation", "and", "W", "represents", "the", "trainable", "parameters." ]
[ 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2558
[ "Unlike", "traditional", "prompttuning", "methods", "that", "rely", "on", "a", "fixed", "prompt", "for", "each", "task,", "IDPG", "instead", "develops", "a", "conditional", "prompt", "generation", "model", "to", "generate", "prompts", "for", "each", "instance." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2559
[ "This", "paper", "presents", "the", "instance-dependent", "prompt", "generation", "(IDPG)", "strategy", "for", "efficiently", "tuning", "large-scale", "LMs." ]
[ 0, 0, 0, 0, 1, 2, 2, 1, 0, 0, 0, 0, 0, 0 ]
2560
[ "In", "light", "of", "these", "limitations,", "we", "instead", "ask", "the", "following", "question:", "Can", "we", "generate", "input-dependent", "prompts", "to", "smooth", "the", "domain", "difference?" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2561
[ "Thus,", "a", "unified", "prompt", "may", "disturb", "the", "prediction", "and", "lead", "to", "a", "performance", "drop." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2562
[ "Specifically,", "it", "is", "unlikely", "to", "see", "many", "different", "sentences", "with", "the", "same", "prefix", "in", "the", "pretraining", "corpus." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2563
[ "However,", "all", "existing", "prompt-tuning", "methods", "have", "thus", "far", "focused", "on", "task-specific", "prompts,", "which", "are", "inadequate", "to", "address", "the", "gap", "between", "pre-training", "and", "fine-tuning", "objectives." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2564
[ "This", "significantly", "reduced", "the", "number", "of", "trainable", "parameters", "to", "just", "a", "few", "thousand." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2565
[ "In", "doing", "so,", "the", "problem", "of", "searching", "discrete", "prompts", "is", "converted", "to", "a", "continuous", "optimization", "task,", "which", "can", "be", "solved", "by", "a", "variety", "of", "optimization", "techniques", "such", "as", "SGD." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2566
[ "To", "tackle", "this", "issue,", "prompt", "tuning", "(Lester", "et", "al.,", "2021),", "prefix", "tuning", "(Li", "and", "Liang,", "2021),", "and", "Ptuning", "(Liu", "et", "al.,", "2021a,b)", "approaches", "propose", "to", "prepend", "trainable", "prefix", "tokens", "to", "the", "input", "layer", "and", "train", "these", "soft", "prompts", "only", "during", "the", "finetuning", "stage." ]
[ 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2567
[ "However,", "these", "methods", "rely", "on", "grid-search", "for", "a", "natural", "language-based", "prompt", "from", "an", "ample", "search", "space,", "leading", "to", "optimization", "challenges." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2568
[ "LMBFF", "(Gao", "et", "al.,", "2021a),", "EFL", "(Wang", "et", "al.,", "2021),", "and", "AutoPrompt", "(Shin", "et", "al.,", "2020)", "extend", "this", "direction", "by", "inserting", "prompts", "in", "the", "input", "embedding" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2569
[ "The", "GPT-3", "models", "(Brown", "et", "al.,", "2020;", "Schick", "and", "Schütze,", "2021)", "find", "that,", "with", "proper", "manual", "prompts,", "a", "pre-trained", "LM", "can", "successfully", "match", "the", "fine-tuning", "performance", "of", "BERT", "models." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2570
[ "Another", "line", "of", "work", "focuses", "on", "prompting." ]
[ 0, 0, 0, 0, 0, 0, 0 ]
2571
[ "Compacter", "(Mahabadi", "et", "al.,", "2021)", "optimizes", "the", "training", "parameters", "further", "by", "designing", "a", "lightweight", "module", "to", "replace", "the", "bottleneck", "architecture", "in", "Adapters." ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2572
[ "Only", "these", "additional", "and", "task-specific", "modules", "are", "trained", "during", "fine-tuning,", "reducing", "the", "number", "of", "trainable", "parameters", "to", "1–3%", "of", "the", "original", "transformer", "model", "per", "task." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2573
[ "One", "line", "of", "research", "(Li", "and", "Liang,", "2021)", "suggests", "augmenting", "the", "model", "with", "smaller,", "trainable", "modules", "and", "freezing", "the", "original", "transformer", "weights." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2574
[ "Previous", "studies", "have", "attempted", "to", "address", "this", "question", "from", "different", "perspectives." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2575
[ "Our", "method", "approaches", "RoBERTaFT’s", "performance", "and", "uses", "fewer", "parameters", "than", "Adapter-based", "methods." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2576
[ "Thus,", "it", "is", "natural", "to", "ask", "whether", "we", "can", "transfer", "the", "knowledge", "of", "a", "pre-trained", "LM", "to", "downstream", "tasks", "by", "keeping", "most", "of", "the", "parameters", "fixed", "and", "tuning", "only", "a", "small", "fraction", "of", "them." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2577
[ "As", "the", "model", "size", "proliferates", "(e.g.,", "330M", "parameters", "for", "BERT", "(Devlin", "et", "al.,", "2019)", "and", "175B", "for", "GPT-3", "(Brown", "et", "al.,", "2020)),", "it", "becomes", "computationally", "expensive", "and", "challenging", "to", "fine-tune", "the", "entire", "pre-trained", "language", "model", "(LM)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2578
[ "Notably,", "this", "paradigm", "requires", "updating", "and", "storing", "all", "the", "model", "parameters", "for", "each", "downstream", "task." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2579
[ "In", "recent", "years,", "pre-training", "a", "transformer", "model", "on", "a", "large", "corpus", "with", "language", "modeling", "tasks", "and", "fine-tuning", "it", "on", "different", "downstream", "tasks", "has", "become", "the", "primary", "transfer", "learning", "paradigm", "in", "natural", "language", "processing", "(Devlin", "et", "al.,", "2019)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2580
[ "Introduction" ]
[ 0 ]
2581
[ "Extensive", "experiments", "on", "ten", "natural", "language", "understanding", "(NLU)", "tasks", "show", "that", "the", "proposed", "strategy", "consistently", "outperforms", "various", "prompt", "tuning", "baselines", "and", "is", "on", "par", "with", "other", "efficient", "transfer", "learning", "methods", "such", "as", "Compacter", "while", "tuning", "far", "fewer", "model", "parameters.1" ]
[ 0, 0, 0, 0, 11, 12, 12, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 ]
2582
[ "Unlike", "traditional", "prompt", "tuning", "methods", "that", "use", "a", "fixed", "prompt,", "IDPG", "introduces", "a", "lightweight", "and", "trainable", "component", "to", "generate", "prompts", "based", "on", "each", "input", "sentence." ]
[ 0, 0, 11, 12, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2583
[ "In", "this", "paper,", "we", "propose", "a", "conditional", "prompt", "generation", "method", "to", "generate", "prompts", "for", "each", "input", "instance,", "referred", "to", "as", "the", "Instance-Dependent", "Prompt", "Generation", "(IDPG)." ]
[ 0, 0, 0, 0, 0, 0, 11, 12, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 1 ]
2584
[ "It", "freezes", "the", "pre-trained", "language", "model", "and", "only", "optimizes", "a", "few", "taskspecific", "prompts." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2585
[ "Prompt", "tuning", "is", "a", "new,", "efficient", "NLP", "transfer", "learning", "paradigm", "that", "adds", "a", "task-specific", "prompt", "in", "each", "input", "instance", "during", "the", "model", "training", "stage." ]
[ 11, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2586
[ "Abstract" ]
[ 0 ]
2587
[ "An", "Instance-Dependent", "Prompt", "Generation", "Method" ]
[ 0, 0, 11, 12, 0 ]
2588
[ "IDPG:" ]
[ 1 ]
2589
[ "The", "results", "in", "Table", "D.1", "are", "generated", "by", "running", "the", "baselines", "with", "a", "batch", "size", "of", "2", "and", "different", "learning", "rates", "5", "suggested", "by", "Gao", "et", "al.", "(2021)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 5, 0, 0, 3, 4, 0, 0, 0, 0, 0, 0, 0 ]
2590
[ "As", "we", "show", "in", "Table", "D.1,", "this", "learning", "rate", "produces", "reasonably", "good", "results", "for", "the", "baselines,", "being", "the", "best", "for", "13", "tasks", "and", "only", "marginally", "under-performing", "in", "the", "other", "2", "tasks." ]
[ 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2591
[ "We", "choose", "1e−", "common", "learning", "rate", "to", "finetune", "BERT/RoBERTa." ]
[ 0, 0, 0, 0, 3, 4, 0, 0, 0 ]
2592
[ "for", "Note", "that", "we", "do", "not", "search", "for", "this", "learning", "rate", "for", "5,", "which", "is", "the", "most", "our", "method." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0 ]
2593
[ "The", "learning", "rate", "Lreal", "in", "the", "baselines", "and", "ours", "are", "kept", "the", "same." ]
[ 0, 3, 4, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2594
[ "The", "baseline", "has", "only", "one", "loss", "Lreal,", "whereas", "we", "are", "learning", "with", "an", "additional", "loss", "Lhalluc,", "making", "the", "total", "loss", "to", "be", "Lhalluc." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2595
[ "D", "Learning", "Rate", "for", "Baselines" ]
[ 0, 3, 4, 0, 0 ]
2596
[ "Qualitatively", "similar", "to", "what", "we", "observe", "with", "experiments", "using", "RoBERTa-large", "in", "the", "main", "paper,", "Re-Init", "and", "Mixout", "fail", "to", "outperform", "EmbedHalluc", "in", "most", "tasks,", "with", "the", "exceptions", "of", "SNLI", "and", "QNLI." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 13, 0, 13 ]
2597
[ "The", "results", "are", "shown", "in", "Table", "C.1." ]
[ 0, 0, 0, 0, 0, 0, 0 ]
2598
[ "Besides", "the", "experiments", "with", "RoBERTa-large", "shown", "in", "the", "main", "paper,", "we", "present", "Re-Init", "and", "Mixout", "using", "BERT-large-cased", "in", "this", "section." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0 ]
2599
[ "C", "Regularization", "Methods", "with", "BERT" ]
[ 0, 0, 0, 0, 0 ]