id
stringlengths
1
4
tokens
sequence
ner_tags
sequence
1900
[ "Dong", "et", "al.,", "2021b)." ]
[ 0, 0, 0, 0 ]
1901
[ "L2", "loss", "can", "=", "ties", "by", "minimizing", "be", "viewed", "as", "an", "implementation", "based", "on", "the", "idea", "of", "knowledge", "distillation", "(Heo", "et", "al.,", "2019;" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1902
[ "Without", "introducing", "any", "negative", "samples,", "L2", "loss", "directly", "reduces", "the", "Euclidean", "distance", "between", "the", "representations", "of", "two", "modali2." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1903
[ "Whereas,", "there", "are", "other", "options", "to", "achieve", "this", "goal,", "such", "as", "L2", "loss", "and", "CTC", "loss." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1904
[ "Our", "goal", "for", "introducing", "the", "contrastive", "loss", "term", "(denoted", "as", "CTR", "Loss)", "is", "to", "close", "the", "distance", "between", "speech", "and", "text", "representations." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1905
[ "Is", "contrastive", "loss", "better", "than", "other", "losses?" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
1906
[ "5.3" ]
[ 0 ]
1907
[ "The", "detailed", "analysis", "of", "possible", "explanations", "will", "be", "shown", "in", "Section", "6.2." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1908
[ "On", "the", "other", "hand,", "although", "the", "performance", "of", "Line", "2", "is", "relatively", "inferior,", "it", "still", "outperforms", "the", "multi-task", "model", "without", "the", "contrastive", "loss", "(Line", "3)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1909
[ "Table", "4", "shows", "that", "contrastive", "learning", "using", "the", "low-level", "representations", "(Line", "1)", "is", "better", "than", "using", "the", "high-level", "ones", "(Line", "2)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1910
[ "Whereas", "inspired", "by", "a", "recent", "study", "in", "multilingual", "MT", "(Pan", "et", "al.,", "2021),", "we", "also", "provide", "an", "alternative", "contrastive", "loss", "as", "a", "comparison,", "whose", "speech", "and", "text", "features", "are", "average-pooled", "semantic", "repre" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1911
[ "In", "the", "method", "part", "(Section", "3.2),", "we", "use", "averaged", "audio", "representation", "u", "for", "speech", "s", "(Eq.(2))", "and", "averaged", "lexical", "embedding", "v", "for", "the", "transcript", "x", "(Eq.(3)),", "denoted", "as", "low-level", "repr.." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1912
[ "An", "intriguing", "question", "is", "which", "representations", "should", "be", "considered", "in", "the", "contrastive", "loss", "function." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1913
[ "5.2", "Which", "layer", "to", "contrast", "on?" ]
[ 0, 0, 0, 0, 0, 0 ]
1914
[ "improvement", "of", "0.9", "BLEU", "over", "the", "baseline", "models", "by", "only", "optimizing", "LST", "(corresponding", "to", "the", "last", "row", "of", "the", "Table", "3),", "and", "multi-task", "learning", "can", "lead", "to", "a", "further", "improvement", "of", "about", "1.2", "BLEU", "on", "top", "of", "that." ]
[ 0, 0, 9, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 7, 0, 0, 0, 0 ]
1915
[ "We", "test", "the", "results", "under", "settings", "with", "and", "without", "the", "introduction", "of", "external", "MT", "data." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1916
[ "By", "gradually", "removing", "each", "losses", "in", "Eq.(", "1),", "Table", "3", "shows", "the", "improvements", "bringing", "by", "the", "multi-task", "learning", "and", "the", "contrastive", "learning." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1917
[ "Comparing", "the", "BLEU", "results", "of", "w/o", "and", "w/", "external", "MT", "data", "situations", "in", "Table", "1,", "we", "find", "that", "ConST", "further", "improves", "0.5", "and", "0.6", "BLEU", "scores", "in", "terms", "of", "eight", "translation", "directions", "on", "average,", "which", "proves", "the", "effectiveness", "of", "the", "cross-modal", "contrastive", "learning." ]
[ 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 9, 9, 10, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2 ]
1918
[ "With", "the", "same", "model", "architecture", "and", "the", "same", "pretraining", "+", "fine-tuning", "procedure,", "the", "main", "difference", "between", "ConST", "and", "XSTNet", "(Ye", "et", "al.,", "2021)", "is", "whether", "we", "use", "the", "contrastive", "loss", "term", "during", "the", "fine-tuning", "or", "not." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1919
[ "Is", "contrastive", "loss", "effective?" ]
[ 0, 0, 0, 0 ]
1920
[ "5.1" ]
[ 0 ]
1921
[ "5", "Analysis" ]
[ 0, 0 ]
1922
[ "Models" ]
[ 0 ]
1923
[ "As", "recommended,", "checkpoint", "we", "use", "is", "BLEURT-20." ]
[ 0, 0, 0, 0, 0, 0, 7 ]
1924
[ "5https://github.com/google-research/", "bleurt", "(Sellam", "et", "al.,", "2020)." ]
[ 0, 0, 0, 0, 0, 0 ]
1925
[ "In", "Appendix", "7,", "we", "provide", "a", "case", "study", "to", "show", "such", "improvement." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1926
[ "From", "Table", "2,", "we", "find", "that", "as", "an", "end-to-end", "model,", "ConST", "can", "outperform", "these", "strong", "cascade", "models." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 ]
1927
[ "Comparison", "with", "cascaded", "ST", "systems", "We", "compare", "our", "method", "with", "several", "cascade", "baselines,", "where", "Ye", "et", "al.", "(2021)", "and", "Xu", "et", "al.", "(2021)", "provided", "two", "strong", "cascade", "systems", "trained", "using", "MuSTC", "and", "external", "ASR", "and", "MT", "data", "(LibriSpeech,", "WMT,", "and", "Opensubtitles)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1928
[ "However,", "the", "shared", "memory", "with", "fixed", "size", "actually", "compromises", "the", "MT", "performance,", "while", "our", "contrastive", "learning", "approach", "is", "more", "straightforward", "and", "effective." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1929
[ "Among", "the", "benchmark", "models,", "with", "the", "same", "goal", "of", "closing", "two", "modality", "gaps,", "Chimera", "(Han", "et", "al.,", "2021)", "constructed", "an", "extra", "fixed-length", "shared", "semantic", "space." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1930
[ "When", "extra", "MT", "data", "are", "introduced,", "our", "method", "also", "outperforms", "SOTA", "by", "an", "average", "of", "0.6", "BLEU." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 9, 7 ]
1931
[ "Also", "when", "speech", "data", "is", "introduced", "for", "pre-training,", "our", "method", "works", "better", "than", "others", "(Self-training,", "W-Transf.", "and", "XSTNet)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1 ]
1932
[ "Without", "the", "external", "MT", "data,", "our", "method", "already", "gains", "an", "average", "improvement", "of", "0.5", "BLEU", "over", "the", "previous", "best", "models." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 7, 0, 0, 0, 0, 0 ]
1933
[ "For", "a", "relatively", "fair", "comparison,", "we", "investigate", "two", "cases:", "(1)", "without", "external", "MT", "data", "and", "(2)", "with", "external", "MT", "data." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1934
[ "Since", "many", "existing", "works", "regard", "“leveraging", "external", "data”", "to", "be", "one", "of", "their", "model’s", "features,", "their", "strong", "performances", "are", "largely", "predicated", "on", "the", "utilization", "of", "auxiliary", "data,", "especially", "large-scale", "MT", "data." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1935
[ "Comparison", "with", "end-to-end", "ST", "models", "Table", "1", "shows", "the", "main", "results." ]
[ 0, 0, 11, 12, 0, 0, 0, 0, 0, 0, 0 ]
1936
[ "3https://github.com/mjpost/sacrebleu,", "BLEU", "Signature:" ]
[ 0, 0, 0 ]
1937
[ "fairseq/wav2vec/wav2vec_small.pt" ]
[ 0 ]
1938
[ "https://ict.fbk.eu/must-c/", "2https://dl.fbaipublicfiles.com/" ]
[ 0, 0 ]
1939
[ "1We", "use", "v1.0." ]
[ 0, 0, 0 ]
1940
[ "In", "the", "analysis,", "we", "also", "report", "the", "ChrF++", "score", "4", "(Popovi´c,", "2017)" ]
[ 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0 ]
1941
[ "Experiment", "Details", "We", "evaluate", "case-sensitive", "detokenized", "BLEU", "using", "sacreBLEU3", "(Post,", "2018)", "on", "MuST-C", "tst-COMMON", "set." ]
[ 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 13, 0, 0 ]
1942
[ "The", "model", "with", "the", "above", "configurations", "has", "a", "total", "of", "about", "150M", "parameters." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1943
[ "We", "use", "pre-layer", "normalization", "for", "stable", "training." ]
[ 0, 0, 0, 0, 0, 0, 0 ]
1944
[ "The", "Transformer", "follows", "the", "base", "configuration,", "with", "6", "layers", "of", "encoder", "and", "decoder,", "hidden", "size", "d", "=", "512,", "8", "attention", "heads,", "and", "2048", "FFN", "hidden", "states." ]
[ 0, 0, 0, 0, 0, 0, 0, 5, 3, 0, 0, 0, 0, 3, 4, 3, 0, 5, 5, 3, 4, 0, 5, 3, 4, 4 ]
1945
[ "Two", "layers", "of", "CNNs", "after", "the", "Wav2vec2.0", "are", "set", "to", "kernel", "size", "5,", "stride", "size", "2", "and", "hidden", "size", "512." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 5, 3, 4, 5, 0, 3, 4, 5 ]
1946
[ "The", "Wav2vec2.0", "in", "the", "SEnc", "is", "only", "pre-trained", "on", "Librispeech", "(Panayotov", "et", "al.,", "2015)", "speech", "without", "any", "downstream", "finetuning2." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1947
[ "Model", "Configurations" ]
[ 0, 0 ]
1948
[ "MT", "datasets", "We", "also", "introduce", "external", "WMT", "datasets", "(Bojar", "et", "al.,", "2016)", "for", "En-De/Es/Fr/Ro/Ru", "and", "OPUS100", "datasets", "(Zhang", "et", "al.,", "2020)", "for", "EnIt/Nl/Pt", "directions,", "as", "the", "expanded", "setup." ]
[ 11, 0, 0, 0, 0, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1949
[ "ST", "datasets", "We", "conduct", "experiments", "on", "all", "in", "MuST-C", "dataset", "1", "(Di", "Gangi", "et", "al.,", "2019):", "English", "(En)", "to", "German", "(De),", "Spanish", "(Es),", "French", "(Fr),", "Italian", "(It),", "Dutch", "(Nl),", "Portuguese", "(Pt),", "Romanian", "(Ro)", "and", "Russian", "(Ru)." ]
[ 11, 0, 0, 0, 0, 0, 0, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1950
[ "As", "one", "of", "the", "largest", "ST", "benchmarks,", "MuST-C", "contains", "more", "than", "385", "hours", "of", "TED", "talks", "for", "each", "direction." ]
[ 0, 0, 0, 0, 0, 11, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1951
[ "4.2", "Main", "Results" ]
[ 0, 0, 0 ]
1952
[ "4.1", "Experimental", "Setups" ]
[ 0, 0, 0 ]
1953
[ "Appendix", "C", "shows", "the", "experiments", "on", "the", "choice", "of", "the", "hyper-parameters." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1954
[ "Appendix", "B", "contains", "more", "detailed", "settings", "and", "explanations", "for", "the", "baseline", "models", "in", "Table", "1." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1955
[ "λ", "=", "1.5", "for", "German", "and", "Dutch,", "and", "λ", "=", "1.0", "for", "the", "other", "languages." ]
[ 3, 0, 5, 0, 0, 0, 0, 3, 4, 0, 5, 0, 0, 0, 0 ]
1956
[ "For", "the", "training", "loss,", "we", "set", "contrastive", "temperature", "τ", "=", "0.02", "and", "weight", "of", "contrastive", "term" ]
[ 0, 0, 0, 0, 0, 0, 0, 3, 3, 0, 5, 0, 0, 0, 0, 0 ]
1957
[ "We", "jointly", "tokenize", "the", "bilingual", "text", "using", "SentencePiece", "(Kudo", "and", "Richardson,", "2018),", "with", "a", "vocabulary", "size", "of", "10k,", "which", "is", "the", "same", "as", "Ye", "et", "al.", "(2021)’s", "setup." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1958
[ "We", "use", "the", "raw", "16-bit", "16kHz", "mono-channel", "speech", "input." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1959
[ "and", "the", "learning-based", "BLEURT", "score", "5." ]
[ 0, 0, 0, 0, 0, 0 ]
1960
[ "4", "Experiments" ]
[ 0, 0 ]
1961
[ "Similarly,", "we", "treat", "the", "cut-off", "audio", "representation", "and", "the", "original", "transcribed", "sentence", "as", "positive", "pairs,", "and", "the", "rest", "sentences", "in", "the", "same", "batch", "as", "negative", "pairs." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1962
[ "Dropout", "randomly", "sets", "some", "elements", "to", "0,", "while", "cut-off", "is", "a", "dimensional", "“block\"", "dropout." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1963
[ "Note", "that", "there", "is", "a", "difference", "between", "cut-off", "and", "dropout." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1964
[ "Here,", "we", "present", "two", "variants:", "sequence", "cut-off", ",", "which", "erases", "some", "sequence", "dimension,", "and", "feature", "cut-off", ",", "which", "erases", "some", "feature", "dimension." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1965
[ "We", "analogize", "a", "similar", "idea", "to", "the", "cut-off", "approach", "for", "speech", "representation." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1966
[ "Yan", "et", "al.,", "2021)." ]
[ 0, 0, 0, 0 ]
1967
[ "Cut-off", "strategy", "Recent", "studies", "on", "natural", "language", "understanding", "and", "generation", "have", "proved", "cut-off", "strategy", "to", "be", "successful", "(Shen", "et", "al.,", "2020;" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1968
[ "We", "regard", "x0", "as", "the", "additional", "positive", "example", "for", "the", "speech", "s", "and", "the", "samples", "with", "the", "same", "operation", "in", "the", "same", "batch", "as", "the", "negative", "examples." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1969
[ "Specifically,", "given", "sentence", "x,", "each", "sub-word", "token", "xi", "can", "be", "duplicated", "k", "more", "times,", "resulting", "in", "the", "duplicated", "sentence", "x0,", "where", "k", "=", "0,", "1,", "2,", "...", "and", "k", "Poisson(1)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1970
[ "Second,", "repeating", "words", "does", "not", "change", "the", "semantics", "and", "is", "suitable", "as", "an", "extra", "positive", "example", "of", "the", "corresponding", "speech." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1971
[ "First,", "as", "the", "length", "of", "the", "sentence", "is", "shorter", "than", "that", "of", "its", "audio", "representation,", "randomly", "repeating", "the", "words", "in", "the", "sentence", "is", "a", "simple", "yet", "useful", "technique", "to", "increase", "the", "length." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1972
[ "The", "word", "repetition", "strategy", "randomly", "replicates", "some", "words", "(or", "sub-words)", "in", "the", "original", "sentences,", "with", "two", "advantages", "for", "improving", "representation", "robustness." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1973
[ "Since", "the", "masked", "speech", "fragment", "is", "very", "short,", "we", "consider", "the", "masked", "speech", "and", "the", "original", "transcript", "to", "be", "positive", "pairs,", "and", "the", "remaining", "transcripts", "in", "the", "same", "batch", "to", "be", "negative", "pairs." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1974
[ "In", "the", "experiment,", "we", "tried", "multiple", "configurations,", "and", "found", "p", "=", "0.25,", "M", "=", "3600", "the", "best,", "resulting", "in", "a", "masked", "span", "of", "0.225", "second." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 5, 3, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1975
[ "We", "randomly", "sample", "without", "replacement", "all", "time", "steps", "in", "the", "original", "waveform", "of", "the", "speech", "to", "be", "the", "starting", "indices", "with", "a", "probability", "p,", "and", "then", "we", "set", "the", "sub-sequence", "M", "successive", "time", "steps", "to", "be", "blank." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0 ]
1976
[ "We", "take", "s0", "as", "an", "input", "to", "the", "model,", "and", "compute", "the", "contrastive", "loss", "on", "its", "original", "corresponding", "transcript." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1977
[ "We", "mask", "consecutive", "segments", "of", "an", "original", "audio", "waveform", "sequence", "s", "to", "obtain", "a", "new", "modified", "speech", "s0." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1978
[ "In", "the", "cut-off", "strategy,", "the", "gray", "shaded", "grid", "represents", "the", "zero-out", "element." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1979
[ "Specific", "schematic", "illustrations", "of", "each", "operations", "are", "shown", "in", "Figure", "3." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1980
[ "These", "strategies", "are", "at", "input", "and", "representation", "(gray", "shaded", "modules", "in", "Figure", "2(a))." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1981
[ "To", "further", "enhance", "the", "contrastive", "learning,", "we", "introduce", "three", "strategies", "to", "mine", "additional", "hard", "examples." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1982
[ "Learning" ]
[ 0 ]
1983
[ "3.3", "Mining", "Hard", "Examples", "for", "Contrastive" ]
[ 0, 0, 0, 0, 0, 0 ]
1984
[ "exp(sim(u,", "v(xj))/τ", ")", "(4)", "N", "i=1", ",", "τ", "is", "the", "temperature", "where", "hyper-parameter,", "and", "sim", "is", "the", "cosine", "similarity", "function", "sim(a,", "b)", "=" ]
[ 0, 3, 0, 0, 0, 0, 0, 3, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1985
[ "x−i", "}" ]
[ 0, 0 ]
1986
[ "For", "speech", "s", "and", "its", "transcript", "x,", "we", "first", "average", "them", "in", "terms", "of", "the", "time", "dimension," ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1987
[ "Given", "a", "positive", "example", "of", "such", "a", "speechtranscript", "pair", "(s,", "x),", "we", "randomly", "pick", "a", "set", "of", "N", "N", "i=1", "from", "the", "same", "batch", "as", "negative", "examples." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1988
[ "1", "transcripts" ]
[ 0, 0 ]
1989
[ "The", "main", "idea", "of", "cross-modal", "contrastive", "learning", "is", "to", "introduce", "a", "loss", "that", "brings", "speech", "and", "its", "corresponding", "transcript", "(positive", "example)", "near", "together", "while", "pushing", "irrelevant", "ones", "(negative", "examples)", "far", "apart." ]
[ 0, 0, 0, 0, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1990
[ "As", "mentioned", "in", "the", "beginning,", "since", "we", "need", "to", "produce", "similar", "representations", "for", "the", "speech", "and", "transcript", "sharing", "the", "same", "semantic", "meanings,", "we", "propose", "cross-modal", "contrastive", "learning", "method", "to", "bring", "their", "representations", "closer", "together." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0 ]
1991
[ "3.2", "Cross-modal", "Contrastive", "Learning" ]
[ 0, 0, 0, 0 ]
1992
[ "λ", "is", "a", "tuned", "hyper-parameter", "of", "the", "weighted", "contrastive", "loss", "term." ]
[ 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1993
[ "It", "aims", "to", "bring", "the", "representation", "between", "the", "speech", "and", "textual", "transcription", "modalities", "closer", "(its", "effect", "will", "be", "analyzed", "in", "detail", "in", "Section", "6)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1994
[ "These", "pairs", "are", "built", "from", "the", "triplet", "ST", "data." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1995
[ "The", "first", "three", "elements", "are", "cross-entropy", "losses", "on", "<speech,", "target", "text>,", "<speech,", "source", "text>", "and", "<source", "text,", "target", "text>", "pairs." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1996
[ "=" ]
[ 0 ]
1997
[ "Our", "training", "loss", "consists", "of", "the", "following", "elements." ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
1998
[ "Previous", "work", "has", "shown", "that", "multi-task", "learning", "on", "ST,", "MT", "and", "ASR", "improves", "translation", "performance", "(Indurthi", "et", "al.,", "2020;", "Tang", "et", "al.,", "2021b;", "Ye", "et", "al.,", "2021)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1999
[ "Since", "our", "model", "has", "a", "complete", "Transformer", "encoder-decoder", "as", "a", "sub-module,", "this", "makes", "it", "possible", "to", "pre-train", "using", "large-scale", "extra", "MT", "parallel", "data." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]