diff --git "a/data/tempo_wic/train.jsonl" "b/data/tempo_wic/train.jsonl" --- "a/data/tempo_wic/train.jsonl" +++ "b/data/tempo_wic/train.jsonl" @@ -1,1427 +1,3 @@ -{"id": "3349-frisk", "word": "frisk", "label_binary": 0, "text_1": "imagine seeing qoute from cave story making it into smash as a dlc character instead of frisk or sans lmao", "token_idx_1": 16, "text_start_1": 88, "text_end_1": 93, "date_1": "2019-02", "text_2": "Bloomberg? Are you people for real?16 cases of sexual harrassment, stop and frisk, redlining mortgages, and he is a conservative. If the dems think the only way you can beat trump is with a republican, then you deserve trump, and you show just how weak the dems are. #NotMeUs", "token_idx_2": 16, "text_start_2": 76, "text_end_2": 81, "date_2": "2020-02", "text_1_tokenized": ["imagine", "seeing", "qoute", "from", "cave", "story", "making", "it", "into", "smash", "as", "a", "dlc", "character", "instead", "of", "frisk", "or", "sans", "lmao"], "text_2_tokenized": ["Bloomberg", "?", "Are", "you", "people", "for", "real", "?", "16", "cases", "of", "sexual", "harrassment", ",", "stop", "and", "frisk", ",", "redlining", "mortgages", ",", "and", "he", "is", "a", "conservative", ".", "If", "the", "dems", "think", "the", "only", "way", "you", "can", "beat", "trump", "is", "with", "a", "republican", ",", "then", "you", "deserve", "trump", ",", "and", "you", "show", "just", "how", "weak", "the", "dems", "are", ".", "#NotMeUs"]} -{"id": "3350-frisk", "word": "frisk", "label_binary": 0, "text_1": "frisk and sans are my two favorite undertale characters actually jknJKNDJKNKDN", "token_idx_1": 0, "text_start_1": 0, "text_end_1": 5, "date_1": "2019-02", "text_2": "Today, in my wrongful convictions class we listened to the audio of Bloomberg talking about stop and frisk, and then discussed how this relates to the Exonerated Five and the criminalization of young Black men and boys.", "token_idx_2": 18, "text_start_2": 101, "text_end_2": 106, "date_2": "2020-02", "text_1_tokenized": ["frisk", "and", "sans", "are", "my", "two", "favorite", "undertale", "characters", "actually", "jknJKNDJKNKDN"], "text_2_tokenized": ["Today", ",", "in", "my", "wrongful", "convictions", "class", "we", "listened", "to", "the", "audio", "of", "Bloomberg", "talking", "about", "stop", "and", "frisk", ",", "and", "then", "discussed", "how", "this", "relates", "to", "the", "Exonerated", "Five", "and", "the", "criminalization", "of", "young", "Black", "men", "and", "boys", "."]} -{"id": "3351-frisk", "word": "frisk", "label_binary": 1, "text_1": "We don't like the search and frisk so this bitch in neutral", "token_idx_1": 6, "text_start_1": 29, "text_end_1": 34, "date_1": "2019-02", "text_2": "who the fuck is listening to mike bloomberg railing in \"bernie bros\"? mr stop and frisk, literally turned the police on occupy wall street, rnc protestors, and new york muslims. get the fuck out", "token_idx_2": 18, "text_start_2": 82, "text_end_2": 87, "date_2": "2020-02", "text_1_tokenized": ["We", "don't", "like", "the", "search", "and", "frisk", "so", "this", "bitch", "in", "neutral"], "text_2_tokenized": ["who", "the", "fuck", "is", "listening", "to", "mike", "bloomberg", "railing", "in", "\"", "bernie", "bros", "\"", "?", "mr", "stop", "and", "frisk", ",", "literally", "turned", "the", "police", "on", "occupy", "wall", "street", ",", "rnc", "protestors", ",", "and", "new", "york", "muslims", ".", "get", "the", "fuck", "out"]} -{"id": "3352-frisk", "word": "frisk", "label_binary": 0, "text_1": "Hey guys I'm wondering if anybody would draw me with frisk or my brother liu in my au homicidal liu is my brother and he's nice once you get to know him better I hope to hear from you soon bye", "token_idx_1": 10, "text_start_1": 53, "text_end_1": 58, "date_1": "2019-02", "text_2": "How about stop in frisk happened in mostly minority communities because that's statistically where the majority of crime happens?? These candidates need to have a back bone and stop acting dumb when asked common sense questions in order to not hurt feelings. #Debatenight", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 23, "date_2": "2020-02", "text_1_tokenized": ["Hey", "guys", "I'm", "wondering", "if", "anybody", "would", "draw", "me", "with", "frisk", "or", "my", "brother", "liu", "in", "my", "au", "homicidal", "liu", "is", "my", "brother", "and", "he's", "nice", "once", "you", "get", "to", "know", "him", "better", "I", "hope", "to", "hear", "from", "you", "soon", "bye"], "text_2_tokenized": ["How", "about", "stop", "in", "frisk", "happened", "in", "mostly", "minority", "communities", "because", "that's", "statistically", "where", "the", "majority", "of", "crime", "happens", "?", "?", "These", "candidates", "need", "to", "have", "a", "back", "bone", "and", "stop", "acting", "dumb", "when", "asked", "common", "sense", "questions", "in", "order", "to", "not", "hurt", "feelings", ".", "#Debatenight"]} -{"id": "3353-frisk", "word": "frisk", "label_binary": 1, "text_1": "Tampa's low turnout non-partisan election for mayor featured a bunch of unexciting Dems, a buffoonish billionaire, and a notorious cop known for her racist stop and frisk program. The runoff will feature the cop and the billionaire. 2 Republicans in a very blue city.", "token_idx_1": 28, "text_start_1": 165, "text_end_1": 170, "date_1": "2019-02", "text_2": "It feels like \"stop & frisk\" = Hillary's emails. I'm listening to regular black voters for direction on this, not media personalities.", "token_idx_2": 6, "text_start_2": 26, "text_end_2": 31, "date_2": "2020-02", "text_1_tokenized": ["Tampa's", "low", "turnout", "non-partisan", "election", "for", "mayor", "featured", "a", "bunch", "of", "unexciting", "Dems", ",", "a", "buffoonish", "billionaire", ",", "and", "a", "notorious", "cop", "known", "for", "her", "racist", "stop", "and", "frisk", "program", ".", "The", "runoff", "will", "feature", "the", "cop", "and", "the", "billionaire", ".", "2", "Republicans", "in", "a", "very", "blue", "city", "."], "text_2_tokenized": ["It", "feels", "like", "\"", "stop", "&", "frisk", "\"", "=", "Hillary's", "emails", ".", "I'm", "listening", "to", "regular", "black", "voters", "for", "direction", "on", "this", ",", "not", "media", "personalities", "."]} -{"id": "3354-frisk", "word": "frisk", "label_binary": 0, "text_1": "Sending fanart frisk on frans amino. People in there: i hate to be rude but can we see the proof? Me: Me: you're telling me my art is a fraud? \ud83d\ude05 #frans P. S will send video process hahah", "token_idx_1": 2, "text_start_1": 15, "text_end_1": 20, "date_1": "2019-02", "text_2": "When I heard the Bloomberg for black people add it made me verbally laugh out loud. \ud83d\ude02\ud83d\ude02\ud83d\ude02 Dudes just trying to add-buy his way away from decades of racist policy. Stop and frisk for black people!!!", "token_idx_2": 36, "text_start_2": 170, "text_end_2": 175, "date_2": "2020-02", "text_1_tokenized": ["Sending", "fanart", "frisk", "on", "frans", "amino", ".", "People", "in", "there", ":", "i", "hate", "to", "be", "rude", "but", "can", "we", "see", "the", "proof", "?", "Me", ":", "Me", ":", "you're", "telling", "me", "my", "art", "is", "a", "fraud", "?", "\ud83d\ude05", "#frans", "P", ".", "S", "will", "send", "video", "process", "hahah"], "text_2_tokenized": ["When", "I", "heard", "the", "Bloomberg", "for", "black", "people", "add", "it", "made", "me", "verbally", "laugh", "out", "loud", ".", "\ud83d\ude02", "\ud83d\ude02", "\ud83d\ude02", "Dudes", "just", "trying", "to", "add-buy", "his", "way", "away", "from", "decades", "of", "racist", "policy", ".", "Stop", "and", "frisk", "for", "black", "people", "!", "!", "!"]} -{"id": "3355-frisk", "word": "frisk", "label_binary": 0, "text_1": "Joshua jumping off stage to frisk with Miller's crew. The roadman is free!!!", "token_idx_1": 5, "text_start_1": 28, "text_end_1": 33, "date_1": "2019-02", "text_2": "I don't see how anyone can support @MikeBloomberg, especially Black politicians. Stop and frisk is wrong and discriminatory against Black people.", "token_idx_2": 15, "text_start_2": 90, "text_end_2": 95, "date_2": "2020-02", "text_1_tokenized": ["Joshua", "jumping", "off", "stage", "to", "frisk", "with", "Miller's", "crew", ".", "The", "roadman", "is", "free", "!", "!", "!"], "text_2_tokenized": ["I", "don't", "see", "how", "anyone", "can", "support", "@MikeBloomberg", ",", "especially", "Black", "politicians", ".", "Stop", "and", "frisk", "is", "wrong", "and", "discriminatory", "against", "Black", "people", "."]} -{"id": "3356-frisk", "word": "frisk", "label_binary": 1, "text_1": "hmm a businessman president presented a unique opportunity to accelerate the fight against climate change imo but a stop and frisk advocate has no place in the 2020 dem primary so probably for the best", "token_idx_1": 20, "text_start_1": 125, "text_end_1": 130, "date_1": "2019-02", "text_2": "Why are people critical of @PeteButtigieg perceived low support from POC so ready to forgive Bloomberg's awful record with stop and frisk policy here in NYC? \ud83e\udd14", "token_idx_2": 21, "text_start_2": 132, "text_end_2": 137, "date_2": "2020-02", "text_1_tokenized": ["hmm", "a", "businessman", "president", "presented", "a", "unique", "opportunity", "to", "accelerate", "the", "fight", "against", "climate", "change", "imo", "but", "a", "stop", "and", "frisk", "advocate", "has", "no", "place", "in", "the", "2020", "dem", "primary", "so", "probably", "for", "the", "best"], "text_2_tokenized": ["Why", "are", "people", "critical", "of", "@PeteButtigieg", "perceived", "low", "support", "from", "POC", "so", "ready", "to", "forgive", "Bloomberg's", "awful", "record", "with", "stop", "and", "frisk", "policy", "here", "in", "NYC", "?", "\ud83e\udd14"]} -{"id": "3357-frisk", "word": "frisk", "label_binary": 1, "text_1": "That man who was given a free pass being on the 'No frisk list' wasnt answerable to any statutory officer is now questioned for days and not hours Smartest of the crooks have been known to make mistakes Karma never looses an address.. #RobertVadra", "token_idx_1": 13, "text_start_1": 52, "text_end_1": 57, "date_1": "2019-02", "text_2": "Seeing a lot of spin on why Mike Bloomberg failed candidacy never took off. It was a doom campaign to begin with, can't market bad product to constituencies that felt aggrieved by his stop & frisk policy while he was Mayor of NYC.", "token_idx_2": 37, "text_start_2": 195, "text_end_2": 200, "date_2": "2020-02", "text_1_tokenized": ["That", "man", "who", "was", "given", "a", "free", "pass", "being", "on", "the", "'", "No", "frisk", "list", "'", "wasnt", "answerable", "to", "any", "statutory", "officer", "is", "now", "questioned", "for", "days", "and", "not", "hours", "Smartest", "of", "the", "crooks", "have", "been", "known", "to", "make", "mistakes", "Karma", "never", "looses", "an", "address", "..", "#RobertVadra"], "text_2_tokenized": ["Seeing", "a", "lot", "of", "spin", "on", "why", "Mike", "Bloomberg", "failed", "candidacy", "never", "took", "off", ".", "It", "was", "a", "doom", "campaign", "to", "begin", "with", ",", "can't", "market", "bad", "product", "to", "constituencies", "that", "felt", "aggrieved", "by", "his", "stop", "&", "frisk", "policy", "while", "he", "was", "Mayor", "of", "NYC", "."]} -{"id": "3358-frisk", "word": "frisk", "label_binary": 1, "text_1": "Stella is shaking, seems nervous..Maybe they should frisk her #LivePD", "token_idx_1": 10, "text_start_1": 52, "text_end_1": 57, "date_1": "2019-02", "text_2": "Bloomberg hates Black kids. Instead of constructively keeping kids off the streets, he'd rather \u201cthrow them against the wall and frisk them.\u201d", "token_idx_2": 23, "text_start_2": 129, "text_end_2": 134, "date_2": "2020-02", "text_1_tokenized": ["Stella", "is", "shaking", ",", "seems", "nervous", "..", "Maybe", "they", "should", "frisk", "her", "#LivePD"], "text_2_tokenized": ["Bloomberg", "hates", "Black", "kids", ".", "Instead", "of", "constructively", "keeping", "kids", "off", "the", "streets", ",", "he'd", "rather", "\u201c", "throw", "them", "against", "the", "wall", "and", "frisk", "them", ".", "\u201d"]} -{"id": "3359-frisk", "word": "frisk", "label_binary": 0, "text_1": "Little Mary donned her skates Upon the ice to frisk; Wasn't she a silly girl Her little *? How do you pronounce asterisk?", "token_idx_1": 9, "text_start_1": 46, "text_end_1": 51, "date_1": "2019-02", "text_2": "Y'all made all these GOOD candidates seem like out of touch weirdos and now we've gotta deal with a guy who thinks \u201cweirdo\u201d and \u201ctouched\u201d are descriptors for folks we should \u201cstop\u201d and \u201cfrisk\u201d", "token_idx_2": 40, "text_start_2": 186, "text_end_2": 191, "date_2": "2020-02", "text_1_tokenized": ["Little", "Mary", "donned", "her", "skates", "Upon", "the", "ice", "to", "frisk", ";", "Wasn't", "she", "a", "silly", "girl", "Her", "little", "*", "?", "How", "do", "you", "pronounce", "asterisk", "?"], "text_2_tokenized": ["Y'all", "made", "all", "these", "GOOD", "candidates", "seem", "like", "out", "of", "touch", "weirdos", "and", "now", "we've", "gotta", "deal", "with", "a", "guy", "who", "thinks", "\u201c", "weirdo", "\u201d", "and", "\u201c", "touched", "\u201d", "are", "descriptors", "for", "folks", "we", "should", "\u201c", "stop", "\u201d", "and", "\u201c", "frisk", "\u201d"]} -{"id": "3360-frisk", "word": "frisk", "label_binary": 0, "text_1": "you know I never really think about it but the fact that a frisk Undertale shirt exists and I own and can wear it makes me really happy they must be working on a Kris deltarune version, right? it seemed like an obvious version of the shirt even back when it was just undertale", "token_idx_1": 13, "text_start_1": 59, "text_end_1": 64, "date_1": "2019-02", "text_2": "Bloomberg is racist. Stop and frisk is/was racist.", "token_idx_2": 6, "text_start_2": 30, "text_end_2": 35, "date_2": "2020-02", "text_1_tokenized": ["you", "know", "I", "never", "really", "think", "about", "it", "but", "the", "fact", "that", "a", "frisk", "Undertale", "shirt", "exists", "and", "I", "own", "and", "can", "wear", "it", "makes", "me", "really", "happy", "they", "must", "be", "working", "on", "a", "Kris", "deltarune", "version", ",", "right", "?", "it", "seemed", "like", "an", "obvious", "version", "of", "the", "shirt", "even", "back", "when", "it", "was", "just", "undertale"], "text_2_tokenized": ["Bloomberg", "is", "racist", ".", "Stop", "and", "frisk", "is", "/", "was", "racist", "."]} -{"id": "3361-frisk", "word": "frisk", "label_binary": 0, "text_1": "I really wish to rp with a pervy frisk or Chara.....im kinda into kinky woman >//<", "token_idx_1": 8, "text_start_1": 33, "text_end_1": 38, "date_1": "2019-02", "text_2": "Bloomberg like all Democrat nominee's start out apologizing for suppport for stop and frisk, How dumb are the voters to listen to these scumbags and their blatant bullshit", "token_idx_2": 13, "text_start_2": 86, "text_end_2": 91, "date_2": "2020-02", "text_1_tokenized": ["I", "really", "wish", "to", "rp", "with", "a", "pervy", "frisk", "or", "Chara", "...", "im", "kinda", "into", "kinky", "woman", ">", "/", "/", "<"], "text_2_tokenized": ["Bloomberg", "like", "all", "Democrat", "nominee's", "start", "out", "apologizing", "for", "suppport", "for", "stop", "and", "frisk", ",", "How", "dumb", "are", "the", "voters", "to", "listen", "to", "these", "scumbags", "and", "their", "blatant", "bullshit"]} -{"id": "3362-frisk", "word": "frisk", "label_binary": 1, "text_1": "NY City instituted a highly controversial program called \"stop and frisk\" through 2016. The notion was to intervene to search for guns/drugs/low level crimes so bigger ones didn't occur. It devolved into \"stop and frisk black guys\" and therefore imploded THREAD", "token_idx_1": 11, "text_start_1": 67, "text_end_1": 72, "date_1": "2019-02", "text_2": "For those of you having a fit over @MikeBloomberg stop and frisk He Apologized EXCEPT IT...When we have Trump getting away with all his crimes and never Apologized for all the wrong he did thinking of himself only..REALLY this country is crying for someone like MB", "token_idx_2": 11, "text_start_2": 59, "text_end_2": 64, "date_2": "2020-02", "text_1_tokenized": ["NY", "City", "instituted", "a", "highly", "controversial", "program", "called", "\"", "stop", "and", "frisk", "\"", "through", "2016", ".", "The", "notion", "was", "to", "intervene", "to", "search", "for", "guns", "/", "drugs", "/", "low", "level", "crimes", "so", "bigger", "ones", "didn't", "occur", ".", "It", "devolved", "into", "\"", "stop", "and", "frisk", "black", "guys", "\"", "and", "therefore", "imploded", "THREAD"], "text_2_tokenized": ["For", "those", "of", "you", "having", "a", "fit", "over", "@MikeBloomberg", "stop", "and", "frisk", "He", "Apologized", "EXCEPT", "IT", "...", "When", "we", "have", "Trump", "getting", "away", "with", "all", "his", "crimes", "and", "never", "Apologized", "for", "all", "the", "wrong", "he", "did", "thinking", "of", "himself", "only", "..", "REALLY", "this", "country", "is", "crying", "for", "someone", "like", "MB"]} -{"id": "3363-frisk", "word": "frisk", "label_binary": 0, "text_1": "I love frisk so fucking much", "token_idx_1": 2, "text_start_1": 7, "text_end_1": 12, "date_1": "2019-02", "text_2": "police in my own town just did a whole stop n frisk on my ass. she gotta be new or sum. bc the pigs don't fw me", "token_idx_2": 11, "text_start_2": 46, "text_end_2": 51, "date_2": "2020-02", "text_1_tokenized": ["I", "love", "frisk", "so", "fucking", "much"], "text_2_tokenized": ["police", "in", "my", "own", "town", "just", "did", "a", "whole", "stop", "n", "frisk", "on", "my", "ass", ".", "she", "gotta", "be", "new", "or", "sum", ".", "bc", "the", "pigs", "don't", "fw", "me"]} -{"id": "3365-frisk", "word": "frisk", "label_binary": 0, "text_1": "i remember in freshman year i spent months making my alphys cosplay and wore it to school to match my friend who was frisk and tons of people put pics and videos of me on sc making fun of me gfdjgf when i moved schools 2 years later ppl recognized me as the cringey alphys dfgshfg", "token_idx_1": 23, "text_start_1": 117, "text_end_1": 122, "date_1": "2019-02", "text_2": "Bloomberg said he didnt really want to talk about stop and frisk! Of course he doesn't! As a New Yorker, his attitude at time and just 18 months ago, shows his true self! Believe him!", "token_idx_2": 11, "text_start_2": 59, "text_end_2": 64, "date_2": "2020-02", "text_1_tokenized": ["i", "remember", "in", "freshman", "year", "i", "spent", "months", "making", "my", "alphys", "cosplay", "and", "wore", "it", "to", "school", "to", "match", "my", "friend", "who", "was", "frisk", "and", "tons", "of", "people", "put", "pics", "and", "videos", "of", "me", "on", "sc", "making", "fun", "of", "me", "gfdjgf", "when", "i", "moved", "schools", "2", "years", "later", "ppl", "recognized", "me", "as", "the", "cringey", "alphys", "dfgshfg"], "text_2_tokenized": ["Bloomberg", "said", "he", "didnt", "really", "want", "to", "talk", "about", "stop", "and", "frisk", "!", "Of", "course", "he", "doesn't", "!", "As", "a", "New", "Yorker", ",", "his", "attitude", "at", "time", "and", "just", "18", "months", "ago", ",", "shows", "his", "true", "self", "!", "Believe", "him", "!"]} -{"id": "3366-frisk", "word": "frisk", "label_binary": 0, "text_1": "Toons just won 2 nil and today i had a one wiper, what a frisk man", "token_idx_1": 15, "text_start_1": 57, "text_end_1": 62, "date_1": "2019-02", "text_2": "Most *amusing* stop & frisk incident, was when I was walking home & got stopped. One of the officers, a woman, saw my wallet, noticed my State ID & Cal Poly Pomona ID & said she attended too. After I was cleared & let go, I started talking about CPP, practically flirting. \ud83d\ude1d", "token_idx_2": 6, "text_start_2": 26, "text_end_2": 31, "date_2": "2020-02", "text_1_tokenized": ["Toons", "just", "won", "2", "nil", "and", "today", "i", "had", "a", "one", "wiper", ",", "what", "a", "frisk", "man"], "text_2_tokenized": ["Most", "*", "amusing", "*", "stop", "&", "frisk", "incident", ",", "was", "when", "I", "was", "walking", "home", "&", "got", "stopped", ".", "One", "of", "the", "officers", ",", "a", "woman", ",", "saw", "my", "wallet", ",", "noticed", "my", "State", "ID", "&", "Cal", "Poly", "Pomona", "ID", "&", "said", "she", "attended", "too", ".", "After", "I", "was", "cleared", "&", "let", "go", ",", "I", "started", "talking", "about", "CPP", ",", "practically", "flirting", ".", "\ud83d\ude1d"]} -{"id": "3367-frisk", "word": "frisk", "label_binary": 1, "text_1": "I've noticed over many years on the job and by watching #LivePD that nobody ever has ID when it comes to a field contact. But when it comes to a frisk, low & behold!! A wallet with ID!! Now was that so difficult?", "token_idx_1": 31, "text_start_1": 145, "text_end_1": 150, "date_1": "2019-02", "text_2": "STOP giving me fucking ads for bloomberg after warren ate his ass yesterday. i'm tired of seeing stop and frisk man.", "token_idx_2": 20, "text_start_2": 106, "text_end_2": 111, "date_2": "2020-02", "text_1_tokenized": ["I've", "noticed", "over", "many", "years", "on", "the", "job", "and", "by", "watching", "#LivePD", "that", "nobody", "ever", "has", "ID", "when", "it", "comes", "to", "a", "field", "contact", ".", "But", "when", "it", "comes", "to", "a", "frisk", ",", "low", "&", "behold", "!", "!", "A", "wallet", "with", "ID", "!", "!", "Now", "was", "that", "so", "difficult", "?"], "text_2_tokenized": ["STOP", "giving", "me", "fucking", "ads", "for", "bloomberg", "after", "warren", "ate", "his", "ass", "yesterday", ".", "i'm", "tired", "of", "seeing", "stop", "and", "frisk", "man", "."]} -{"id": "3368-frisk", "word": "frisk", "label_binary": 1, "text_1": "As cross as I was with some behaviour last night, am pleased to report that the pre-match frisk was far less grabby this time around, and actually quite sensual. Thank you for the tender caress of my body for safety purposes.", "token_idx_1": 18, "text_start_1": 90, "text_end_1": 95, "date_1": "2019-02", "text_2": "Stop and frisk was some straight draconian Jim Crow type shit that nigga will never win the presidency", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 14, "date_2": "2020-02", "text_1_tokenized": ["As", "cross", "as", "I", "was", "with", "some", "behaviour", "last", "night", ",", "am", "pleased", "to", "report", "that", "the", "pre-match", "frisk", "was", "far", "less", "grabby", "this", "time", "around", ",", "and", "actually", "quite", "sensual", ".", "Thank", "you", "for", "the", "tender", "caress", "of", "my", "body", "for", "safety", "purposes", "."], "text_2_tokenized": ["Stop", "and", "frisk", "was", "some", "straight", "draconian", "Jim", "Crow", "type", "shit", "that", "nigga", "will", "never", "win", "the", "presidency"]} -{"id": "3370-frisk", "word": "frisk", "label_binary": 0, "text_1": "here's my new rm take, Vermillion is gap bird and Pyrolution is frisk", "token_idx_1": 13, "text_start_1": 64, "text_end_1": 69, "date_1": "2019-02", "text_2": "Stop and frisk isn't a hypothetical issue. I remember being in high school while Ghouliani was mayor. We were coming from a party to celebrate our friend who was leaving for Syracuse U...", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 14, "date_2": "2020-02", "text_1_tokenized": ["here's", "my", "new", "rm", "take", ",", "Vermillion", "is", "gap", "bird", "and", "Pyrolution", "is", "frisk"], "text_2_tokenized": ["Stop", "and", "frisk", "isn't", "a", "hypothetical", "issue", ".", "I", "remember", "being", "in", "high", "school", "while", "Ghouliani", "was", "mayor", ".", "We", "were", "coming", "from", "a", "party", "to", "celebrate", "our", "friend", "who", "was", "leaving", "for", "Syracuse", "U", "..."]} -{"id": "3371-frisk", "word": "frisk", "label_binary": 1, "text_1": "Also, just for the record, LabCorp in Frederick all but does a frisk search before your piss test.", "token_idx_1": 14, "text_start_1": 63, "text_end_1": 68, "date_1": "2019-02", "text_2": "why did my AFRICAN dad just say that Bloomberg's stop and frisk was good for New York?", "token_idx_2": 11, "text_start_2": 58, "text_end_2": 63, "date_2": "2020-02", "text_1_tokenized": ["Also", ",", "just", "for", "the", "record", ",", "LabCorp", "in", "Frederick", "all", "but", "does", "a", "frisk", "search", "before", "your", "piss", "test", "."], "text_2_tokenized": ["why", "did", "my", "AFRICAN", "dad", "just", "say", "that", "Bloomberg's", "stop", "and", "frisk", "was", "good", "for", "New", "York", "?"]} -{"id": "3372-frisk", "word": "frisk", "label_binary": 1, "text_1": "\u201cYou might have to frisk me.\u201d Fucking gross go away. \ud83d\ude37", "token_idx_1": 5, "text_start_1": 19, "text_end_1": 24, "date_1": "2019-02", "text_2": "You might say Bloomberg is using his money to \u201cstop and frisk\u201d our democracy - @jonlovett #lovettorleaveit #DemocraticPrimary #Elections2020", "token_idx_2": 12, "text_start_2": 56, "text_end_2": 61, "date_2": "2020-02", "text_1_tokenized": ["\u201c", "You", "might", "have", "to", "frisk", "me", ".", "\u201d", "Fucking", "gross", "go", "away", ".", "\ud83d\ude37"], "text_2_tokenized": ["You", "might", "say", "Bloomberg", "is", "using", "his", "money", "to", "\u201c", "stop", "and", "frisk", "\u201d", "our", "democracy", "-", "@jonlovett", "#lovettorleaveit", "#DemocraticPrimary", "#Elections2020"]} -{"id": "3373-frisk", "word": "frisk", "label_binary": 0, "text_1": "i love watching frisk unfold on here\ud83d\ude02\ud83d\ude02\ud83d\ude02", "token_idx_1": 3, "text_start_1": 16, "text_end_1": 21, "date_1": "2019-02", "text_2": "Now that I think about it Trump was also in favour of stop and frisk. #TYTLive @FunnyAida @bretterlich", "token_idx_2": 14, "text_start_2": 63, "text_end_2": 68, "date_2": "2020-02", "text_1_tokenized": ["i", "love", "watching", "frisk", "unfold", "on", "here", "\ud83d\ude02", "\ud83d\ude02", "\ud83d\ude02"], "text_2_tokenized": ["Now", "that", "I", "think", "about", "it", "Trump", "was", "also", "in", "favour", "of", "stop", "and", "frisk", ".", "#TYTLive", "@FunnyAida", "@bretterlich"]} -{"id": "3374-frisk", "word": "frisk", "label_binary": 1, "text_1": "NYC murders up 56% so far this year. Could it b we r now seeing the consequences of ending the stop and frisk policy and the chilling effect the War on Cops has produced?", "token_idx_1": 24, "text_start_1": 104, "text_end_1": 109, "date_1": "2019-02", "text_2": "Bloomberg on stop and frisk: \u201cit's just not something I think about anymore.\u201d Really? I bet the people affected by it still do #TownHall", "token_idx_2": 4, "text_start_2": 22, "text_end_2": 27, "date_2": "2020-02", "text_1_tokenized": ["NYC", "murders", "up", "56", "%", "so", "far", "this", "year", ".", "Could", "it", "b", "we", "r", "now", "seeing", "the", "consequences", "of", "ending", "the", "stop", "and", "frisk", "policy", "and", "the", "chilling", "effect", "the", "War", "on", "Cops", "has", "produced", "?"], "text_2_tokenized": ["Bloomberg", "on", "stop", "and", "frisk", ":", "\u201c", "it's", "just", "not", "something", "I", "think", "about", "anymore", ".", "\u201d", "Really", "?", "I", "bet", "the", "people", "affected", "by", "it", "still", "do", "#TownHall"]} -{"id": "3375-frisk", "word": "frisk", "label_binary": 1, "text_1": "Boy: I'm really open-minded. I love intelligent debate. I don't get offended easily. Me: Everyone has implicit biases. The immigration system is broken and a wall on our southern border won't fix illegal immigration. Stop and frisk is racist. Boy: I'm offended.", "token_idx_1": 43, "text_start_1": 226, "text_end_1": 231, "date_1": "2019-02", "text_2": "Stop and frisk man looks like the dude from the hobbit.", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 14, "date_2": "2020-02", "text_1_tokenized": ["Boy", ":", "I'm", "really", "open-minded", ".", "I", "love", "intelligent", "debate", ".", "I", "don't", "get", "offended", "easily", ".", "Me", ":", "Everyone", "has", "implicit", "biases", ".", "The", "immigration", "system", "is", "broken", "and", "a", "wall", "on", "our", "southern", "border", "won't", "fix", "illegal", "immigration", ".", "Stop", "and", "frisk", "is", "racist", ".", "Boy", ":", "I'm", "offended", "."], "text_2_tokenized": ["Stop", "and", "frisk", "man", "looks", "like", "the", "dude", "from", "the", "hobbit", "."]} -{"id": "3376-frisk", "word": "frisk", "label_binary": 0, "text_1": "Me, with their team with all over the hosting coach told frisk \"I'll show he's", "token_idx_1": 12, "text_start_1": 57, "text_end_1": 62, "date_1": "2019-02", "text_2": "Michael Bloomberg needs to promise to WIPE all the black and brown who now has record based on his policies of stop and frisk.", "token_idx_2": 23, "text_start_2": 120, "text_end_2": 125, "date_2": "2020-02", "text_1_tokenized": ["Me", ",", "with", "their", "team", "with", "all", "over", "the", "hosting", "coach", "told", "frisk", "\"", "I'll", "show", "he's"], "text_2_tokenized": ["Michael", "Bloomberg", "needs", "to", "promise", "to", "WIPE", "all", "the", "black", "and", "brown", "who", "now", "has", "record", "based", "on", "his", "policies", "of", "stop", "and", "frisk", "."]} -{"id": "3377-frisk", "word": "frisk", "label_binary": 1, "text_1": "Grimes incentivizes stop and frisk and declares the victory of women", "token_idx_1": 4, "text_start_1": 29, "text_end_1": 34, "date_1": "2019-02", "text_2": "A party that gets behind the stop and frisk guy should be destroyed", "token_idx_2": 8, "text_start_2": 38, "text_end_2": 43, "date_2": "2020-02", "text_1_tokenized": ["Grimes", "incentivizes", "stop", "and", "frisk", "and", "declares", "the", "victory", "of", "women"], "text_2_tokenized": ["A", "party", "that", "gets", "behind", "the", "stop", "and", "frisk", "guy", "should", "be", "destroyed"]} -{"id": "3378-frisk", "word": "frisk", "label_binary": 0, "text_1": "#SB10 is extremely borderline stop and frisk, we have seen these type of laws in IL and GA and it causes more unrest . #utpol", "token_idx_1": 6, "text_start_1": 39, "text_end_1": 44, "date_1": "2019-02", "text_2": "himiko kin's frisk undertale", "token_idx_2": 2, "text_start_2": 13, "text_end_2": 18, "date_2": "2020-02", "text_1_tokenized": ["#SB10", "is", "extremely", "borderline", "stop", "and", "frisk", ",", "we", "have", "seen", "these", "type", "of", "laws", "in", "IL", "and", "GA", "and", "it", "causes", "more", "unrest", ".", "#utpol"], "text_2_tokenized": ["himiko", "kin's", "frisk", "undertale"]} -{"id": "3379-frisk", "word": "frisk", "label_binary": 0, "text_1": "i dont ship gaster w/frisk but boy howdy some of the art i have seen? god i wish that were me", "token_idx_1": 6, "text_start_1": 21, "text_end_1": 26, "date_1": "2019-02", "text_2": "Trump calling Bloomberg a racist when he himself called for Nationwide stop and frisk is truly the mayo calling the snow, white.", "token_idx_2": 13, "text_start_2": 80, "text_end_2": 85, "date_2": "2020-02", "text_1_tokenized": ["i", "dont", "ship", "gaster", "w", "/", "frisk", "but", "boy", "howdy", "some", "of", "the", "art", "i", "have", "seen", "?", "god", "i", "wish", "that", "were", "me"], "text_2_tokenized": ["Trump", "calling", "Bloomberg", "a", "racist", "when", "he", "himself", "called", "for", "Nationwide", "stop", "and", "frisk", "is", "truly", "the", "mayo", "calling", "the", "snow", ",", "white", "."]} -{"id": "3380-frisk", "word": "frisk", "label_binary": 0, "text_1": "is fisk bot even a bot anymore or is it just frisk posting", "token_idx_1": 11, "text_start_1": 45, "text_end_1": 50, "date_1": "2019-02", "text_2": "turning on the news and seeing people praise the stop & frisk man<<<<<", "token_idx_2": 11, "text_start_2": 60, "text_end_2": 65, "date_2": "2020-02", "text_1_tokenized": ["is", "fisk", "bot", "even", "a", "bot", "anymore", "or", "is", "it", "just", "frisk", "posting"], "text_2_tokenized": ["turning", "on", "the", "news", "and", "seeing", "people", "praise", "the", "stop", "&", "frisk", "man", "<", "<", "<"]} -{"id": "3381-frisk", "word": "frisk", "label_binary": 1, "text_1": "Holy Shit, Holy Fuck, They used to be a *HIC* a small deal with China. How do you feel about stop and frisk?", "token_idx_1": 27, "text_start_1": 102, "text_end_1": 107, "date_1": "2019-02", "text_2": "\u201cYou should expunge the records of those that have been caught up [by my fascist stop and frisk policy that only ended because the Supreme Court struck it down] before\u201d - @MikeBloomberg #DemDebate2020 #DemDebate", "token_idx_2": 19, "text_start_2": 90, "text_end_2": 95, "date_2": "2020-02", "text_1_tokenized": ["Holy", "Shit", ",", "Holy", "Fuck", ",", "They", "used", "to", "be", "a", "*", "HIC", "*", "a", "small", "deal", "with", "China", ".", "How", "do", "you", "feel", "about", "stop", "and", "frisk", "?"], "text_2_tokenized": ["\u201c", "You", "should", "expunge", "the", "records", "of", "those", "that", "have", "been", "caught", "up", "[", "by", "my", "fascist", "stop", "and", "frisk", "policy", "that", "only", "ended", "because", "the", "Supreme", "Court", "struck", "it", "down", "]", "before", "\u201d", "-", "@MikeBloomberg", "#DemDebate2020", "#DemDebate"]} -{"id": "3382-frisk", "word": "frisk", "label_binary": 0, "text_1": "i want to work on this cute fic about chara asriel and frisk seeing snow and being a cute qp polycule but i'm so sleepy i can barely keep my eyes open", "token_idx_1": 12, "text_start_1": 55, "text_end_1": 60, "date_1": "2019-02", "text_2": "When will people learn that once a person shows you who they are believe them? You think that allowing someone who felt like it was good to stop/frisk based on ZERO facts of criminal activity should run our country? Hell no.", "token_idx_2": 30, "text_start_2": 145, "text_end_2": 150, "date_2": "2020-02", "text_1_tokenized": ["i", "want", "to", "work", "on", "this", "cute", "fic", "about", "chara", "asriel", "and", "frisk", "seeing", "snow", "and", "being", "a", "cute", "qp", "polycule", "but", "i'm", "so", "sleepy", "i", "can", "barely", "keep", "my", "eyes", "open"], "text_2_tokenized": ["When", "will", "people", "learn", "that", "once", "a", "person", "shows", "you", "who", "they", "are", "believe", "them", "?", "You", "think", "that", "allowing", "someone", "who", "felt", "like", "it", "was", "good", "to", "stop", "/", "frisk", "based", "on", "ZERO", "facts", "of", "criminal", "activity", "should", "run", "our", "country", "?", "Hell", "no", "."]} -{"id": "3383-frisk", "word": "frisk", "label_binary": 1, "text_1": "This night, I'm a sweet knight Let me, an Angel of light Lead you on a sensual trip I want to hold you in my arms And frisk you down for arms We would get lust in worship And make love in warships", "token_idx_1": 29, "text_start_1": 118, "text_end_1": 123, "date_1": "2019-02", "text_2": "Why would you want stop and frisk in baltimore..majority black ppl", "token_idx_2": 6, "text_start_2": 28, "text_end_2": 33, "date_2": "2020-02", "text_1_tokenized": ["This", "night", ",", "I'm", "a", "sweet", "knight", "Let", "me", ",", "an", "Angel", "of", "light", "Lead", "you", "on", "a", "sensual", "trip", "I", "want", "to", "hold", "you", "in", "my", "arms", "And", "frisk", "you", "down", "for", "arms", "We", "would", "get", "lust", "in", "worship", "And", "make", "love", "in", "warships"], "text_2_tokenized": ["Why", "would", "you", "want", "stop", "and", "frisk", "in", "baltimore", "..", "majority", "black", "ppl"]} -{"id": "3384-frisk", "word": "frisk", "label_binary": 0, "text_1": "me: sees a comic where frisk and flowey maintain a friendship after the game ends me: fuck fuck fuck i love these sad kids so much", "token_idx_1": 6, "text_start_1": 23, "text_end_1": 28, "date_1": "2019-02", "text_2": "ON @CNNSotu @DanaBashCNN @marcshort45 Just watching Marc Short the chief of staff for VP Pence on SOTU program with Dana Bash discuss Bloomberg's stop and frisk policy and Trump calling him racist.Since we all know Trump's full and total agreement with this policy and would have", "token_idx_2": 25, "text_start_2": 155, "text_end_2": 160, "date_2": "2020-02", "text_1_tokenized": ["me", ":", "sees", "a", "comic", "where", "frisk", "and", "flowey", "maintain", "a", "friendship", "after", "the", "game", "ends", "me", ":", "fuck", "fuck", "fuck", "i", "love", "these", "sad", "kids", "so", "much"], "text_2_tokenized": ["ON", "@CNNSotu", "@DanaBashCNN", "@marcshort45", "Just", "watching", "Marc", "Short", "the", "chief", "of", "staff", "for", "VP", "Pence", "on", "SOTU", "program", "with", "Dana", "Bash", "discuss", "Bloomberg's", "stop", "and", "frisk", "policy", "and", "Trump", "calling", "him", "racist.Since", "we", "all", "know", "Trump's", "full", "and", "total", "agreement", "with", "this", "policy", "and", "would", "have"]} -{"id": "3386-frisk", "word": "frisk", "label_binary": 1, "text_1": "Williams says \u201cwe got to many guns in this country\u201d MEANING? But yet something as sensible as stop and frisk is not allowed! And gun crimes penalties are a disgrace! So what? You think ALL GUN OWNERS SHOULD TURN THEM IN? MORON!!!!", "token_idx_1": 22, "text_start_1": 103, "text_end_1": 108, "date_1": "2019-02", "text_2": "If throw black males against the wall, stop & frisk, mini Mike gloomberg is so sure he can beat the Deplorables, why didn't he Stay in the Republican party to do so?!\ud83e\udd14 Asking for a friend!\ud83d\ude33\ud83c\uddfa\ud83c\uddf8", "token_idx_2": 10, "text_start_2": 50, "text_end_2": 55, "date_2": "2020-02", "text_1_tokenized": ["Williams", "says", "\u201c", "we", "got", "to", "many", "guns", "in", "this", "country", "\u201d", "MEANING", "?", "But", "yet", "something", "as", "sensible", "as", "stop", "and", "frisk", "is", "not", "allowed", "!", "And", "gun", "crimes", "penalties", "are", "a", "disgrace", "!", "So", "what", "?", "You", "think", "ALL", "GUN", "OWNERS", "SHOULD", "TURN", "THEM", "IN", "?", "MORON", "!", "!", "!"], "text_2_tokenized": ["If", "throw", "black", "males", "against", "the", "wall", ",", "stop", "&", "frisk", ",", "mini", "Mike", "gloomberg", "is", "so", "sure", "he", "can", "beat", "the", "Deplorables", ",", "why", "didn't", "he", "Stay", "in", "the", "Republican", "party", "to", "do", "so", "?", "!", "\ud83e\udd14", "Asking", "for", "a", "friend", "!", "\ud83d\ude33", "\ud83c\uddfa", "\ud83c\uddf8"]} -{"id": "3387-frisk", "word": "frisk", "label_binary": 1, "text_1": "6) Mainstream America is very dismissive about the idea of paying reparations owned to #ADOS and repairing the damage stemming from slave codes, black codes, jim crow, redlining, stop 'n frisk, profiling, unarmed killing of #ADOS & the wealth gap to name a few #Tangibles2020", "token_idx_1": 36, "text_start_1": 187, "text_end_1": 192, "date_1": "2019-02", "text_2": "If you're at all sympathetic to Warren at this point you're on the side of Bloomberg and all the stop and frisk, sexual harassment lawsuits, etc baggage that comes with him.", "token_idx_2": 21, "text_start_2": 106, "text_end_2": 111, "date_2": "2020-02", "text_1_tokenized": ["6", ")", "Mainstream", "America", "is", "very", "dismissive", "about", "the", "idea", "of", "paying", "reparations", "owned", "to", "#ADOS", "and", "repairing", "the", "damage", "stemming", "from", "slave", "codes", ",", "black", "codes", ",", "jim", "crow", ",", "redlining", ",", "stop", "'", "n", "frisk", ",", "profiling", ",", "unarmed", "killing", "of", "#ADOS", "&", "the", "wealth", "gap", "to", "name", "a", "few", "#Tangibles2020"], "text_2_tokenized": ["If", "you're", "at", "all", "sympathetic", "to", "Warren", "at", "this", "point", "you're", "on", "the", "side", "of", "Bloomberg", "and", "all", "the", "stop", "and", "frisk", ",", "sexual", "harassment", "lawsuits", ",", "etc", "baggage", "that", "comes", "with", "him", "."]} -{"id": "3388-frisk", "word": "frisk", "label_binary": 0, "text_1": "my wishlist: 1. persona 5 on switch 2. kingdom hearts on switch 3. frisk in smash 4. mario super sluggers remake or sequel ik these are v unlikely but im ready for not getting what i want and being excited anyways", "token_idx_1": 17, "text_start_1": 67, "text_end_1": 72, "date_1": "2019-02", "text_2": "Ok @DNC The big guns need to come out now. Bloomberg's racist stop & frisk stuff is pouring out. Bernie is divisive and will lose moderates & independents. We can't lose in 2020. Period. Throw your weight behind Pete. He's our only hope against Trump. Unity & Hope. Unity & Hope.", "token_idx_2": 15, "text_start_2": 73, "text_end_2": 78, "date_2": "2020-02", "text_1_tokenized": ["my", "wishlist", ":", "1", ".", "persona", "5", "on", "switch", "2", ".", "kingdom", "hearts", "on", "switch", "3", ".", "frisk", "in", "smash", "4", ".", "mario", "super", "sluggers", "remake", "or", "sequel", "ik", "these", "are", "v", "unlikely", "but", "im", "ready", "for", "not", "getting", "what", "i", "want", "and", "being", "excited", "anyways"], "text_2_tokenized": ["Ok", "@DNC", "The", "big", "guns", "need", "to", "come", "out", "now", ".", "Bloomberg's", "racist", "stop", "&", "frisk", "stuff", "is", "pouring", "out", ".", "Bernie", "is", "divisive", "and", "will", "lose", "moderates", "&", "independents", ".", "We", "can't", "lose", "in", "2020", ".", "Period", ".", "Throw", "your", "weight", "behind", "Pete", ".", "He's", "our", "only", "hope", "against", "Trump", ".", "Unity", "&", "Hope", ".", "Unity", "&", "Hope", "."]} -{"id": "3389-frisk", "word": "frisk", "label_binary": 0, "text_1": "I have a theory idea what if undertales story is about frisk in a time lope @MatPatGT", "token_idx_1": 11, "text_start_1": 55, "text_end_1": 60, "date_1": "2019-02", "text_2": "I for one think it's great that whenever Bloomberg surrogates are asked about stop and frisk or his comments about women they look like hostages reading a script", "token_idx_2": 15, "text_start_2": 87, "text_end_2": 92, "date_2": "2020-02", "text_1_tokenized": ["I", "have", "a", "theory", "idea", "what", "if", "undertales", "story", "is", "about", "frisk", "in", "a", "time", "lope", "@MatPatGT"], "text_2_tokenized": ["I", "for", "one", "think", "it's", "great", "that", "whenever", "Bloomberg", "surrogates", "are", "asked", "about", "stop", "and", "frisk", "or", "his", "comments", "about", "women", "they", "look", "like", "hostages", "reading", "a", "script"]} -{"id": "3390-frisk", "word": "frisk", "label_binary": 1, "text_1": "Remembering the time we had stop-and-searches at primary school in which teachers would frisk us for Garbage Pail Kids trading cards.", "token_idx_1": 13, "text_start_1": 88, "text_end_1": 93, "date_1": "2019-02", "text_2": "\"You all are going to start focusing on [Bloomberg] like you have on me. ... His position on issues relating to the African-American community, from stop and frisk to the way he talked about Obama.\" -- @JoeBiden #MTP #IfItsSunday", "token_idx_2": 31, "text_start_2": 158, "text_end_2": 163, "date_2": "2020-02", "text_1_tokenized": ["Remembering", "the", "time", "we", "had", "stop-and-searches", "at", "primary", "school", "in", "which", "teachers", "would", "frisk", "us", "for", "Garbage", "Pail", "Kids", "trading", "cards", "."], "text_2_tokenized": ["\"", "You", "all", "are", "going", "to", "start", "focusing", "on", "[", "Bloomberg", "]", "like", "you", "have", "on", "me", ". ...", "His", "position", "on", "issues", "relating", "to", "the", "African-American", "community", ",", "from", "stop", "and", "frisk", "to", "the", "way", "he", "talked", "about", "Obama", ".", "\"", "-", "-", "@JoeBiden", "#MTP", "#IfItsSunday"]} -{"id": "3392-frisk", "word": "frisk", "label_binary": 1, "text_1": "I don't want to have to csi background check non-black-looking people to see if they have black in them everytime they say the nword. Tired of it. Non-black-looking mixed people must LOVE the constant frisk search.", "token_idx_1": 36, "text_start_1": 201, "text_end_1": 206, "date_1": "2019-02", "text_2": "Anybody else seeing a shitload of bot like comments about how stop and frisk was fine actually so let's talk about Bernie Bro's and how mean they are?", "token_idx_2": 13, "text_start_2": 71, "text_end_2": 76, "date_2": "2020-02", "text_1_tokenized": ["I", "don't", "want", "to", "have", "to", "csi", "background", "check", "non-black-looking", "people", "to", "see", "if", "they", "have", "black", "in", "them", "everytime", "they", "say", "the", "nword", ".", "Tired", "of", "it", ".", "Non-black-looking", "mixed", "people", "must", "LOVE", "the", "constant", "frisk", "search", "."], "text_2_tokenized": ["Anybody", "else", "seeing", "a", "shitload", "of", "bot", "like", "comments", "about", "how", "stop", "and", "frisk", "was", "fine", "actually", "so", "let's", "talk", "about", "Bernie", "Bro's", "and", "how", "mean", "they", "are", "?"]} -{"id": "3393-frisk", "word": "frisk", "label_binary": 1, "text_1": "wonder if the tory party will arrange to frisk grayling on a daily basis to ensure he has no pens on his person just in case he is tempted to go out and sign another costly contract or will they tell him to hand them to Liam fox so he can actually sign some?", "token_idx_1": 8, "text_start_1": 41, "text_end_1": 46, "date_1": "2019-02", "text_2": "\"We put all the cops in the minority neighborhoods. Yes, that is true. Why did we do it? Because that's where all the crime is. And the way you get the guns out of the kid's hands is to throw them up against the walls and frisk them.\" -Michael Bloomberg #BloombergIsARacist", "token_idx_2": 52, "text_start_2": 222, "text_end_2": 227, "date_2": "2020-02", "text_1_tokenized": ["wonder", "if", "the", "tory", "party", "will", "arrange", "to", "frisk", "grayling", "on", "a", "daily", "basis", "to", "ensure", "he", "has", "no", "pens", "on", "his", "person", "just", "in", "case", "he", "is", "tempted", "to", "go", "out", "and", "sign", "another", "costly", "contract", "or", "will", "they", "tell", "him", "to", "hand", "them", "to", "Liam", "fox", "so", "he", "can", "actually", "sign", "some", "?"], "text_2_tokenized": ["\"", "We", "put", "all", "the", "cops", "in", "the", "minority", "neighborhoods", ".", "Yes", ",", "that", "is", "true", ".", "Why", "did", "we", "do", "it", "?", "Because", "that's", "where", "all", "the", "crime", "is", ".", "And", "the", "way", "you", "get", "the", "guns", "out", "of", "the", "kid's", "hands", "is", "to", "throw", "them", "up", "against", "the", "walls", "and", "frisk", "them", ".", "\"", "-", "Michael", "Bloomberg", "#BloombergIsARacist"]} -{"id": "3394-frisk", "word": "frisk", "label_binary": 0, "text_1": "frisk. if you have my lilac heels with the flowers. you are Obliged to tell me, i have a date later. \ud83d\ude24", "token_idx_1": 0, "text_start_1": 0, "text_end_1": 5, "date_1": "2019-02", "text_2": "remember in the debate when Biden took credit for appointing a \"federal monitor\" to oversee Bloomberg's stop and frisk policy? yeah that never happened. how much lying are you'all willing to swallow?", "token_idx_2": 20, "text_start_2": 113, "text_end_2": 118, "date_2": "2020-02", "text_1_tokenized": ["frisk", ".", "if", "you", "have", "my", "lilac", "heels", "with", "the", "flowers", ".", "you", "are", "Obliged", "to", "tell", "me", ",", "i", "have", "a", "date", "later", ".", "\ud83d\ude24"], "text_2_tokenized": ["remember", "in", "the", "debate", "when", "Biden", "took", "credit", "for", "appointing", "a", "\"", "federal", "monitor", "\"", "to", "oversee", "Bloomberg's", "stop", "and", "frisk", "policy", "?", "yeah", "that", "never", "happened", ".", "how", "much", "lying", "are", "you'all", "willing", "to", "swallow", "?"]} -{"id": "3395-frisk", "word": "frisk", "label_binary": 1, "text_1": "Harder pat down: TSA frisk on Ahmed or me checking for my wallet and keys after class", "token_idx_1": 5, "text_start_1": 21, "text_end_1": 26, "date_1": "2019-02", "text_2": "had coworkers tell me today it's ok Bloomberg is trying to buy the nomination because \u201cat least it's his own money\u201d, stop and frisk was good because \u201cthat's who had the guns\u201d and that \u201cblack people actually liked stop and frisk\u201d! I love talking politics at work!!! The best!!!!!", "token_idx_2": 26, "text_start_2": 126, "text_end_2": 131, "date_2": "2020-02", "text_1_tokenized": ["Harder", "pat", "down", ":", "TSA", "frisk", "on", "Ahmed", "or", "me", "checking", "for", "my", "wallet", "and", "keys", "after", "class"], "text_2_tokenized": ["had", "coworkers", "tell", "me", "today", "it's", "ok", "Bloomberg", "is", "trying", "to", "buy", "the", "nomination", "because", "\u201c", "at", "least", "it's", "his", "own", "money", "\u201d", ",", "stop", "and", "frisk", "was", "good", "because", "\u201c", "that's", "who", "had", "the", "guns", "\u201d", "and", "that", "\u201c", "black", "people", "actually", "liked", "stop", "and", "frisk", "\u201d", "!", "I", "love", "talking", "politics", "at", "work", "!", "!", "!", "The", "best", "!", "!", "!"]} -{"id": "3397-frisk", "word": "frisk", "label_binary": 1, "text_1": "This is stop in frisk!", "token_idx_1": 4, "text_start_1": 16, "text_end_1": 21, "date_1": "2019-02", "text_2": "I always thought stop and frisk would be the biggest obstacle @MikeBloomberg would face in 2020. Yet somehow @realDonaldTrump and Bernie Sanders mouthpiece @BenjaminPDixon ineptly victimized Mike via spite", "token_idx_2": 5, "text_start_2": 26, "text_end_2": 31, "date_2": "2020-02", "text_1_tokenized": ["This", "is", "stop", "in", "frisk", "!"], "text_2_tokenized": ["I", "always", "thought", "stop", "and", "frisk", "would", "be", "the", "biggest", "obstacle", "@MikeBloomberg", "would", "face", "in", "2020", ".", "Yet", "somehow", "@realDonaldTrump", "and", "Bernie", "Sanders", "mouthpiece", "@BenjaminPDixon", "ineptly", "victimized", "Mike", "via", "spite"]} -{"id": "3398-frisk", "word": "frisk", "label_binary": 0, "text_1": "I'd like to reiterate that bouncers in Newcastle are cunts but it's all good in the hood cause el boys still made me night out a frisk. Fuck the bouncers and duck Geordie shore", "token_idx_1": 26, "text_start_1": 129, "text_end_1": 134, "date_1": "2019-02", "text_2": "To sum up last night: go frisk yourself, Mike. #DemDebate", "token_idx_2": 7, "text_start_2": 25, "text_end_2": 30, "date_2": "2020-02", "text_1_tokenized": ["I'd", "like", "to", "reiterate", "that", "bouncers", "in", "Newcastle", "are", "cunts", "but", "it's", "all", "good", "in", "the", "hood", "cause", "el", "boys", "still", "made", "me", "night", "out", "a", "frisk", ".", "Fuck", "the", "bouncers", "and", "duck", "Geordie", "shore"], "text_2_tokenized": ["To", "sum", "up", "last", "night", ":", "go", "frisk", "yourself", ",", "Mike", ".", "#DemDebate"]} -{"id": "3399-frisk", "word": "frisk", "label_binary": 1, "text_1": "ain't a bouncer but the way you bounce it I might jus frisk", "token_idx_1": 12, "text_start_1": 54, "text_end_1": 59, "date_1": "2019-02", "text_2": "A plurality of Democratic voters are, I think, dumber than republican voters. Y'all really edging for Bloomberg, a stop and frisk candidate, to win? Or Buttigieg, a candidate that targeted Black citizens? Or Klobuchar, a candidate that put an innocent Black man in jail?", "token_idx_2": 24, "text_start_2": 124, "text_end_2": 129, "date_2": "2020-02", "text_1_tokenized": ["ain't", "a", "bouncer", "but", "the", "way", "you", "bounce", "it", "I", "might", "jus", "frisk"], "text_2_tokenized": ["A", "plurality", "of", "Democratic", "voters", "are", ",", "I", "think", ",", "dumber", "than", "republican", "voters", ".", "Y'all", "really", "edging", "for", "Bloomberg", ",", "a", "stop", "and", "frisk", "candidate", ",", "to", "win", "?", "Or", "Buttigieg", ",", "a", "candidate", "that", "targeted", "Black", "citizens", "?", "Or", "Klobuchar", ",", "a", "candidate", "that", "put", "an", "innocent", "Black", "man", "in", "jail", "?"]} -{"id": "3400-frisk", "word": "frisk", "label_binary": 1, "text_1": "Made it through security with only two students treated to the frisk.", "token_idx_1": 11, "text_start_1": 63, "text_end_1": 68, "date_1": "2019-02", "text_2": "Can we stop calling Bloomberg by his name and just call him \"that stop and frisk nigga\"? If you're not comfortable with that, let's go with \"that stop and frisk fucker\".", "token_idx_2": 16, "text_start_2": 75, "text_end_2": 80, "date_2": "2020-02", "text_1_tokenized": ["Made", "it", "through", "security", "with", "only", "two", "students", "treated", "to", "the", "frisk", "."], "text_2_tokenized": ["Can", "we", "stop", "calling", "Bloomberg", "by", "his", "name", "and", "just", "call", "him", "\"", "that", "stop", "and", "frisk", "nigga", "\"", "?", "If", "you're", "not", "comfortable", "with", "that", ",", "let's", "go", "with", "\"", "that", "stop", "and", "frisk", "fucker", "\"", "."]} -{"id": "3401-frisk", "word": "frisk", "label_binary": 0, "text_1": "frisk for smash so i can listen to rude buster on my switch", "token_idx_1": 0, "text_start_1": 0, "text_end_1": 5, "date_1": "2019-02", "text_2": "I won't vote 4 #Bloomberg2020 period. I don't trust him. He's back tracked on his record/past statements almost as much as Trump-social security min. wage stop frisk housing. He's a republican Trump's friend & an Oligarch out 2 save the Staus Quo 4 self & friends #NotMeUs", "token_idx_2": 31, "text_start_2": 160, "text_end_2": 165, "date_2": "2020-02", "text_1_tokenized": ["frisk", "for", "smash", "so", "i", "can", "listen", "to", "rude", "buster", "on", "my", "switch"], "text_2_tokenized": ["I", "won't", "vote", "4", "#Bloomberg2020", "period", ".", "I", "don't", "trust", "him", ".", "He's", "back", "tracked", "on", "his", "record", "/", "past", "statements", "almost", "as", "much", "as", "Trump-social", "security", "min", ".", "wage", "stop", "frisk", "housing", ".", "He's", "a", "republican", "Trump's", "friend", "&", "an", "Oligarch", "out", "2", "save", "the", "Staus", "Quo", "4", "self", "&", "friends", "#NotMeUs"]} -{"id": "3402-frisk", "word": "frisk", "label_binary": 1, "text_1": "Who actually wants @NYCMayor to run? Ending stop & frisk was the one good thing I can think of from his whole tenure. Meanwhile I watch subway crumble, bike lanes get blocked without tickets, and more.", "token_idx_1": 10, "text_start_1": 55, "text_end_1": 60, "date_1": "2019-02", "text_2": "My landlord was saying \"Bloomberg...they didn't like his stop & frisk laws but it was the Blacks that were doing the crime to be honest with you. And crime did reduce. So it worked...they felt targeted but it wasn't...\" Then when he heard me coming out, he changed lanes", "token_idx_2": 13, "text_start_2": 68, "text_end_2": 73, "date_2": "2020-02", "text_1_tokenized": ["Who", "actually", "wants", "@NYCMayor", "to", "run", "?", "Ending", "stop", "&", "frisk", "was", "the", "one", "good", "thing", "I", "can", "think", "of", "from", "his", "whole", "tenure", ".", "Meanwhile", "I", "watch", "subway", "crumble", ",", "bike", "lanes", "get", "blocked", "without", "tickets", ",", "and", "more", "."], "text_2_tokenized": ["My", "landlord", "was", "saying", "\"", "Bloomberg", "...", "they", "didn't", "like", "his", "stop", "&", "frisk", "laws", "but", "it", "was", "the", "Blacks", "that", "were", "doing", "the", "crime", "to", "be", "honest", "with", "you", ".", "And", "crime", "did", "reduce", ".", "So", "it", "worked", "...", "they", "felt", "targeted", "but", "it", "wasn't", "...", "\"", "Then", "when", "he", "heard", "me", "coming", "out", ",", "he", "changed", "lanes"]} -{"id": "3403-frisk", "word": "frisk", "label_binary": 0, "text_1": "Haha who's frisk leave a message after the beep [PLAYS ELECTRIC VIOLIN, Leans on their blaster.] BEEEP LMAO", "token_idx_1": 2, "text_start_1": 11, "text_end_1": 16, "date_1": "2019-02", "text_2": "Blir aldrig frisk, man I'm sick and tired of this shit", "token_idx_2": 2, "text_start_2": 12, "text_end_2": 17, "date_2": "2020-02", "text_1_tokenized": ["Haha", "who's", "frisk", "leave", "a", "message", "after", "the", "beep", "[", "PLAYS", "ELECTRIC", "VIOLIN", ",", "Leans", "on", "their", "blaster", ".", "]", "BEEEP", "LMAO"], "text_2_tokenized": ["Blir", "aldrig", "frisk", ",", "man", "I'm", "sick", "and", "tired", "of", "this", "shit"]} -{"id": "3404-frisk", "word": "frisk", "label_binary": 0, "text_1": "Having very little hair is a frisk", "token_idx_1": 6, "text_start_1": 29, "text_end_1": 34, "date_1": "2019-02", "text_2": "Hypocrisy at ABC news and Martha Radtiz. Doesn't even mention Bloomberg's comments on women where he said a pregnant woman in my company should \u2018kill it'. Barely mentions the stop and frisk and doesn't mention his comments on Blaming blacks causing mortgage crisis.", "token_idx_2": 35, "text_start_2": 184, "text_end_2": 189, "date_2": "2020-02", "text_1_tokenized": ["Having", "very", "little", "hair", "is", "a", "frisk"], "text_2_tokenized": ["Hypocrisy", "at", "ABC", "news", "and", "Martha", "Radtiz", ".", "Doesn't", "even", "mention", "Bloomberg's", "comments", "on", "women", "where", "he", "said", "a", "pregnant", "woman", "in", "my", "company", "should", "\u2018", "kill", "it", "'", ".", "Barely", "mentions", "the", "stop", "and", "frisk", "and", "doesn't", "mention", "his", "comments", "on", "Blaming", "blacks", "causing", "mortgage", "crisis", "."]} -{"id": "3405-frisk", "word": "frisk", "label_binary": 0, "text_1": "played undertale on switch (before it was on sale bleh) but just reaffirming my fave characters: monster kid, papyrus, mettaton, asriel. special mentions are undyne, toriel, uhh blook i guess, frisk???", "token_idx_1": 40, "text_start_1": 193, "text_end_1": 198, "date_1": "2019-02", "text_2": "With all my friends getting texts from the Bloomberg campaign, I feel a bit left out! Where's my tone deaf appeal to join up with the billionaire who created stop and frisk??", "token_idx_2": 33, "text_start_2": 167, "text_end_2": 172, "date_2": "2020-02", "text_1_tokenized": ["played", "undertale", "on", "switch", "(", "before", "it", "was", "on", "sale", "bleh", ")", "but", "just", "reaffirming", "my", "fave", "characters", ":", "monster", "kid", ",", "papyrus", ",", "mettaton", ",", "asriel", ".", "special", "mentions", "are", "undyne", ",", "toriel", ",", "uhh", "blook", "i", "guess", ",", "frisk", "?", "?", "?"], "text_2_tokenized": ["With", "all", "my", "friends", "getting", "texts", "from", "the", "Bloomberg", "campaign", ",", "I", "feel", "a", "bit", "left", "out", "!", "Where's", "my", "tone", "deaf", "appeal", "to", "join", "up", "with", "the", "billionaire", "who", "created", "stop", "and", "frisk", "?", "?"]} -{"id": "3406-frisk", "word": "frisk", "label_binary": 0, "text_1": "ok message for all the crazy sans fan girls ITS CANNON THAT SANS DOESN'T LIKE MURDERS @pinkiscupcake05 pls retweet this becuase he litterally gets mad at frisk aka chara for killing all the monsters so why would he like you if you murder", "token_idx_1": 26, "text_start_1": 154, "text_end_1": 159, "date_1": "2019-02", "text_2": "Please set up an educational or trade school fund for those affected by stop and frisk when you were mayor. You have my vote already @MikeBloomberg", "token_idx_2": 15, "text_start_2": 81, "text_end_2": 86, "date_2": "2020-02", "text_1_tokenized": ["ok", "message", "for", "all", "the", "crazy", "sans", "fan", "girls", "ITS", "CANNON", "THAT", "SANS", "DOESN'T", "LIKE", "MURDERS", "@pinkiscupcake05", "pls", "retweet", "this", "becuase", "he", "litterally", "gets", "mad", "at", "frisk", "aka", "chara", "for", "killing", "all", "the", "monsters", "so", "why", "would", "he", "like", "you", "if", "you", "murder"], "text_2_tokenized": ["Please", "set", "up", "an", "educational", "or", "trade", "school", "fund", "for", "those", "affected", "by", "stop", "and", "frisk", "when", "you", "were", "mayor", ".", "You", "have", "my", "vote", "already", "@MikeBloomberg"]} -{"id": "3407-frisk", "word": "frisk", "label_binary": 0, "text_1": "my teacher: frisk stop drawing giraffes all the time and do ur work me: chase a bag don't worry about what im doing -_-", "token_idx_1": 3, "text_start_1": 12, "text_end_1": 17, "date_1": "2019-02", "text_2": "My 5yo watched the Dem debate. She legit cannot tell Bloomberg and Biden apart, which is pretty funny. She now knows about stop and frisk. She now refers to Biden as \u201cthe one who can talk dumb, but in a good way.\u201d", "token_idx_2": 27, "text_start_2": 132, "text_end_2": 137, "date_2": "2020-02", "text_1_tokenized": ["my", "teacher", ":", "frisk", "stop", "drawing", "giraffes", "all", "the", "time", "and", "do", "ur", "work", "me", ":", "chase", "a", "bag", "don't", "worry", "about", "what", "im", "doing", "-", "_", "-"], "text_2_tokenized": ["My", "5yo", "watched", "the", "Dem", "debate", ".", "She", "legit", "cannot", "tell", "Bloomberg", "and", "Biden", "apart", ",", "which", "is", "pretty", "funny", ".", "She", "now", "knows", "about", "stop", "and", "frisk", ".", "She", "now", "refers", "to", "Biden", "as", "\u201c", "the", "one", "who", "can", "talk", "dumb", ",", "but", "in", "a", "good", "way", ".", "\u201d"]} -{"id": "3408-frisk", "word": "frisk", "label_binary": 1, "text_1": "I ain't gone let the ppl frisk me if I'm dirty homie", "token_idx_1": 6, "text_start_1": 25, "text_end_1": 30, "date_1": "2019-02", "text_2": "Set up a stop and frisk outside a white club and catch coke heads", "token_idx_2": 5, "text_start_2": 18, "text_end_2": 23, "date_2": "2020-02", "text_1_tokenized": ["I", "ain't", "gone", "let", "the", "ppl", "frisk", "me", "if", "I'm", "dirty", "homie"], "text_2_tokenized": ["Set", "up", "a", "stop", "and", "frisk", "outside", "a", "white", "club", "and", "catch", "coke", "heads"]} -{"id": "3409-frisk", "word": "frisk", "label_binary": 0, "text_1": "i rly wanna draw frisk soon bc i finally!!!!!! finished undertale properly for the first time since it's been out (plus my hair is about the same length as their's \ud83d\ude2d", "token_idx_1": 4, "text_start_1": 17, "text_end_1": 22, "date_1": "2019-02", "text_2": "Bloomberg would like to implement stop and frisk in the middle east", "token_idx_2": 7, "text_start_2": 43, "text_end_2": 48, "date_2": "2020-02", "text_1_tokenized": ["i", "rly", "wanna", "draw", "frisk", "soon", "bc", "i", "finally", "!", "!", "!", "finished", "undertale", "properly", "for", "the", "first", "time", "since", "it's", "been", "out", "(", "plus", "my", "hair", "is", "about", "the", "same", "length", "as", "their's", "\ud83d\ude2d"], "text_2_tokenized": ["Bloomberg", "would", "like", "to", "implement", "stop", "and", "frisk", "in", "the", "middle", "east"]} -{"id": "3411-frisk", "word": "frisk", "label_binary": 1, "text_1": "I ain't a bouncer, but the way you bouncin I might just frisk", "token_idx_1": 13, "text_start_1": 56, "text_end_1": 61, "date_1": "2019-02", "text_2": "im like arguing about stop and frisk with my dad and my head physically hurts", "token_idx_2": 6, "text_start_2": 31, "text_end_2": 36, "date_2": "2020-02", "text_1_tokenized": ["I", "ain't", "a", "bouncer", ",", "but", "the", "way", "you", "bouncin", "I", "might", "just", "frisk"], "text_2_tokenized": ["im", "like", "arguing", "about", "stop", "and", "frisk", "with", "my", "dad", "and", "my", "head", "physically", "hurts"]} -{"id": "3412-frisk", "word": "frisk", "label_binary": 1, "text_1": "And we don't wait for search and frisk leave this bitch in neutral ....\ud83d\ude97 \ud83d\udca8", "token_idx_1": 7, "text_start_1": 33, "text_end_1": 38, "date_1": "2019-02", "text_2": "Why aren't more black entertainers, black leaders,black pastors, the Black caucus speaking on their own people's plight of stop and frisk ?? They sure are quick to want our support,vote and dollars", "token_idx_2": 24, "text_start_2": 132, "text_end_2": 137, "date_2": "2020-02", "text_1_tokenized": ["And", "we", "don't", "wait", "for", "search", "and", "frisk", "leave", "this", "bitch", "in", "neutral", "...", "\ud83d\ude97", "\ud83d\udca8"], "text_2_tokenized": ["Why", "aren't", "more", "black", "entertainers", ",", "black", "leaders", ",", "black", "pastors", ",", "the", "Black", "caucus", "speaking", "on", "their", "own", "people's", "plight", "of", "stop", "and", "frisk", "?", "?", "They", "sure", "are", "quick", "to", "want", "our", "support", ",", "vote", "and", "dollars"]} -{"id": "3413-frisk", "word": "frisk", "label_binary": 0, "text_1": "broke: frisk for smash joke: sans for smash woke: undyne for smash bespoke: jevil for smash", "token_idx_1": 2, "text_start_1": 7, "text_end_1": 12, "date_1": "2019-02", "text_2": "Michael Bloomberg's stop and frisk is a KKK policy, Pete Buttireig gentrification out South Bend is no different than the KKK burning down Black cities. White (European) behavior is consistent.", "token_idx_2": 4, "text_start_2": 29, "text_end_2": 34, "date_2": "2020-02", "text_1_tokenized": ["broke", ":", "frisk", "for", "smash", "joke", ":", "sans", "for", "smash", "woke", ":", "undyne", "for", "smash", "bespoke", ":", "jevil", "for", "smash"], "text_2_tokenized": ["Michael", "Bloomberg's", "stop", "and", "frisk", "is", "a", "KKK", "policy", ",", "Pete", "Buttireig", "gentrification", "out", "South", "Bend", "is", "no", "different", "than", "the", "KKK", "burning", "down", "Black", "cities", ".", "White", "(", "European", ")", "behavior", "is", "consistent", "."]} -{"id": "3414-frisk", "word": "frisk", "label_binary": 0, "text_1": "Welp. It's over.. It's done. I won't do genocide, I won't reset. I'll let frisk live their life and let everyone happy. I don't regret buying this game. I loved it. A true masterpiece. And I have also played Deltarune's first chapter. So I guess I may buy the next one.", "token_idx_1": 19, "text_start_1": 74, "text_end_1": 79, "date_1": "2019-02", "text_2": "Unpopular opinion: When done right, stop and frisk is a good thing.", "token_idx_2": 9, "text_start_2": 45, "text_end_2": 50, "date_2": "2020-02", "text_1_tokenized": ["Welp", ".", "It's", "over", "..", "It's", "done", ".", "I", "won't", "do", "genocide", ",", "I", "won't", "reset", ".", "I'll", "let", "frisk", "live", "their", "life", "and", "let", "everyone", "happy", ".", "I", "don't", "regret", "buying", "this", "game", ".", "I", "loved", "it", ".", "A", "true", "masterpiece", ".", "And", "I", "have", "also", "played", "Deltarune's", "first", "chapter", ".", "So", "I", "guess", "I", "may", "buy", "the", "next", "one", "."], "text_2_tokenized": ["Unpopular", "opinion", ":", "When", "done", "right", ",", "stop", "and", "frisk", "is", "a", "good", "thing", "."]} -{"id": "3415-frisk", "word": "frisk", "label_binary": 0, "text_1": "Is it poetic that I have an underfell sans fc fondling an adult underfell frisk?", "token_idx_1": 14, "text_start_1": 74, "text_end_1": 79, "date_1": "2019-02", "text_2": "Also, catch me on MSNBC at 2:20 talking about stop & frisk.", "token_idx_2": 12, "text_start_2": 57, "text_end_2": 62, "date_2": "2020-02", "text_1_tokenized": ["Is", "it", "poetic", "that", "I", "have", "an", "underfell", "sans", "fc", "fondling", "an", "adult", "underfell", "frisk", "?"], "text_2_tokenized": ["Also", ",", "catch", "me", "on", "MSNBC", "at", "2:20", "talking", "about", "stop", "&", "frisk", "."]} -{"id": "3416-frisk", "word": "frisk", "label_binary": 1, "text_1": "How u call a nigga \u201dBro \u201d then turn around and give him sum pussy ctfuu .. females is 2 frisk bob", "token_idx_1": 21, "text_start_1": 88, "text_end_1": 93, "date_1": "2019-02", "text_2": "I wonder how much of the outrage about Bloomberg's stop and frisk record is related to other candidates' supporters desire to distract from their own candidates' lack of commitment to left-wing criminal justice policies.", "token_idx_2": 11, "text_start_2": 60, "text_end_2": 65, "date_2": "2020-02", "text_1_tokenized": ["How", "u", "call", "a", "nigga", "\u201d", "Bro", "\u201d", "then", "turn", "around", "and", "give", "him", "sum", "pussy", "ctfuu", "..", "females", "is", "2", "frisk", "bob"], "text_2_tokenized": ["I", "wonder", "how", "much", "of", "the", "outrage", "about", "Bloomberg's", "stop", "and", "frisk", "record", "is", "related", "to", "other", "candidates", "'", "supporters", "desire", "to", "distract", "from", "their", "own", "candidates", "'", "lack", "of", "commitment", "to", "left-wing", "criminal", "justice", "policies", "."]} -{"id": "3417-frisk", "word": "frisk", "label_binary": 0, "text_1": "frisk in baldi's schoolhouse??????????", "token_idx_1": 0, "text_start_1": 0, "text_end_1": 5, "date_1": "2019-02", "text_2": "Don't, not for one second, think that @MikeBloomberg is not a racist and a person who ONLY gives a fuck about himself and his money! He created stop and frisk and stood by it until just recently. Don't be fooled by this bozo! #NotForPresident #Liar \ud83d\udc4e\ud83c\udffc\ud83d\udc4e\ud83c\udffc", "token_idx_2": 32, "text_start_2": 153, "text_end_2": 158, "date_2": "2020-02", "text_1_tokenized": ["frisk", "in", "baldi's", "schoolhouse", "?", "?", "?"], "text_2_tokenized": ["Don't", ",", "not", "for", "one", "second", ",", "think", "that", "@MikeBloomberg", "is", "not", "a", "racist", "and", "a", "person", "who", "ONLY", "gives", "a", "fuck", "about", "himself", "and", "his", "money", "!", "He", "created", "stop", "and", "frisk", "and", "stood", "by", "it", "until", "just", "recently", ".", "Don't", "be", "fooled", "by", "this", "bozo", "!", "#NotForPresident", "#Liar", "\ud83d\udc4e\ud83c\udffc", "\ud83d\udc4e\ud83c\udffc"]} -{"id": "3418-frisk", "word": "frisk", "label_binary": 1, "text_1": "I ain't a bouncer but the way you bouncin' I might just frisk ya.", "token_idx_1": 13, "text_start_1": 56, "text_end_1": 61, "date_1": "2019-02", "text_2": "Remember when our family values said never tear a person down regardless of their failures always build a person up. Example even though Bloomberg had stop and frisk he made a m7stake even though Sanders dont have all the answers he still believes in America even though bidens", "token_idx_2": 28, "text_start_2": 160, "text_end_2": 165, "date_2": "2020-02", "text_1_tokenized": ["I", "ain't", "a", "bouncer", "but", "the", "way", "you", "bouncin", "'", "I", "might", "just", "frisk", "ya", "."], "text_2_tokenized": ["Remember", "when", "our", "family", "values", "said", "never", "tear", "a", "person", "down", "regardless", "of", "their", "failures", "always", "build", "a", "person", "up", ".", "Example", "even", "though", "Bloomberg", "had", "stop", "and", "frisk", "he", "made", "a", "m7stake", "even", "though", "Sanders", "dont", "have", "all", "the", "answers", "he", "still", "believes", "in", "America", "even", "though", "bidens"]} -{"id": "3419-frisk", "word": "frisk", "label_binary": 0, "text_1": "(( i WILL draw frisk as a clown", "token_idx_1": 5, "text_start_1": 15, "text_end_1": 20, "date_1": "2019-02", "text_2": "Why would anyone support #Bloomberg? His stop & frisk policy was so racist it was ruled unconstitutional. Donald Trump loves it. He should not be running as a Democrat.", "token_idx_2": 9, "text_start_2": 52, "text_end_2": 57, "date_2": "2020-02", "text_1_tokenized": ["(", "(", "i", "WILL", "draw", "frisk", "as", "a", "clown"], "text_2_tokenized": ["Why", "would", "anyone", "support", "#Bloomberg", "?", "His", "stop", "&", "frisk", "policy", "was", "so", "racist", "it", "was", "ruled", "unconstitutional", ".", "Donald", "Trump", "loves", "it", ".", "He", "should", "not", "be", "running", "as", "a", "Democrat", "."]} -{"id": "3421-frisk", "word": "frisk", "label_binary": 0, "text_1": "new one wut is frisk", "token_idx_1": 4, "text_start_1": 15, "text_end_1": 20, "date_1": "2019-02", "text_2": "The stop and frisk guy is now endorsing the crime bill guy in the most shocking turn of events", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 18, "date_2": "2020-02", "text_1_tokenized": ["new", "one", "wut", "is", "frisk"], "text_2_tokenized": ["The", "stop", "and", "frisk", "guy", "is", "now", "endorsing", "the", "crime", "bill", "guy", "in", "the", "most", "shocking", "turn", "of", "events"]} -{"id": "3422-frisk", "word": "frisk", "label_binary": 1, "text_1": "Keep me away from police tryna frisk me and snakey ass chicks tryna tell me they miss me - YT, Summer Waving", "token_idx_1": 6, "text_start_1": 31, "text_end_1": 36, "date_1": "2019-02", "text_2": "I imagine there are quite a few suburban white people appalled by Trump's xenophobia but willing to look past Bloomberg's stop and frisk because Bernie's taxes scare them more.", "token_idx_2": 22, "text_start_2": 131, "text_end_2": 136, "date_2": "2020-02", "text_1_tokenized": ["Keep", "me", "away", "from", "police", "tryna", "frisk", "me", "and", "snakey", "ass", "chicks", "tryna", "tell", "me", "they", "miss", "me", "-", "YT", ",", "Summer", "Waving"], "text_2_tokenized": ["I", "imagine", "there", "are", "quite", "a", "few", "suburban", "white", "people", "appalled", "by", "Trump's", "xenophobia", "but", "willing", "to", "look", "past", "Bloomberg's", "stop", "and", "frisk", "because", "Bernie's", "taxes", "scare", "them", "more", "."]} -{"id": "3423-frisk", "word": "frisk", "label_binary": 1, "text_1": "The early morning frisk... from the TSA.", "token_idx_1": 3, "text_start_1": 18, "text_end_1": 23, "date_1": "2019-02", "text_2": "Biden was asked about Bloomberg's audio on stop and frisk and he decided to talk about gun control?", "token_idx_2": 9, "text_start_2": 52, "text_end_2": 57, "date_2": "2020-02", "text_1_tokenized": ["The", "early", "morning", "frisk", "...", "from", "the", "TSA", "."], "text_2_tokenized": ["Biden", "was", "asked", "about", "Bloomberg's", "audio", "on", "stop", "and", "frisk", "and", "he", "decided", "to", "talk", "about", "gun", "control", "?"]} -{"id": "3424-frisk", "word": "frisk", "label_binary": 0, "text_1": "Can imagine @LewisCapaldi would be pure frisk. Would enjoy to sit in a kitchen with him", "token_idx_1": 6, "text_start_1": 40, "text_end_1": 45, "date_1": "2019-02", "text_2": "Stop and frisk is a great example of systematic racism. Blame Bloomberg", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 14, "date_2": "2020-02", "text_1_tokenized": ["Can", "imagine", "@LewisCapaldi", "would", "be", "pure", "frisk", ".", "Would", "enjoy", "to", "sit", "in", "a", "kitchen", "with", "him"], "text_2_tokenized": ["Stop", "and", "frisk", "is", "a", "great", "example", "of", "systematic", "racism", ".", "Blame", "Bloomberg"]} -{"id": "3425-frisk", "word": "frisk", "label_binary": 0, "text_1": "I officially hace a au The name of it is Ar\u00edstale Au were frisk was able to change one thing in the past. The change led to many events changing. Like for example gaster will be around. Its basically a au were everyone has a second chance at life.", "token_idx_1": 13, "text_start_1": 58, "text_end_1": 63, "date_1": "2019-02", "text_2": "I was personally stopped at least there's times under Bloomberg's stop and frisk policy. They never found anything but I was still a target. #BloombergIsRacist", "token_idx_2": 12, "text_start_2": 75, "text_end_2": 80, "date_2": "2020-02", "text_1_tokenized": ["I", "officially", "hace", "a", "au", "The", "name", "of", "it", "is", "Ar\u00edstale", "Au", "were", "frisk", "was", "able", "to", "change", "one", "thing", "in", "the", "past", ".", "The", "change", "led", "to", "many", "events", "changing", ".", "Like", "for", "example", "gaster", "will", "be", "around", ".", "Its", "basically", "a", "au", "were", "everyone", "has", "a", "second", "chance", "at", "life", "."], "text_2_tokenized": ["I", "was", "personally", "stopped", "at", "least", "there's", "times", "under", "Bloomberg's", "stop", "and", "frisk", "policy", ".", "They", "never", "found", "anything", "but", "I", "was", "still", "a", "target", ".", "#BloombergIsRacist"]} -{"id": "3426-frisk", "word": "frisk", "label_binary": 1, "text_1": "ralsei accompanies kris through the door, while toriel leaves frisk behind at the door", "token_idx_1": 10, "text_start_1": 62, "text_end_1": 67, "date_1": "2019-02", "text_2": "People have to just let it go... Bloomberg is not sorry for stop and frisk, Period! When someone shows you who they are, Believe them! #CNN", "token_idx_2": 15, "text_start_2": 69, "text_end_2": 74, "date_2": "2020-02", "text_1_tokenized": ["ralsei", "accompanies", "kris", "through", "the", "door", ",", "while", "toriel", "leaves", "frisk", "behind", "at", "the", "door"], "text_2_tokenized": ["People", "have", "to", "just", "let", "it", "go", "...", "Bloomberg", "is", "not", "sorry", "for", "stop", "and", "frisk", ",", "Period", "!", "When", "someone", "shows", "you", "who", "they", "are", ",", "Believe", "them", "!", "#CNN"]} -{"id": "3428-frisk", "word": "frisk", "label_binary": 0, "text_1": "heh... so this was what frisk was talking about, twitter huh seems fun", "token_idx_1": 6, "text_start_1": 24, "text_end_1": 29, "date_1": "2019-02", "text_2": "Trump and Mike Bloomberg really do have a lot in common. Both are billionaires, and both think \"stop and frisk\" is the best way to deal with minority women.", "token_idx_2": 22, "text_start_2": 105, "text_end_2": 110, "date_2": "2020-02", "text_1_tokenized": ["heh", "...", "so", "this", "was", "what", "frisk", "was", "talking", "about", ",", "twitter", "huh", "seems", "fun"], "text_2_tokenized": ["Trump", "and", "Mike", "Bloomberg", "really", "do", "have", "a", "lot", "in", "common", ".", "Both", "are", "billionaires", ",", "and", "both", "think", "\"", "stop", "and", "frisk", "\"", "is", "the", "best", "way", "to", "deal", "with", "minority", "women", "."]} -{"id": "3429-frisk", "word": "frisk", "label_binary": 0, "text_1": "A4 one way is to elect more politicians like my amazing DA Larry Krasner who are willing to not prosecute stop and frisk, and to personally not call the cops, but what else? #CleartheAir", "token_idx_1": 22, "text_start_1": 115, "text_end_1": 120, "date_1": "2019-02", "text_2": "Looking for a shiny Orbeetle (with frisk, sticky web, and recover). Can offer a number of shinies or mythicals.", "token_idx_2": 7, "text_start_2": 35, "text_end_2": 40, "date_2": "2020-02", "text_1_tokenized": ["A4", "one", "way", "is", "to", "elect", "more", "politicians", "like", "my", "amazing", "DA", "Larry", "Krasner", "who", "are", "willing", "to", "not", "prosecute", "stop", "and", "frisk", ",", "and", "to", "personally", "not", "call", "the", "cops", ",", "but", "what", "else", "?", "#CleartheAir"], "text_2_tokenized": ["Looking", "for", "a", "shiny", "Orbeetle", "(", "with", "frisk", ",", "sticky", "web", ",", "and", "recover", ")", ".", "Can", "offer", "a", "number", "of", "shinies", "or", "mythicals", "."]} -{"id": "3430-frisk", "word": "frisk", "label_binary": 0, "text_1": "That video of the wife getting pushed off her peddler into the Bush is a fucking frisk.", "token_idx_1": 16, "text_start_1": 81, "text_end_1": 86, "date_1": "2019-02", "text_2": "I will never vote for \u201cstop & frisk\u201d Bloomberg.", "token_idx_2": 8, "text_start_2": 34, "text_end_2": 39, "date_2": "2020-02", "text_1_tokenized": ["That", "video", "of", "the", "wife", "getting", "pushed", "off", "her", "peddler", "into", "the", "Bush", "is", "a", "fucking", "frisk", "."], "text_2_tokenized": ["I", "will", "never", "vote", "for", "\u201c", "stop", "&", "frisk", "\u201d", "Bloomberg", "."]} -{"id": "3431-frisk", "word": "frisk", "label_binary": 1, "text_1": "Me: *gets pulled over* Hello officer, are you gonna frisk me ;3c", "token_idx_1": 13, "text_start_1": 52, "text_end_1": 57, "date_1": "2019-02", "text_2": "Today on Twitter: -People defending eugenics -People defending stop and frisk -Lis Smith has a burner account where she's pretending to be a Nigerian Pete Buttigieg supporter I hate it here", "token_idx_2": 13, "text_start_2": 72, "text_end_2": 77, "date_2": "2020-02", "text_1_tokenized": ["Me", ":", "*", "gets", "pulled", "over", "*", "Hello", "officer", ",", "are", "you", "gonna", "frisk", "me", ";", "3c"], "text_2_tokenized": ["Today", "on", "Twitter", ":", "-", "People", "defending", "eugenics", "-", "People", "defending", "stop", "and", "frisk", "-", "Lis", "Smith", "has", "a", "burner", "account", "where", "she's", "pretending", "to", "be", "a", "Nigerian", "Pete", "Buttigieg", "supporter", "I", "hate", "it", "here"]} -{"id": "3432-frisk", "word": "frisk", "label_binary": 0, "text_1": "I changed my icon into frisk from undertale wwww", "token_idx_1": 5, "text_start_1": 23, "text_end_1": 28, "date_1": "2019-02", "text_2": "anybody else have this strange memory of some standardized test question from like middle school that had you evaluate the morality of stop and frisk? maybe it was just a red state thing\u263a\ufe0f", "token_idx_2": 24, "text_start_2": 144, "text_end_2": 149, "date_2": "2020-02", "text_1_tokenized": ["I", "changed", "my", "icon", "into", "frisk", "from", "undertale", "wwww"], "text_2_tokenized": ["anybody", "else", "have", "this", "strange", "memory", "of", "some", "standardized", "test", "question", "from", "like", "middle", "school", "that", "had", "you", "evaluate", "the", "morality", "of", "stop", "and", "frisk", "?", "maybe", "it", "was", "just", "a", "red", "state", "thing", "\u263a", "\ufe0f"]} -{"id": "3433-frisk", "word": "frisk", "label_binary": 1, "text_1": "started the morning with easily the most intimate airport frisk i've ever had and now im watching @GameShowNetwork in my own row. it's not even 8am and today has been wild.", "token_idx_1": 9, "text_start_1": 58, "text_end_1": 63, "date_1": "2019-02", "text_2": "America Black people is a joke , it's not funny.. Maybe black people think Bloomberg will give them some of his money. All he will do is to frisk or whatever them even more this time. Never give a racist a second chance.", "token_idx_2": 30, "text_start_2": 140, "text_end_2": 145, "date_2": "2020-02", "text_1_tokenized": ["started", "the", "morning", "with", "easily", "the", "most", "intimate", "airport", "frisk", "i've", "ever", "had", "and", "now", "im", "watching", "@GameShowNetwork", "in", "my", "own", "row", ".", "it's", "not", "even", "8am", "and", "today", "has", "been", "wild", "."], "text_2_tokenized": ["America", "Black", "people", "is", "a", "joke", ",", "it's", "not", "funny", "..", "Maybe", "black", "people", "think", "Bloomberg", "will", "give", "them", "some", "of", "his", "money", ".", "All", "he", "will", "do", "is", "to", "frisk", "or", "whatever", "them", "even", "more", "this", "time", ".", "Never", "give", "a", "racist", "a", "second", "chance", "."]} -{"id": "3434-frisk", "word": "frisk", "label_binary": 1, "text_1": "Keep me away from police tryna frisk me and snakey arse people tryna tell me that they miss me.", "token_idx_1": 6, "text_start_1": 31, "text_end_1": 36, "date_1": "2019-02", "text_2": "There's no way you're for \u201cBlack Americans\u201d with a stop and frisk policy. Absolutely no way.", "token_idx_2": 13, "text_start_2": 60, "text_end_2": 65, "date_2": "2020-02", "text_1_tokenized": ["Keep", "me", "away", "from", "police", "tryna", "frisk", "me", "and", "snakey", "arse", "people", "tryna", "tell", "me", "that", "they", "miss", "me", "."], "text_2_tokenized": ["There's", "no", "way", "you're", "for", "\u201c", "Black", "Americans", "\u201d", "with", "a", "stop", "and", "frisk", "policy", ".", "Absolutely", "no", "way", "."]} -{"id": "3435-frisk", "word": "frisk", "label_binary": 0, "text_1": "Not cashing out always fucks me over, some frisk", "token_idx_1": 9, "text_start_1": 43, "text_end_1": 48, "date_1": "2019-02", "text_2": "It's so hypocritical of Bloomberg to have a commercial mentioning basic decency yet he defended stop & frisk policy until DOJ ruled it was unconstitutional. #StopAndFrisk", "token_idx_2": 17, "text_start_2": 107, "text_end_2": 112, "date_2": "2020-02", "text_1_tokenized": ["Not", "cashing", "out", "always", "fucks", "me", "over", ",", "some", "frisk"], "text_2_tokenized": ["It's", "so", "hypocritical", "of", "Bloomberg", "to", "have", "a", "commercial", "mentioning", "basic", "decency", "yet", "he", "defended", "stop", "&", "frisk", "policy", "until", "DOJ", "ruled", "it", "was", "unconstitutional", ".", "#StopAndFrisk"]} -{"id": "3436-frisk", "word": "frisk", "label_binary": 1, "text_1": "ayy lemme frisk you up", "token_idx_1": 2, "text_start_1": 10, "text_end_1": 15, "date_1": "2019-02", "text_2": "Re: Bloomberg brazenly lying about his record on stop & frisk: Where's the snakes??", "token_idx_2": 11, "text_start_2": 60, "text_end_2": 65, "date_2": "2020-02", "text_1_tokenized": ["ayy", "lemme", "frisk", "you", "up"], "text_2_tokenized": ["Re", ":", "Bloomberg", "brazenly", "lying", "about", "his", "record", "on", "stop", "&", "frisk", ":", "Where's", "the", "snakes", "?", "?"]} -{"id": "3437-frisk", "word": "frisk", "label_binary": 1, "text_1": "I can just picture it if the Markles ever got to visit the Palace. A frisk search would find Vonnie to have a camera built into her handbag, Old dad with a microphone built into his umbrella handle and Mark with mirrors on his shoes. All clutching media contracts. #samanthamarkle", "token_idx_1": 16, "text_start_1": 69, "text_end_1": 74, "date_1": "2019-02", "text_2": "Real time face surveillance is algorithmic stop and frisk. Don't let your school, university, workplace, or city start using it. #shutitdown #blacklivesmatteratschool", "token_idx_2": 8, "text_start_2": 52, "text_end_2": 57, "date_2": "2020-02", "text_1_tokenized": ["I", "can", "just", "picture", "it", "if", "the", "Markles", "ever", "got", "to", "visit", "the", "Palace", ".", "A", "frisk", "search", "would", "find", "Vonnie", "to", "have", "a", "camera", "built", "into", "her", "handbag", ",", "Old", "dad", "with", "a", "microphone", "built", "into", "his", "umbrella", "handle", "and", "Mark", "with", "mirrors", "on", "his", "shoes", ".", "All", "clutching", "media", "contracts", ".", "#samanthamarkle"], "text_2_tokenized": ["Real", "time", "face", "surveillance", "is", "algorithmic", "stop", "and", "frisk", ".", "Don't", "let", "your", "school", ",", "university", ",", "workplace", ",", "or", "city", "start", "using", "it", ".", "#shutitdown", "#blacklivesmatteratschool"]} -{"id": "3438-frisk", "word": "frisk", "label_binary": 0, "text_1": "// frisk collared me again", "token_idx_1": 2, "text_start_1": 3, "text_end_1": 8, "date_1": "2019-02", "text_2": "I'm seeing all these tweets about stop and frisk and I just wanna say parents, please don't force your kids to speak to cops without a lawyer. Police manipulate parents as much as kids. No one needs to say anything until an attorney is present.", "token_idx_2": 8, "text_start_2": 43, "text_end_2": 48, "date_2": "2020-02", "text_1_tokenized": ["/", "/", "frisk", "collared", "me", "again"], "text_2_tokenized": ["I'm", "seeing", "all", "these", "tweets", "about", "stop", "and", "frisk", "and", "I", "just", "wanna", "say", "parents", ",", "please", "don't", "force", "your", "kids", "to", "speak", "to", "cops", "without", "a", "lawyer", ".", "Police", "manipulate", "parents", "as", "much", "as", "kids", ".", "No", "one", "needs", "to", "say", "anything", "until", "an", "attorney", "is", "present", "."]} -{"id": "3439-frisk", "word": "frisk", "label_binary": 0, "text_1": "Hey Camila, In the end scene with Jessie and Gaster why was Gaster unable to shortcut, I was not fully explained, I know that before (in the fight between frisk and betty it was due to the fact he was weak but is that the same reason this time??@Camilacuevaszu", "token_idx_1": 33, "text_start_1": 155, "text_end_1": 160, "date_1": "2019-02", "text_2": "Bravo Mike !!!! keep confronting this \"stop and frisk debate\" and be sincere and keep asking for forgiveness. What u trying to achieve here is bigger . As a black we r smarter than that. We know what u can do what u promise and u can promise what u will do. All u speech should", "token_idx_2": 11, "text_start_2": 48, "text_end_2": 53, "date_2": "2020-02", "text_1_tokenized": ["Hey", "Camila", ",", "In", "the", "end", "scene", "with", "Jessie", "and", "Gaster", "why", "was", "Gaster", "unable", "to", "shortcut", ",", "I", "was", "not", "fully", "explained", ",", "I", "know", "that", "before", "(", "in", "the", "fight", "between", "frisk", "and", "betty", "it", "was", "due", "to", "the", "fact", "he", "was", "weak", "but", "is", "that", "the", "same", "reason", "this", "time", "?", "?", "@Camilacuevaszu"], "text_2_tokenized": ["Bravo", "Mike", "!", "!", "!", "keep", "confronting", "this", "\"", "stop", "and", "frisk", "debate", "\"", "and", "be", "sincere", "and", "keep", "asking", "for", "forgiveness", ".", "What", "u", "trying", "to", "achieve", "here", "is", "bigger", ".", "As", "a", "black", "we", "r", "smarter", "than", "that", ".", "We", "know", "what", "u", "can", "do", "what", "u", "promise", "and", "u", "can", "promise", "what", "u", "will", "do", ".", "All", "u", "speech", "should"]} -{"id": "3440-frisk", "word": "frisk", "label_binary": 0, "text_1": "(undertale spoilers) asgore: ready to die frisk: *dumps like 5 cheeseburgers on the ground* sup", "token_idx_1": 9, "text_start_1": 42, "text_end_1": 47, "date_1": "2019-02", "text_2": "I have no issues with stop, question, and frisk. And no, I don't know what you mean by \u201ccriminal justice reform.\u201d What, specifically, needs reforming? Get a grip, conservatives.", "token_idx_2": 10, "text_start_2": 42, "text_end_2": 47, "date_2": "2020-02", "text_1_tokenized": ["(", "undertale", "spoilers", ")", "asgore", ":", "ready", "to", "die", "frisk", ":", "*", "dumps", "like", "5", "cheeseburgers", "on", "the", "ground", "*", "sup"], "text_2_tokenized": ["I", "have", "no", "issues", "with", "stop", ",", "question", ",", "and", "frisk", ".", "And", "no", ",", "I", "don't", "know", "what", "you", "mean", "by", "\u201c", "criminal", "justice", "reform", ".", "\u201d", "What", ",", "specifically", ",", "needs", "reforming", "?", "Get", "a", "grip", ",", "conservatives", "."]} -{"id": "3441-frisk", "word": "frisk", "label_binary": 1, "text_1": "I know ya freaky baby I know you miss me baby You cannot reach me baby She want to frisk me baby She want to tease me baby I know ya freaky baby", "token_idx_1": 19, "text_start_1": 83, "text_end_1": 88, "date_1": "2019-02", "text_2": "If I have to see this female pandering ad from @MikeBloomberg one more fucking time... Those ladies sure are excited over that PAYCHECK they got to do it. No. Just. Stop. And don't frisk either, ya racist.", "token_idx_2": 38, "text_start_2": 181, "text_end_2": 186, "date_2": "2020-02", "text_1_tokenized": ["I", "know", "ya", "freaky", "baby", "I", "know", "you", "miss", "me", "baby", "You", "cannot", "reach", "me", "baby", "She", "want", "to", "frisk", "me", "baby", "She", "want", "to", "tease", "me", "baby", "I", "know", "ya", "freaky", "baby"], "text_2_tokenized": ["If", "I", "have", "to", "see", "this", "female", "pandering", "ad", "from", "@MikeBloomberg", "one", "more", "fucking", "time", "...", "Those", "ladies", "sure", "are", "excited", "over", "that", "PAYCHECK", "they", "got", "to", "do", "it", ".", "No", ".", "Just", ".", "Stop", ".", "And", "don't", "frisk", "either", ",", "ya", "racist", "."]} -{"id": "3442-frisk", "word": "frisk", "label_binary": 0, "text_1": "Waiting for frisk to realize that the water is filled with leaches and also spikes", "token_idx_1": 2, "text_start_1": 12, "text_end_1": 17, "date_1": "2019-02", "text_2": "Just imagining an almost dead undyne yeeting herself into alphys' lab after she kills frisk in the genocide run and was like \"alphys I won\" and alphys is like \"UNDYNE WTF YOU'RE PRACTICALLY DEAD\" and undyne's like \"no I'm fine\" while like her arm is fucking melting", "token_idx_2": 15, "text_start_2": 86, "text_end_2": 91, "date_2": "2020-02", "text_1_tokenized": ["Waiting", "for", "frisk", "to", "realize", "that", "the", "water", "is", "filled", "with", "leaches", "and", "also", "spikes"], "text_2_tokenized": ["Just", "imagining", "an", "almost", "dead", "undyne", "yeeting", "herself", "into", "alphys", "'", "lab", "after", "she", "kills", "frisk", "in", "the", "genocide", "run", "and", "was", "like", "\"", "alphys", "I", "won", "\"", "and", "alphys", "is", "like", "\"", "UNDYNE", "WTF", "YOU'RE", "PRACTICALLY", "DEAD", "\"", "and", "undyne's", "like", "\"", "no", "I'm", "fine", "\"", "while", "like", "her", "arm", "is", "fucking", "melting"]} -{"id": "3443-frisk", "word": "frisk", "label_binary": 0, "text_1": "It's me frisk, and I hacked Cardila account, HAHAHAHA", "token_idx_1": 2, "text_start_1": 8, "text_end_1": 13, "date_1": "2019-02", "text_2": "LRT I'm making this a separate tweet bc I have A Lot: def booting up undertale after the true/pacifist ending where flowey says to \u201cleave frisk alone\u201d and all that. I had seen 2 people play this game with all 3 endings and still this fucked me up.", "token_idx_2": 29, "text_start_2": 138, "text_end_2": 143, "date_2": "2020-02", "text_1_tokenized": ["It's", "me", "frisk", ",", "and", "I", "hacked", "Cardila", "account", ",", "HAHAHAHA"], "text_2_tokenized": ["LRT", "I'm", "making", "this", "a", "separate", "tweet", "bc", "I", "have", "A", "Lot", ":", "def", "booting", "up", "undertale", "after", "the", "true", "/", "pacifist", "ending", "where", "flowey", "says", "to", "\u201c", "leave", "frisk", "alone", "\u201d", "and", "all", "that", ".", "I", "had", "seen", "2", "people", "play", "this", "game", "with", "all", "3", "endings", "and", "still", "this", "fucked", "me", "up", "."]} -{"id": "3444-frisk", "word": "frisk", "label_binary": 1, "text_1": "\u201cNot one white student had a stop and frisk story\u201d \u201cWhy is it because of my white privilege I am more ignorant than I should be?\u201d ~@KielyBrendan on his experience of teaching and acknowledging his privilege. What a powerful insight about honoring our students' stories!", "token_idx_1": 9, "text_start_1": 38, "text_end_1": 43, "date_1": "2019-02", "text_2": "Dad said that maybe Bloomberg is popular among blacks because some of them actually liked \u201cstop and frisk\u201d because it made their neighborhoods safer. I was, like, \u201cNo, I'm fairly certain they do not like racist policies and the very idea of that is preposterous.\u201d", "token_idx_2": 18, "text_start_2": 100, "text_end_2": 105, "date_2": "2020-02", "text_1_tokenized": ["\u201c", "Not", "one", "white", "student", "had", "a", "stop", "and", "frisk", "story", "\u201d", "\u201c", "Why", "is", "it", "because", "of", "my", "white", "privilege", "I", "am", "more", "ignorant", "than", "I", "should", "be", "?", "\u201d", "~", "@KielyBrendan", "on", "his", "experience", "of", "teaching", "and", "acknowledging", "his", "privilege", ".", "What", "a", "powerful", "insight", "about", "honoring", "our", "students", "'", "stories", "!"], "text_2_tokenized": ["Dad", "said", "that", "maybe", "Bloomberg", "is", "popular", "among", "blacks", "because", "some", "of", "them", "actually", "liked", "\u201c", "stop", "and", "frisk", "\u201d", "because", "it", "made", "their", "neighborhoods", "safer", ".", "I", "was", ",", "like", ",", "\u201c", "No", ",", "I'm", "fairly", "certain", "they", "do", "not", "like", "racist", "policies", "and", "the", "very", "idea", "of", "that", "is", "preposterous", ".", "\u201d"]} -{"id": "3445-frisk", "word": "frisk", "label_binary": 1, "text_1": "make sure ya frisk me good check my panties and my bra", "token_idx_1": 3, "text_start_1": 13, "text_end_1": 18, "date_1": "2019-02", "text_2": "After court ruled stop and frisk unconstitutional, Bloomberg had the balls to try to appeal that ruling! Buyer beware.", "token_idx_2": 5, "text_start_2": 27, "text_end_2": 32, "date_2": "2020-02", "text_1_tokenized": ["make", "sure", "ya", "frisk", "me", "good", "check", "my", "panties", "and", "my", "bra"], "text_2_tokenized": ["After", "court", "ruled", "stop", "and", "frisk", "unconstitutional", ",", "Bloomberg", "had", "the", "balls", "to", "try", "to", "appeal", "that", "ruling", "!", "Buyer", "beware", "."]} -{"id": "2068-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Has anybody been keeping count of how many times Ilhan Omar has been mentioned at AIPAC compared to how many times the Tree of Life pogrom has been mentioned?", "token_idx_1": 25, "text_start_1": 132, "text_end_1": 138, "date_1": "2019-02", "text_2": "Remember how sanghis were enjoying delhi pogrom .. I thank coronavirus 4 scarring the \ud83d\udca9 out of sanghis.. Kaha road clear karne chale they aur ab kaha ghar mei dubuk ke baithe hai\ud83d\ude02", "token_idx_2": 6, "text_start_2": 41, "text_end_2": 47, "date_2": "2020-02", "text_1_tokenized": ["Has", "anybody", "been", "keeping", "count", "of", "how", "many", "times", "Ilhan", "Omar", "has", "been", "mentioned", "at", "AIPAC", "compared", "to", "how", "many", "times", "the", "Tree", "of", "Life", "pogrom", "has", "been", "mentioned", "?"], "text_2_tokenized": ["Remember", "how", "sanghis", "were", "enjoying", "delhi", "pogrom", "..", "I", "thank", "coronavirus", "4", "scarring", "the", "\ud83d\udca9", "out", "of", "sanghis", "..", "Kaha", "road", "clear", "karne", "chale", "they", "aur", "ab", "kaha", "ghar", "mei", "dubuk", "ke", "baithe", "hai", "\ud83d\ude02"]} -{"id": "2069-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "One night a gang of vigilantes gathered for a pogrom against San Francisco's Chinatown #tor", "token_idx_1": 9, "text_start_1": 46, "text_end_1": 52, "date_1": "2019-02", "text_2": "First time I'm hearing the pogrom. Where did it all start?", "token_idx_2": 5, "text_start_2": 27, "text_end_2": 33, "date_2": "2020-02", "text_1_tokenized": ["One", "night", "a", "gang", "of", "vigilantes", "gathered", "for", "a", "pogrom", "against", "San", "Francisco's", "Chinatown", "#tor"], "text_2_tokenized": ["First", "time", "I'm", "hearing", "the", "pogrom", ".", "Where", "did", "it", "all", "start", "?"]} -{"id": "2070-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Twitter's shadowbanning is essentially the same thing as the Nazi book-burning pogrom.", "token_idx_1": 11, "text_start_1": 79, "text_end_1": 85, "date_1": "2019-02", "text_2": "Why the f**k is Kapil Mishra still on the loose. He declared an ultimatum and has set stage to a pogrom, that genocidal maniac. #Arrest_Kapil_Mishra #AmitShahMustResign #DelhiViolence", "token_idx_2": 24, "text_start_2": 97, "text_end_2": 103, "date_2": "2020-02", "text_1_tokenized": ["Twitter's", "shadowbanning", "is", "essentially", "the", "same", "thing", "as", "the", "Nazi", "book-burning", "pogrom", "."], "text_2_tokenized": ["Why", "the", "f", "*", "*", "k", "is", "Kapil", "Mishra", "still", "on", "the", "loose", ".", "He", "declared", "an", "ultimatum", "and", "has", "set", "stage", "to", "a", "pogrom", ",", "that", "genocidal", "maniac", ".", "#Arrest_Kapil_Mishra", "#AmitShahMustResign", "#DelhiViolence"]} -{"id": "2071-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Killers of 50 Muslims at Friday prayers in #ChristchurchMassacre are predators who don't understand condemnation. It should be showered on the Senator & others who justified the pogrom of innocent humans and offered their shoulders of Parliament, media & internet to the assassins", "token_idx_1": 28, "text_start_1": 182, "text_end_1": 188, "date_1": "2019-02", "text_2": "The devil must be so happy today. Fresh batch identified since yesterday from India for his after death recruitment drive. Everyone will automatically chant the names of their favourite Gods when the devil starts with his pogrom.", "token_idx_2": 38, "text_start_2": 222, "text_end_2": 228, "date_2": "2020-02", "text_1_tokenized": ["Killers", "of", "50", "Muslims", "at", "Friday", "prayers", "in", "#ChristchurchMassacre", "are", "predators", "who", "don't", "understand", "condemnation", ".", "It", "should", "be", "showered", "on", "the", "Senator", "&", "others", "who", "justified", "the", "pogrom", "of", "innocent", "humans", "and", "offered", "their", "shoulders", "of", "Parliament", ",", "media", "&", "internet", "to", "the", "assassins"], "text_2_tokenized": ["The", "devil", "must", "be", "so", "happy", "today", ".", "Fresh", "batch", "identified", "since", "yesterday", "from", "India", "for", "his", "after", "death", "recruitment", "drive", ".", "Everyone", "will", "automatically", "chant", "the", "names", "of", "their", "favourite", "Gods", "when", "the", "devil", "starts", "with", "his", "pogrom", "."]} -{"id": "2072-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "I can't believe what I'm hearing in the media. Even Some saying dems should apologize to trump. Let's move on, talk about anything else. Speaker Pelosi looks shell shocked. trump plans a pogrom of revenge. I've wanted to see the arctic. And only watch reruns of firefly.", "token_idx_1": 37, "text_start_1": 187, "text_end_1": 193, "date_1": "2019-02", "text_2": "Kejriwal had said in 2012 that if Police wants, every riot can be stopped in just 2 hours. What you are witnessing is MoSha sponsored Gujarat 2.0 pogrom!", "token_idx_2": 29, "text_start_2": 146, "text_end_2": 152, "date_2": "2020-02", "text_1_tokenized": ["I", "can't", "believe", "what", "I'm", "hearing", "in", "the", "media", ".", "Even", "Some", "saying", "dems", "should", "apologize", "to", "trump", ".", "Let's", "move", "on", ",", "talk", "about", "anything", "else", ".", "Speaker", "Pelosi", "looks", "shell", "shocked", ".", "trump", "plans", "a", "pogrom", "of", "revenge", ".", "I've", "wanted", "to", "see", "the", "arctic", ".", "And", "only", "watch", "reruns", "of", "firefly", "."], "text_2_tokenized": ["Kejriwal", "had", "said", "in", "2012", "that", "if", "Police", "wants", ",", "every", "riot", "can", "be", "stopped", "in", "just", "2", "hours", ".", "What", "you", "are", "witnessing", "is", "MoSha", "sponsored", "Gujarat", "2.0", "pogrom", "!"]} -{"id": "2073-pogrom", "word": "pogrom", "label_binary": 0, "text_1": "I sure hope Alan Partridge doesn't put a foot wrong tonight or it's the chuckle pogrom for you old son.", "token_idx_1": 15, "text_start_1": 80, "text_end_1": 86, "date_1": "2019-02", "text_2": "Does anybody know how many Muslims and how many Hindus were killed in the recently conducted pogrom. How many mosques and how many temples were burnt down. No sarcastic reply's, if anybody knows anything please share.", "token_idx_2": 16, "text_start_2": 93, "text_end_2": 99, "date_2": "2020-02", "text_1_tokenized": ["I", "sure", "hope", "Alan", "Partridge", "doesn't", "put", "a", "foot", "wrong", "tonight", "or", "it's", "the", "chuckle", "pogrom", "for", "you", "old", "son", "."], "text_2_tokenized": ["Does", "anybody", "know", "how", "many", "Muslims", "and", "how", "many", "Hindus", "were", "killed", "in", "the", "recently", "conducted", "pogrom", ".", "How", "many", "mosques", "and", "how", "many", "temples", "were", "burnt", "down", ".", "No", "sarcastic", "reply's", ",", "if", "anybody", "knows", "anything", "please", "share", "."]} -{"id": "2074-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "With \u2018Ganga' plane hijack in Jan 71,stopping over flights btw East and West Pakistan, for creation of BD, there is long list of false flag operations by India. \u2018Pulwama' is latest.Ahmadabad pogrom of Muslims under Modi as CM was due to similar mischief", "token_idx_1": 40, "text_start_1": 190, "text_end_1": 196, "date_1": "2019-02", "text_2": "So Libbus are trying their best to show this pogrom as clashes and riots because seeing small hindu kids on streets with weapons is hurting their delusional idea of secular India. Chutiye!", "token_idx_2": 9, "text_start_2": 45, "text_end_2": 51, "date_2": "2020-02", "text_1_tokenized": ["With", "\u2018", "Ganga", "'", "plane", "hijack", "in", "Jan", "71", ",", "stopping", "over", "flights", "btw", "East", "and", "West", "Pakistan", ",", "for", "creation", "of", "BD", ",", "there", "is", "long", "list", "of", "false", "flag", "operations", "by", "India", ".", "\u2018", "Pulwama", "'", "is", "latest.Ahmadabad", "pogrom", "of", "Muslims", "under", "Modi", "as", "CM", "was", "due", "to", "similar", "mischief"], "text_2_tokenized": ["So", "Libbus", "are", "trying", "their", "best", "to", "show", "this", "pogrom", "as", "clashes", "and", "riots", "because", "seeing", "small", "hindu", "kids", "on", "streets", "with", "weapons", "is", "hurting", "their", "delusional", "idea", "of", "secular", "India", ".", "Chutiye", "!"]} -{"id": "2075-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Today has marked something so horrendous and painful that I'm scared of staying in this world any more. I'm afraid of what may happen if I walk down the street, I'm afraid that humanity has now turned against me and a pogrom is on the horizon. I'm afraid, ya Rabb.", "token_idx_1": 43, "text_start_1": 218, "text_end_1": 224, "date_1": "2019-02", "text_2": "Implication of Tahir Hussain and Shahrukh in anti-Muslim pogrom is the Sangh/BJP way of telling Muslims: we will legislate against you. Sponsor genocide. Break your bones and tear you flesh. But you'll NOT do anything in SELF DEFENSE. You'll only submit!", "token_idx_2": 8, "text_start_2": 57, "text_end_2": 63, "date_2": "2020-02", "text_1_tokenized": ["Today", "has", "marked", "something", "so", "horrendous", "and", "painful", "that", "I'm", "scared", "of", "staying", "in", "this", "world", "any", "more", ".", "I'm", "afraid", "of", "what", "may", "happen", "if", "I", "walk", "down", "the", "street", ",", "I'm", "afraid", "that", "humanity", "has", "now", "turned", "against", "me", "and", "a", "pogrom", "is", "on", "the", "horizon", ".", "I'm", "afraid", ",", "ya", "Rabb", "."], "text_2_tokenized": ["Implication", "of", "Tahir", "Hussain", "and", "Shahrukh", "in", "anti-Muslim", "pogrom", "is", "the", "Sangh", "/", "BJP", "way", "of", "telling", "Muslims", ":", "we", "will", "legislate", "against", "you", ".", "Sponsor", "genocide", ".", "Break", "your", "bones", "and", "tear", "you", "flesh", ".", "But", "you'll", "NOT", "do", "anything", "in", "SELF", "DEFENSE", ".", "You'll", "only", "submit", "!"]} -{"id": "2076-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "ppl are into dependency more than personal development. swhy they coon and snitch. coz theyre econ-incentivized to serve the pogrom of ma$$a bratz.", "token_idx_1": 21, "text_start_1": 125, "text_end_1": 131, "date_1": "2019-02", "text_2": "What happened in #delhi is called #pogrom . an organized massacre of a particular ethnic/religion . Eg: \u201cthe Nazis planned a pogrom against Jewish people in Germany\"", "token_idx_2": 25, "text_start_2": 35, "text_end_2": 41, "date_2": "2020-02", "text_1_tokenized": ["ppl", "are", "into", "dependency", "more", "than", "personal", "development", ".", "swhy", "they", "coon", "and", "snitch", ".", "coz", "theyre", "econ-incentivized", "to", "serve", "the", "pogrom", "of", "ma", "$", "$", "a", "bratz", "."], "text_2_tokenized": ["What", "happened", "in", "#delhi", "is", "called", "#pogrom", ".", "an", "organized", "massacre", "of", "a", "particular", "ethnic", "/", "religion", ".", "Eg", ":", "\u201c", "the", "Nazis", "planned", "a", "pogrom", "against", "Jewish", "people", "in", "Germany", "\""]} -{"id": "2077-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Terry Gross had J. M. Berger on today talking about the Australian who committed a pogrom in NZ. Manifesto verbiage is trollish. Said he was \u201csteeped in troll culture.\u201d Also talked about how much \u2018gun rights' culture connected many types of extremists. Sigh.", "token_idx_1": 17, "text_start_1": 83, "text_end_1": 89, "date_1": "2019-02", "text_2": "Delhi now is the result of Babri Masjid, Gujarat killing, Musazzarbad etc where men / group are rewarded and those who planned manufactured executed such pogrom are once again at the helm of affairs operating without impunity. While humanlife constitution and peace don't matter.", "token_idx_2": 27, "text_start_2": 154, "text_end_2": 160, "date_2": "2020-02", "text_1_tokenized": ["Terry", "Gross", "had", "J", ".", "M", ".", "Berger", "on", "today", "talking", "about", "the", "Australian", "who", "committed", "a", "pogrom", "in", "NZ", ".", "Manifesto", "verbiage", "is", "trollish", ".", "Said", "he", "was", "\u201c", "steeped", "in", "troll", "culture", ".", "\u201d", "Also", "talked", "about", "how", "much", "\u2018", "gun", "rights", "'", "culture", "connected", "many", "types", "of", "extremists", ".", "Sigh", "."], "text_2_tokenized": ["Delhi", "now", "is", "the", "result", "of", "Babri", "Masjid", ",", "Gujarat", "killing", ",", "Musazzarbad", "etc", "where", "men", "/", "group", "are", "rewarded", "and", "those", "who", "planned", "manufactured", "executed", "such", "pogrom", "are", "once", "again", "at", "the", "helm", "of", "affairs", "operating", "without", "impunity", ".", "While", "humanlife", "constitution", "and", "peace", "don't", "matter", "."]} -{"id": "2078-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "feel like the 'HUMANITY FIRST' posters are taunting me with the inevitability of the anti-cyborg pogrom that will snuff me out in ~30 years", "token_idx_1": 17, "text_start_1": 97, "text_end_1": 103, "date_1": "2019-02", "text_2": "Be very wary \u2022 15-Dec Jamia Sunday \u2022 05-Jan JNU Sunday \u2022 23-Feb Delhi pogrom Sunday \u2022 08-Mar Social media Sunday @hatefreeworldX", "token_idx_2": 20, "text_start_2": 70, "text_end_2": 76, "date_2": "2020-02", "text_1_tokenized": ["feel", "like", "the", "'", "HUMANITY", "FIRST", "'", "posters", "are", "taunting", "me", "with", "the", "inevitability", "of", "the", "anti-cyborg", "pogrom", "that", "will", "snuff", "me", "out", "in", "~", "30", "years"], "text_2_tokenized": ["Be", "very", "wary", "\u2022", "15", "-", "Dec", "Jamia", "Sunday", "\u2022", "05", "-", "Jan", "JNU", "Sunday", "\u2022", "23", "-", "Feb", "Delhi", "pogrom", "Sunday", "\u2022", "08", "-", "Mar", "Social", "media", "Sunday", "@hatefreeworldX"]} -{"id": "2079-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "For the apologist who deleted her post before I had a chance 2 respond: you're right anti-Semitism knows no geographical bounds in particular in Palestine where Zionists began their pogrom against Semites more than 70 years ago. Decades later they still slaughter Palestinians.", "token_idx_1": 30, "text_start_1": 182, "text_end_1": 188, "date_1": "2019-02", "text_2": "Question: Extremists are leading a deadly anti-Muslim pogrom in the Indian capital of New Delhi. Will you condemn this? #DemDebate", "token_idx_2": 8, "text_start_2": 54, "text_end_2": 60, "date_2": "2020-02", "text_1_tokenized": ["For", "the", "apologist", "who", "deleted", "her", "post", "before", "I", "had", "a", "chance", "2", "respond", ":", "you're", "right", "anti-Semitism", "knows", "no", "geographical", "bounds", "in", "particular", "in", "Palestine", "where", "Zionists", "began", "their", "pogrom", "against", "Semites", "more", "than", "70", "years", "ago", ".", "Decades", "later", "they", "still", "slaughter", "Palestinians", "."], "text_2_tokenized": ["Question", ":", "Extremists", "are", "leading", "a", "deadly", "anti-Muslim", "pogrom", "in", "the", "Indian", "capital", "of", "New", "Delhi", ".", "Will", "you", "condemn", "this", "?", "#DemDebate"]} -{"id": "2080-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "To @JewishVoice: A picture of Jews murdered in German camp #BergenBelsen to depict Polish pogrom at #Jedwabne. Unacceptable. \"Nazi Germany chose to locate its death camps in Poland, ostensibly because of rabid anti-Semitism in pre-war Poland\" - utter ignorance. @mmagierowski", "token_idx_1": 15, "text_start_1": 90, "text_end_1": 96, "date_1": "2019-02", "text_2": "I wonder people did what @narendramodi said them to do. So the pogrom of Delhi muslims ..... Rest you know.", "token_idx_2": 13, "text_start_2": 63, "text_end_2": 69, "date_2": "2020-02", "text_1_tokenized": ["To", "@JewishVoice", ":", "A", "picture", "of", "Jews", "murdered", "in", "German", "camp", "#BergenBelsen", "to", "depict", "Polish", "pogrom", "at", "#Jedwabne", ".", "Unacceptable", ".", "\"", "Nazi", "Germany", "chose", "to", "locate", "its", "death", "camps", "in", "Poland", ",", "ostensibly", "because", "of", "rabid", "anti-Semitism", "in", "pre-war", "Poland", "\"", "-", "utter", "ignorance", ".", "@mmagierowski"], "text_2_tokenized": ["I", "wonder", "people", "did", "what", "@narendramodi", "said", "them", "to", "do", ".", "So", "the", "pogrom", "of", "Delhi", "muslims", "...", "Rest", "you", "know", "."]} -{"id": "2081-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "RT @kunfaaya He may wash the feet of Safaai Karamcharis but will never manage to wash off his bloodstained hands. He is responsible for killing 2000+ Muslims during the Gujarat pogrom. Remember that when you vote in May. Never forgive, never forget.", "token_idx_1": 32, "text_start_1": 177, "text_end_1": 183, "date_1": "2019-02", "text_2": "Media and Bollywood are complicit in the anti-Hindu pogrom of #DelhiRiots.", "token_idx_2": 8, "text_start_2": 52, "text_end_2": 58, "date_2": "2020-02", "text_1_tokenized": ["RT", "@kunfaaya", "He", "may", "wash", "the", "feet", "of", "Safaai", "Karamcharis", "but", "will", "never", "manage", "to", "wash", "off", "his", "bloodstained", "hands", ".", "He", "is", "responsible", "for", "killing", "2000", "+", "Muslims", "during", "the", "Gujarat", "pogrom", ".", "Remember", "that", "when", "you", "vote", "in", "May", ".", "Never", "forgive", ",", "never", "forget", "."], "text_2_tokenized": ["Media", "and", "Bollywood", "are", "complicit", "in", "the", "anti-Hindu", "pogrom", "of", "#DelhiRiots", "."]} -{"id": "2082-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "It really feels like the news cycle has dwelled for longer on the words of a Black Muslim Congresswoman as a threat to American Jews than it did on an actual far-right pogrom at a synagogue just a few months ago.", "token_idx_1": 32, "text_start_1": 168, "text_end_1": 174, "date_1": "2019-02", "text_2": "Delhi terror attacks were a pogrom.", "token_idx_2": 5, "text_start_2": 28, "text_end_2": 34, "date_2": "2020-02", "text_1_tokenized": ["It", "really", "feels", "like", "the", "news", "cycle", "has", "dwelled", "for", "longer", "on", "the", "words", "of", "a", "Black", "Muslim", "Congresswoman", "as", "a", "threat", "to", "American", "Jews", "than", "it", "did", "on", "an", "actual", "far-right", "pogrom", "at", "a", "synagogue", "just", "a", "few", "months", "ago", "."], "text_2_tokenized": ["Delhi", "terror", "attacks", "were", "a", "pogrom", "."]} -{"id": "2083-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Every week Israel commits its own Christchurch Massacre. Each new air raid bombing campaign is a pogrom. They target apartment buildings, busy streets, elementary schools, hospitals, and mosques. The same places every other white supremacist mass murderers go to kill Muslims.", "token_idx_1": 17, "text_start_1": 97, "text_end_1": 103, "date_1": "2019-02", "text_2": "If 30 dead muslims = pogrom and a genocide. Then why are the 257 KPs dead not considered a pogrom or a genocide?", "token_idx_2": 5, "text_start_2": 21, "text_end_2": 27, "date_2": "2020-02", "text_1_tokenized": ["Every", "week", "Israel", "commits", "its", "own", "Christchurch", "Massacre", ".", "Each", "new", "air", "raid", "bombing", "campaign", "is", "a", "pogrom", ".", "They", "target", "apartment", "buildings", ",", "busy", "streets", ",", "elementary", "schools", ",", "hospitals", ",", "and", "mosques", ".", "The", "same", "places", "every", "other", "white", "supremacist", "mass", "murderers", "go", "to", "kill", "Muslims", "."], "text_2_tokenized": ["If", "30", "dead", "muslims", "=", "pogrom", "and", "a", "genocide", ".", "Then", "why", "are", "the", "257", "KPs", "dead", "not", "considered", "a", "pogrom", "or", "a", "genocide", "?"]} -{"id": "2084-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "The blood spilled is too much, at this rate can we just give them kano to avoid further pogrom.", "token_idx_1": 19, "text_start_1": 88, "text_end_1": 94, "date_1": "2019-02", "text_2": "Innocent people were killed, homes and shops looted & then burnt, 1 Dargah and 14 Mosques destroyed in a state sponsored pogrom in Delhi, the capital of India. But many people were silent and didn't give a damn. Lekin ab #Corona pe sabko rona aa raha hai. #CoronavirusPandemic", "token_idx_2": 23, "text_start_2": 125, "text_end_2": 131, "date_2": "2020-02", "text_1_tokenized": ["The", "blood", "spilled", "is", "too", "much", ",", "at", "this", "rate", "can", "we", "just", "give", "them", "kano", "to", "avoid", "further", "pogrom", "."], "text_2_tokenized": ["Innocent", "people", "were", "killed", ",", "homes", "and", "shops", "looted", "&", "then", "burnt", ",", "1", "Dargah", "and", "14", "Mosques", "destroyed", "in", "a", "state", "sponsored", "pogrom", "in", "Delhi", ",", "the", "capital", "of", "India", ".", "But", "many", "people", "were", "silent", "and", "didn't", "give", "a", "damn", ".", "Lekin", "ab", "#Corona", "pe", "sabko", "rona", "aa", "raha", "hai", ".", "#CoronavirusPandemic"]} -{"id": "2085-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Fun fact: In every wedding I have ever attended (except my own), I spend a good portion of the ceremony fixated on the possibility of there being a pogrom, and how I will react when it inevitably happens. I blame it on seeing Fiddler at too young of an age.", "token_idx_1": 32, "text_start_1": 148, "text_end_1": 154, "date_1": "2019-02", "text_2": "\"Court can never prevent such things. Everyone should understand this. We act and pass appropriate orders only after things have taken place,\" the CJI said. They found the right man to further this pogrom. Quite super good at his job.", "token_idx_2": 39, "text_start_2": 198, "text_end_2": 204, "date_2": "2020-02", "text_1_tokenized": ["Fun", "fact", ":", "In", "every", "wedding", "I", "have", "ever", "attended", "(", "except", "my", "own", ")", ",", "I", "spend", "a", "good", "portion", "of", "the", "ceremony", "fixated", "on", "the", "possibility", "of", "there", "being", "a", "pogrom", ",", "and", "how", "I", "will", "react", "when", "it", "inevitably", "happens", ".", "I", "blame", "it", "on", "seeing", "Fiddler", "at", "too", "young", "of", "an", "age", "."], "text_2_tokenized": ["\"", "Court", "can", "never", "prevent", "such", "things", ".", "Everyone", "should", "understand", "this", ".", "We", "act", "and", "pass", "appropriate", "orders", "only", "after", "things", "have", "taken", "place", ",", "\"", "the", "CJI", "said", ".", "They", "found", "the", "right", "man", "to", "further", "this", "pogrom", ".", "Quite", "super", "good", "at", "his", "job", "."]} -{"id": "2086-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "The BBC had a member from a fascist organisation on Newsnight the same day as a pogrom. No further evidence is required that their broadcasting is no longer in the public interest. #Christchurch", "token_idx_1": 16, "text_start_1": 80, "text_end_1": 86, "date_1": "2019-02", "text_2": "Waiting for the bluffers on the payroll of @SwarajyaMag to put up a piece on the Delhi pogrom.", "token_idx_2": 17, "text_start_2": 87, "text_end_2": 93, "date_2": "2020-02", "text_1_tokenized": ["The", "BBC", "had", "a", "member", "from", "a", "fascist", "organisation", "on", "Newsnight", "the", "same", "day", "as", "a", "pogrom", ".", "No", "further", "evidence", "is", "required", "that", "their", "broadcasting", "is", "no", "longer", "in", "the", "public", "interest", ".", "#Christchurch"], "text_2_tokenized": ["Waiting", "for", "the", "bluffers", "on", "the", "payroll", "of", "@SwarajyaMag", "to", "put", "up", "a", "piece", "on", "the", "Delhi", "pogrom", "."]} -{"id": "2087-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Greatly enjoying Great Neck South Middle's opening night production of FIDDLER ON THE ROOF (directed by my pal @GabeCarras). The kids are all great. Even if they were briefly outshone by one brave parent who, right before the pogrom, cried out: \u201cAw, it's the Russians!\u201d", "token_idx_1": 43, "text_start_1": 226, "text_end_1": 232, "date_1": "2019-02", "text_2": "#askBhandari What can be done to legally compel the Telangana govt to help the Hindus of #bhainsa who suffered an Islamist pogrom?", "token_idx_2": 21, "text_start_2": 123, "text_end_2": 129, "date_2": "2020-02", "text_1_tokenized": ["Greatly", "enjoying", "Great", "Neck", "South", "Middle's", "opening", "night", "production", "of", "FIDDLER", "ON", "THE", "ROOF", "(", "directed", "by", "my", "pal", "@GabeCarras", ")", ".", "The", "kids", "are", "all", "great", ".", "Even", "if", "they", "were", "briefly", "outshone", "by", "one", "brave", "parent", "who", ",", "right", "before", "the", "pogrom", ",", "cried", "out", ":", "\u201c", "Aw", ",", "it's", "the", "Russians", "!", "\u201d"], "text_2_tokenized": ["#askBhandari", "What", "can", "be", "done", "to", "legally", "compel", "the", "Telangana", "govt", "to", "help", "the", "Hindus", "of", "#bhainsa", "who", "suffered", "an", "Islamist", "pogrom", "?"]} -{"id": "2088-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "True Talk... Back in school (Jos), we were having an argument about the Biafran war and 3M pogrom, then my Berom friend now said... \"You people discovered oil and wanted to leave na. Greed\" I cried inside. I cried. Such ignorance. Days later...", "token_idx_1": 21, "text_start_1": 91, "text_end_1": 97, "date_1": "2019-02", "text_2": "A media pogrom of Indian Muslims has become a routine. Media has simply acquiesced in state of fear and greed. A frightened, prejudiced and betrayed media is the biggest threat for national integrity and harmony. Shame on #IslamoPhobicIndianMedia", "token_idx_2": 2, "text_start_2": 8, "text_end_2": 14, "date_2": "2020-02", "text_1_tokenized": ["True", "Talk", "...", "Back", "in", "school", "(", "Jos", ")", ",", "we", "were", "having", "an", "argument", "about", "the", "Biafran", "war", "and", "3M", "pogrom", ",", "then", "my", "Berom", "friend", "now", "said", "...", "\"", "You", "people", "discovered", "oil", "and", "wanted", "to", "leave", "na", ".", "Greed", "\"", "I", "cried", "inside", ".", "I", "cried", ".", "Such", "ignorance", ".", "Days", "later", "..."], "text_2_tokenized": ["A", "media", "pogrom", "of", "Indian", "Muslims", "has", "become", "a", "routine", ".", "Media", "has", "simply", "acquiesced", "in", "state", "of", "fear", "and", "greed", ".", "A", "frightened", ",", "prejudiced", "and", "betrayed", "media", "is", "the", "biggest", "threat", "for", "national", "integrity", "and", "harmony", ".", "Shame", "on", "#IslamoPhobicIndianMedia"]} -{"id": "2089-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "The lines of code filled the screen. *Weapons systems initialized.* This wasn't right. *Proximity analysis complete, targets indentified.* The #sabotaged programme began its pogrom. A gentle hiss signified the release of the 'Red Kiss' virus. His eyes wept blood. #vss365", "token_idx_1": 32, "text_start_1": 174, "text_end_1": 180, "date_1": "2019-02", "text_2": "After watching Jojo Rabbit for the nth time, I had a dream where my one true love helped me escape a nationwide pogrom, Captain K style. I escaped hiding in a German cruise. Published my book, went to London for a BBC interview; and cried myself to sleep in a five star hotel.", "token_idx_2": 23, "text_start_2": 112, "text_end_2": 118, "date_2": "2020-02", "text_1_tokenized": ["The", "lines", "of", "code", "filled", "the", "screen", ".", "*", "Weapons", "systems", "initialized", ".", "*", "This", "wasn't", "right", ".", "*", "Proximity", "analysis", "complete", ",", "targets", "indentified", ".", "*", "The", "#sabotaged", "programme", "began", "its", "pogrom", ".", "A", "gentle", "hiss", "signified", "the", "release", "of", "the", "'", "Red", "Kiss", "'", "virus", ".", "His", "eyes", "wept", "blood", ".", "#vss365"], "text_2_tokenized": ["After", "watching", "Jojo", "Rabbit", "for", "the", "nth", "time", ",", "I", "had", "a", "dream", "where", "my", "one", "true", "love", "helped", "me", "escape", "a", "nationwide", "pogrom", ",", "Captain", "K", "style", ".", "I", "escaped", "hiding", "in", "a", "German", "cruise", ".", "Published", "my", "book", ",", "went", "to", "London", "for", "a", "BBC", "interview", ";", "and", "cried", "myself", "to", "sleep", "in", "a", "five", "star", "hotel", "."]} -{"id": "2090-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "The House Oversight Committee has launched yet another series of 'investgations' into things that had nothing to do with the election, and into things already 'investigated' which are 'dead ends'--no evidence, no nothing. This Charlie Foxtrot has become a Stalinesque pogrom", "token_idx_1": 48, "text_start_1": 268, "text_end_1": 274, "date_1": "2019-02", "text_2": "Images coming out of Muslim homes & businesses being burnt, Muslims being beaten & killed, mosques & graveyards being burnt & desecrated are similar to Jews fleeing the pogrom in Nazi Germany. The world must accept this brutal reality of the Modi fascist racist regime & stop it.", "token_idx_2": 30, "text_start_2": 185, "text_end_2": 191, "date_2": "2020-02", "text_1_tokenized": ["The", "House", "Oversight", "Committee", "has", "launched", "yet", "another", "series", "of", "'", "investgations", "'", "into", "things", "that", "had", "nothing", "to", "do", "with", "the", "election", ",", "and", "into", "things", "already", "'", "investigated", "'", "which", "are", "'", "dead", "ends'--no", "evidence", ",", "no", "nothing", ".", "This", "Charlie", "Foxtrot", "has", "become", "a", "Stalinesque", "pogrom"], "text_2_tokenized": ["Images", "coming", "out", "of", "Muslim", "homes", "&", "businesses", "being", "burnt", ",", "Muslims", "being", "beaten", "&", "killed", ",", "mosques", "&", "graveyards", "being", "burnt", "&", "desecrated", "are", "similar", "to", "Jews", "fleeing", "the", "pogrom", "in", "Nazi", "Germany", ".", "The", "world", "must", "accept", "this", "brutal", "reality", "of", "the", "Modi", "fascist", "racist", "regime", "&", "stop", "it", "."]} -{"id": "2091-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Rapist Babu Bajrangi, who confessed to tearing out a fetus from a woman's womb during the 2002 Gujarat anti-Muslim pogrom gets bail. #HappyWomensDay2019 #yehhainayaindia #NamumkinAbMumkinHai", "token_idx_1": 20, "text_start_1": 115, "text_end_1": 121, "date_1": "2019-02", "text_2": "What's happening in Delhi is not a clash between Pro-CAA & anti-CAA protestors. It's an anti-Muslim pogrom planned,sponsored and executed by Sangh Parivar with the help of its members in all the four pillars of the great Indian democracy(legislature,judiciary,executive and media)", "token_idx_2": 17, "text_start_2": 104, "text_end_2": 110, "date_2": "2020-02", "text_1_tokenized": ["Rapist", "Babu", "Bajrangi", ",", "who", "confessed", "to", "tearing", "out", "a", "fetus", "from", "a", "woman's", "womb", "during", "the", "2002", "Gujarat", "anti-Muslim", "pogrom", "gets", "bail", ".", "#HappyWomensDay2019", "#yehhainayaindia", "#NamumkinAbMumkinHai"], "text_2_tokenized": ["What's", "happening", "in", "Delhi", "is", "not", "a", "clash", "between", "Pro-CAA", "&", "anti-CAA", "protestors", ".", "It's", "an", "anti-Muslim", "pogrom", "planned", ",", "sponsored", "and", "executed", "by", "Sangh", "Parivar", "with", "the", "help", "of", "its", "members", "in", "all", "the", "four", "pillars", "of", "the", "great", "Indian", "democracy", "(", "legislature", ",", "judiciary", ",", "executive", "and", "media", ")"]} -{"id": "2092-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "with the kind of independent research capability hasan minhaj and his team at netflix have, it's VERY easy to find out and address the fact that gujarat 2002 wasn't \"riots\" but a state-backed pogrom of muslims and it gets even sadder once you realise hasan is a muslim himself", "token_idx_1": 36, "text_start_1": 192, "text_end_1": 198, "date_1": "2019-02", "text_2": "Word of the day: \"pogrom\". I hate that I know this word. \ud83d\ude14 #Delhi", "token_idx_2": 6, "text_start_2": 18, "text_end_2": 24, "date_2": "2020-02", "text_1_tokenized": ["with", "the", "kind", "of", "independent", "research", "capability", "hasan", "minhaj", "and", "his", "team", "at", "netflix", "have", ",", "it's", "VERY", "easy", "to", "find", "out", "and", "address", "the", "fact", "that", "gujarat", "2002", "wasn't", "\"", "riots", "\"", "but", "a", "state-backed", "pogrom", "of", "muslims", "and", "it", "gets", "even", "sadder", "once", "you", "realise", "hasan", "is", "a", "muslim", "himself"], "text_2_tokenized": ["Word", "of", "the", "day", ":", "\"", "pogrom", "\"", ".", "I", "hate", "that", "I", "know", "this", "word", ".", "\ud83d\ude14", "#Delhi"]} -{"id": "2093-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "this is a pogrom in new zealand", "token_idx_1": 3, "text_start_1": 10, "text_end_1": 16, "date_1": "2019-02", "text_2": "Mobs targeting Muslims were allowed to run riot while the police not just looked the other way, it also aided the rioters. This was a state sanctioned anti-Muslim pogrom. PM has still not condemned the violence and the hate speeches that preceded it. #WorldAgainstDelhiPogrom", "token_idx_2": 30, "text_start_2": 163, "text_end_2": 169, "date_2": "2020-02", "text_1_tokenized": ["this", "is", "a", "pogrom", "in", "new", "zealand"], "text_2_tokenized": ["Mobs", "targeting", "Muslims", "were", "allowed", "to", "run", "riot", "while", "the", "police", "not", "just", "looked", "the", "other", "way", ",", "it", "also", "aided", "the", "rioters", ".", "This", "was", "a", "state", "sanctioned", "anti-Muslim", "pogrom", ".", "PM", "has", "still", "not", "condemned", "the", "violence", "and", "the", "hate", "speeches", "that", "preceded", "it", ".", "#WorldAgainstDelhiPogrom"]} -{"id": "2094-pogrom", "word": "pogrom", "label_binary": 0, "text_1": "The Spying agencies are downloading all your personal photos for their facial recognition pogrom. #FacebookDown", "token_idx_1": 13, "text_start_1": 90, "text_end_1": 96, "date_1": "2019-02", "text_2": "Schadenfreude behavior of incumbent minister on Delhi riots is concerned and seems they had pogrom it. Largest Democracy failed in the Capital of India. I Condemned central govt. leadership plus those who are kowtowing them and behaving as stooges. @ArvindKejriwal", "token_idx_2": 14, "text_start_2": 92, "text_end_2": 98, "date_2": "2020-02", "text_1_tokenized": ["The", "Spying", "agencies", "are", "downloading", "all", "your", "personal", "photos", "for", "their", "facial", "recognition", "pogrom", ".", "#FacebookDown"], "text_2_tokenized": ["Schadenfreude", "behavior", "of", "incumbent", "minister", "on", "Delhi", "riots", "is", "concerned", "and", "seems", "they", "had", "pogrom", "it", ".", "Largest", "Democracy", "failed", "in", "the", "Capital", "of", "India", ".", "I", "Condemned", "central", "govt", ".", "leadership", "plus", "those", "who", "are", "kowtowing", "them", "and", "behaving", "as", "stooges", ".", "@ArvindKejriwal"]} -{"id": "2095-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Nigerians and our \"move on\" mentality. Carry out a pogrom on a people. No justice. Lord over them for 50 years. Nigerians: Move On!! Steals someone's intellectual property. Us it as a springboard to fame and wealth. No accountability. 15years later... Nigerians: Move on!!", "token_idx_1": 12, "text_start_1": 51, "text_end_1": 57, "date_1": "2019-02", "text_2": "I know it's been said before but we are barely 3 months into 2020 but we've seen state sponsored attacks on students, a pogrom in the national capital, and now a pandemic leading to a total lockdown. This is NOT normal. You don't have to be okay. Take a breather if you need to.", "token_idx_2": 24, "text_start_2": 120, "text_end_2": 126, "date_2": "2020-02", "text_1_tokenized": ["Nigerians", "and", "our", "\"", "move", "on", "\"", "mentality", ".", "Carry", "out", "a", "pogrom", "on", "a", "people", ".", "No", "justice", ".", "Lord", "over", "them", "for", "50", "years", ".", "Nigerians", ":", "Move", "On", "!", "!", "Steals", "someone's", "intellectual", "property", ".", "Us", "it", "as", "a", "springboard", "to", "fame", "and", "wealth", ".", "No", "accountability", ".", "15years", "later", "...", "Nigerians", ":", "Move", "on", "!", "!"], "text_2_tokenized": ["I", "know", "it's", "been", "said", "before", "but", "we", "are", "barely", "3", "months", "into", "2020", "but", "we've", "seen", "state", "sponsored", "attacks", "on", "students", ",", "a", "pogrom", "in", "the", "national", "capital", ",", "and", "now", "a", "pandemic", "leading", "to", "a", "total", "lockdown", ".", "This", "is", "NOT", "normal", ".", "You", "don't", "have", "to", "be", "okay", ".", "Take", "a", "breather", "if", "you", "need", "to", "."]} -{"id": "2096-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "If only media had accorded the status of a Kashmiri to the pandits back then, horrors of the pogrom against a population would have paled the few who are getting thrashed here and there.", "token_idx_1": 19, "text_start_1": 93, "text_end_1": 99, "date_1": "2019-02", "text_2": "People are dying and being targeted. And the biggest problem, for some people is that its being called a 'pogrom' instead of a 'riot.'", "token_idx_2": 22, "text_start_2": 106, "text_end_2": 112, "date_2": "2020-02", "text_1_tokenized": ["If", "only", "media", "had", "accorded", "the", "status", "of", "a", "Kashmiri", "to", "the", "pandits", "back", "then", ",", "horrors", "of", "the", "pogrom", "against", "a", "population", "would", "have", "paled", "the", "few", "who", "are", "getting", "thrashed", "here", "and", "there", "."], "text_2_tokenized": ["People", "are", "dying", "and", "being", "targeted", ".", "And", "the", "biggest", "problem", ",", "for", "some", "people", "is", "that", "its", "being", "called", "a", "'", "pogrom", "'", "instead", "of", "a", "'", "riot", ".", "'"]} -{"id": "2097-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "At what point will we protest against this pogrom in southern Kaduna?", "token_idx_1": 8, "text_start_1": 43, "text_end_1": 49, "date_1": "2019-02", "text_2": "It is obvious that, #WorldAgainstDelhiPogrom because its own government is biased & fuels this pogrom, cannot expect anything lower than this! @AdityaMenon22", "token_idx_2": 15, "text_start_2": 99, "text_end_2": 105, "date_2": "2020-02", "text_1_tokenized": ["At", "what", "point", "will", "we", "protest", "against", "this", "pogrom", "in", "southern", "Kaduna", "?"], "text_2_tokenized": ["It", "is", "obvious", "that", ",", "#WorldAgainstDelhiPogrom", "because", "its", "own", "government", "is", "biased", "&", "fuels", "this", "pogrom", ",", "cannot", "expect", "anything", "lower", "than", "this", "!", "@AdityaMenon22"]} -{"id": "2098-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "#PoliticsLive #ChrisWilliamson #Antisemitism having watched the video footage what exactly has Williamson said wrong? Tom Watson seems to want to conduct a reverse pogrom of anyone daring to question this made up (or at least exaggerated) Labour 'problem' with antisemistism", "token_idx_1": 24, "text_start_1": 164, "text_end_1": 170, "date_1": "2019-02", "text_2": "by her Sikh bodyguard as an excuse for organising a pogrom in which thousands of Sikhs were murdered #GenocideOfIndianMuslims", "token_idx_2": 10, "text_start_2": 52, "text_end_2": 58, "date_2": "2020-02", "text_1_tokenized": ["#PoliticsLive", "#ChrisWilliamson", "#Antisemitism", "having", "watched", "the", "video", "footage", "what", "exactly", "has", "Williamson", "said", "wrong", "?", "Tom", "Watson", "seems", "to", "want", "to", "conduct", "a", "reverse", "pogrom", "of", "anyone", "daring", "to", "question", "this", "made", "up", "(", "or", "at", "least", "exaggerated", ")", "Labour", "'", "problem", "'", "with", "antisemistism"], "text_2_tokenized": ["by", "her", "Sikh", "bodyguard", "as", "an", "excuse", "for", "organising", "a", "pogrom", "in", "which", "thousands", "of", "Sikhs", "were", "murdered", "#GenocideOfIndianMuslims"]} -{"id": "2099-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "And something that I expect from @sardesairajdeep \"On the eve of 17th anniversary of Godhra, Modi has again led a pogrom of killing misguided youth belonging to the minorities in the Indian Subcontinent.\" #surgicalstrike2 #Balakot", "token_idx_1": 22, "text_start_1": 114, "text_end_1": 120, "date_1": "2019-02", "text_2": "I wonder when all minorities will realize that AAP believes in majority community votes only. They don't give a damn about the rest & will never criticize BJP-RSS-Modi-Shah right till 2024 If Delhi pogrom has not woken up people to AAP's reality, what will?", "token_idx_2": 34, "text_start_2": 202, "text_end_2": 208, "date_2": "2020-02", "text_1_tokenized": ["And", "something", "that", "I", "expect", "from", "@sardesairajdeep", "\"", "On", "the", "eve", "of", "17th", "anniversary", "of", "Godhra", ",", "Modi", "has", "again", "led", "a", "pogrom", "of", "killing", "misguided", "youth", "belonging", "to", "the", "minorities", "in", "the", "Indian", "Subcontinent", ".", "\"", "#surgicalstrike2", "#Balakot"], "text_2_tokenized": ["I", "wonder", "when", "all", "minorities", "will", "realize", "that", "AAP", "believes", "in", "majority", "community", "votes", "only", ".", "They", "don't", "give", "a", "damn", "about", "the", "rest", "&", "will", "never", "criticize", "BJP-RSS-Modi-Shah", "right", "till", "2024", "If", "Delhi", "pogrom", "has", "not", "woken", "up", "people", "to", "AAP's", "reality", ",", "what", "will", "?"]} -{"id": "2100-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "And ancient Kano is a centre of learning and civilization, how modern Kano gets to this shameless situation as a result of bad leadership and state sponsored political pogrom remains unclear #KanoRerun #SupplementaryElections", "token_idx_1": 29, "text_start_1": 168, "text_end_1": 174, "date_1": "2019-02", "text_2": "Fox News is really spinning an anti-Muslim pogrom as \"brave few Hindus resisting Muslim persecution\". Nafrat ho to aisi", "token_idx_2": 7, "text_start_2": 43, "text_end_2": 49, "date_2": "2020-02", "text_1_tokenized": ["And", "ancient", "Kano", "is", "a", "centre", "of", "learning", "and", "civilization", ",", "how", "modern", "Kano", "gets", "to", "this", "shameless", "situation", "as", "a", "result", "of", "bad", "leadership", "and", "state", "sponsored", "political", "pogrom", "remains", "unclear", "#KanoRerun", "#SupplementaryElections"], "text_2_tokenized": ["Fox", "News", "is", "really", "spinning", "an", "anti-Muslim", "pogrom", "as", "\"", "brave", "few", "Hindus", "resisting", "Muslim", "persecution", "\"", ".", "Nafrat", "ho", "to", "aisi"]} -{"id": "2101-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "#shortstories the duke looked at the ruined remains of the victims of the pogrom and sucked his teeth. \u2018This would never have happened if non-humans hadn't come here' he told his followers. Those nearby edged away, disgusted, but not nearly enough.", "token_idx_1": 13, "text_start_1": 74, "text_end_1": 80, "date_1": "2019-02", "text_2": "An occasional lynching will pave way to a pogrom, eventually.", "token_idx_2": 8, "text_start_2": 42, "text_end_2": 48, "date_2": "2020-02", "text_1_tokenized": ["#shortstories", "the", "duke", "looked", "at", "the", "ruined", "remains", "of", "the", "victims", "of", "the", "pogrom", "and", "sucked", "his", "teeth", ".", "\u2018", "This", "would", "never", "have", "happened", "if", "non-humans", "hadn't", "come", "here", "'", "he", "told", "his", "followers", ".", "Those", "nearby", "edged", "away", ",", "disgusted", ",", "but", "not", "nearly", "enough", "."], "text_2_tokenized": ["An", "occasional", "lynching", "will", "pave", "way", "to", "a", "pogrom", ",", "eventually", "."]} -{"id": "2102-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "It's shocking that rogue organisations like the JeI and the JKLF weren't banned till now. These two are no less than terrorist organisations responsible for many heinous crimes including the pogrom of the Kashmiri Pandits from the valley. Their leaders should be sent to jails.", "token_idx_1": 31, "text_start_1": 191, "text_end_1": 197, "date_1": "2019-02", "text_2": "Poor, vulnerable Muslims get pushed into ghettoes all their life only for the home they work so hard to build to burn in a state sponsored pogrom. The Gujarat model of development in a nutshell.", "token_idx_2": 27, "text_start_2": 139, "text_end_2": 145, "date_2": "2020-02", "text_1_tokenized": ["It's", "shocking", "that", "rogue", "organisations", "like", "the", "JeI", "and", "the", "JKLF", "weren't", "banned", "till", "now", ".", "These", "two", "are", "no", "less", "than", "terrorist", "organisations", "responsible", "for", "many", "heinous", "crimes", "including", "the", "pogrom", "of", "the", "Kashmiri", "Pandits", "from", "the", "valley", ".", "Their", "leaders", "should", "be", "sent", "to", "jails", "."], "text_2_tokenized": ["Poor", ",", "vulnerable", "Muslims", "get", "pushed", "into", "ghettoes", "all", "their", "life", "only", "for", "the", "home", "they", "work", "so", "hard", "to", "build", "to", "burn", "in", "a", "state", "sponsored", "pogrom", ".", "The", "Gujarat", "model", "of", "development", "in", "a", "nutshell", "."]} -{"id": "2103-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "It was very nice and refreshing to play at a simchah without the necessity to survive the pogrom. The food was great, too. #differentperspective", "token_idx_1": 17, "text_start_1": 90, "text_end_1": 96, "date_1": "2019-02", "text_2": "stop calling it a clash when it's a pogrom. call it what it is: a state sponsored attack on muslims. an attempt at their extermination, an attempt to instill fear.", "token_idx_2": 8, "text_start_2": 36, "text_end_2": 42, "date_2": "2020-02", "text_1_tokenized": ["It", "was", "very", "nice", "and", "refreshing", "to", "play", "at", "a", "simchah", "without", "the", "necessity", "to", "survive", "the", "pogrom", ".", "The", "food", "was", "great", ",", "too", ".", "#differentperspective"], "text_2_tokenized": ["stop", "calling", "it", "a", "clash", "when", "it's", "a", "pogrom", ".", "call", "it", "what", "it", "is", ":", "a", "state", "sponsored", "attack", "on", "muslims", ".", "an", "attempt", "at", "their", "extermination", ",", "an", "attempt", "to", "instill", "fear", "."]} -{"id": "2104-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "My husband is playing his guitar and singing \u201cA Hard Rain's Gonna Fall\u201d as loud as he can\u2014all the verses\u2014which means that he's expecting a pogrom or a hostile takeover of our electrical grid or something.", "token_idx_1": 31, "text_start_1": 139, "text_end_1": 145, "date_1": "2019-02", "text_2": "Hey people, as #DelhiBurns and yet another state backed pogrom takes place, isn't it obvious what I keep saying: the purpose of state, contra its 247 propaganda din, is not your protection, it's just the opposite: it exists for predation?", "token_idx_2": 10, "text_start_2": 56, "text_end_2": 62, "date_2": "2020-02", "text_1_tokenized": ["My", "husband", "is", "playing", "his", "guitar", "and", "singing", "\u201c", "A", "Hard", "Rain's", "Gonna", "Fall", "\u201d", "as", "loud", "as", "he", "can", "\u2014", "all", "the", "verses", "\u2014", "which", "means", "that", "he's", "expecting", "a", "pogrom", "or", "a", "hostile", "takeover", "of", "our", "electrical", "grid", "or", "something", "."], "text_2_tokenized": ["Hey", "people", ",", "as", "#DelhiBurns", "and", "yet", "another", "state", "backed", "pogrom", "takes", "place", ",", "isn't", "it", "obvious", "what", "I", "keep", "saying", ":", "the", "purpose", "of", "state", ",", "contra", "its", "247", "propaganda", "din", ",", "is", "not", "your", "protection", ",", "it's", "just", "the", "opposite", ":", "it", "exists", "for", "predation", "?"]} -{"id": "2105-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "to be clear, Kirstjen Nielsen the appointed head of DHS, lied under oath about the FACT that she is running a pogrom against children of color in this country, and the press are melting down about something a legitimately elected official did NOT say.", "token_idx_1": 23, "text_start_1": 110, "text_end_1": 116, "date_1": "2019-02", "text_2": "Mayhem dawned when Trump was here. It seems planned and calculated. Blaming @KapilMishra_IND or calling it a muslim pogrom is an act of desperation and bigotry. #DelhiRiots", "token_idx_2": 20, "text_start_2": 116, "text_end_2": 122, "date_2": "2020-02", "text_1_tokenized": ["to", "be", "clear", ",", "Kirstjen", "Nielsen", "the", "appointed", "head", "of", "DHS", ",", "lied", "under", "oath", "about", "the", "FACT", "that", "she", "is", "running", "a", "pogrom", "against", "children", "of", "color", "in", "this", "country", ",", "and", "the", "press", "are", "melting", "down", "about", "something", "a", "legitimately", "elected", "official", "did", "NOT", "say", "."], "text_2_tokenized": ["Mayhem", "dawned", "when", "Trump", "was", "here", ".", "It", "seems", "planned", "and", "calculated", ".", "Blaming", "@KapilMishra_IND", "or", "calling", "it", "a", "muslim", "pogrom", "is", "an", "act", "of", "desperation", "and", "bigotry", ".", "#DelhiRiots"]} -{"id": "2106-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "People keep discussing Jedwabne, but they need to look at Szczuczyn pogrom as well. @heiligtag , have you read anything of this town before? I just stumbled upon it.", "token_idx_1": 12, "text_start_1": 68, "text_end_1": 74, "date_1": "2019-02", "text_2": "The diabolical swiftness of anti- Muslim pogrom proved this. The govt behaves with assumed impunity that no one outside India will notice. But thanks to the courageous journalism & the footage, images, videos that we could circulate, this time the world is not ignoring. 1/n", "token_idx_2": 7, "text_start_2": 41, "text_end_2": 47, "date_2": "2020-02", "text_1_tokenized": ["People", "keep", "discussing", "Jedwabne", ",", "but", "they", "need", "to", "look", "at", "Szczuczyn", "pogrom", "as", "well", ".", "@heiligtag", ",", "have", "you", "read", "anything", "of", "this", "town", "before", "?", "I", "just", "stumbled", "upon", "it", "."], "text_2_tokenized": ["The", "diabolical", "swiftness", "of", "anti", "-", "Muslim", "pogrom", "proved", "this", ".", "The", "govt", "behaves", "with", "assumed", "impunity", "that", "no", "one", "outside", "India", "will", "notice", ".", "But", "thanks", "to", "the", "courageous", "journalism", "&", "the", "footage", ",", "images", ",", "videos", "that", "we", "could", "circulate", ",", "this", "time", "the", "world", "is", "not", "ignoring", ".", "1", "/", "n"]} -{"id": "2107-pogrom", "word": "pogrom", "label_binary": 0, "text_1": "any pogrom is a good pogrom, nawmeen", "token_idx_1": 1, "text_start_1": 4, "text_end_1": 10, "date_1": "2019-02", "text_2": "The full impact of Delhi pogrom is not out yet. My feeling, its far more gory, horrifying, blood and guts. The aftermath of displacement is yet to assessed.", "token_idx_2": 5, "text_start_2": 25, "text_end_2": 31, "date_2": "2020-02", "text_1_tokenized": ["any", "pogrom", "is", "a", "good", "pogrom", ",", "nawmeen"], "text_2_tokenized": ["The", "full", "impact", "of", "Delhi", "pogrom", "is", "not", "out", "yet", ".", "My", "feeling", ",", "its", "far", "more", "gory", ",", "horrifying", ",", "blood", "and", "guts", ".", "The", "aftermath", "of", "displacement", "is", "yet", "to", "assessed", "."]} -{"id": "2108-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "3 of our cast members are Quitting caffeine on our first five show weekend. There could really be a pogrom in this place @FiddlerNYC", "token_idx_1": 20, "text_start_1": 100, "text_end_1": 106, "date_1": "2019-02", "text_2": "A class 10 student, of shiv Vihar, at d Idgah gave her Sanskrit exam ystrday.Her maths exam is on12. She shared how impossible it is 2 study in d noise.She survived a pogrom,is living in makeshift tent n is expectd to live as if all is normal. @msisodia @AtishiAAP is this fair?", "token_idx_2": 35, "text_start_2": 167, "text_end_2": 173, "date_2": "2020-02", "text_1_tokenized": ["3", "of", "our", "cast", "members", "are", "Quitting", "caffeine", "on", "our", "first", "five", "show", "weekend", ".", "There", "could", "really", "be", "a", "pogrom", "in", "this", "place", "@FiddlerNYC"], "text_2_tokenized": ["A", "class", "10", "student", ",", "of", "shiv", "Vihar", ",", "at", "d", "Idgah", "gave", "her", "Sanskrit", "exam", "ystrday.Her", "maths", "exam", "is", "on12", ".", "She", "shared", "how", "impossible", "it", "is", "2", "study", "in", "d", "noise.She", "survived", "a", "pogrom", ",", "is", "living", "in", "makeshift", "tent", "n", "is", "expectd", "to", "live", "as", "if", "all", "is", "normal", ".", "@msisodia", "@AtishiAAP", "is", "this", "fair", "?"]} -{"id": "2109-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "March came with great divide, Ethiopian airlines, Collapse of a building killing school children in Lagos, Fire incident in onitsha, now a pogrom in New Zealand killing about 40 people ....God safe us from us!!!", "token_idx_1": 26, "text_start_1": 139, "text_end_1": 145, "date_1": "2019-02", "text_2": "Tusi Gabbard saying that Hinduphobia is behind outrage over the Hindutva pogrom riots aimed at ethnic cleansing Muslim neighborhoods in Delhi is very on brand.", "token_idx_2": 11, "text_start_2": 73, "text_end_2": 79, "date_2": "2020-02", "text_1_tokenized": ["March", "came", "with", "great", "divide", ",", "Ethiopian", "airlines", ",", "Collapse", "of", "a", "building", "killing", "school", "children", "in", "Lagos", ",", "Fire", "incident", "in", "onitsha", ",", "now", "a", "pogrom", "in", "New", "Zealand", "killing", "about", "40", "people", "...", "God", "safe", "us", "from", "us", "!", "!", "!"], "text_2_tokenized": ["Tusi", "Gabbard", "saying", "that", "Hinduphobia", "is", "behind", "outrage", "over", "the", "Hindutva", "pogrom", "riots", "aimed", "at", "ethnic", "cleansing", "Muslim", "neighborhoods", "in", "Delhi", "is", "very", "on", "brand", "."]} -{"id": "2110-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "The last pogrom in France took place on 29 February 1848 in the Alsace", "token_idx_1": 2, "text_start_1": 9, "text_end_1": 15, "date_1": "2019-02", "text_2": "A lot to think about for all of us: It wasn't a pogrom but it was 1sided Try not to defend the indefensible (from any side) Try to understand how people in line of clear & present danger feel", "token_idx_2": 13, "text_start_2": 48, "text_end_2": 54, "date_2": "2020-02", "text_1_tokenized": ["The", "last", "pogrom", "in", "France", "took", "place", "on", "29", "February", "1848", "in", "the", "Alsace"], "text_2_tokenized": ["A", "lot", "to", "think", "about", "for", "all", "of", "us", ":", "It", "wasn't", "a", "pogrom", "but", "it", "was", "1sided", "Try", "not", "to", "defend", "the", "indefensible", "(", "from", "any", "side", ")", "Try", "to", "understand", "how", "people", "in", "line", "of", "clear", "&", "present", "danger", "feel"]} -{"id": "2111-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Rahm Emmanuel claims @IlhanMN is \"associating herself with calamities from the Spanish Inquisition to the Russian pogroms to the Holocaust\" Alright Rahm, let's talk about the historical evil YOU associated yourself when you conspired to cover up a murder of an unarmed Black kid.", "token_idx_1": 17, "text_start_1": 114, "text_end_1": 121, "date_1": "2019-02", "text_2": "Looks like Erdogan's boys are starting pogroms against Syrian refugees in Turkey to drive them to European borders. He's going all in and wants to get 3 million refugees into Europe. #IStandWithGreece", "token_idx_2": 6, "text_start_2": 39, "text_end_2": 46, "date_2": "2020-02", "text_1_tokenized": ["Rahm", "Emmanuel", "claims", "@IlhanMN", "is", "\"", "associating", "herself", "with", "calamities", "from", "the", "Spanish", "Inquisition", "to", "the", "Russian", "pogroms", "to", "the", "Holocaust", "\"", "Alright", "Rahm", ",", "let's", "talk", "about", "the", "historical", "evil", "YOU", "associated", "yourself", "when", "you", "conspired", "to", "cover", "up", "a", "murder", "of", "an", "unarmed", "Black", "kid", "."], "text_2_tokenized": ["Looks", "like", "Erdogan's", "boys", "are", "starting", "pogroms", "against", "Syrian", "refugees", "in", "Turkey", "to", "drive", "them", "to", "European", "borders", ".", "He's", "going", "all", "in", "and", "wants", "to", "get", "3", "million", "refugees", "into", "Europe", ".", "#IStandWithGreece"]} -{"id": "2112-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Republican leaders think Jewish-Americans are Israeli, crazy antisemetic, but also how they miss terror White Nationalism, filled w/ rhetoric of pogroms past, causes us. They think we fear Muslims like Israeli but they are our friends & allies. We fear them, like Americans & Jews", "token_idx_1": 24, "text_start_1": 145, "text_end_1": 152, "date_1": "2019-02", "text_2": "Jewish population! Let this sink in a bit . If not for pogroms and other Jewish disasters throughout history with natural death , we would be at 180million today +-", "token_idx_2": 13, "text_start_2": 55, "text_end_2": 62, "date_2": "2020-02", "text_1_tokenized": ["Republican", "leaders", "think", "Jewish-Americans", "are", "Israeli", ",", "crazy", "antisemetic", ",", "but", "also", "how", "they", "miss", "terror", "White", "Nationalism", ",", "filled", "w", "/", "rhetoric", "of", "pogroms", "past", ",", "causes", "us", ".", "They", "think", "we", "fear", "Muslims", "like", "Israeli", "but", "they", "are", "our", "friends", "&", "allies", ".", "We", "fear", "them", ",", "like", "Americans", "&", "Jews"], "text_2_tokenized": ["Jewish", "population", "!", "Let", "this", "sink", "in", "a", "bit", ".", "If", "not", "for", "pogroms", "and", "other", "Jewish", "disasters", "throughout", "history", "with", "natural", "death", ",", "we", "would", "be", "at", "180million", "today", "+", "-"]} -{"id": "2113-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "A big thank you to @RealClaudiaGold and to everyone @JewishBookWeek for your kindness and support this week. It was a pleasure chairing events with Robert Chandler and Zachary Leader and I attended some fab events from Roth & Isaiah Berlin to Russian pogroms & Jewish refugees.", "token_idx_1": 43, "text_start_1": 255, "text_end_1": 262, "date_1": "2019-02", "text_2": "If Tahir Hussain is jailed then all the Indian Muslims should be jailed for existing and surviving in pogroms here.", "token_idx_2": 18, "text_start_2": 102, "text_end_2": 109, "date_2": "2020-02", "text_1_tokenized": ["A", "big", "thank", "you", "to", "@RealClaudiaGold", "and", "to", "everyone", "@JewishBookWeek", "for", "your", "kindness", "and", "support", "this", "week", ".", "It", "was", "a", "pleasure", "chairing", "events", "with", "Robert", "Chandler", "and", "Zachary", "Leader", "and", "I", "attended", "some", "fab", "events", "from", "Roth", "&", "Isaiah", "Berlin", "to", "Russian", "pogroms", "&", "Jewish", "refugees", "."], "text_2_tokenized": ["If", "Tahir", "Hussain", "is", "jailed", "then", "all", "the", "Indian", "Muslims", "should", "be", "jailed", "for", "existing", "and", "surviving", "in", "pogroms", "here", "."]} -{"id": "2114-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Well it's finally happening. Internet Archive is taking down it's less screeching more substantive anti-Khazar stuff. Pyotr Krasnov's historical works about the revolution the Khazar Workers Bundt, and the pogroms is now offline. It only ever had a few hundred views.", "token_idx_1": 32, "text_start_1": 206, "text_end_1": 213, "date_1": "2019-02", "text_2": "Trump visiting #India while there are anti-muslims pogroms happening there. You'd almost think they'd doing it to please him.", "token_idx_2": 7, "text_start_2": 51, "text_end_2": 58, "date_2": "2020-02", "text_1_tokenized": ["Well", "it's", "finally", "happening", ".", "Internet", "Archive", "is", "taking", "down", "it's", "less", "screeching", "more", "substantive", "anti-Khazar", "stuff", ".", "Pyotr", "Krasnov's", "historical", "works", "about", "the", "revolution", "the", "Khazar", "Workers", "Bundt", ",", "and", "the", "pogroms", "is", "now", "offline", ".", "It", "only", "ever", "had", "a", "few", "hundred", "views", "."], "text_2_tokenized": ["Trump", "visiting", "#India", "while", "there", "are", "anti-muslims", "pogroms", "happening", "there", ".", "You'd", "almost", "think", "they'd", "doing", "it", "to", "please", "him", "."]} -{"id": "2115-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "I know there's a periodic conversation that happens in YA about Jewish books not about the Holocaust/pogroms/Jewish pain and how there aren't many and should be more and if that's something you give a damn about, I have good news", "token_idx_1": 18, "text_start_1": 101, "text_end_1": 108, "date_1": "2019-02", "text_2": "first concentration camps in china, now anti muslim pogroms in india? idek what to say", "token_idx_2": 9, "text_start_2": 52, "text_end_2": 59, "date_2": "2020-02", "text_1_tokenized": ["I", "know", "there's", "a", "periodic", "conversation", "that", "happens", "in", "YA", "about", "Jewish", "books", "not", "about", "the", "Holocaust", "/", "pogroms", "/", "Jewish", "pain", "and", "how", "there", "aren't", "many", "and", "should", "be", "more", "and", "if", "that's", "something", "you", "give", "a", "damn", "about", ",", "I", "have", "good", "news"], "text_2_tokenized": ["first", "concentration", "camps", "in", "china", ",", "now", "anti", "muslim", "pogroms", "in", "india", "?", "idek", "what", "to", "say"]} -{"id": "2116-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "#Oscars2019 one of Dame Helen's (Mirren's) parents come from old Czarist nobility... but not the ones that instigated pogroms. .. I'd have a heart attack just watching FREE SOLO... what that guy put his family through...", "token_idx_1": 21, "text_start_1": 118, "text_end_1": 125, "date_1": "2019-02", "text_2": "The #RSS is the oldest & largest paramilitary in the world. The RSS is responsible for at least a dozen major pogroms in India since 1947. The RSS is a terrorist organization. The RSS should be banned & its leaders denied international visas.", "token_idx_2": 22, "text_start_2": 114, "text_end_2": 121, "date_2": "2020-02", "text_1_tokenized": ["#Oscars2019", "one", "of", "Dame", "Helen's", "(", "Mirren's", ")", "parents", "come", "from", "old", "Czarist", "nobility", "...", "but", "not", "the", "ones", "that", "instigated", "pogroms", ". ..", "I'd", "have", "a", "heart", "attack", "just", "watching", "FREE", "SOLO", "...", "what", "that", "guy", "put", "his", "family", "through", "..."], "text_2_tokenized": ["The", "#RSS", "is", "the", "oldest", "&", "largest", "paramilitary", "in", "the", "world", ".", "The", "RSS", "is", "responsible", "for", "at", "least", "a", "dozen", "major", "pogroms", "in", "India", "since", "1947", ".", "The", "RSS", "is", "a", "terrorist", "organization", ".", "The", "RSS", "should", "be", "banned", "&", "its", "leaders", "denied", "international", "visas", "."]} -{"id": "2117-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "I swore to myself I would steel myself to the regular occurrence of pogroms after Squirrel Hill but how do you do that? How do you not descend into despair when this happens regularly? How do you live with yourself accepting this as the new normal?", "token_idx_1": 13, "text_start_1": 68, "text_end_1": 75, "date_1": "2019-02", "text_2": "If you say 'two sides' about pogroms, you're on the wrong side", "token_idx_2": 8, "text_start_2": 29, "text_end_2": 36, "date_2": "2020-02", "text_1_tokenized": ["I", "swore", "to", "myself", "I", "would", "steel", "myself", "to", "the", "regular", "occurrence", "of", "pogroms", "after", "Squirrel", "Hill", "but", "how", "do", "you", "do", "that", "?", "How", "do", "you", "not", "descend", "into", "despair", "when", "this", "happens", "regularly", "?", "How", "do", "you", "live", "with", "yourself", "accepting", "this", "as", "the", "new", "normal", "?"], "text_2_tokenized": ["If", "you", "say", "'", "two", "sides", "'", "about", "pogroms", ",", "you're", "on", "the", "wrong", "side"]} -{"id": "2118-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Individual violence against Muslims and state violence against Muslims work together. We Jews have a useful reminder that they are connected. We call them pogroms. One word for both. So if you support surveillance of Muslims. War in Muslim countries. You support these attacks.", "token_idx_1": 26, "text_start_1": 155, "text_end_1": 162, "date_1": "2019-02", "text_2": "Here's what we're facing: Trump is embracing genocidal Modhi who's leading Nazi-style pogroms against Indian Muslims. Authoritarianism is taking over globally. Sanders is the only candidate who can beat Trump. We, Bernie's supporters, MUST prevail.", "token_idx_2": 13, "text_start_2": 86, "text_end_2": 93, "date_2": "2020-02", "text_1_tokenized": ["Individual", "violence", "against", "Muslims", "and", "state", "violence", "against", "Muslims", "work", "together", ".", "We", "Jews", "have", "a", "useful", "reminder", "that", "they", "are", "connected", ".", "We", "call", "them", "pogroms", ".", "One", "word", "for", "both", ".", "So", "if", "you", "support", "surveillance", "of", "Muslims", ".", "War", "in", "Muslim", "countries", ".", "You", "support", "these", "attacks", "."], "text_2_tokenized": ["Here's", "what", "we're", "facing", ":", "Trump", "is", "embracing", "genocidal", "Modhi", "who's", "leading", "Nazi-style", "pogroms", "against", "Indian", "Muslims", ".", "Authoritarianism", "is", "taking", "over", "globally", ".", "Sanders", "is", "the", "only", "candidate", "who", "can", "beat", "Trump", ".", "We", ",", "Bernie's", "supporters", ",", "MUST", "prevail", "."]} -{"id": "2119-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Lebensraum ... The mindset that caused the Germany-led pogroms of 1940 - 1944 is the same one that will cause another Yoruba/Igbo uprising in Nigeria in the not so distant future.", "token_idx_1": 8, "text_start_1": 55, "text_end_1": 62, "date_1": "2019-02", "text_2": "Modi never claimed or pretended he was going to unify the country. He thrives on bigotry and pogroms. Kejriwal is the real divider in chief. He and his party must never be trusted ever again.", "token_idx_2": 18, "text_start_2": 93, "text_end_2": 100, "date_2": "2020-02", "text_1_tokenized": ["Lebensraum", "...", "The", "mindset", "that", "caused", "the", "Germany-led", "pogroms", "of", "1940 - 1944", "is", "the", "same", "one", "that", "will", "cause", "another", "Yoruba", "/", "Igbo", "uprising", "in", "Nigeria", "in", "the", "not", "so", "distant", "future", "."], "text_2_tokenized": ["Modi", "never", "claimed", "or", "pretended", "he", "was", "going", "to", "unify", "the", "country", ".", "He", "thrives", "on", "bigotry", "and", "pogroms", ".", "Kejriwal", "is", "the", "real", "divider", "in", "chief", ".", "He", "and", "his", "party", "must", "never", "be", "trusted", "ever", "again", "."]} -{"id": "2120-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "#Apollo11: I asked my 85-yr-old grandfather that day, guy who escaped pogroms and Czar's army, \u201cJake did you ever think you'd see men walking on the moon?\u201d \u201cNeil, in my whole life, when I hear any big news, the first thing I think is, is it good for the Jews or bad for the Jews?\u201d", "token_idx_1": 15, "text_start_1": 70, "text_end_1": 77, "date_1": "2019-02", "text_2": "They're not communal riots, they're pogroms. Muslims in Delhi are being lynched, they aren't being subjected to mere riots at the words of BJP's Kapil Mishra.", "token_idx_2": 6, "text_start_2": 36, "text_end_2": 43, "date_2": "2020-02", "text_1_tokenized": ["#Apollo11", ":", "I", "asked", "my", "85", "-", "yr-old", "grandfather", "that", "day", ",", "guy", "who", "escaped", "pogroms", "and", "Czar's", "army", ",", "\u201c", "Jake", "did", "you", "ever", "think", "you'd", "see", "men", "walking", "on", "the", "moon", "?", "\u201d", "\u201c", "Neil", ",", "in", "my", "whole", "life", ",", "when", "I", "hear", "any", "big", "news", ",", "the", "first", "thing", "I", "think", "is", ",", "is", "it", "good", "for", "the", "Jews", "or", "bad", "for", "the", "Jews", "?", "\u201d"], "text_2_tokenized": ["They're", "not", "communal", "riots", ",", "they're", "pogroms", ".", "Muslims", "in", "Delhi", "are", "being", "lynched", ",", "they", "aren't", "being", "subjected", "to", "mere", "riots", "at", "the", "words", "of", "BJP's", "Kapil", "Mishra", "."]} -{"id": "2121-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "All were witnesses to the manner in which 1, the Staff and the entire army conducted ourselves with regard to anti-Semitism and the pogroms that arose from it.", "token_idx_1": 24, "text_start_1": 132, "text_end_1": 139, "date_1": "2019-02", "text_2": "Indian Sikh community stood with Muslims in the all anti-Muslim pogroms, even in Shaheen Bag. The aim of the terrorists who attacked the Gurudwara is to destroy the Muslim-Sikh Unity in India, whoever is behind it. @OpusOfAli @irenaakbar", "token_idx_2": 10, "text_start_2": 64, "text_end_2": 71, "date_2": "2020-02", "text_1_tokenized": ["All", "were", "witnesses", "to", "the", "manner", "in", "which", "1", ",", "the", "Staff", "and", "the", "entire", "army", "conducted", "ourselves", "with", "regard", "to", "anti-Semitism", "and", "the", "pogroms", "that", "arose", "from", "it", "."], "text_2_tokenized": ["Indian", "Sikh", "community", "stood", "with", "Muslims", "in", "the", "all", "anti-Muslim", "pogroms", ",", "even", "in", "Shaheen", "Bag", ".", "The", "aim", "of", "the", "terrorists", "who", "attacked", "the", "Gurudwara", "is", "to", "destroy", "the", "Muslim-Sikh", "Unity", "in", "India", ",", "whoever", "is", "behind", "it", ".", "@OpusOfAli", "@irenaakbar"]} -{"id": "2122-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Two more were Walter Melianovich, head of the GOP's Byelorussian unit. Melianovich worked closely with many Nazi groups. Bohdan Fedorak was a leader of \"Ukrainians for Bush.\" Fedorak headed a Nazi group involved in anti-Jewish wartime pogroms. -Carla Binion", "token_idx_1": 42, "text_start_1": 235, "text_end_1": 242, "date_1": "2019-02", "text_2": "For centuries, India has been a pluralistic, multi ethnic and multi religious country. The state pogroms against Sikhs in 1984, Muslims in 2002 and 2020 and senior govt figures advocating for an India free of Christians, Sikhs and Muslims is a far cry from what India was.", "token_idx_2": 18, "text_start_2": 97, "text_end_2": 104, "date_2": "2020-02", "text_1_tokenized": ["Two", "more", "were", "Walter", "Melianovich", ",", "head", "of", "the", "GOP's", "Byelorussian", "unit", ".", "Melianovich", "worked", "closely", "with", "many", "Nazi", "groups", ".", "Bohdan", "Fedorak", "was", "a", "leader", "of", "\"", "Ukrainians", "for", "Bush", ".", "\"", "Fedorak", "headed", "a", "Nazi", "group", "involved", "in", "anti-Jewish", "wartime", "pogroms", ".", "-", "Carla", "Binion"], "text_2_tokenized": ["For", "centuries", ",", "India", "has", "been", "a", "pluralistic", ",", "multi", "ethnic", "and", "multi", "religious", "country", ".", "The", "state", "pogroms", "against", "Sikhs", "in", "1984", ",", "Muslims", "in", "2002", "and", "2020", "and", "senior", "govt", "figures", "advocating", "for", "an", "India", "free", "of", "Christians", ",", "Sikhs", "and", "Muslims", "is", "a", "far", "cry", "from", "what", "India", "was", "."]} -{"id": "2123-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Terrible terrible news comes in from #Christchurch yet we wonder where all hate, xenophobia and bigotry, people spew off and online end up? this is where it leads to. Words aren't merely words. Speaking hate incite, provoke and legitimise genocides, pogroms and holocausts.", "token_idx_1": 47, "text_start_1": 250, "text_end_1": 257, "date_1": "2019-02", "text_2": "Amit Shah can stop these Delhi riots in 1 hour if he wants to. But he wants riots to spread across the country. He wants to provoke every Muslim in India to react against Hindus and then state controlled pogroms. BJP voters.. You are the real murderers.", "token_idx_2": 41, "text_start_2": 204, "text_end_2": 211, "date_2": "2020-02", "text_1_tokenized": ["Terrible", "terrible", "news", "comes", "in", "from", "#Christchurch", "yet", "we", "wonder", "where", "all", "hate", ",", "xenophobia", "and", "bigotry", ",", "people", "spew", "off", "and", "online", "end", "up", "?", "this", "is", "where", "it", "leads", "to", ".", "Words", "aren't", "merely", "words", ".", "Speaking", "hate", "incite", ",", "provoke", "and", "legitimise", "genocides", ",", "pogroms", "and", "holocausts", "."], "text_2_tokenized": ["Amit", "Shah", "can", "stop", "these", "Delhi", "riots", "in", "1", "hour", "if", "he", "wants", "to", ".", "But", "he", "wants", "riots", "to", "spread", "across", "the", "country", ".", "He", "wants", "to", "provoke", "every", "Muslim", "in", "India", "to", "react", "against", "Hindus", "and", "then", "state", "controlled", "pogroms", ".", "BJP", "voters", "..", "You", "are", "the", "real", "murderers", "."]} -{"id": "2124-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "I heard about the #Sumgait pogroms when I was a schoolboy, probably 5-6th grade. I asked people at school and around me - how a single Armenian man had such a power that he led Azerbaijanis to kill Armenians, and how those Azerbaijanis could not understand what is going on?", "token_idx_1": 5, "text_start_1": 27, "text_end_1": 34, "date_1": "2019-02", "text_2": "pogroms are not spontaneous it takes a whole generation of brain washing to do them the hatred in India will out live Modi", "token_idx_2": 0, "text_start_2": 0, "text_end_2": 7, "date_2": "2020-02", "text_1_tokenized": ["I", "heard", "about", "the", "#Sumgait", "pogroms", "when", "I", "was", "a", "schoolboy", ",", "probably", "5-6", "th", "grade", ".", "I", "asked", "people", "at", "school", "and", "around", "me", "-", "how", "a", "single", "Armenian", "man", "had", "such", "a", "power", "that", "he", "led", "Azerbaijanis", "to", "kill", "Armenians", ",", "and", "how", "those", "Azerbaijanis", "could", "not", "understand", "what", "is", "going", "on", "?"], "text_2_tokenized": ["pogroms", "are", "not", "spontaneous", "it", "takes", "a", "whole", "generation", "of", "brain", "washing", "to", "do", "them", "the", "hatred", "in", "India", "will", "out", "live", "Modi"]} -{"id": "2125-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "When the pogroms start here, where are American Jews going to flee?", "token_idx_1": 2, "text_start_1": 9, "text_end_1": 16, "date_1": "2019-02", "text_2": "Twitter allowing fascist far-right hashtag to trend with hate speech that led to recent pogroms #SupportHinduEconomy wheres @jack who donated to Tulsi Gabbard", "token_idx_2": 14, "text_start_2": 88, "text_end_2": 95, "date_2": "2020-02", "text_1_tokenized": ["When", "the", "pogroms", "start", "here", ",", "where", "are", "American", "Jews", "going", "to", "flee", "?"], "text_2_tokenized": ["Twitter", "allowing", "fascist", "far-right", "hashtag", "to", "trend", "with", "hate", "speech", "that", "led", "to", "recent", "pogroms", "#SupportHinduEconomy", "wheres", "@jack", "who", "donated", "to", "Tulsi", "Gabbard"]} -{"id": "2126-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "In choosing a political regime, we inadvertently also tend to choose the varieties of violence they generate, says a Sociologist. What else sums up the trading of political charges among BJP & Congress upon pogroms of 1984 & 2002.", "token_idx_1": 37, "text_start_1": 211, "text_end_1": 218, "date_1": "2019-02", "text_2": "There are pogroms against Muslims happening in India who don't have d\u2026.", "token_idx_2": 2, "text_start_2": 10, "text_end_2": 17, "date_2": "2020-02", "text_1_tokenized": ["In", "choosing", "a", "political", "regime", ",", "we", "inadvertently", "also", "tend", "to", "choose", "the", "varieties", "of", "violence", "they", "generate", ",", "says", "a", "Sociologist", ".", "What", "else", "sums", "up", "the", "trading", "of", "political", "charges", "among", "BJP", "&", "Congress", "upon", "pogroms", "of", "1984", "&", "2002", "."], "text_2_tokenized": ["There", "are", "pogroms", "against", "Muslims", "happening", "in", "India", "who", "don't", "have", "d", "\u2026", "."]} -{"id": "2127-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "#auspol'Fearless girl' looks like a poster frm th old Soviet Union at th height of th pogroms and gulags.", "token_idx_1": 17, "text_start_1": 86, "text_end_1": 93, "date_1": "2019-02", "text_2": "Shocking scenes of Muslim victims of violence being dragged through the streets of Delhi by Hindu lynch mobs. India has always been prone to pogroms against religious minorities, but the frequency appears to be increasing", "token_idx_2": 25, "text_start_2": 141, "text_end_2": 148, "date_2": "2020-02", "text_1_tokenized": ["#auspol'Fearless", "girl", "'", "looks", "like", "a", "poster", "frm", "th", "old", "Soviet", "Union", "at", "th", "height", "of", "th", "pogroms", "and", "gulags", "."], "text_2_tokenized": ["Shocking", "scenes", "of", "Muslim", "victims", "of", "violence", "being", "dragged", "through", "the", "streets", "of", "Delhi", "by", "Hindu", "lynch", "mobs", ".", "India", "has", "always", "been", "prone", "to", "pogroms", "against", "religious", "minorities", ",", "but", "the", "frequency", "appears", "to", "be", "increasing"]} -{"id": "2128-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Interesting to note that in most of the past French Army routinely was used to put down social unrest. Everything from labor strikes and street rebellions to pogroms. All normal. Today the Army, as conservative as much of it is, seems to want nothing to do with that.", "token_idx_1": 28, "text_start_1": 158, "text_end_1": 165, "date_1": "2019-02", "text_2": "So many of the journalists who covered the #DelhiRiots / pogroms these last two days are in their 20s and 30s. They've faced mobs, threats, bullets, risked their lives. And done their jobs. In a time of malignant anchors and hate-broadcasting, their courage is a shining light.", "token_idx_2": 10, "text_start_2": 57, "text_end_2": 64, "date_2": "2020-02", "text_1_tokenized": ["Interesting", "to", "note", "that", "in", "most", "of", "the", "past", "French", "Army", "routinely", "was", "used", "to", "put", "down", "social", "unrest", ".", "Everything", "from", "labor", "strikes", "and", "street", "rebellions", "to", "pogroms", ".", "All", "normal", ".", "Today", "the", "Army", ",", "as", "conservative", "as", "much", "of", "it", "is", ",", "seems", "to", "want", "nothing", "to", "do", "with", "that", "."], "text_2_tokenized": ["So", "many", "of", "the", "journalists", "who", "covered", "the", "#DelhiRiots", "/", "pogroms", "these", "last", "two", "days", "are", "in", "their", "20s", "and", "30s", ".", "They've", "faced", "mobs", ",", "threats", ",", "bullets", ",", "risked", "their", "lives", ".", "And", "done", "their", "jobs", ".", "In", "a", "time", "of", "malignant", "anchors", "and", "hate-broadcasting", ",", "their", "courage", "is", "a", "shining", "light", "."]} -{"id": "2129-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Supreme court is not willing to do justice on Babri Masjid. Mediation will not help as majority community will bulldoze the minority community. Only thing to be seen, will India again disappoint it's minorities fighting for justice? We have seen it in pogroms cases.", "token_idx_1": 46, "text_start_1": 252, "text_end_1": 259, "date_1": "2019-02", "text_2": "The current Indian government has been undoubtedly most innovative one where they try to discover a 5 trillion dollar economy by digging through the debris of riots and pogroms...#WorldAgainstDelhiPogrom", "token_idx_2": 28, "text_start_2": 169, "text_end_2": 176, "date_2": "2020-02", "text_1_tokenized": ["Supreme", "court", "is", "not", "willing", "to", "do", "justice", "on", "Babri", "Masjid", ".", "Mediation", "will", "not", "help", "as", "majority", "community", "will", "bulldoze", "the", "minority", "community", ".", "Only", "thing", "to", "be", "seen", ",", "will", "India", "again", "disappoint", "it's", "minorities", "fighting", "for", "justice", "?", "We", "have", "seen", "it", "in", "pogroms", "cases", "."], "text_2_tokenized": ["The", "current", "Indian", "government", "has", "been", "undoubtedly", "most", "innovative", "one", "where", "they", "try", "to", "discover", "a", "5", "trillion", "dollar", "economy", "by", "digging", "through", "the", "debris", "of", "riots", "and", "pogroms", "...", "#WorldAgainstDelhiPogrom"]} -{"id": "2130-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Martin Luther preached toleration of the Jews in his early sermons as a way to bash the Catholics. When he realized that they weren't going to convert en masse to Protestantism he spent his dying years calling for pogroms. Fuck Evangelical philosemites forever.", "token_idx_1": 39, "text_start_1": 214, "text_end_1": 221, "date_1": "2019-02", "text_2": "one of grounds on which a woman can be summed up as forced conversions, pogroms, and centuries of violence?", "token_idx_2": 15, "text_start_2": 72, "text_end_2": 79, "date_2": "2020-02", "text_1_tokenized": ["Martin", "Luther", "preached", "toleration", "of", "the", "Jews", "in", "his", "early", "sermons", "as", "a", "way", "to", "bash", "the", "Catholics", ".", "When", "he", "realized", "that", "they", "weren't", "going", "to", "convert", "en", "masse", "to", "Protestantism", "he", "spent", "his", "dying", "years", "calling", "for", "pogroms", ".", "Fuck", "Evangelical", "philosemites", "forever", "."], "text_2_tokenized": ["one", "of", "grounds", "on", "which", "a", "woman", "can", "be", "summed", "up", "as", "forced", "conversions", ",", "pogroms", ",", "and", "centuries", "of", "violence", "?"]} -{"id": "2131-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "\u201cI believe the pogroms of November '38 made less impression on the nation than cutting the bar of chocolate for Christmas.\u201d \u2015 Victor Klemperer, I Will Bear Witness: A Diary of the Nazi Years, 1933-1941", "token_idx_1": 4, "text_start_1": 15, "text_end_1": 22, "date_1": "2019-02", "text_2": "Enjeti and Stoller jacking off to the thought of anti-Chinese pogroms in response to the virus.", "token_idx_2": 10, "text_start_2": 62, "text_end_2": 69, "date_2": "2020-02", "text_1_tokenized": ["\u201c", "I", "believe", "the", "pogroms", "of", "November", "'", "38", "made", "less", "impression", "on", "the", "nation", "than", "cutting", "the", "bar", "of", "chocolate", "for", "Christmas", ".", "\u201d", "\u2015", "Victor", "Klemperer", ",", "I", "Will", "Bear", "Witness", ":", "A", "Diary", "of", "the", "Nazi", "Years", ",", "1933-1941"], "text_2_tokenized": ["Enjeti", "and", "Stoller", "jacking", "off", "to", "the", "thought", "of", "anti-Chinese", "pogroms", "in", "response", "to", "the", "virus", "."]} -{"id": "2132-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "#otd remember William of Norwich, a boy who was murdered (said to be crucified) supposedly by Jews for ritual purposes and the first manifestation in England of the ant-Jewish blood libel of folk and official prejudice which lay behind regular pogroms in the Middle Ages.", "token_idx_1": 43, "text_start_1": 244, "text_end_1": 251, "date_1": "2019-02", "text_2": "To date, in 2004, the Serb population in Kosovo and Metohija has suffered pogroms from the Albanian population! Great glory to the martyrs! #Kosovo", "token_idx_2": 15, "text_start_2": 74, "text_end_2": 81, "date_2": "2020-02", "text_1_tokenized": ["#otd", "remember", "William", "of", "Norwich", ",", "a", "boy", "who", "was", "murdered", "(", "said", "to", "be", "crucified", ")", "supposedly", "by", "Jews", "for", "ritual", "purposes", "and", "the", "first", "manifestation", "in", "England", "of", "the", "ant-Jewish", "blood", "libel", "of", "folk", "and", "official", "prejudice", "which", "lay", "behind", "regular", "pogroms", "in", "the", "Middle", "Ages", "."], "text_2_tokenized": ["To", "date", ",", "in", "2004", ",", "the", "Serb", "population", "in", "Kosovo", "and", "Metohija", "has", "suffered", "pogroms", "from", "the", "Albanian", "population", "!", "Great", "glory", "to", "the", "martyrs", "!", "#Kosovo"]} -{"id": "2133-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "The same Nigeria army that was so inactive during the herdsmen pogroms is now everywhere aiding electoral fraud and causing mayhem", "token_idx_1": 11, "text_start_1": 63, "text_end_1": 70, "date_1": "2019-02", "text_2": "Any Hindu who doesn't boycott M & X shops or products is anti-Hindu, who voluntarily or involuntarily aids anti-Hindu pogroms and destruction of Hindus.", "token_idx_2": 20, "text_start_2": 122, "text_end_2": 129, "date_2": "2020-02", "text_1_tokenized": ["The", "same", "Nigeria", "army", "that", "was", "so", "inactive", "during", "the", "herdsmen", "pogroms", "is", "now", "everywhere", "aiding", "electoral", "fraud", "and", "causing", "mayhem"], "text_2_tokenized": ["Any", "Hindu", "who", "doesn't", "boycott", "M", "&", "X", "shops", "or", "products", "is", "anti-Hindu", ",", "who", "voluntarily", "or", "involuntarily", "aids", "anti-Hindu", "pogroms", "and", "destruction", "of", "Hindus", "."]} -{"id": "2134-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Donald \u201c fuckin \u201cTrump asks why do they hate us. Maybe starting with the Crusades the West has invaded their territory to enslave them and plunder the resources. When King Richards gang went to the Middle East to plunder and rape , they warmed up by running a few pogroms on Jews", "token_idx_1": 52, "text_start_1": 264, "text_end_1": 271, "date_1": "2019-02", "text_2": "I am gonna be involved in organizing a round table discussion on my campus regarding the Delhi pogroms. We are going to raise awareness about & induct Hindu nationalism. Very excited.", "token_idx_2": 17, "text_start_2": 95, "text_end_2": 102, "date_2": "2020-02", "text_1_tokenized": ["Donald", "\u201c", "fuckin", "\u201c", "Trump", "asks", "why", "do", "they", "hate", "us", ".", "Maybe", "starting", "with", "the", "Crusades", "the", "West", "has", "invaded", "their", "territory", "to", "enslave", "them", "and", "plunder", "the", "resources", ".", "When", "King", "Richards", "gang", "went", "to", "the", "Middle", "East", "to", "plunder", "and", "rape", ",", "they", "warmed", "up", "by", "running", "a", "few", "pogroms", "on", "Jews"], "text_2_tokenized": ["I", "am", "gonna", "be", "involved", "in", "organizing", "a", "round", "table", "discussion", "on", "my", "campus", "regarding", "the", "Delhi", "pogroms", ".", "We", "are", "going", "to", "raise", "awareness", "about", "&", "induct", "Hindu", "nationalism", ".", "Very", "excited", "."]} -{"id": "2135-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "The inquisition, the pogroms, the holocaust. You motherfuckers will never learn.", "token_idx_1": 4, "text_start_1": 21, "text_end_1": 28, "date_1": "2019-02", "text_2": "It seems as if just one person died in the pogroms, rest just quit their life on their own! This is how journalism in India works. Even in propaganda, they selected a middle-class person who was a government employee, for maximum impact.", "token_idx_2": 10, "text_start_2": 43, "text_end_2": 50, "date_2": "2020-02", "text_1_tokenized": ["The", "inquisition", ",", "the", "pogroms", ",", "the", "holocaust", ".", "You", "motherfuckers", "will", "never", "learn", "."], "text_2_tokenized": ["It", "seems", "as", "if", "just", "one", "person", "died", "in", "the", "pogroms", ",", "rest", "just", "quit", "their", "life", "on", "their", "own", "!", "This", "is", "how", "journalism", "in", "India", "works", ".", "Even", "in", "propaganda", ",", "they", "selected", "a", "middle-class", "person", "who", "was", "a", "government", "employee", ",", "for", "maximum", "impact", "."]} -{"id": "2136-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Martin Luther called for pogroms? Wow, they def didn't teach us that in CRK.", "token_idx_1": 4, "text_start_1": 25, "text_end_1": 32, "date_1": "2019-02", "text_2": "Let's see how tweeters control pogroms. Pogroms set free.", "token_idx_2": 5, "text_start_2": 31, "text_end_2": 38, "date_2": "2020-02", "text_1_tokenized": ["Martin", "Luther", "called", "for", "pogroms", "?", "Wow", ",", "they", "def", "didn't", "teach", "us", "that", "in", "CRK", "."], "text_2_tokenized": ["Let's", "see", "how", "tweeters", "control", "pogroms", ".", "Pogroms", "set", "free", "."]} -{"id": "2137-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "First St Patrick's Day since discovering my \"Irish\" family probably weren't Irish at all, but more likely Jews who fled 19th century pogroms and adopted an Irish identity when they reached Africa because it might bring slightly less kak their way. I still hate Guinness though.", "token_idx_1": 25, "text_start_1": 133, "text_end_1": 140, "date_1": "2019-02", "text_2": "The Hindu far right in India are carrying out pogroms against Muslims and look very likely to be ramping up to genocide. This must be opposed and condemned as strenuously as possible. And yet people who promote armed intervention are utterly silent on this issue.", "token_idx_2": 9, "text_start_2": 46, "text_end_2": 53, "date_2": "2020-02", "text_1_tokenized": ["First", "St", "Patrick's", "Day", "since", "discovering", "my", "\"", "Irish", "\"", "family", "probably", "weren't", "Irish", "at", "all", ",", "but", "more", "likely", "Jews", "who", "fled", "19th", "century", "pogroms", "and", "adopted", "an", "Irish", "identity", "when", "they", "reached", "Africa", "because", "it", "might", "bring", "slightly", "less", "kak", "their", "way", ".", "I", "still", "hate", "Guinness", "though", "."], "text_2_tokenized": ["The", "Hindu", "far", "right", "in", "India", "are", "carrying", "out", "pogroms", "against", "Muslims", "and", "look", "very", "likely", "to", "be", "ramping", "up", "to", "genocide", ".", "This", "must", "be", "opposed", "and", "condemned", "as", "strenuously", "as", "possible", ".", "And", "yet", "people", "who", "promote", "armed", "intervention", "are", "utterly", "silent", "on", "this", "issue", "."]} -{"id": "2138-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "\"The Holocaust began to devastate\" \"Berlin had divided into four sectors\" \"Anti-Jewish pogroms were enforced\" \"The separation between Germans and Jews grew\" Having talked to my students about the importance of human agency, I despair over grading their final papers.", "token_idx_1": 17, "text_start_1": 87, "text_end_1": 94, "date_1": "2019-02", "text_2": "What's happening in India is devastating. Whenever there have been pogroms like this in history, it has ended terribly, and in enormous suffering. I don't know what to do, but it's just so deeply disturbing that this can go on and people say they're \u201criots\u201d?", "token_idx_2": 11, "text_start_2": 67, "text_end_2": 74, "date_2": "2020-02", "text_1_tokenized": ["\"", "The", "Holocaust", "began", "to", "devastate", "\"", "\"", "Berlin", "had", "divided", "into", "four", "sectors", "\"", "\"", "Anti-Jewish", "pogroms", "were", "enforced", "\"", "\"", "The", "separation", "between", "Germans", "and", "Jews", "grew", "\"", "Having", "talked", "to", "my", "students", "about", "the", "importance", "of", "human", "agency", ",", "I", "despair", "over", "grading", "their", "final", "papers", "."], "text_2_tokenized": ["What's", "happening", "in", "India", "is", "devastating", ".", "Whenever", "there", "have", "been", "pogroms", "like", "this", "in", "history", ",", "it", "has", "ended", "terribly", ",", "and", "in", "enormous", "suffering", ".", "I", "don't", "know", "what", "to", "do", ",", "but", "it's", "just", "so", "deeply", "disturbing", "that", "this", "can", "go", "on", "and", "people", "say", "they're", "\u201c", "riots", "\u201d", "?"]} -{"id": "2139-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "HISTORY OF #ZIONISM TIMELINE: 1904-14 - 2nd Aliyah - After suffering from repeated pogroms & impoverishment, approx 35,000 Jewish immigrants, mainly from Russia, Poland & Yemen, began 2 arrive in Palestine. (1/2) #JewishImmigration #AmYisraelChai", "token_idx_1": 14, "text_start_1": 83, "text_end_1": 90, "date_1": "2019-02", "text_2": "In light of the need felt for public apology by Muslims for acts of terrorists, will the very prominent people of Indian descent in the Conservative Party now apologise for the racist and facist Indian government and what is now acknowledged to be their pogroms against Muslims?", "token_idx_2": 45, "text_start_2": 254, "text_end_2": 261, "date_2": "2020-02", "text_1_tokenized": ["HISTORY", "OF", "#ZIONISM", "TIMELINE", ":", "1904-14", "-", "2nd", "Aliyah", "-", "After", "suffering", "from", "repeated", "pogroms", "&", "impoverishment", ",", "approx", "35,000", "Jewish", "immigrants", ",", "mainly", "from", "Russia", ",", "Poland", "&", "Yemen", ",", "began", "2", "arrive", "in", "Palestine", ".", "(", "1/2", ")", "#JewishImmigration", "#AmYisraelChai"], "text_2_tokenized": ["In", "light", "of", "the", "need", "felt", "for", "public", "apology", "by", "Muslims", "for", "acts", "of", "terrorists", ",", "will", "the", "very", "prominent", "people", "of", "Indian", "descent", "in", "the", "Conservative", "Party", "now", "apologise", "for", "the", "racist", "and", "facist", "Indian", "government", "and", "what", "is", "now", "acknowledged", "to", "be", "their", "pogroms", "against", "Muslims", "?"]} -{"id": "2140-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "White supremacy and capitalism cannot be separated. There was only one way to stop the anti-Semitic pogroms in Russia, and there's only one way to stop white violence here, in NZ, and everywhere else. Anyone saying anything else is lying to you, themselves, or both.", "token_idx_1": 17, "text_start_1": 100, "text_end_1": 107, "date_1": "2019-02", "text_2": "I've decided not to RT any news tweets featuring actual pictures of Muslims wounded in the pogroms in India. I have mixed feelings about this but I think they're just too upsetting to subject people to without warning, and you shouldn't need pics to be horrified.", "token_idx_2": 16, "text_start_2": 91, "text_end_2": 98, "date_2": "2020-02", "text_1_tokenized": ["White", "supremacy", "and", "capitalism", "cannot", "be", "separated", ".", "There", "was", "only", "one", "way", "to", "stop", "the", "anti-Semitic", "pogroms", "in", "Russia", ",", "and", "there's", "only", "one", "way", "to", "stop", "white", "violence", "here", ",", "in", "NZ", ",", "and", "everywhere", "else", ".", "Anyone", "saying", "anything", "else", "is", "lying", "to", "you", ",", "themselves", ",", "or", "both", "."], "text_2_tokenized": ["I've", "decided", "not", "to", "RT", "any", "news", "tweets", "featuring", "actual", "pictures", "of", "Muslims", "wounded", "in", "the", "pogroms", "in", "India", ".", "I", "have", "mixed", "feelings", "about", "this", "but", "I", "think", "they're", "just", "too", "upsetting", "to", "subject", "people", "to", "without", "warning", ",", "and", "you", "shouldn't", "need", "pics", "to", "be", "horrified", "."]} -{"id": "2141-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "It's just occurred to me that I am supposed to be reading a book for Library Journal on a secret plan to take down Auschwitz. They mostly have me on pogroms and genocide. This leaves me very capable of believing the absolute worst of most people.", "token_idx_1": 31, "text_start_1": 149, "text_end_1": 156, "date_1": "2019-02", "text_2": "GOI has money for Oppressing Kashmiris, pogroms, Toppling democratically elected governments, Paying media channels for propaganda, Paying celebrities to speak in favor of them, Helping cronies to evade loans,but poor has to pay 4500 to get tested with no hospitals to get treated", "token_idx_2": 7, "text_start_2": 40, "text_end_2": 47, "date_2": "2020-02", "text_1_tokenized": ["It's", "just", "occurred", "to", "me", "that", "I", "am", "supposed", "to", "be", "reading", "a", "book", "for", "Library", "Journal", "on", "a", "secret", "plan", "to", "take", "down", "Auschwitz", ".", "They", "mostly", "have", "me", "on", "pogroms", "and", "genocide", ".", "This", "leaves", "me", "very", "capable", "of", "believing", "the", "absolute", "worst", "of", "most", "people", "."], "text_2_tokenized": ["GOI", "has", "money", "for", "Oppressing", "Kashmiris", ",", "pogroms", ",", "Toppling", "democratically", "elected", "governments", ",", "Paying", "media", "channels", "for", "propaganda", ",", "Paying", "celebrities", "to", "speak", "in", "favor", "of", "them", ",", "Helping", "cronies", "to", "evade", "loans", ",", "but", "poor", "has", "to", "pay", "4500", "to", "get", "tested", "with", "no", "hospitals", "to", "get", "treated"]} -{"id": "2142-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "I'm a grandchild of Jewish refugees from antisemitic pogroms & the Holocaust, still dealing with intergenerational trauma. I stand with @IlhanMN and I don't believe she's an anti-semite. I object to her congressional colleagues attempting to censure her in my name.", "token_idx_1": 8, "text_start_1": 53, "text_end_1": 60, "date_1": "2019-02", "text_2": "pogroms of such scale cannot be orchestrated unless the State Government is on the side of the aggressors. kejriwal and his government are equally responsible.", "token_idx_2": 0, "text_start_2": 0, "text_end_2": 7, "date_2": "2020-02", "text_1_tokenized": ["I'm", "a", "grandchild", "of", "Jewish", "refugees", "from", "antisemitic", "pogroms", "&", "the", "Holocaust", ",", "still", "dealing", "with", "intergenerational", "trauma", ".", "I", "stand", "with", "@IlhanMN", "and", "I", "don't", "believe", "she's", "an", "anti-semite", ".", "I", "object", "to", "her", "congressional", "colleagues", "attempting", "to", "censure", "her", "in", "my", "name", "."], "text_2_tokenized": ["pogroms", "of", "such", "scale", "cannot", "be", "orchestrated", "unless", "the", "State", "Government", "is", "on", "the", "side", "of", "the", "aggressors", ".", "kejriwal", "and", "his", "government", "are", "equally", "responsible", "."]} -{"id": "2143-pogrom", "word": "pogrom", "label_binary": 0, "text_1": "#let_upload holud pogroms Mr & miss. \u263a\u263a\u263a \u2014 feeling loved", "token_idx_1": 2, "text_start_1": 18, "text_end_1": 25, "date_1": "2019-02", "text_2": "Since camera phones are common, this is all being documented. Videos coming in are heart breaking & frightening. This gives you an idea of how previous pogroms happened. How genocides happen under the guidance of State Machinery. How easy it is to trigger rioters to cause mayhem", "token_idx_2": 29, "text_start_2": 156, "text_end_2": 163, "date_2": "2020-02", "text_1_tokenized": ["#let_upload", "holud", "pogroms", "Mr", "&", "miss", ".", "\u263a", "\u263a", "\u263a", "\u2014", "feeling", "loved"], "text_2_tokenized": ["Since", "camera", "phones", "are", "common", ",", "this", "is", "all", "being", "documented", ".", "Videos", "coming", "in", "are", "heart", "breaking", "&", "frightening", ".", "This", "gives", "you", "an", "idea", "of", "how", "previous", "pogroms", "happened", ".", "How", "genocides", "happen", "under", "the", "guidance", "of", "State", "Machinery", ".", "How", "easy", "it", "is", "to", "trigger", "rioters", "to", "cause", "mayhem"]} -{"id": "2145-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "In the mid-19th century, Britain experienced an influx of Irish immigrants who swelled the populations of the major cities, including the East End of London. From 1882, Jewish refugees from pogroms in Tsarist Russia and other areas of Eastern Europe", "token_idx_1": 36, "text_start_1": 190, "text_end_1": 197, "date_1": "2019-02", "text_2": "Since independence pogroms have taken place whoever was involved or in power has had a very treachorous end look back in history ..perkhacha udha hai body ka so inshallah Allah ke yahan der hai andher nahi", "token_idx_2": 2, "text_start_2": 19, "text_end_2": 26, "date_2": "2020-02", "text_1_tokenized": ["In", "the", "mid", "-", "19th", "century", ",", "Britain", "experienced", "an", "influx", "of", "Irish", "immigrants", "who", "swelled", "the", "populations", "of", "the", "major", "cities", ",", "including", "the", "East", "End", "of", "London", ".", "From", "1882", ",", "Jewish", "refugees", "from", "pogroms", "in", "Tsarist", "Russia", "and", "other", "areas", "of", "Eastern", "Europe"], "text_2_tokenized": ["Since", "independence", "pogroms", "have", "taken", "place", "whoever", "was", "involved", "or", "in", "power", "has", "had", "a", "very", "treachorous", "end", "look", "back", "in", "history", "..", "perkhacha", "udha", "hai", "body", "ka", "so", "inshallah", "Allah", "ke", "yahan", "der", "hai", "andher", "nahi"]} -{"id": "2146-pogrom", "word": "pogrom", "label_binary": 0, "text_1": "George Papadopoulos bought prodigal baubles by popular boggling pogroms.", "token_idx_1": 8, "text_start_1": 64, "text_end_1": 71, "date_1": "2019-02", "text_2": "What are the best news sources/background sources on the pogroms in India? I only learned about them yesterday, so I don't have much to go on.", "token_idx_2": 11, "text_start_2": 57, "text_end_2": 64, "date_2": "2020-02", "text_1_tokenized": ["George", "Papadopoulos", "bought", "prodigal", "baubles", "by", "popular", "boggling", "pogroms", "."], "text_2_tokenized": ["What", "are", "the", "best", "news", "sources", "/", "background", "sources", "on", "the", "pogroms", "in", "India", "?", "I", "only", "learned", "about", "them", "yesterday", ",", "so", "I", "don't", "have", "much", "to", "go", "on", "."]} -{"id": "2147-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "So anti-#Zionists are basically saying - fuck you Jews. You can't re-establish your nation. Carry on living in the countries you were exiled to/forced to flee to. & tough shit if those countries treat you as 2nd class citizens/carry out pogroms against you. Just think on that", "token_idx_1": 49, "text_start_1": 241, "text_end_1": 248, "date_1": "2019-02", "text_2": "#Turks have made pogroms against Alevis in Dersim, Sivas, Corum, Mara\u015f and many other cities in Turkey. There is a Sunni dominance in Turkey that does not tolerate any other direction of faith. The worst terrorists are Sunnis! The Shiites only defend themselves. #idlib", "token_idx_2": 3, "text_start_2": 17, "text_end_2": 24, "date_2": "2020-02", "text_1_tokenized": ["So", "anti", "-", "#Zionists", "are", "basically", "saying", "-", "fuck", "you", "Jews", ".", "You", "can't", "re-establish", "your", "nation", ".", "Carry", "on", "living", "in", "the", "countries", "you", "were", "exiled", "to", "/", "forced", "to", "flee", "to", ".", "&", "tough", "shit", "if", "those", "countries", "treat", "you", "as", "2nd", "class", "citizens", "/", "carry", "out", "pogroms", "against", "you", ".", "Just", "think", "on", "that"], "text_2_tokenized": ["#Turks", "have", "made", "pogroms", "against", "Alevis", "in", "Dersim", ",", "Sivas", ",", "Corum", ",", "Mara\u015f", "and", "many", "other", "cities", "in", "Turkey", ".", "There", "is", "a", "Sunni", "dominance", "in", "Turkey", "that", "does", "not", "tolerate", "any", "other", "direction", "of", "faith", ".", "The", "worst", "terrorists", "are", "Sunnis", "!", "The", "Shiites", "only", "defend", "themselves", ".", "#idlib"]} -{"id": "2148-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "one thing we don't remember about the Black Plague is that many Europeans took it as an opportunity to torture and execute all the Jewish families in town. the pogroms often happened before the plague even arrived.", "token_idx_1": 30, "text_start_1": 160, "text_end_1": 167, "date_1": "2019-02", "text_2": "Some absolutely grim videos circulating of people murdered in pogroms in India. A half dead man in a pile of bodies being mocked by police. Modi is a fucking criminal.", "token_idx_2": 9, "text_start_2": 62, "text_end_2": 69, "date_2": "2020-02", "text_1_tokenized": ["one", "thing", "we", "don't", "remember", "about", "the", "Black", "Plague", "is", "that", "many", "Europeans", "took", "it", "as", "an", "opportunity", "to", "torture", "and", "execute", "all", "the", "Jewish", "families", "in", "town", ".", "the", "pogroms", "often", "happened", "before", "the", "plague", "even", "arrived", "."], "text_2_tokenized": ["Some", "absolutely", "grim", "videos", "circulating", "of", "people", "murdered", "in", "pogroms", "in", "India", ".", "A", "half", "dead", "man", "in", "a", "pile", "of", "bodies", "being", "mocked", "by", "police", ".", "Modi", "is", "a", "fucking", "criminal", "."]} -{"id": "2149-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Jews not living in Israel where we are safe from pogroms by antisemitic regimes, can never understand the constant threat of attacks we Israeli Jews live with on a daily basis.", "token_idx_1": 10, "text_start_1": 49, "text_end_1": 56, "date_1": "2019-02", "text_2": "Muslims have been through lot of pogroms after independence & we have survived & v will as a community. But wht u r teaching to ur children, stealing their humanity, their innocence, teaching them to loot, to kill. If u think this is wht their future is. Sit down & think about it", "token_idx_2": 6, "text_start_2": 33, "text_end_2": 40, "date_2": "2020-02", "text_1_tokenized": ["Jews", "not", "living", "in", "Israel", "where", "we", "are", "safe", "from", "pogroms", "by", "antisemitic", "regimes", ",", "can", "never", "understand", "the", "constant", "threat", "of", "attacks", "we", "Israeli", "Jews", "live", "with", "on", "a", "daily", "basis", "."], "text_2_tokenized": ["Muslims", "have", "been", "through", "lot", "of", "pogroms", "after", "independence", "&", "we", "have", "survived", "&", "v", "will", "as", "a", "community", ".", "But", "wht", "u", "r", "teaching", "to", "ur", "children", ",", "stealing", "their", "humanity", ",", "their", "innocence", ",", "teaching", "them", "to", "loot", ",", "to", "kill", ".", "If", "u", "think", "this", "is", "wht", "their", "future", "is", ".", "Sit", "down", "&", "think", "about", "it"]} -{"id": "2150-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Blatant State terrorism by extremist Modi of Bengal pogroms unravelled not only in Kashmir, Pulwama but through our territorial aggression.Political mileage through Muslims n Pakistan phobia unfolded in fictional narrative of Indian media n foreign secretary. Humaneness missing", "token_idx_1": 8, "text_start_1": 52, "text_end_1": 59, "date_1": "2019-02", "text_2": "The people calling themselves neutrals even post delhi riots and are too bilnd to see these pogroms then dont expect anything from them cuz they too are right winged to the grain but they just dont know or not ready to accept what they are. #DelhiRiotTruth", "token_idx_2": 16, "text_start_2": 92, "text_end_2": 99, "date_2": "2020-02", "text_1_tokenized": ["Blatant", "State", "terrorism", "by", "extremist", "Modi", "of", "Bengal", "pogroms", "unravelled", "not", "only", "in", "Kashmir", ",", "Pulwama", "but", "through", "our", "territorial", "aggression.Political", "mileage", "through", "Muslims", "n", "Pakistan", "phobia", "unfolded", "in", "fictional", "narrative", "of", "Indian", "media", "n", "foreign", "secretary", ".", "Humaneness", "missing"], "text_2_tokenized": ["The", "people", "calling", "themselves", "neutrals", "even", "post", "delhi", "riots", "and", "are", "too", "bilnd", "to", "see", "these", "pogroms", "then", "dont", "expect", "anything", "from", "them", "cuz", "they", "too", "are", "right", "winged", "to", "the", "grain", "but", "they", "just", "dont", "know", "or", "not", "ready", "to", "accept", "what", "they", "are", ".", "#DelhiRiotTruth"]} -{"id": "2151-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "110 years since the Adana pogroms it's important to note that the history of violence towards indigenous minorities in Turkey was not limited to the events of 1915-21 but rather was and is an ongoing process of genocide and cultural erasure.", "token_idx_1": 5, "text_start_1": 26, "text_end_1": 33, "date_1": "2019-02", "text_2": "\u201cThe violence in which the RSS \u2014 and the Hindu nationalist movement it has cultivated \u2014 is implicated includes assassinations, bombings, and even pogroms against Christians, Muslims. #RSS_KillingMuslims", "token_idx_2": 26, "text_start_2": 146, "text_end_2": 153, "date_2": "2020-02", "text_1_tokenized": ["110", "years", "since", "the", "Adana", "pogroms", "it's", "important", "to", "note", "that", "the", "history", "of", "violence", "towards", "indigenous", "minorities", "in", "Turkey", "was", "not", "limited", "to", "the", "events", "of", "1915-21", "but", "rather", "was", "and", "is", "an", "ongoing", "process", "of", "genocide", "and", "cultural", "erasure", "."], "text_2_tokenized": ["\u201c", "The", "violence", "in", "which", "the", "RSS", "\u2014", "and", "the", "Hindu", "nationalist", "movement", "it", "has", "cultivated", "\u2014", "is", "implicated", "includes", "assassinations", ",", "bombings", ",", "and", "even", "pogroms", "against", "Christians", ",", "Muslims", ".", "#RSS_KillingMuslims"]} -{"id": "2152-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "why does @IlhanMN call herself, on her profile, a 'refugee'? when my grandparents, escaping pogroms and holocaust came to America, they called themselves by a different name: 'Americans'", "token_idx_1": 20, "text_start_1": 92, "text_end_1": 99, "date_1": "2019-02", "text_2": "Ugh! I am simply #antiwar people! Being antiwar & pro truth about what countries do to make war more liklely does not make me a Russian troll. My ancestors were killed in Russian pogroms. I am no fan of Russia either. Just #antiwar", "token_idx_2": 36, "text_start_2": 183, "text_end_2": 190, "date_2": "2020-02", "text_1_tokenized": ["why", "does", "@IlhanMN", "call", "herself", ",", "on", "her", "profile", ",", "a", "'", "refugee", "'", "?", "when", "my", "grandparents", ",", "escaping", "pogroms", "and", "holocaust", "came", "to", "America", ",", "they", "called", "themselves", "by", "a", "different", "name", ":", "'", "Americans", "'"], "text_2_tokenized": ["Ugh", "!", "I", "am", "simply", "#antiwar", "people", "!", "Being", "antiwar", "&", "pro", "truth", "about", "what", "countries", "do", "to", "make", "war", "more", "liklely", "does", "not", "make", "me", "a", "Russian", "troll", ".", "My", "ancestors", "were", "killed", "in", "Russian", "pogroms", ".", "I", "am", "no", "fan", "of", "Russia", "either", ".", "Just", "#antiwar"]} -{"id": "2153-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "I cried listening to @BernieSanders tell his story today. Imagining that his father and my great grandfather probably fled pogroms in Poland and sought refuge in America the same year. Imagining the millions of American families that identify with his family's story.", "token_idx_1": 20, "text_start_1": 123, "text_end_1": 130, "date_1": "2019-02", "text_2": "I'm sick and tired of the race issue coming up against white people from devious democrats. White democrats running for office...you are spreading pogroms of racism stop this BS.", "token_idx_2": 26, "text_start_2": 147, "text_end_2": 154, "date_2": "2020-02", "text_1_tokenized": ["I", "cried", "listening", "to", "@BernieSanders", "tell", "his", "story", "today", ".", "Imagining", "that", "his", "father", "and", "my", "great", "grandfather", "probably", "fled", "pogroms", "in", "Poland", "and", "sought", "refuge", "in", "America", "the", "same", "year", ".", "Imagining", "the", "millions", "of", "American", "families", "that", "identify", "with", "his", "family's", "story", "."], "text_2_tokenized": ["I'm", "sick", "and", "tired", "of", "the", "race", "issue", "coming", "up", "against", "white", "people", "from", "devious", "democrats", ".", "White", "democrats", "running", "for", "office", "...", "you", "are", "spreading", "pogroms", "of", "racism", "stop", "this", "BS", "."]} -{"id": "2154-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "It seems that Jewish history is being erased and people are forgetting what has happened. No one non-Jewish knows what pogroms are, no one speaks about Jewish ghettos or how so many governments exploited us then expelled us.", "token_idx_1": 21, "text_start_1": 119, "text_end_1": 126, "date_1": "2019-02", "text_2": "i was in my feminism studies class and was gonna look up pogroms, i typed in the first two letters and ofc the first recommendation my address bar gives me is @deadlyraquel's pornhub page, thank you computer very cool", "token_idx_2": 12, "text_start_2": 57, "text_end_2": 64, "date_2": "2020-02", "text_1_tokenized": ["It", "seems", "that", "Jewish", "history", "is", "being", "erased", "and", "people", "are", "forgetting", "what", "has", "happened", ".", "No", "one", "non-Jewish", "knows", "what", "pogroms", "are", ",", "no", "one", "speaks", "about", "Jewish", "ghettos", "or", "how", "so", "many", "governments", "exploited", "us", "then", "expelled", "us", "."], "text_2_tokenized": ["i", "was", "in", "my", "feminism", "studies", "class", "and", "was", "gonna", "look", "up", "pogroms", ",", "i", "typed", "in", "the", "first", "two", "letters", "and", "ofc", "the", "first", "recommendation", "my", "address", "bar", "gives", "me", "is", "@deadlyraquel", "'", "s", "pornhub", "page", ",", "thank", "you", "computer", "very", "cool"]} -{"id": "2155-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "#MyDearChildren is based on a long letter written 80+ years ago by Feiga Shamis, a Jewish mother of 12 driven by a little known humanitarian tragedy \u2014 the Russian Civil War pogroms \u2014 to send two of her youngest children to an orphanage a continent away, in #SouthAfrica. 1/5", "token_idx_1": 33, "text_start_1": 173, "text_end_1": 180, "date_1": "2019-02", "text_2": "A God that punishes his son or inflicts harm upon him, even to the point of death, is a God that would do that to all his children and creation. From authorizing infanticide, to pogroms, to removal of entire tribes and groups, and to gruesome enslavement. I reject that God.", "token_idx_2": 38, "text_start_2": 178, "text_end_2": 185, "date_2": "2020-02", "text_1_tokenized": ["#MyDearChildren", "is", "based", "on", "a", "long", "letter", "written", "80", "+", "years", "ago", "by", "Feiga", "Shamis", ",", "a", "Jewish", "mother", "of", "12", "driven", "by", "a", "little", "known", "humanitarian", "tragedy", "\u2014", "the", "Russian", "Civil", "War", "pogroms", "\u2014", "to", "send", "two", "of", "her", "youngest", "children", "to", "an", "orphanage", "a", "continent", "away", ",", "in", "#SouthAfrica", ".", "1/5"], "text_2_tokenized": ["A", "God", "that", "punishes", "his", "son", "or", "inflicts", "harm", "upon", "him", ",", "even", "to", "the", "point", "of", "death", ",", "is", "a", "God", "that", "would", "do", "that", "to", "all", "his", "children", "and", "creation", ".", "From", "authorizing", "infanticide", ",", "to", "pogroms", ",", "to", "removal", "of", "entire", "tribes", "and", "groups", ",", "and", "to", "gruesome", "enslavement", ".", "I", "reject", "that", "God", "."]} -{"id": "2156-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "stop international support Nazis anti semitic regime in Ukraine, glorifications Ukrainian Nazis killrs jewish in Holocaust and before in pogroms.", "token_idx_1": 20, "text_start_1": 137, "text_end_1": 144, "date_1": "2019-02", "text_2": "Lal Krishna Advani's rath yatra culminated in the Babri Masjid Mosque demolition. What is Modiji's end game with the CAB as pressed by the Kashmir lockdown and the Delhi pogroms?", "token_idx_2": 30, "text_start_2": 170, "text_end_2": 177, "date_2": "2020-02", "text_1_tokenized": ["stop", "international", "support", "Nazis", "anti", "semitic", "regime", "in", "Ukraine", ",", "glorifications", "Ukrainian", "Nazis", "killrs", "jewish", "in", "Holocaust", "and", "before", "in", "pogroms", "."], "text_2_tokenized": ["Lal", "Krishna", "Advani's", "rath", "yatra", "culminated", "in", "the", "Babri", "Masjid", "Mosque", "demolition", ".", "What", "is", "Modiji's", "end", "game", "with", "the", "CAB", "as", "pressed", "by", "the", "Kashmir", "lockdown", "and", "the", "Delhi", "pogroms", "?"]} -{"id": "2157-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "#maigemu...the pogroms of ethnic minorities has resumed in Southern Kaduna...again.", "token_idx_1": 3, "text_start_1": 15, "text_end_1": 22, "date_1": "2019-02", "text_2": "Keep in mind that as Black Plague spread, pogroms followed, blaming and slaughtering (innocent) Jews. There will be scapegoating deaths with #Coronavid19 - like after 9/11 too....we need to be vigilant and dispel blame games. @BurtonJM", "token_idx_2": 9, "text_start_2": 42, "text_end_2": 49, "date_2": "2020-02", "text_1_tokenized": ["#maigemu", "...", "the", "pogroms", "of", "ethnic", "minorities", "has", "resumed", "in", "Southern", "Kaduna", "...", "again", "."], "text_2_tokenized": ["Keep", "in", "mind", "that", "as", "Black", "Plague", "spread", ",", "pogroms", "followed", ",", "blaming", "and", "slaughtering", "(", "innocent", ")", "Jews", ".", "There", "will", "be", "scapegoating", "deaths", "with", "#Coronavid19", "-", "like", "after", "9/11", "too", "...", "we", "need", "to", "be", "vigilant", "and", "dispel", "blame", "games", ".", "@BurtonJM"]} -{"id": "2158-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "The BBC interviewed the leader of fascist Generation Identity in response to the New Zealand massacre. This is to literally enact fascist strategy - commit pogroms to draw attention to your twisted worldview. They just created an incentive for further attacks.", "token_idx_1": 26, "text_start_1": 156, "text_end_1": 163, "date_1": "2019-02", "text_2": "There are some things to hate in this world. I hate massacres. I hate state architected and/or state sanctioned massacres. I hate pogroms. I hate when massacres and pogroms are called riots. My father was 8 when he survived a massacre. Massacres have long lasting impacts.", "token_idx_2": 27, "text_start_2": 130, "text_end_2": 137, "date_2": "2020-02", "text_1_tokenized": ["The", "BBC", "interviewed", "the", "leader", "of", "fascist", "Generation", "Identity", "in", "response", "to", "the", "New", "Zealand", "massacre", ".", "This", "is", "to", "literally", "enact", "fascist", "strategy", "-", "commit", "pogroms", "to", "draw", "attention", "to", "your", "twisted", "worldview", ".", "They", "just", "created", "an", "incentive", "for", "further", "attacks", "."], "text_2_tokenized": ["There", "are", "some", "things", "to", "hate", "in", "this", "world", ".", "I", "hate", "massacres", ".", "I", "hate", "state", "architected", "and", "/", "or", "state", "sanctioned", "massacres", ".", "I", "hate", "pogroms", ".", "I", "hate", "when", "massacres", "and", "pogroms", "are", "called", "riots", ".", "My", "father", "was", "8", "when", "he", "survived", "a", "massacre", ".", "Massacres", "have", "long", "lasting", "impacts", "."]} -{"id": "2159-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "As a Christian, I have to admit that the vast majority of religiously motivated violence, from the Crusades through pogroms and Holocaust to the wars and terror attacks of the present, has been done by baptized Christians against Jews and Muslims.", "token_idx_1": 21, "text_start_1": 116, "text_end_1": 123, "date_1": "2019-02", "text_2": "A Prime Minister who cannot express grief and condolences to the 50+ deaths in the capital city in riots, what is he even capable of, i mean apart from designing these pogroms #DelhiViolence", "token_idx_2": 34, "text_start_2": 168, "text_end_2": 175, "date_2": "2020-02", "text_1_tokenized": ["As", "a", "Christian", ",", "I", "have", "to", "admit", "that", "the", "vast", "majority", "of", "religiously", "motivated", "violence", ",", "from", "the", "Crusades", "through", "pogroms", "and", "Holocaust", "to", "the", "wars", "and", "terror", "attacks", "of", "the", "present", ",", "has", "been", "done", "by", "baptized", "Christians", "against", "Jews", "and", "Muslims", "."], "text_2_tokenized": ["A", "Prime", "Minister", "who", "cannot", "express", "grief", "and", "condolences", "to", "the", "50", "+", "deaths", "in", "the", "capital", "city", "in", "riots", ",", "what", "is", "he", "even", "capable", "of", ",", "i", "mean", "apart", "from", "designing", "these", "pogroms", "#DelhiViolence"]} -{"id": "2160-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "We commemorate victims of anti-Armenian pogroms in city of #Sumgait directed & tacitly supported by Azerbaijan authorities. Violence was used in response to rightful demands of NK Armenians, which further underscored the legitimacy of their cause.", "token_idx_1": 5, "text_start_1": 40, "text_end_1": 47, "date_1": "2019-02", "text_2": "Instead of dispatching Indian Muslims to Pakistan on the flimsiest of excuses, vicious accusations of treason and orchestrating diabolical pogroms, the BJP-RSS led government should carve a new homeland within India for Muslims and minorities and call it \u201cNew Pakistan\u201d.", "token_idx_2": 20, "text_start_2": 139, "text_end_2": 146, "date_2": "2020-02", "text_1_tokenized": ["We", "commemorate", "victims", "of", "anti-Armenian", "pogroms", "in", "city", "of", "#Sumgait", "directed", "&", "tacitly", "supported", "by", "Azerbaijan", "authorities", ".", "Violence", "was", "used", "in", "response", "to", "rightful", "demands", "of", "NK", "Armenians", ",", "which", "further", "underscored", "the", "legitimacy", "of", "their", "cause", "."], "text_2_tokenized": ["Instead", "of", "dispatching", "Indian", "Muslims", "to", "Pakistan", "on", "the", "flimsiest", "of", "excuses", ",", "vicious", "accusations", "of", "treason", "and", "orchestrating", "diabolical", "pogroms", ",", "the", "BJP-RSS", "led", "government", "should", "carve", "a", "new", "homeland", "within", "India", "for", "Muslims", "and", "minorities", "and", "call", "it", "\u201c", "New", "Pakistan", "\u201d", "."]} -{"id": "2161-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "hasan minhaj's episode on india was so funny but you can tell he learned this shit last week lmaoooo. for gujurat pogroms against muslims, he said \"2000 people died\" without even referencing their religion or why they were killed.", "token_idx_1": 22, "text_start_1": 114, "text_end_1": 121, "date_1": "2019-02", "text_2": "#IslamAndPatriotism After scores of Muslims in the martyrs list for freedom and then throughout any wars the nation had fought for, Still Muslims and their patriotism is in question. The no.of Muslims killed around the nation through pogroms are yet to be counted.", "token_idx_2": 39, "text_start_2": 234, "text_end_2": 241, "date_2": "2020-02", "text_1_tokenized": ["hasan", "minhaj's", "episode", "on", "india", "was", "so", "funny", "but", "you", "can", "tell", "he", "learned", "this", "shit", "last", "week", "lmaoooo", ".", "for", "gujurat", "pogroms", "against", "muslims", ",", "he", "said", "\"", "2000", "people", "died", "\"", "without", "even", "referencing", "their", "religion", "or", "why", "they", "were", "killed", "."], "text_2_tokenized": ["#IslamAndPatriotism", "After", "scores", "of", "Muslims", "in", "the", "martyrs", "list", "for", "freedom", "and", "then", "throughout", "any", "wars", "the", "nation", "had", "fought", "for", ",", "Still", "Muslims", "and", "their", "patriotism", "is", "in", "question", ".", "The", "no.of", "Muslims", "killed", "around", "the", "nation", "through", "pogroms", "are", "yet", "to", "be", "counted", "."]} -{"id": "2162-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Imagine if people had, during germany under the jackboot of Nazism said that anti-semitism was a fiction or a non-term? Also remember we didn't come to save the day straight away, only when it was almost too late. Cant forget eastern europes pogroms either. LEARN from history ffs", "token_idx_1": 46, "text_start_1": 242, "text_end_1": 249, "date_1": "2019-02", "text_2": "he said, referring to the anti-Sikh pogroms in Delhi in 1984 where more than 3,000 Sikhs were killed. @TheTeamPatriots #StandUpForMuslims", "token_idx_2": 7, "text_start_2": 36, "text_end_2": 43, "date_2": "2020-02", "text_1_tokenized": ["Imagine", "if", "people", "had", ",", "during", "germany", "under", "the", "jackboot", "of", "Nazism", "said", "that", "anti-semitism", "was", "a", "fiction", "or", "a", "non-term", "?", "Also", "remember", "we", "didn't", "come", "to", "save", "the", "day", "straight", "away", ",", "only", "when", "it", "was", "almost", "too", "late", ".", "Cant", "forget", "eastern", "europes", "pogroms", "either", ".", "LEARN", "from", "history", "ffs"], "text_2_tokenized": ["he", "said", ",", "referring", "to", "the", "anti-Sikh", "pogroms", "in", "Delhi", "in", "1984", "where", "more", "than", "3,000", "Sikhs", "were", "killed", ".", "@TheTeamPatriots", "#StandUpForMuslims"]} -{"id": "2163-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "Being critical of the Israeli Government's treatment of the Palestinian people is not anti-semetic. Myb Jewish great-grandfather had to flee the pogroms in Russia at the beginning of the last century. Any government oppressing any people deserves criticism. #IStandWithIlhan", "token_idx_1": 22, "text_start_1": 145, "text_end_1": 152, "date_1": "2019-02", "text_2": "During the 1984 anti-Sikh pogroms in Delhi, 10,000\u201317,000 were burned alive or otherwise killed, In Exodus of Kashmiri Hindus between September 1989 to 1990 - approximately 300 Kashmiri Pandits were killed. But Sikh community has no bitterness and wants only justice.", "token_idx_2": 4, "text_start_2": 26, "text_end_2": 33, "date_2": "2020-02", "text_1_tokenized": ["Being", "critical", "of", "the", "Israeli", "Government's", "treatment", "of", "the", "Palestinian", "people", "is", "not", "anti-semetic", ".", "Myb", "Jewish", "great-grandfather", "had", "to", "flee", "the", "pogroms", "in", "Russia", "at", "the", "beginning", "of", "the", "last", "century", ".", "Any", "government", "oppressing", "any", "people", "deserves", "criticism", ".", "#IStandWithIlhan"], "text_2_tokenized": ["During", "the", "1984", "anti-Sikh", "pogroms", "in", "Delhi", ",", "10,000", "\u2013", "17,000", "were", "burned", "alive", "or", "otherwise", "killed", ",", "In", "Exodus", "of", "Kashmiri", "Hindus", "between", "September", "1989", "to", "1990", "-", "approximately", "300", "Kashmiri", "Pandits", "were", "killed", ".", "But", "Sikh", "community", "has", "no", "bitterness", "and", "wants", "only", "justice", "."]} -{"id": "2164-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "It was normal and unquestioned to persecute the jews prior to the holocaust. Russia had a century of pogroms all terrible and disgusting. We had to say Never Again because it kept happening, and there are elements that would choose genocide again.", "token_idx_1": 19, "text_start_1": 101, "text_end_1": 108, "date_1": "2019-02", "text_2": "Think about it- If TV/Papers can report so selectively in today's day and time, when we can cross verify on social media, what would they have had done in 2002, Gujarat? Full blown riots shown as \"pogroms\". Think of all the one sided news from 2002 which people think to be true.", "token_idx_2": 44, "text_start_2": 197, "text_end_2": 204, "date_2": "2020-02", "text_1_tokenized": ["It", "was", "normal", "and", "unquestioned", "to", "persecute", "the", "jews", "prior", "to", "the", "holocaust", ".", "Russia", "had", "a", "century", "of", "pogroms", "all", "terrible", "and", "disgusting", ".", "We", "had", "to", "say", "Never", "Again", "because", "it", "kept", "happening", ",", "and", "there", "are", "elements", "that", "would", "choose", "genocide", "again", "."], "text_2_tokenized": ["Think", "about", "it", "-", "If", "TV", "/", "Papers", "can", "report", "so", "selectively", "in", "today's", "day", "and", "time", ",", "when", "we", "can", "cross", "verify", "on", "social", "media", ",", "what", "would", "they", "have", "had", "done", "in", "2002", ",", "Gujarat", "?", "Full", "blown", "riots", "shown", "as", "\"", "pogroms", "\"", ".", "Think", "of", "all", "the", "one", "sided", "news", "from", "2002", "which", "people", "think", "to", "be", "true", "."]} -{"id": "2165-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "All these Azeris demonstrating for the Khojaly \"massacre\" should protest their own government for holding the Azeris of Khojaly captive to be used as political pawns, and for allowing the despicible pogroms and rapes of Armenians in Baku, Kirovabad and Sumgait.", "token_idx_1": 34, "text_start_1": 199, "text_end_1": 206, "date_1": "2019-02", "text_2": "The \u201cThank You Jinnah\u201d comments in response to anti-Muslim pogroms in India are a test in one's patience. The unbridled arrogance bordering on glee, the complete erasure of one's own historical (and ongoing!) wrongs, the utter lack of empathy and self-awareness. Where to begin?", "token_idx_2": 11, "text_start_2": 59, "text_end_2": 66, "date_2": "2020-02", "text_1_tokenized": ["All", "these", "Azeris", "demonstrating", "for", "the", "Khojaly", "\"", "massacre", "\"", "should", "protest", "their", "own", "government", "for", "holding", "the", "Azeris", "of", "Khojaly", "captive", "to", "be", "used", "as", "political", "pawns", ",", "and", "for", "allowing", "the", "despicible", "pogroms", "and", "rapes", "of", "Armenians", "in", "Baku", ",", "Kirovabad", "and", "Sumgait", "."], "text_2_tokenized": ["The", "\u201c", "Thank", "You", "Jinnah", "\u201d", "comments", "in", "response", "to", "anti-Muslim", "pogroms", "in", "India", "are", "a", "test", "in", "one's", "patience", ".", "The", "unbridled", "arrogance", "bordering", "on", "glee", ",", "the", "complete", "erasure", "of", "one's", "own", "historical", "(", "and", "ongoing", "!", ")", "wrongs", ",", "the", "utter", "lack", "of", "empathy", "and", "self-awareness", ".", "Where", "to", "begin", "?"]} -{"id": "2166-pogrom", "word": "pogrom", "label_binary": 1, "text_1": "A decade ago, societies were unaware of the threat posed by social media. For centuries, pogroms and massacres occured via town by town spreading of fake news and in the 1930's, Hitler used the modern invention of newspapers to spread daily propoganda to the masses. 1/2", "token_idx_1": 18, "text_start_1": 89, "text_end_1": 96, "date_1": "2019-02", "text_2": "A Polish member of parliament (MP) lauded pogroms against Jews in a Feb. 27 interview on a Polish television network, asserting that the pogroms made Jews powerful through natural selection. Janusz Korwin-Mikke, of the right-wing Liberty Party, said that the benefit of the corona", "token_idx_2": 9, "text_start_2": 42, "text_end_2": 49, "date_2": "2020-02", "text_1_tokenized": ["A", "decade", "ago", ",", "societies", "were", "unaware", "of", "the", "threat", "posed", "by", "social", "media", ".", "For", "centuries", ",", "pogroms", "and", "massacres", "occured", "via", "town", "by", "town", "spreading", "of", "fake", "news", "and", "in", "the", "1930", "'", "s", ",", "Hitler", "used", "the", "modern", "invention", "of", "newspapers", "to", "spread", "daily", "propoganda", "to", "the", "masses", ".", "1/2"], "text_2_tokenized": ["A", "Polish", "member", "of", "parliament", "(", "MP", ")", "lauded", "pogroms", "against", "Jews", "in", "a", "Feb", ".", "27", "interview", "on", "a", "Polish", "television", "network", ",", "asserting", "that", "the", "pogroms", "made", "Jews", "powerful", "through", "natural", "selection", ".", "Janusz", "Korwin-Mikke", ",", "of", "the", "right-wing", "Liberty", "Party", ",", "said", "that", "the", "benefit", "of", "the", "corona"]} -{"id": "0479-containment", "word": "containment", "label_binary": 1, "text_1": "Ragnar sipped at his recaf, the containment field he'd used to brew the potent elixir had barely held under its gravitational mass. \u201cThis \u2018daylight savings' is a warp spawned curse!\u201d He growled.", "token_idx_1": 7, "text_start_1": 32, "text_end_1": 43, "date_1": "2019-03", "text_2": "Your activities due to containment: Cuddles: 1/10 Sport: -100/10 Video games: 20/10 Reading: 0/10 Series: 10/10 Work: 5/10 Kitchen: 6/10 Rest: 40/10 Twitter: 10/10", "token_idx_2": 4, "text_start_2": 23, "text_end_2": 34, "date_2": "2020-03", "text_1_tokenized": ["Ragnar", "sipped", "at", "his", "recaf", ",", "the", "containment", "field", "he'd", "used", "to", "brew", "the", "potent", "elixir", "had", "barely", "held", "under", "its", "gravitational", "mass", ".", "\u201c", "This", "\u2018", "daylight", "savings", "'", "is", "a", "warp", "spawned", "curse", "!", "\u201d", "He", "growled", "."], "text_2_tokenized": ["Your", "activities", "due", "to", "containment", ":", "Cuddles", ":", "1/10", "Sport", ":", "-100/10", "Video", "games", ":", "20/10", "Reading", ":", "0/10", "Series", ":", "10/10", "Work", ":", "5/10", "Kitchen", ":", "6/10", "Rest", ":", "40/10", "Twitter", ":", "10/10"]} -{"id": "0481-containment", "word": "containment", "label_binary": 1, "text_1": "SABC CEO Madoda Mxakwe says despite more funds coming in and cost containment, the broadcaster is struggling to meet its payment obligations.", "token_idx_1": 12, "text_start_1": 66, "text_end_1": 77, "date_1": "2019-03", "text_2": "So the delay phase is pretty much the same as the containment phase! Wash your hands and avoid cruises. Really!!! Pathetic . Business before people but that's what you'd expect from a Tory Govt #coronavirusuk", "token_idx_2": 11, "text_start_2": 50, "text_end_2": 61, "date_2": "2020-03", "text_1_tokenized": ["SABC", "CEO", "Madoda", "Mxakwe", "says", "despite", "more", "funds", "coming", "in", "and", "cost", "containment", ",", "the", "broadcaster", "is", "struggling", "to", "meet", "its", "payment", "obligations", "."], "text_2_tokenized": ["So", "the", "delay", "phase", "is", "pretty", "much", "the", "same", "as", "the", "containment", "phase", "!", "Wash", "your", "hands", "and", "avoid", "cruises", ".", "Really", "!", "!", "!", "Pathetic", ".", "Business", "before", "people", "but", "that's", "what", "you'd", "expect", "from", "a", "Tory", "Govt", "#coronavirusuk"]} -{"id": "0487-containment", "word": "containment", "label_binary": 0, "text_1": "shame that many leftists do not cover the containment camps on china", "token_idx_1": 8, "text_start_1": 42, "text_end_1": 53, "date_1": "2019-03", "text_2": "That was the plan, from the beginning! This is all being orchestrated, by the Kremlin, as was DoTard`s 'hoax'/obstruction of containment, of the \"voter suppression pandemic\"! Trump = Manchurian Candidate (Russian style) with collusion from Russia`s Republicans in Congress/SCOTUS!", "token_idx_2": 30, "text_start_2": 125, "text_end_2": 136, "date_2": "2020-03", "text_1_tokenized": ["shame", "that", "many", "leftists", "do", "not", "cover", "the", "containment", "camps", "on", "china"], "text_2_tokenized": ["That", "was", "the", "plan", ",", "from", "the", "beginning", "!", "This", "is", "all", "being", "orchestrated", ",", "by", "the", "Kremlin", ",", "as", "was", "DoTard", "`", "s", "'", "hoax", "'", "/", "obstruction", "of", "containment", ",", "of", "the", "\"", "voter", "suppression", "pandemic", "\"", "!", "Trump", "=", "Manchurian", "Candidate", "(", "Russian", "style", ")", "with", "collusion", "from", "Russia", "`", "s", "Republicans", "in", "Congress", "/", "SCOTUS", "!"]} -{"id": "0488-containment", "word": "containment", "label_binary": 1, "text_1": "How you gonna get heartburn you're supposed to keep it in a cool , dry containment area.", "token_idx_1": 15, "text_start_1": 71, "text_end_1": 82, "date_1": "2019-03", "text_2": "We are containing certain areas to protect the health of the people and prevent the spread of COVID-19. We understand it causes inconvenience to some people. It has to be appreciated that containment is necessary to prevent community transmission.", "token_idx_2": 36, "text_start_2": 188, "text_end_2": 199, "date_2": "2020-03", "text_1_tokenized": ["How", "you", "gonna", "get", "heartburn", "you're", "supposed", "to", "keep", "it", "in", "a", "cool", ",", "dry", "containment", "area", "."], "text_2_tokenized": ["We", "are", "containing", "certain", "areas", "to", "protect", "the", "health", "of", "the", "people", "and", "prevent", "the", "spread", "of", "COVID", "-", "19", ".", "We", "understand", "it", "causes", "inconvenience", "to", "some", "people", ".", "It", "has", "to", "be", "appreciated", "that", "containment", "is", "necessary", "to", "prevent", "community", "transmission", "."]} -{"id": "0489-containment", "word": "containment", "label_binary": 0, "text_1": "My ancestors came on the boats,met the boats and one was a signer of the constitution. My indigenous ancestors ended up assimilating to avoid containment or death. It was not easy. But they did it for their children's future. So you see I am America.", "token_idx_1": 27, "text_start_1": 142, "text_end_1": 153, "date_1": "2019-03", "text_2": "Don't join anyone in trolling the EU or USA and compare their containment or assistance measures with Russia or China. You'd be joining a political battle that isn't yours. Focus on your country. Focus on Nigeria. Don't distract that the fight. #COVID19 #staysafe", "token_idx_2": 12, "text_start_2": 62, "text_end_2": 73, "date_2": "2020-03", "text_1_tokenized": ["My", "ancestors", "came", "on", "the", "boats", ",", "met", "the", "boats", "and", "one", "was", "a", "signer", "of", "the", "constitution", ".", "My", "indigenous", "ancestors", "ended", "up", "assimilating", "to", "avoid", "containment", "or", "death", ".", "It", "was", "not", "easy", ".", "But", "they", "did", "it", "for", "their", "children's", "future", ".", "So", "you", "see", "I", "am", "America", "."], "text_2_tokenized": ["Don't", "join", "anyone", "in", "trolling", "the", "EU", "or", "USA", "and", "compare", "their", "containment", "or", "assistance", "measures", "with", "Russia", "or", "China", ".", "You'd", "be", "joining", "a", "political", "battle", "that", "isn't", "yours", ".", "Focus", "on", "your", "country", ".", "Focus", "on", "Nigeria", ".", "Don't", "distract", "that", "the", "fight", ".", "#COVID19", "#staysafe"]} -{"id": "0490-containment", "word": "containment", "label_binary": 1, "text_1": "Did Miami's Brickell \u2013 with all its towers \u2013 prevent gentrification? I say yes. It became a yuppie containment zone on the waterfront, preventing luxury aesthetic from spreading to Little Havana. Contrast this w/ SF, which doesn't have a Brickell, and is thus very gentrified.", "token_idx_1": 20, "text_start_1": 99, "text_end_1": 110, "date_1": "2019-03", "text_2": "Times of crisis and social upheaval invariably result in a spike in domestic violence. Pandemic containment measures are putting women at a heightened risk of abuse. Good Shepherd is here for you. If you feel unsafe, call our WOMEN'S SERVICES CRISIS LINE at 905.523.6277.", "token_idx_2": 16, "text_start_2": 96, "text_end_2": 107, "date_2": "2020-03", "text_1_tokenized": ["Did", "Miami's", "Brickell", "\u2013", "with", "all", "its", "towers", "\u2013", "prevent", "gentrification", "?", "I", "say", "yes", ".", "It", "became", "a", "yuppie", "containment", "zone", "on", "the", "waterfront", ",", "preventing", "luxury", "aesthetic", "from", "spreading", "to", "Little", "Havana", ".", "Contrast", "this", "w", "/", "SF", ",", "which", "doesn't", "have", "a", "Brickell", ",", "and", "is", "thus", "very", "gentrified", "."], "text_2_tokenized": ["Times", "of", "crisis", "and", "social", "upheaval", "invariably", "result", "in", "a", "spike", "in", "domestic", "violence", ".", "Pandemic", "containment", "measures", "are", "putting", "women", "at", "a", "heightened", "risk", "of", "abuse", ".", "Good", "Shepherd", "is", "here", "for", "you", ".", "If", "you", "feel", "unsafe", ",", "call", "our", "WOMEN'S", "SERVICES", "CRISIS", "LINE", "at", "905.523.6277", "."]} -{"id": "0491-containment", "word": "containment", "label_binary": 0, "text_1": "I am unbounding the contraceptions as a containment zone of socialist rumination. #4s2018", "token_idx_1": 7, "text_start_1": 40, "text_end_1": 51, "date_1": "2019-03", "text_2": "Ok so I'm watching containment.. have nuttin better to do let's see if this foreshadowed anything happening now", "token_idx_2": 4, "text_start_2": 19, "text_end_2": 30, "date_2": "2020-03", "text_1_tokenized": ["I", "am", "unbounding", "the", "contraceptions", "as", "a", "containment", "zone", "of", "socialist", "rumination", ".", "#4s2018"], "text_2_tokenized": ["Ok", "so", "I'm", "watching", "containment", "..", "have", "nuttin", "better", "to", "do", "let's", "see", "if", "this", "foreshadowed", "anything", "happening", "now"]} -{"id": "0492-containment", "word": "containment", "label_binary": 0, "text_1": "Well today I didn't wear my containment bracelet. I did very well. \ud83d\ude24", "token_idx_1": 6, "text_start_1": 28, "text_end_1": 39, "date_1": "2019-03", "text_2": "I started watching containment lastnight and let's just say things thats happening now happened on this movie", "token_idx_2": 3, "text_start_2": 19, "text_end_2": 30, "date_2": "2020-03", "text_1_tokenized": ["Well", "today", "I", "didn't", "wear", "my", "containment", "bracelet", ".", "I", "did", "very", "well", ".", "\ud83d\ude24"], "text_2_tokenized": ["I", "started", "watching", "containment", "lastnight", "and", "let's", "just", "say", "things", "thats", "happening", "now", "happened", "on", "this", "movie"]} -{"id": "0494-containment", "word": "containment", "label_binary": 0, "text_1": "Palmdale: 245(assault with a deadly weapon) 20th street east and Ave q-2 , multiple fighting with knifes And sticks , 1 suspect running from deputies , suspects are 415g(gang member) type multiple exiting location, Deputy involved in a fight containment needed", "token_idx_1": 49, "text_start_1": 242, "text_end_1": 253, "date_1": "2019-03", "text_2": "Hmmm containment is good alreadyyyy", "token_idx_2": 1, "text_start_2": 5, "text_end_2": 16, "date_2": "2020-03", "text_1_tokenized": ["Palmdale", ":", "245", "(", "assault", "with", "a", "deadly", "weapon", ")", "20th", "street", "east", "and", "Ave", "q", "-", "2", ",", "multiple", "fighting", "with", "knifes", "And", "sticks", ",", "1", "suspect", "running", "from", "deputies", ",", "suspects", "are", "415g", "(", "gang", "member", ")", "type", "multiple", "exiting", "location", ",", "Deputy", "involved", "in", "a", "fight", "containment", "needed"], "text_2_tokenized": ["Hmmm", "containment", "is", "good", "alreadyyyy"]} -{"id": "0495-containment", "word": "containment", "label_binary": 0, "text_1": "\u2014\u2014-EMERGENCY\u2014\u2014- Character containment has been breached C-654 aka \u201cLuci\u201d as escaped containment Be on the lookout -TC", "token_idx_1": 8, "text_start_1": 26, "text_end_1": 37, "date_1": "2019-03", "text_2": "seems like investing in the pandemic containment initiatives would have been good business to begin with. vision short as they dicks", "token_idx_2": 6, "text_start_2": 37, "text_end_2": 48, "date_2": "2020-03", "text_1_tokenized": ["\u2014", "\u2014", "-", "EMERGENCY", "\u2014", "\u2014", "-", "Character", "containment", "has", "been", "breached", "C", "-", "654", "aka", "\u201c", "Luci", "\u201d", "as", "escaped", "containment", "Be", "on", "the", "lookout", "-", "TC"], "text_2_tokenized": ["seems", "like", "investing", "in", "the", "pandemic", "containment", "initiatives", "would", "have", "been", "good", "business", "to", "begin", "with", ".", "vision", "short", "as", "they", "dicks"]} -{"id": "0496-containment", "word": "containment", "label_binary": 1, "text_1": "There's a containment breach rn, so who wants to hide with me under my bed?", "token_idx_1": 2, "text_start_1": 10, "text_end_1": 21, "date_1": "2019-03", "text_2": "Testing for workers related to essential goods supply , police force shopkeepers to be done for better containment of the spread. @PMOIndia @msisodia @ArvindKejriwal", "token_idx_2": 17, "text_start_2": 103, "text_end_2": 114, "date_2": "2020-03", "text_1_tokenized": ["There's", "a", "containment", "breach", "rn", ",", "so", "who", "wants", "to", "hide", "with", "me", "under", "my", "bed", "?"], "text_2_tokenized": ["Testing", "for", "workers", "related", "to", "essential", "goods", "supply", ",", "police", "force", "shopkeepers", "to", "be", "done", "for", "better", "containment", "of", "the", "spread", ".", "@PMOIndia", "@msisodia", "@ArvindKejriwal"]} -{"id": "0497-containment", "word": "containment", "label_binary": 1, "text_1": "If you need some to watch on Netflix containment is so good", "token_idx_1": 8, "text_start_1": 37, "text_end_1": 48, "date_1": "2019-03", "text_2": "I have a lot of questions about containment series \ud83e\udd74", "token_idx_2": 7, "text_start_2": 32, "text_end_2": 43, "date_2": "2020-03", "text_1_tokenized": ["If", "you", "need", "some", "to", "watch", "on", "Netflix", "containment", "is", "so", "good"], "text_2_tokenized": ["I", "have", "a", "lot", "of", "questions", "about", "containment", "series", "\ud83e\udd74"]} -{"id": "0498-containment", "word": "containment", "label_binary": 0, "text_1": "There is no reason to accept this Horrid Violence on a daily basis ! The Crisis That Exists on the Border is all most beyond containment & may soon be! So what would it look like if it was closed yesterday ? That is what we'll hear soon! Or closed it a month ago,last year ?", "token_idx_1": 25, "text_start_1": 125, "text_end_1": 136, "date_1": "2019-03", "text_2": "Wanna know/see why it's important to be social distancing & quarantine.. watch containment on Netflix", "token_idx_2": 15, "text_start_2": 83, "text_end_2": 94, "date_2": "2020-03", "text_1_tokenized": ["There", "is", "no", "reason", "to", "accept", "this", "Horrid", "Violence", "on", "a", "daily", "basis", "!", "The", "Crisis", "That", "Exists", "on", "the", "Border", "is", "all", "most", "beyond", "containment", "&", "may", "soon", "be", "!", "So", "what", "would", "it", "look", "like", "if", "it", "was", "closed", "yesterday", "?", "That", "is", "what", "we'll", "hear", "soon", "!", "Or", "closed", "it", "a", "month", "ago", ",", "last", "year", "?"], "text_2_tokenized": ["Wanna", "know", "/", "see", "why", "it's", "important", "to", "be", "social", "distancing", "&", "quarantine", "..", "watch", "containment", "on", "Netflix"]} -{"id": "0499-containment", "word": "containment", "label_binary": 0, "text_1": "Okay since im close to 200 once i get it I'll stream containment breach again but actually play it", "token_idx_1": 12, "text_start_1": 53, "text_end_1": 64, "date_1": "2019-03", "text_2": "Bruh, do we think we don't know that the rest of the world is doing way better at testing and containment? How is this real life?", "token_idx_2": 21, "text_start_2": 94, "text_end_2": 105, "date_2": "2020-03", "text_1_tokenized": ["Okay", "since", "im", "close", "to", "200", "once", "i", "get", "it", "I'll", "stream", "containment", "breach", "again", "but", "actually", "play", "it"], "text_2_tokenized": ["Bruh", ",", "do", "we", "think", "we", "don't", "know", "that", "the", "rest", "of", "the", "world", "is", "doing", "way", "better", "at", "testing", "and", "containment", "?", "How", "is", "this", "real", "life", "?"]} -{"id": "0500-containment", "word": "containment", "label_binary": 0, "text_1": "#Bayverse ah yes because leaving them with the other prototypes is 'containment' very good.", "token_idx_1": 12, "text_start_1": 68, "text_end_1": 79, "date_1": "2019-03", "text_2": "Looool they apparently don't even have enough tests to test everyone who *attended* the Biogen conference\u2014let alone close contacts. That's the kind of thing I'd like to learn from @MassDPH, not from friends of friends. Really hard to imagine containment working at this point...", "token_idx_2": 46, "text_start_2": 242, "text_end_2": 253, "date_2": "2020-03", "text_1_tokenized": ["#Bayverse", "ah", "yes", "because", "leaving", "them", "with", "the", "other", "prototypes", "is", "'", "containment", "'", "very", "good", "."], "text_2_tokenized": ["Looool", "they", "apparently", "don't", "even", "have", "enough", "tests", "to", "test", "everyone", "who", "*", "attended", "*", "the", "Biogen", "conference", "\u2014", "let", "alone", "close", "contacts", ".", "That's", "the", "kind", "of", "thing", "I'd", "like", "to", "learn", "from", "@MassDPH", ",", "not", "from", "friends", "of", "friends", ".", "Really", "hard", "to", "imagine", "containment", "working", "at", "this", "point", "..."]} -{"id": "0501-containment", "word": "containment", "label_binary": 0, "text_1": "Twitter is a test tube psyops containment experiment. They get us dissenters all in one place, surround us with bots and let us burn ourselves out fighting over shit Trump and the GOP drivel into it. We need to find a better platform.", "token_idx_1": 6, "text_start_1": 30, "text_end_1": 41, "date_1": "2019-03", "text_2": "ayo scientists at Imperial College London said containment measures may be necessary for 18 months \ud83d\ude2d", "token_idx_2": 7, "text_start_2": 47, "text_end_2": 58, "date_2": "2020-03", "text_1_tokenized": ["Twitter", "is", "a", "test", "tube", "psyops", "containment", "experiment", ".", "They", "get", "us", "dissenters", "all", "in", "one", "place", ",", "surround", "us", "with", "bots", "and", "let", "us", "burn", "ourselves", "out", "fighting", "over", "shit", "Trump", "and", "the", "GOP", "drivel", "into", "it", ".", "We", "need", "to", "find", "a", "better", "platform", "."], "text_2_tokenized": ["ayo", "scientists", "at", "Imperial", "College", "London", "said", "containment", "measures", "may", "be", "necessary", "for", "18", "months", "\ud83d\ude2d"]} -{"id": "0502-containment", "word": "containment", "label_binary": 1, "text_1": "The police are real twats at times - \u2018armed containment area' - we don't want you to see our incompetence...\ud83d\ude2c#policeinterceptors", "token_idx_1": 10, "text_start_1": 44, "text_end_1": 55, "date_1": "2019-03", "text_2": "\u201cQuestion: Social distancing remains in effect til April 30th/adopting large containment measures. However = problem = population never reaches the critical level of immunity to avoid a recurrence of the epidemic = infections would recur as soon as containment is relaxed?\u201d", "token_idx_2": 14, "text_start_2": 77, "text_end_2": 88, "date_2": "2020-03", "text_1_tokenized": ["The", "police", "are", "real", "twats", "at", "times", "-", "\u2018", "armed", "containment", "area", "'", "-", "we", "don't", "want", "you", "to", "see", "our", "incompetence", "...", "\ud83d\ude2c", "#policeinterceptors"], "text_2_tokenized": ["\u201c", "Question", ":", "Social", "distancing", "remains", "in", "effect", "til", "April", "30th", "/", "adopting", "large", "containment", "measures", ".", "However", "=", "problem", "=", "population", "never", "reaches", "the", "critical", "level", "of", "immunity", "to", "avoid", "a", "recurrence", "of", "the", "epidemic", "=", "infections", "would", "recur", "as", "soon", "as", "containment", "is", "relaxed", "?", "\u201d"]} -{"id": "0503-containment", "word": "containment", "label_binary": 0, "text_1": "I could have chosen to stay, & that life would have continued to give me moments of comfortable events. Instead, I chose the way out of the labyrinth of predictability & containment. I now stay in the realm of possibility & adventure. It all starts with a state of mind.", "token_idx_1": 34, "text_start_1": 178, "text_end_1": 189, "date_1": "2019-03", "text_2": "I need somebody to watch containment with me \ud83d\ude14", "token_idx_2": 5, "text_start_2": 25, "text_end_2": 36, "date_2": "2020-03", "text_1_tokenized": ["I", "could", "have", "chosen", "to", "stay", ",", "&", "that", "life", "would", "have", "continued", "to", "give", "me", "moments", "of", "comfortable", "events", ".", "Instead", ",", "I", "chose", "the", "way", "out", "of", "the", "labyrinth", "of", "predictability", "&", "containment", ".", "I", "now", "stay", "in", "the", "realm", "of", "possibility", "&", "adventure", ".", "It", "all", "starts", "with", "a", "state", "of", "mind", "."], "text_2_tokenized": ["I", "need", "somebody", "to", "watch", "containment", "with", "me", "\ud83d\ude14"]} -{"id": "0504-containment", "word": "containment", "label_binary": 1, "text_1": "So this afternoon the boss (@EastHertsChief) spotted someone in breach of his CBO in our town centre. A foot chase, containment, another foot chase and he was arrested. Great team work \ud83d\udc6e\u200d\u2642\ufe0f What do you all think of Criminal Behaviour orders? I love them!", "token_idx_1": 24, "text_start_1": 116, "text_end_1": 127, "date_1": "2019-03", "text_2": "should I waste my time trying to convince an anti-vaxxer about the difference between the seriousness of spread/containment and death rates between covid-19 and the seasonal influenza", "token_idx_2": 19, "text_start_2": 112, "text_end_2": 123, "date_2": "2020-03", "text_1_tokenized": ["So", "this", "afternoon", "the", "boss", "(", "@EastHertsChief", ")", "spotted", "someone", "in", "breach", "of", "his", "CBO", "in", "our", "town", "centre", ".", "A", "foot", "chase", ",", "containment", ",", "another", "foot", "chase", "and", "he", "was", "arrested", ".", "Great", "team", "work", "\ud83d\udc6e\u200d\u2642", "\ufe0f", "What", "do", "you", "all", "think", "of", "Criminal", "Behaviour", "orders", "?", "I", "love", "them", "!"], "text_2_tokenized": ["should", "I", "waste", "my", "time", "trying", "to", "convince", "an", "anti-vaxxer", "about", "the", "difference", "between", "the", "seriousness", "of", "spread", "/", "containment", "and", "death", "rates", "between", "covid", "-", "19", "and", "the", "seasonal", "influenza"]} -{"id": "0505-containment", "word": "containment", "label_binary": 1, "text_1": "Finally some sense quote \" over the last 35 yrs basic methods of lifesaving have been abandoned in Britain by an overzealous interpretation of risk assessment \" Firefighting operations are now mainly containment/boundary cooling . Never forget June 14th Two Thousand Seventeen", "token_idx_1": 32, "text_start_1": 200, "text_end_1": 211, "date_1": "2019-03", "text_2": "Why are people so interested with the exit strategy when we haven't even got our containment strategy under control? #bbcqt", "token_idx_2": 15, "text_start_2": 81, "text_end_2": 92, "date_2": "2020-03", "text_1_tokenized": ["Finally", "some", "sense", "quote", "\"", "over", "the", "last", "35", "yrs", "basic", "methods", "of", "lifesaving", "have", "been", "abandoned", "in", "Britain", "by", "an", "overzealous", "interpretation", "of", "risk", "assessment", "\"", "Firefighting", "operations", "are", "now", "mainly", "containment", "/", "boundary", "cooling", ".", "Never", "forget", "June", "14th", "Two", "Thousand", "Seventeen"], "text_2_tokenized": ["Why", "are", "people", "so", "interested", "with", "the", "exit", "strategy", "when", "we", "haven't", "even", "got", "our", "containment", "strategy", "under", "control", "?", "#bbcqt"]} -{"id": "0506-containment", "word": "containment", "label_binary": 0, "text_1": "Breached containment for twitter I'm that much of chad", "token_idx_1": 1, "text_start_1": 9, "text_end_1": 20, "date_1": "2019-03", "text_2": "Isn't it amazing how many virus containment and spread experts there are on twitter tonight ?", "token_idx_2": 6, "text_start_2": 32, "text_end_2": 43, "date_2": "2020-03", "text_1_tokenized": ["Breached", "containment", "for", "twitter", "I'm", "that", "much", "of", "chad"], "text_2_tokenized": ["Isn't", "it", "amazing", "how", "many", "virus", "containment", "and", "spread", "experts", "there", "are", "on", "twitter", "tonight", "?"]} -{"id": "0507-containment", "word": "containment", "label_binary": 1, "text_1": "The containment search at William Workman High School has ended. Deputies earlier said a suspect was detained.", "token_idx_1": 1, "text_start_1": 4, "text_end_1": 15, "date_1": "2019-03", "text_2": "IMP- 20 containment zones identified in Delhi. Delhi Govt to start rapid testing of people in these areas which have reported more number of #COVID19 cases. Rapid testing to be scaled up to 1 lakh. An aggressive Trace-Test-Isolate is the way forward. #Coronavirusindia", "token_idx_2": 3, "text_start_2": 8, "text_end_2": 19, "date_2": "2020-03", "text_1_tokenized": ["The", "containment", "search", "at", "William", "Workman", "High", "School", "has", "ended", ".", "Deputies", "earlier", "said", "a", "suspect", "was", "detained", "."], "text_2_tokenized": ["IMP", "-", "20", "containment", "zones", "identified", "in", "Delhi", ".", "Delhi", "Govt", "to", "start", "rapid", "testing", "of", "people", "in", "these", "areas", "which", "have", "reported", "more", "number", "of", "#COVID19", "cases", ".", "Rapid", "testing", "to", "be", "scaled", "up", "to", "1", "lakh", ".", "An", "aggressive", "Trace-Test-Isolate", "is", "the", "way", "forward", ".", "#Coronavirusindia"]} -{"id": "0508-containment", "word": "containment", "label_binary": 1, "text_1": "i don't feel like inviting the possible containment breach of retweeting that pic on my page but lemme just say it's impossible to \u201cmistakenly\u201d depict link as a chick", "token_idx_1": 7, "text_start_1": 40, "text_end_1": 51, "date_1": "2019-03", "text_2": "Day 8 of containment: now I'm talking to the roomba Great...", "token_idx_2": 3, "text_start_2": 9, "text_end_2": 20, "date_2": "2020-03", "text_1_tokenized": ["i", "don't", "feel", "like", "inviting", "the", "possible", "containment", "breach", "of", "retweeting", "that", "pic", "on", "my", "page", "but", "lemme", "just", "say", "it's", "impossible", "to", "\u201c", "mistakenly", "\u201d", "depict", "link", "as", "a", "chick"], "text_2_tokenized": ["Day", "8", "of", "containment", ":", "now", "I'm", "talking", "to", "the", "roomba", "Great", "..."]} -{"id": "0509-containment", "word": "containment", "label_binary": 0, "text_1": "SCP RP be like site 69: containment has been breached mtf 420: alright everyone, we gottem", "token_idx_1": 7, "text_start_1": 24, "text_end_1": 35, "date_1": "2019-03", "text_2": "Disappointed with @JustinTrudeau. It is political cowardice to close to borders to everyone except US citizens. I realize angering the orange man would mean economic damage to Canada, but the US has *completely* dropped the ball on containment. Trudeau traded lives for money.", "token_idx_2": 42, "text_start_2": 232, "text_end_2": 243, "date_2": "2020-03", "text_1_tokenized": ["SCP", "RP", "be", "like", "site", "69", ":", "containment", "has", "been", "breached", "mtf", "420", ":", "alright", "everyone", ",", "we", "gottem"], "text_2_tokenized": ["Disappointed", "with", "@JustinTrudeau", ".", "It", "is", "political", "cowardice", "to", "close", "to", "borders", "to", "everyone", "except", "US", "citizens", ".", "I", "realize", "angering", "the", "orange", "man", "would", "mean", "economic", "damage", "to", "Canada", ",", "but", "the", "US", "has", "*", "completely", "*", "dropped", "the", "ball", "on", "containment", ".", "Trudeau", "traded", "lives", "for", "money", "."]} -{"id": "0510-containment", "word": "containment", "label_binary": 1, "text_1": "Had a dream that You Watanabe and some other girl \"got into a fight\" and there was this big controversy over who started the fight. Eventually, footage of that other girl being gunned down in a containment cell surfaced and sparked even more problems.", "token_idx_1": 40, "text_start_1": 194, "text_end_1": 205, "date_1": "2019-03", "text_2": "when ur hometown has the first containment zone in the country and ur going back there this weekend because you dont have a choice :^))))) i live like. close to it.", "token_idx_2": 6, "text_start_2": 31, "text_end_2": 42, "date_2": "2020-03", "text_1_tokenized": ["Had", "a", "dream", "that", "You", "Watanabe", "and", "some", "other", "girl", "\"", "got", "into", "a", "fight", "\"", "and", "there", "was", "this", "big", "controversy", "over", "who", "started", "the", "fight", ".", "Eventually", ",", "footage", "of", "that", "other", "girl", "being", "gunned", "down", "in", "a", "containment", "cell", "surfaced", "and", "sparked", "even", "more", "problems", "."], "text_2_tokenized": ["when", "ur", "hometown", "has", "the", "first", "containment", "zone", "in", "the", "country", "and", "ur", "going", "back", "there", "this", "weekend", "because", "you", "dont", "have", "a", "choice", ":", "^", ")", ")", ")", "i", "live", "like", ".", "close", "to", "it", "."]} -{"id": "0511-containment", "word": "containment", "label_binary": 1, "text_1": "And now .... containment made better. Come .. #EmpirehIzMe", "token_idx_1": 3, "text_start_1": 13, "text_end_1": 24, "date_1": "2019-03", "text_2": "Hmmm guys so if Wits does shut down then all the students will need to go back to their respective homes. What exactly does that mean in terms of containment?", "token_idx_2": 30, "text_start_2": 146, "text_end_2": 157, "date_2": "2020-03", "text_1_tokenized": ["And", "now", "...", "containment", "made", "better", ".", "Come", "..", "#EmpirehIzMe"], "text_2_tokenized": ["Hmmm", "guys", "so", "if", "Wits", "does", "shut", "down", "then", "all", "the", "students", "will", "need", "to", "go", "back", "to", "their", "respective", "homes", ".", "What", "exactly", "does", "that", "mean", "in", "terms", "of", "containment", "?"]} -{"id": "0512-containment", "word": "containment", "label_binary": 0, "text_1": "these hoes ain't loyal scp containment breach femur breaker sounds", "token_idx_1": 5, "text_start_1": 27, "text_end_1": 38, "date_1": "2019-03", "text_2": ".@NYGovCuomo: $2T stimulus bill helps unemployed & small biz, but not local & state govt. Feds only helping with #COVID19-related expenses, not lost revenue fm containment policies. Cuomo wants the Feds to \"do your job.\" He must do his job: Cut spending & close $6T budget hole.", "token_idx_2": 31, "text_start_2": 168, "text_end_2": 179, "date_2": "2020-03", "text_1_tokenized": ["these", "hoes", "ain't", "loyal", "scp", "containment", "breach", "femur", "breaker", "sounds"], "text_2_tokenized": [".", "@NYGovCuomo", ":", "$", "2T", "stimulus", "bill", "helps", "unemployed", "&", "small", "biz", ",", "but", "not", "local", "&", "state", "govt", ".", "Feds", "only", "helping", "with", "#COVID19-related", "expenses", ",", "not", "lost", "revenue", "fm", "containment", "policies", ".", "Cuomo", "wants", "the", "Feds", "to", "\"", "do", "your", "job", ".", "\"", "He", "must", "do", "his", "job", ":", "Cut", "spending", "&", "close", "$", "6T", "budget", "hole", "."]} -{"id": "0513-containment", "word": "containment", "label_binary": 1, "text_1": "Just had a dream about an apocalypse in a parallel universe. Hundreds of people were stuck in dark, containment rooms for government experimentations. A giant wave caved through the room, and we broke out, witnessing how corrupt the world turned into.", "token_idx_1": 20, "text_start_1": 100, "text_end_1": 111, "date_1": "2019-03", "text_2": "Maybe a brief foray into testing? Couple of points. It is hard & complex. 1) Considerable staff resource & bit of staff risk 1.1) Done in a containment lab to required level 1.2) Human quality control 1.3) Huge effort in disseminating results 2) Bad quality testing is dangerous", "token_idx_2": 32, "text_start_2": 148, "text_end_2": 159, "date_2": "2020-03", "text_1_tokenized": ["Just", "had", "a", "dream", "about", "an", "apocalypse", "in", "a", "parallel", "universe", ".", "Hundreds", "of", "people", "were", "stuck", "in", "dark", ",", "containment", "rooms", "for", "government", "experimentations", ".", "A", "giant", "wave", "caved", "through", "the", "room", ",", "and", "we", "broke", "out", ",", "witnessing", "how", "corrupt", "the", "world", "turned", "into", "."], "text_2_tokenized": ["Maybe", "a", "brief", "foray", "into", "testing", "?", "Couple", "of", "points", ".", "It", "is", "hard", "&", "complex", ".", "1", ")", "Considerable", "staff", "resource", "&", "bit", "of", "staff", "risk", "1.1", ")", "Done", "in", "a", "containment", "lab", "to", "required", "level", "1.2", ")", "Human", "quality", "control", "1.3", ")", "Huge", "effort", "in", "disseminating", "results", "2", ")", "Bad", "quality", "testing", "is", "dangerous"]} -{"id": "0514-containment", "word": "containment", "label_binary": 1, "text_1": "Participle that build and bind confussion with self containment transfused with etymology might bind masked identity with self worth.", "token_idx_1": 8, "text_start_1": 52, "text_end_1": 63, "date_1": "2019-03", "text_2": "#NYCLockdown - the leaker mayor now blaming President Trump for nyc problems. meanwhile, many residents fleeing nyc to infect other parts of the country. i thought this was all about containment? denis", "token_idx_2": 33, "text_start_2": 183, "text_end_2": 194, "date_2": "2020-03", "text_1_tokenized": ["Participle", "that", "build", "and", "bind", "confussion", "with", "self", "containment", "transfused", "with", "etymology", "might", "bind", "masked", "identity", "with", "self", "worth", "."], "text_2_tokenized": ["#NYCLockdown", "-", "the", "leaker", "mayor", "now", "blaming", "President", "Trump", "for", "nyc", "problems", ".", "meanwhile", ",", "many", "residents", "fleeing", "nyc", "to", "infect", "other", "parts", "of", "the", "country", ".", "i", "thought", "this", "was", "all", "about", "containment", "?", "denis"]} -{"id": "0515-containment", "word": "containment", "label_binary": 0, "text_1": "//What if Chaldea is just a simulation to prevent SCPs(Servants) containment breaching?", "token_idx_1": 15, "text_start_1": 65, "text_end_1": 76, "date_1": "2019-03", "text_2": "Also between Jared Kushner asking for pandemic containment advice on Facebook and Trump continuing to make public appearances while he's waiting for his test results, we are TRULY living in VEEP.", "token_idx_2": 7, "text_start_2": 47, "text_end_2": 58, "date_2": "2020-03", "text_1_tokenized": ["/", "/", "What", "if", "Chaldea", "is", "just", "a", "simulation", "to", "prevent", "SCPs", "(", "Servants", ")", "containment", "breaching", "?"], "text_2_tokenized": ["Also", "between", "Jared", "Kushner", "asking", "for", "pandemic", "containment", "advice", "on", "Facebook", "and", "Trump", "continuing", "to", "make", "public", "appearances", "while", "he's", "waiting", "for", "his", "test", "results", ",", "we", "are", "TRULY", "living", "in", "VEEP", "."]} -{"id": "0516-containment", "word": "containment", "label_binary": 1, "text_1": "Mexico to set up CentAm migrant 'containment' belt in south, if you know what i mean", "token_idx_1": 7, "text_start_1": 33, "text_end_1": 44, "date_1": "2019-03", "text_2": "Interesting how other countries are following president Trump's lead on border closings. Do you guys agree, that was a big step in the right direction for containment, @ScottWapnerCNBC and @carlquintanilla ?", "token_idx_2": 28, "text_start_2": 155, "text_end_2": 166, "date_2": "2020-03", "text_1_tokenized": ["Mexico", "to", "set", "up", "CentAm", "migrant", "'", "containment", "'", "belt", "in", "south", ",", "if", "you", "know", "what", "i", "mean"], "text_2_tokenized": ["Interesting", "how", "other", "countries", "are", "following", "president", "Trump's", "lead", "on", "border", "closings", ".", "Do", "you", "guys", "agree", ",", "that", "was", "a", "big", "step", "in", "the", "right", "direction", "for", "containment", ",", "@ScottWapnerCNBC", "and", "@carlquintanilla", "?"]} -{"id": "0517-containment", "word": "containment", "label_binary": 1, "text_1": "SDS:030-B has escaped containment we must act accordingly to the containment procedures please escort all scientists researcher interviewer to gate A safe zone then re-contain the SDS:030-B", "token_idx_1": 7, "text_start_1": 22, "text_end_1": 33, "date_1": "2019-03", "text_2": "Request our respected Prime Minister Narendra Modi ji to please extend the lockdown till 01 May 2020 to ensure containment Corona Virus in our lovely country.", "token_idx_2": 19, "text_start_2": 111, "text_end_2": 122, "date_2": "2020-03", "text_1_tokenized": ["SDS", ":", "030", "-", "B", "has", "escaped", "containment", "we", "must", "act", "accordingly", "to", "the", "containment", "procedures", "please", "escort", "all", "scientists", "researcher", "interviewer", "to", "gate", "A", "safe", "zone", "then", "re-contain", "the", "SDS", ":", "030", "-", "B"], "text_2_tokenized": ["Request", "our", "respected", "Prime", "Minister", "Narendra", "Modi", "ji", "to", "please", "extend", "the", "lockdown", "till", "01", "May", "2020", "to", "ensure", "containment", "Corona", "Virus", "in", "our", "lovely", "country", "."]} -{"id": "0518-containment", "word": "containment", "label_binary": 1, "text_1": "An alien alien trader flogs you a partially-disassembled containment system.", "token_idx_1": 8, "text_start_1": 57, "text_end_1": 68, "date_1": "2019-03", "text_2": "The Italian govt threatened today to ban all outdoor exercise. Frustration is growing over people defying a national lockdown order. A Sicilian friend told me that even after the order to stay put for containment, 31k people have travelled to Sicily from the north alone.", "token_idx_2": 36, "text_start_2": 201, "text_end_2": 212, "date_2": "2020-03", "text_1_tokenized": ["An", "alien", "alien", "trader", "flogs", "you", "a", "partially-disassembled", "containment", "system", "."], "text_2_tokenized": ["The", "Italian", "govt", "threatened", "today", "to", "ban", "all", "outdoor", "exercise", ".", "Frustration", "is", "growing", "over", "people", "defying", "a", "national", "lockdown", "order", ".", "A", "Sicilian", "friend", "told", "me", "that", "even", "after", "the", "order", "to", "stay", "put", "for", "containment", ",", "31k", "people", "have", "travelled", "to", "Sicily", "from", "the", "north", "alone", "."]} -{"id": "0519-containment", "word": "containment", "label_binary": 0, "text_1": "\"your nursery rhymes use the word cistern?\" \"how else are children supposed to learn about underground water containment systems\"", "token_idx_1": 21, "text_start_1": 109, "text_end_1": 120, "date_1": "2019-03", "text_2": "Y'all watch containment on Netflix ... I mean kinda weird considering we're going through the same thing rn.", "token_idx_2": 2, "text_start_2": 12, "text_end_2": 23, "date_2": "2020-03", "text_1_tokenized": ["\"", "your", "nursery", "rhymes", "use", "the", "word", "cistern", "?", "\"", "\"", "how", "else", "are", "children", "supposed", "to", "learn", "about", "underground", "water", "containment", "systems", "\""], "text_2_tokenized": ["Y'all", "watch", "containment", "on", "Netflix", "...", "I", "mean", "kinda", "weird", "considering", "we're", "going", "through", "the", "same", "thing", "rn", "."]} -{"id": "0520-containment", "word": "containment", "label_binary": 0, "text_1": "It's not enough to just look at the entry point, you have to look at everything going on. You wouldn't deal with a wild fire by taking care of one fire, you want to look at containment of the whole fire. Brian Laing #ISSAWebConf", "token_idx_1": 39, "text_start_1": 173, "text_end_1": 184, "date_1": "2019-03", "text_2": "I watched containment in 2 days...", "token_idx_2": 2, "text_start_2": 10, "text_end_2": 21, "date_2": "2020-03", "text_1_tokenized": ["It's", "not", "enough", "to", "just", "look", "at", "the", "entry", "point", ",", "you", "have", "to", "look", "at", "everything", "going", "on", ".", "You", "wouldn't", "deal", "with", "a", "wild", "fire", "by", "taking", "care", "of", "one", "fire", ",", "you", "want", "to", "look", "at", "containment", "of", "the", "whole", "fire", ".", "Brian", "Laing", "#ISSAWebConf"], "text_2_tokenized": ["I", "watched", "containment", "in", "2", "days", "..."]} -{"id": "0521-containment", "word": "containment", "label_binary": 1, "text_1": "Zimplow says its profits surged 76% to $6.010 mln in the year ended December 31, 2018 compared to $3.468 mln recorded in prior period on the back of cost containment measures.#Dailynews", "token_idx_1": 33, "text_start_1": 154, "text_end_1": 165, "date_1": "2019-03", "text_2": "Does Covid 19 exist in India. Not seen anything on that. What are the numbers. What is the strategy for containment.", "token_idx_2": 23, "text_start_2": 104, "text_end_2": 115, "date_2": "2020-03", "text_1_tokenized": ["Zimplow", "says", "its", "profits", "surged", "76", "%", "to", "$", "6.010", "mln", "in", "the", "year", "ended", "December", "31", ",", "2018", "compared", "to", "$", "3.468", "mln", "recorded", "in", "prior", "period", "on", "the", "back", "of", "cost", "containment", "measures", ".", "#Dailynews"], "text_2_tokenized": ["Does", "Covid", "19", "exist", "in", "India", ".", "Not", "seen", "anything", "on", "that", ".", "What", "are", "the", "numbers", ".", "What", "is", "the", "strategy", "for", "containment", "."]} -{"id": "0522-containment", "word": "containment", "label_binary": 1, "text_1": "Well, what was meant to be a minor operation to tidy the flat ahead of friends coming round evolved into being a biohazard containment situation after we decided to empty and clean the bins Just waiting for the priest to finish the exorcism...", "token_idx_1": 24, "text_start_1": 123, "text_end_1": 134, "date_1": "2019-03", "text_2": "Ah think it's absolutely fuckin mental that Boris has completely fucked up containment, told the UK to take a global pandemic on the chin, overlooked emails from the EU ref ventilators,gave the contract to Dyson and fucked off to bed to play PlayStation while 100 people a day die", "token_idx_2": 12, "text_start_2": 75, "text_end_2": 86, "date_2": "2020-03", "text_1_tokenized": ["Well", ",", "what", "was", "meant", "to", "be", "a", "minor", "operation", "to", "tidy", "the", "flat", "ahead", "of", "friends", "coming", "round", "evolved", "into", "being", "a", "biohazard", "containment", "situation", "after", "we", "decided", "to", "empty", "and", "clean", "the", "bins", "Just", "waiting", "for", "the", "priest", "to", "finish", "the", "exorcism", "..."], "text_2_tokenized": ["Ah", "think", "it's", "absolutely", "fuckin", "mental", "that", "Boris", "has", "completely", "fucked", "up", "containment", ",", "told", "the", "UK", "to", "take", "a", "global", "pandemic", "on", "the", "chin", ",", "overlooked", "emails", "from", "the", "EU", "ref", "ventilators", ",", "gave", "the", "contract", "to", "Dyson", "and", "fucked", "off", "to", "bed", "to", "play", "PlayStation", "while", "100", "people", "a", "day", "die"]} -{"id": "0523-containment", "word": "containment", "label_binary": 1, "text_1": "Staff's fear of incidents escalating to violence, resulted in an overestimation of the perceived threat, preventing staff from looking for alternative ways of providing more therapeutic containment (Duxbury, 2002)", "token_idx_1": 28, "text_start_1": 186, "text_end_1": 197, "date_1": "2019-03", "text_2": "If you look closely at the success of the Chinese and South Korean containment models, they acted swiftly to deny tests to the masses while only testing the ruling class.", "token_idx_2": 13, "text_start_2": 67, "text_end_2": 78, "date_2": "2020-03", "text_1_tokenized": ["Staff's", "fear", "of", "incidents", "escalating", "to", "violence", ",", "resulted", "in", "an", "overestimation", "of", "the", "perceived", "threat", ",", "preventing", "staff", "from", "looking", "for", "alternative", "ways", "of", "providing", "more", "therapeutic", "containment", "(", "Duxbury", ",", "2002", ")"], "text_2_tokenized": ["If", "you", "look", "closely", "at", "the", "success", "of", "the", "Chinese", "and", "South", "Korean", "containment", "models", ",", "they", "acted", "swiftly", "to", "deny", "tests", "to", "the", "masses", "while", "only", "testing", "the", "ruling", "class", "."]} -{"id": "0524-containment", "word": "containment", "label_binary": 0, "text_1": "this show \u201ccontainment\u201d on netflix is bomb asf.", "token_idx_1": 3, "text_start_1": 11, "text_end_1": 22, "date_1": "2019-03", "text_2": "Can't sleep \u201ccontainment\u201d it is \ud83d\ude0c", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 24, "date_2": "2020-03", "text_1_tokenized": ["this", "show", "\u201c", "containment", "\u201d", "on", "netflix", "is", "bomb", "asf", "."], "text_2_tokenized": ["Can't", "sleep", "\u201c", "containment", "\u201d", "it", "is", "\ud83d\ude0c"]} -{"id": "0525-containment", "word": "containment", "label_binary": 1, "text_1": "Just assisted on a containment for @emopssdogs who have had a fail to stop. Response officers drive past a vehicle and it goes lights out, shortly after it was picked up by EMOPSS colleagues. Following a short pursuit it decamped and unfortunatly they were quicker than us \ud83d\ude29", "token_idx_1": 4, "text_start_1": 19, "text_end_1": 30, "date_1": "2019-03", "text_2": "Movement of food and cargo will continue as normal throughout the containment period by road/rail/air - H.E. Uhuru Kenyatta #COVID19KE #KomeshaCorona #WeShallOvercomeCorona", "token_idx_2": 11, "text_start_2": 66, "text_end_2": 77, "date_2": "2020-03", "text_1_tokenized": ["Just", "assisted", "on", "a", "containment", "for", "@emopssdogs", "who", "have", "had", "a", "fail", "to", "stop", ".", "Response", "officers", "drive", "past", "a", "vehicle", "and", "it", "goes", "lights", "out", ",", "shortly", "after", "it", "was", "picked", "up", "by", "EMOPSS", "colleagues", ".", "Following", "a", "short", "pursuit", "it", "decamped", "and", "unfortunatly", "they", "were", "quicker", "than", "us", "\ud83d\ude29"], "text_2_tokenized": ["Movement", "of", "food", "and", "cargo", "will", "continue", "as", "normal", "throughout", "the", "containment", "period", "by", "road", "/", "rail", "/", "air", "-", "H", ".", "E", ".", "Uhuru", "Kenyatta", "#COVID19KE", "#KomeshaCorona", "#WeShallOvercomeCorona"]} -{"id": "0526-containment", "word": "containment", "label_binary": 1, "text_1": "I wanna have the kinda balls and boundaries my therapists has when she cuts me off in the middle of sobbing saying that's time let's do a containment exercise and move along with our day. If that's not bad bitch energy idk what is.", "token_idx_1": 27, "text_start_1": 138, "text_end_1": 149, "date_1": "2019-03", "text_2": "usa had no problem practicing containment before covid-19 huh", "token_idx_2": 5, "text_start_2": 30, "text_end_2": 41, "date_2": "2020-03", "text_1_tokenized": ["I", "wanna", "have", "the", "kinda", "balls", "and", "boundaries", "my", "therapists", "has", "when", "she", "cuts", "me", "off", "in", "the", "middle", "of", "sobbing", "saying", "that's", "time", "let's", "do", "a", "containment", "exercise", "and", "move", "along", "with", "our", "day", ".", "If", "that's", "not", "bad", "bitch", "energy", "idk", "what", "is", "."], "text_2_tokenized": ["usa", "had", "no", "problem", "practicing", "containment", "before", "covid", "-", "19", "huh"]} -{"id": "0527-containment", "word": "containment", "label_binary": 1, "text_1": "Later, Libya's desert city Islamic State shooting incident will be handed over to France to sever military support with Hafta and military support in the east, as a containment effect! \ud83e\udd85by \ud83d\udc0e", "token_idx_1": 30, "text_start_1": 165, "text_end_1": 176, "date_1": "2019-03", "text_2": "Already tired of people asking why we're \"overreacting\" to COVID-19 because they don't personally know anyone that has it. That's exactly how containment is supposed to work, you shouldn't know someone that has it. Let me explain how highly contagious viruses spread. 1/2", "token_idx_2": 27, "text_start_2": 142, "text_end_2": 153, "date_2": "2020-03", "text_1_tokenized": ["Later", ",", "Libya's", "desert", "city", "Islamic", "State", "shooting", "incident", "will", "be", "handed", "over", "to", "France", "to", "sever", "military", "support", "with", "Hafta", "and", "military", "support", "in", "the", "east", ",", "as", "a", "containment", "effect", "!", "\ud83e\udd85", "by", "\ud83d\udc0e"], "text_2_tokenized": ["Already", "tired", "of", "people", "asking", "why", "we're", "\"", "overreacting", "\"", "to", "COVID", "-", "19", "because", "they", "don't", "personally", "know", "anyone", "that", "has", "it", ".", "That's", "exactly", "how", "containment", "is", "supposed", "to", "work", ",", "you", "shouldn't", "know", "someone", "that", "has", "it", ".", "Let", "me", "explain", "how", "highly", "contagious", "viruses", "spread", ".", "1/2"]} -{"id": "0528-containment", "word": "containment", "label_binary": 1, "text_1": "The #AllantonRoadFire remains 40% contained at 500 acres. Crews hope to have containment increased to 50% tonight. The FLForestService will have 2 wildland engines patrolling and mopping up and 3 firefighting bulldozers building and improving fire lines tonight. BayCountyEM \u2026", "token_idx_1": 14, "text_start_1": 77, "text_end_1": 88, "date_1": "2019-03", "text_2": "It's imp PM call for a meeting of all CMs for making them take this issue of containment more seriously.", "token_idx_2": 17, "text_start_2": 77, "text_end_2": 88, "date_2": "2020-03", "text_1_tokenized": ["The", "#AllantonRoadFire", "remains", "40", "%", "contained", "at", "500", "acres", ".", "Crews", "hope", "to", "have", "containment", "increased", "to", "50", "%", "tonight", ".", "The", "FLForestService", "will", "have", "2", "wildland", "engines", "patrolling", "and", "mopping", "up", "and", "3", "firefighting", "bulldozers", "building", "and", "improving", "fire", "lines", "tonight", ".", "BayCountyEM", "\u2026"], "text_2_tokenized": ["It's", "imp", "PM", "call", "for", "a", "meeting", "of", "all", "CMs", "for", "making", "them", "take", "this", "issue", "of", "containment", "more", "seriously", "."]} -{"id": "0529-containment", "word": "containment", "label_binary": 1, "text_1": "This comes the same TN Senator @BrianKelsey filed a resolution today calling for the expeditious removal of toxic coal ash from the containment ponds at the Allen Fossil Plant. In recent days @RepCohen said he wasn't happy at the timeframe of the Allen coal ash removal.", "token_idx_1": 22, "text_start_1": 132, "text_end_1": 143, "date_1": "2019-03", "text_2": "I've been self isolating as much as possible at home to put others such as my father, at risk of catching illness. Came down with a high fever on Thursday night, and what looks like strep, so I've been in my own containment until I get seen by a doctor on Monday.", "token_idx_2": 46, "text_start_2": 212, "text_end_2": 223, "date_2": "2020-03", "text_1_tokenized": ["This", "comes", "the", "same", "TN", "Senator", "@BrianKelsey", "filed", "a", "resolution", "today", "calling", "for", "the", "expeditious", "removal", "of", "toxic", "coal", "ash", "from", "the", "containment", "ponds", "at", "the", "Allen", "Fossil", "Plant", ".", "In", "recent", "days", "@RepCohen", "said", "he", "wasn't", "happy", "at", "the", "timeframe", "of", "the", "Allen", "coal", "ash", "removal", "."], "text_2_tokenized": ["I've", "been", "self", "isolating", "as", "much", "as", "possible", "at", "home", "to", "put", "others", "such", "as", "my", "father", ",", "at", "risk", "of", "catching", "illness", ".", "Came", "down", "with", "a", "high", "fever", "on", "Thursday", "night", ",", "and", "what", "looks", "like", "strep", ",", "so", "I've", "been", "in", "my", "own", "containment", "until", "I", "get", "seen", "by", "a", "doctor", "on", "Monday", "."]} -{"id": "0530-containment", "word": "containment", "label_binary": 1, "text_1": "Hi fellow #UglyDogs! Urgent question from the newbie rescue dog dad here about wireless vs fence containment Together we got more than halfway to the initial goal to getting Mr. Dog set up as a happy domestic pup after his sad past of beatings & street life! But- (Short thread)", "token_idx_1": 17, "text_start_1": 97, "text_end_1": 108, "date_1": "2019-03", "text_2": "Is the National Orientation Agency still active? The information about Covid 19 does not seem to have penetrated to the grass roots yet. They may have heard about it, but not to the extent of devastation, containment and management in case they contract it.", "token_idx_2": 40, "text_start_2": 205, "text_end_2": 216, "date_2": "2020-03", "text_1_tokenized": ["Hi", "fellow", "#UglyDogs", "!", "Urgent", "question", "from", "the", "newbie", "rescue", "dog", "dad", "here", "about", "wireless", "vs", "fence", "containment", "Together", "we", "got", "more", "than", "halfway", "to", "the", "initial", "goal", "to", "getting", "Mr", ".", "Dog", "set", "up", "as", "a", "happy", "domestic", "pup", "after", "his", "sad", "past", "of", "beatings", "&", "street", "life", "!", "But", "-", "(", "Short", "thread", ")"], "text_2_tokenized": ["Is", "the", "National", "Orientation", "Agency", "still", "active", "?", "The", "information", "about", "Covid", "19", "does", "not", "seem", "to", "have", "penetrated", "to", "the", "grass", "roots", "yet", ".", "They", "may", "have", "heard", "about", "it", ",", "but", "not", "to", "the", "extent", "of", "devastation", ",", "containment", "and", "management", "in", "case", "they", "contract", "it", "."]} -{"id": "0531-containment", "word": "containment", "label_binary": 0, "text_1": "if they keep the shifty stations, somehow, that is, the ones from each fest, and they miraculously end up in the game's updates i'm taking both the containment one from chicken and egg and the one from sweaters and socks and i'm bagsing them for porting them later.", "token_idx_1": 31, "text_start_1": 148, "text_end_1": 159, "date_1": "2019-03", "text_2": "I started ozark & containment last night & couldn't get into either", "token_idx_2": 4, "text_start_2": 22, "text_end_2": 33, "date_2": "2020-03", "text_1_tokenized": ["if", "they", "keep", "the", "shifty", "stations", ",", "somehow", ",", "that", "is", ",", "the", "ones", "from", "each", "fest", ",", "and", "they", "miraculously", "end", "up", "in", "the", "game's", "updates", "i'm", "taking", "both", "the", "containment", "one", "from", "chicken", "and", "egg", "and", "the", "one", "from", "sweaters", "and", "socks", "and", "i'm", "bagsing", "them", "for", "porting", "them", "later", "."], "text_2_tokenized": ["I", "started", "ozark", "&", "containment", "last", "night", "&", "couldn't", "get", "into", "either"]} -{"id": "0532-containment", "word": "containment", "label_binary": 1, "text_1": "\"Try to understand. This is a high voltage laser containment system.\" - Kurt Cobain 1967-1993", "token_idx_1": 11, "text_start_1": 49, "text_end_1": 60, "date_1": "2019-03", "text_2": "Day 27. National School Board rules out a pass/fail grading system. Committee of experts requested to address de-escalation of containment. It's been a really intense day at home with the kids today. Can't really wait for those de-escalation measures to take effect.", "token_idx_2": 23, "text_start_2": 127, "text_end_2": 138, "date_2": "2020-03", "text_1_tokenized": ["\"", "Try", "to", "understand", ".", "This", "is", "a", "high", "voltage", "laser", "containment", "system", ".", "\"", "-", "Kurt", "Cobain", "1967-1993"], "text_2_tokenized": ["Day", "27", ".", "National", "School", "Board", "rules", "out", "a", "pass", "/", "fail", "grading", "system", ".", "Committee", "of", "experts", "requested", "to", "address", "de-escalation", "of", "containment", ".", "It's", "been", "a", "really", "intense", "day", "at", "home", "with", "the", "kids", "today", ".", "Can't", "really", "wait", "for", "those", "de-escalation", "measures", "to", "take", "effect", "."]} -{"id": "0533-containment", "word": "containment", "label_binary": 0, "text_1": "why hasnt anyone recommended \"scp containment\" to me yet like why did i only find out about it today. nevertheless, I'm a big connor kinnie now so theres that", "token_idx_1": 6, "text_start_1": 34, "text_end_1": 45, "date_1": "2019-03", "text_2": "My mom \"well something nice with this containment is more fics\" priorities", "token_idx_2": 8, "text_start_2": 38, "text_end_2": 49, "date_2": "2020-03", "text_1_tokenized": ["why", "hasnt", "anyone", "recommended", "\"", "scp", "containment", "\"", "to", "me", "yet", "like", "why", "did", "i", "only", "find", "out", "about", "it", "today", ".", "nevertheless", ",", "I'm", "a", "big", "connor", "kinnie", "now", "so", "theres", "that"], "text_2_tokenized": ["My", "mom", "\"", "well", "something", "nice", "with", "this", "containment", "is", "more", "fics", "\"", "priorities"]} -{"id": "0534-containment", "word": "containment", "label_binary": 1, "text_1": "Hopefully @CDCgov does better job at containment than #KateWinslet in #Contagion. Tells carrier, \"Get off that bus.\" She didnt contain the bus?", "token_idx_1": 6, "text_start_1": 37, "text_end_1": 48, "date_1": "2019-03", "text_2": "\u201cThe coronavirus test that wasn't: How federal health officials misled state scientists and derailed the best chance at containment\u201d per USA Today. Trump and his admin just keep winning. On a positive note we lead the world in confirmed cases!", "token_idx_2": 20, "text_start_2": 120, "text_end_2": 131, "date_2": "2020-03", "text_1_tokenized": ["Hopefully", "@CDCgov", "does", "better", "job", "at", "containment", "than", "#KateWinslet", "in", "#Contagion", ".", "Tells", "carrier", ",", "\"", "Get", "off", "that", "bus", ".", "\"", "She", "didnt", "contain", "the", "bus", "?"], "text_2_tokenized": ["\u201c", "The", "coronavirus", "test", "that", "wasn't", ":", "How", "federal", "health", "officials", "misled", "state", "scientists", "and", "derailed", "the", "best", "chance", "at", "containment", "\u201d", "per", "USA", "Today", ".", "Trump", "and", "his", "admin", "just", "keep", "winning", ".", "On", "a", "positive", "note", "we", "lead", "the", "world", "in", "confirmed", "cases", "!"]} -{"id": "0535-containment", "word": "containment", "label_binary": 1, "text_1": "Evil containment does the job. #DragonBallSuper @MikeMcFarlandVA", "token_idx_1": 1, "text_start_1": 5, "text_end_1": 16, "date_1": "2019-03", "text_2": "Stop this madness, how do we buy food from supermarkets, when you put a roadblock at Mpesa Academy and we are just 7 km from Thika town. We live near Mary Hill girls high school. Stop this nonsense in the name of containment and you are not feeding us. Stupid mitigations.", "token_idx_2": 46, "text_start_2": 213, "text_end_2": 224, "date_2": "2020-03", "text_1_tokenized": ["Evil", "containment", "does", "the", "job", ".", "#DragonBallSuper", "@MikeMcFarlandVA"], "text_2_tokenized": ["Stop", "this", "madness", ",", "how", "do", "we", "buy", "food", "from", "supermarkets", ",", "when", "you", "put", "a", "roadblock", "at", "Mpesa", "Academy", "and", "we", "are", "just", "7", "km", "from", "Thika", "town", ".", "We", "live", "near", "Mary", "Hill", "girls", "high", "school", ".", "Stop", "this", "nonsense", "in", "the", "name", "of", "containment", "and", "you", "are", "not", "feeding", "us", ".", "Stupid", "mitigations", "."]} -{"id": "0536-containment", "word": "containment", "label_binary": 0, "text_1": "*hes wearing a lab coat* I'm smart now. I also got a level 5 keycard! (Time to start a containment breach! I shouldn't have said that out loud)", "token_idx_1": 24, "text_start_1": 87, "text_end_1": 98, "date_1": "2019-03", "text_2": "like sure the unemployment numbers will go down if they end the containment bc people won't be unemployed they'll be d e a d", "token_idx_2": 12, "text_start_2": 64, "text_end_2": 75, "date_2": "2020-03", "text_1_tokenized": ["*", "hes", "wearing", "a", "lab", "coat", "*", "I'm", "smart", "now", ".", "I", "also", "got", "a", "level", "5", "keycard", "!", "(", "Time", "to", "start", "a", "containment", "breach", "!", "I", "shouldn't", "have", "said", "that", "out", "loud", ")"], "text_2_tokenized": ["like", "sure", "the", "unemployment", "numbers", "will", "go", "down", "if", "they", "end", "the", "containment", "bc", "people", "won't", "be", "unemployed", "they'll", "be", "d", "e", "a", "d"]} -{"id": "0537-containment", "word": "containment", "label_binary": 1, "text_1": "Spurs need to be positive and not try for containment. City are used to that and will get around it A win would be great but a draw wouldn't be OK. The Ethiad is by no means a fortress on European nights. No one expects Spurs to go through so might as well give it a lash #COYS", "token_idx_1": 9, "text_start_1": 42, "text_end_1": 53, "date_1": "2019-03", "text_2": "Even if you live in a state with appropriate containment measures, Trump's rhetoric endangers us all. As infection becomes more widespread things that were relatively safe for #HighRiskCovid19 won't be anymore. Grocery delivery, mail carriers. We can't control our exposure.", "token_idx_2": 9, "text_start_2": 45, "text_end_2": 56, "date_2": "2020-03", "text_1_tokenized": ["Spurs", "need", "to", "be", "positive", "and", "not", "try", "for", "containment", ".", "City", "are", "used", "to", "that", "and", "will", "get", "around", "it", "A", "win", "would", "be", "great", "but", "a", "draw", "wouldn't", "be", "OK", ".", "The", "Ethiad", "is", "by", "no", "means", "a", "fortress", "on", "European", "nights", ".", "No", "one", "expects", "Spurs", "to", "go", "through", "so", "might", "as", "well", "give", "it", "a", "lash", "#COYS"], "text_2_tokenized": ["Even", "if", "you", "live", "in", "a", "state", "with", "appropriate", "containment", "measures", ",", "Trump's", "rhetoric", "endangers", "us", "all", ".", "As", "infection", "becomes", "more", "widespread", "things", "that", "were", "relatively", "safe", "for", "#HighRiskCovid19", "won't", "be", "anymore", ".", "Grocery", "delivery", ",", "mail", "carriers", ".", "We", "can't", "control", "our", "exposure", "."]} -{"id": "0538-containment", "word": "containment", "label_binary": 1, "text_1": "#Woodgate \ud83d\udd25 \u26a0\ufe0f remains at PREPARE TO LEAVE. @QldFES says at 12.10pm the large slow-moving Fire is travelling south-west and is likely to impact Woodgate Road between rail crossing and Woppis Road. Crews continuing to strengthen containment lines \ud83d\ude92@ABCemergency @abcnews", "token_idx_1": 40, "text_start_1": 228, "text_end_1": 239, "date_1": "2019-03", "text_2": "COVID-19: Why have Pakistan abandoned their citizens in the UAE? Governments must take care of their citizens, especially those who want to go back home The economic impact of the coronavirus pandemic has been quite devastating across the world as countries enforce containment", "token_idx_2": 47, "text_start_2": 266, "text_end_2": 277, "date_2": "2020-03", "text_1_tokenized": ["#Woodgate", "\ud83d\udd25", "\u26a0", "\ufe0f", "remains", "at", "PREPARE", "TO", "LEAVE", ".", "@QldFES", "says", "at", "12.10", "pm", "the", "large", "slow-moving", "Fire", "is", "travelling", "south-west", "and", "is", "likely", "to", "impact", "Woodgate", "Road", "between", "rail", "crossing", "and", "Woppis", "Road", ".", "Crews", "continuing", "to", "strengthen", "containment", "lines", "\ud83d\ude92", "@ABCemergency", "@abcnews"], "text_2_tokenized": ["COVID", "-", "19", ":", "Why", "have", "Pakistan", "abandoned", "their", "citizens", "in", "the", "UAE", "?", "Governments", "must", "take", "care", "of", "their", "citizens", ",", "especially", "those", "who", "want", "to", "go", "back", "home", "The", "economic", "impact", "of", "the", "coronavirus", "pandemic", "has", "been", "quite", "devastating", "across", "the", "world", "as", "countries", "enforce", "containment"]} -{"id": "0539-containment", "word": "containment", "label_binary": 0, "text_1": "SCP-106 Has broken out of the facility at gate A! Fire the HID turret immediately! Do NOT let it across the bridge! Just beat SCP containment breach on keter mode. It was hard to beat on that difficulty but hey... I did it.", "token_idx_1": 30, "text_start_1": 130, "text_end_1": 141, "date_1": "2019-03", "text_2": "So basically everything is a necessary gatherings. Useless for containment @ScottMorrisonMP @abcnews", "token_idx_2": 10, "text_start_2": 63, "text_end_2": 74, "date_2": "2020-03", "text_1_tokenized": ["SCP", "-", "106", "Has", "broken", "out", "of", "the", "facility", "at", "gate", "A", "!", "Fire", "the", "HID", "turret", "immediately", "!", "Do", "NOT", "let", "it", "across", "the", "bridge", "!", "Just", "beat", "SCP", "containment", "breach", "on", "keter", "mode", ".", "It", "was", "hard", "to", "beat", "on", "that", "difficulty", "but", "hey", "...", "I", "did", "it", "."], "text_2_tokenized": ["So", "basically", "everything", "is", "a", "necessary", "gatherings", ".", "Useless", "for", "containment", "@ScottMorrisonMP", "@abcnews"]} -{"id": "0540-containment", "word": "containment", "label_binary": 1, "text_1": "$ostk shorts doing some classic containment work here, they have to, otherwise they lose their ass on those massive borrowing costs.", "token_idx_1": 6, "text_start_1": 32, "text_end_1": 43, "date_1": "2019-03", "text_2": "The administration has put in place a cluster containment action plan in these three zones to restrict the spread of Covid 19.", "token_idx_2": 8, "text_start_2": 46, "text_end_2": 57, "date_2": "2020-03", "text_1_tokenized": ["$", "ostk", "shorts", "doing", "some", "classic", "containment", "work", "here", ",", "they", "have", "to", ",", "otherwise", "they", "lose", "their", "ass", "on", "those", "massive", "borrowing", "costs", "."], "text_2_tokenized": ["The", "administration", "has", "put", "in", "place", "a", "cluster", "containment", "action", "plan", "in", "these", "three", "zones", "to", "restrict", "the", "spread", "of", "Covid", "19", "."]} -{"id": "0541-containment", "word": "containment", "label_binary": 1, "text_1": "40 years ago at #ThreeMileIsland: At 1:51pm, hydrogen gas explodes in the containment building \u2014 allowing large amounts of radiation to escape. Operators do not realize the reactor core was uncovered and severely damaged, and don't recognize the boom as a hydrogen explosion.", "token_idx_1": 15, "text_start_1": 74, "text_end_1": 85, "date_1": "2019-03", "text_2": "Calling #COVID\u30fc19 the \u201cChinese\u201d or \u201cWuhan\u201d virus is offensive political marketing at its finest. The CCP absolutely enabled and created an environment that muted early communication and containment. The US Administration has done no better thus far.", "token_idx_2": 32, "text_start_2": 186, "text_end_2": 197, "date_2": "2020-03", "text_1_tokenized": ["40", "years", "ago", "at", "#ThreeMileIsland", ":", "At", "1:51", "pm", ",", "hydrogen", "gas", "explodes", "in", "the", "containment", "building", "\u2014", "allowing", "large", "amounts", "of", "radiation", "to", "escape", ".", "Operators", "do", "not", "realize", "the", "reactor", "core", "was", "uncovered", "and", "severely", "damaged", ",", "and", "don't", "recognize", "the", "boom", "as", "a", "hydrogen", "explosion", "."], "text_2_tokenized": ["Calling", "#COVID\u30fc19", "the", "\u201c", "Chinese", "\u201d", "or", "\u201c", "Wuhan", "\u201d", "virus", "is", "offensive", "political", "marketing", "at", "its", "finest", ".", "The", "CCP", "absolutely", "enabled", "and", "created", "an", "environment", "that", "muted", "early", "communication", "and", "containment", ".", "The", "US", "Administration", "has", "done", "no", "better", "thus", "far", "."]} -{"id": "0542-containment", "word": "containment", "label_binary": 0, "text_1": "Absorb the emotions, through the skin.... Walls of containment paper thin... Thoughts of vengeance black as sin... A war of words, never to win.... Memories of glory, litter bin... Pool of blood, protruding fin.... Eyes of glass, empty within .....", "token_idx_1": 10, "text_start_1": 51, "text_end_1": 62, "date_1": "2019-03", "text_2": "I was liking containment until they put the racist spin on it at the end of the first episode", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 24, "date_2": "2020-03", "text_1_tokenized": ["Absorb", "the", "emotions", ",", "through", "the", "skin", "...", "Walls", "of", "containment", "paper", "thin", "...", "Thoughts", "of", "vengeance", "black", "as", "sin", "...", "A", "war", "of", "words", ",", "never", "to", "win", "...", "Memories", "of", "glory", ",", "litter", "bin", "...", "Pool", "of", "blood", ",", "protruding", "fin", "...", "Eyes", "of", "glass", ",", "empty", "within", "..."], "text_2_tokenized": ["I", "was", "liking", "containment", "until", "they", "put", "the", "racist", "spin", "on", "it", "at", "the", "end", "of", "the", "first", "episode"]} -{"id": "0543-containment", "word": "containment", "label_binary": 1, "text_1": "Scanner ~ Service Call 1111 CROFT AVE, GOLD BAR / LZ GBR GROCERY on standby for police assist, containment for subject with warrant & now in custody", "token_idx_1": 20, "text_start_1": 95, "text_end_1": 106, "date_1": "2019-03", "text_2": "Kinda think we're way past the containment stage of this virus. Only a vaccine will save Africa.", "token_idx_2": 6, "text_start_2": 31, "text_end_2": 42, "date_2": "2020-03", "text_1_tokenized": ["Scanner", "~", "Service", "Call", "1111", "CROFT", "AVE", ",", "GOLD", "BAR", "/", "LZ", "GBR", "GROCERY", "on", "standby", "for", "police", "assist", ",", "containment", "for", "subject", "with", "warrant", "&", "now", "in", "custody"], "text_2_tokenized": ["Kinda", "think", "we're", "way", "past", "the", "containment", "stage", "of", "this", "virus", ".", "Only", "a", "vaccine", "will", "save", "Africa", "."]} -{"id": "0544-containment", "word": "containment", "label_binary": 1, "text_1": "StuWarner AuctionsPlus Looking good Stu. Note still in containment - have you had any rain yet? #merinos", "token_idx_1": 9, "text_start_1": 55, "text_end_1": 66, "date_1": "2019-03", "text_2": "praying for the containment to finish before this summer... i wanna get drunk with my friends", "token_idx_2": 3, "text_start_2": 16, "text_end_2": 27, "date_2": "2020-03", "text_1_tokenized": ["StuWarner", "AuctionsPlus", "Looking", "good", "Stu", ".", "Note", "still", "in", "containment", "-", "have", "you", "had", "any", "rain", "yet", "?", "#merinos"], "text_2_tokenized": ["praying", "for", "the", "containment", "to", "finish", "before", "this", "summer", "...", "i", "wanna", "get", "drunk", "with", "my", "friends"]} -{"id": "0545-containment", "word": "containment", "label_binary": 0, "text_1": "Always wondered why it seems most college teams are so vanilla defensively, playing containment man? Coaches run so much great stuff these days and teams allow each other to make every pass they want to. Why not pressure, contest passing lanes, trap some, be more disruptive? \ud83c\udfc0\ud83e\udd14", "token_idx_1": 14, "text_start_1": 84, "text_end_1": 95, "date_1": "2019-03", "text_2": "Paul Kelly says other countries locking down because they have many more #COVID19Au cases and we'll wait until we do too. Wouldn't it make more sense to isolate as a prevention (rather than containment) measure? @rnbreakfast", "token_idx_2": 35, "text_start_2": 190, "text_end_2": 201, "date_2": "2020-03", "text_1_tokenized": ["Always", "wondered", "why", "it", "seems", "most", "college", "teams", "are", "so", "vanilla", "defensively", ",", "playing", "containment", "man", "?", "Coaches", "run", "so", "much", "great", "stuff", "these", "days", "and", "teams", "allow", "each", "other", "to", "make", "every", "pass", "they", "want", "to", ".", "Why", "not", "pressure", ",", "contest", "passing", "lanes", ",", "trap", "some", ",", "be", "more", "disruptive", "?", "\ud83c\udfc0", "\ud83e\udd14"], "text_2_tokenized": ["Paul", "Kelly", "says", "other", "countries", "locking", "down", "because", "they", "have", "many", "more", "#COVID19Au", "cases", "and", "we'll", "wait", "until", "we", "do", "too", ".", "Wouldn't", "it", "make", "more", "sense", "to", "isolate", "as", "a", "prevention", "(", "rather", "than", "containment", ")", "measure", "?", "@rnbreakfast"]} -{"id": "0546-containment", "word": "containment", "label_binary": 1, "text_1": "*looks into the containment bucket, then at @mlp_Twilight* How're you holding up?", "token_idx_1": 4, "text_start_1": 16, "text_end_1": 27, "date_1": "2019-03", "text_2": "okay so i woke up an hour late for a hbl bc i'm a piece of shit student who's sleep deprived,,, and now i'm doing history and what da hell is containment policy", "token_idx_2": 34, "text_start_2": 142, "text_end_2": 153, "date_2": "2020-03", "text_1_tokenized": ["*", "looks", "into", "the", "containment", "bucket", ",", "then", "at", "@mlp_Twilight", "*", "How're", "you", "holding", "up", "?"], "text_2_tokenized": ["okay", "so", "i", "woke", "up", "an", "hour", "late", "for", "a", "hbl", "bc", "i'm", "a", "piece", "of", "shit", "student", "who's", "sleep", "deprived", ",", ",", ",", "and", "now", "i'm", "doing", "history", "and", "what", "da", "hell", "is", "containment", "policy"]} -{"id": "0547-containment", "word": "containment", "label_binary": 0, "text_1": "I am warned you guys who are \"monitoring\", the more you resist the more it is going to hurt. If I am to break containment then you will not have the power to stop that. You have no control of this process. You should know that by now. R\u00f8age", "token_idx_1": 28, "text_start_1": 110, "text_end_1": 121, "date_1": "2019-03", "text_2": "containment on netflix is literally our lives today \ud83d\ude2d\ud83d\ude2d but clearly the circumstances in the movies are worse but i'm still amazed", "token_idx_2": 0, "text_start_2": 0, "text_end_2": 11, "date_2": "2020-03", "text_1_tokenized": ["I", "am", "warned", "you", "guys", "who", "are", "\"", "monitoring", "\"", ",", "the", "more", "you", "resist", "the", "more", "it", "is", "going", "to", "hurt", ".", "If", "I", "am", "to", "break", "containment", "then", "you", "will", "not", "have", "the", "power", "to", "stop", "that", ".", "You", "have", "no", "control", "of", "this", "process", ".", "You", "should", "know", "that", "by", "now", ".", "R\u00f8age"], "text_2_tokenized": ["containment", "on", "netflix", "is", "literally", "our", "lives", "today", "\ud83d\ude2d", "\ud83d\ude2d", "but", "clearly", "the", "circumstances", "in", "the", "movies", "are", "worse", "but", "i'm", "still", "amazed"]} -{"id": "0548-containment", "word": "containment", "label_binary": 1, "text_1": "On the note of using potion vials as means of micro containment, they really do serve a wide array of purposes for Mynaleth. Shake n' go potions which require a tiny in the recipe, yummy treats in a bottle, and most importantly... ready-to-go potion testers~ Any of you want in?~", "token_idx_1": 11, "text_start_1": 52, "text_end_1": 63, "date_1": "2019-03", "text_2": "During one of our recent #COVID19 Medicine Grand Rounds, #StanDOM's Megan Mahoney explained the transition from a containment strategy to a mitigation strategy, including our shift to virtual care & the expansion of testing capabilities. (INSERT URL)", "token_idx_2": 18, "text_start_2": 114, "text_end_2": 125, "date_2": "2020-03", "text_1_tokenized": ["On", "the", "note", "of", "using", "potion", "vials", "as", "means", "of", "micro", "containment", ",", "they", "really", "do", "serve", "a", "wide", "array", "of", "purposes", "for", "Mynaleth", ".", "Shake", "n", "'", "go", "potions", "which", "require", "a", "tiny", "in", "the", "recipe", ",", "yummy", "treats", "in", "a", "bottle", ",", "and", "most", "importantly", "...", "ready-to-go", "potion", "testers", "~", "Any", "of", "you", "want", "in", "?", "~"], "text_2_tokenized": ["During", "one", "of", "our", "recent", "#COVID19", "Medicine", "Grand", "Rounds", ",", "#StanDOM's", "Megan", "Mahoney", "explained", "the", "transition", "from", "a", "containment", "strategy", "to", "a", "mitigation", "strategy", ",", "including", "our", "shift", "to", "virtual", "care", "&", "the", "expansion", "of", "testing", "capabilities", ".", "(", "INSERT", "URL", ")"]} -{"id": "0549-containment", "word": "containment", "label_binary": 0, "text_1": "Increase in cat containment behaviour was found for those participating in the campaign @emilymmcl Good to see that @ZoosVictoria are using this data in their 'educational' marketing campaigns #vrf19", "token_idx_1": 3, "text_start_1": 16, "text_end_1": 27, "date_1": "2019-03", "text_2": "This containment show low key good", "token_idx_2": 1, "text_start_2": 5, "text_end_2": 16, "date_2": "2020-03", "text_1_tokenized": ["Increase", "in", "cat", "containment", "behaviour", "was", "found", "for", "those", "participating", "in", "the", "campaign", "@emilymmcl", "Good", "to", "see", "that", "@ZoosVictoria", "are", "using", "this", "data", "in", "their", "'", "educational", "'", "marketing", "campaigns", "#vrf19"], "text_2_tokenized": ["This", "containment", "show", "low", "key", "good"]} -{"id": "0551-containment", "word": "containment", "label_binary": 0, "text_1": "Do any #twitterstorians have preferred video clips or documentaries on containment, the Truman Doctrine, collective security in Europe, the Korean War, or NSC-68?", "token_idx_1": 10, "text_start_1": 71, "text_end_1": 82, "date_1": "2019-03", "text_2": "i'm watching containment on netflix rn. ironically, it's based in atlanta during a pandemic called H7n2. all of midtown is quarantined, but some young ppl fucked it up and spread the disease bc they just HAD to have a party. y'all really need to STOP fuckin having kickbacks!!", "token_idx_2": 2, "text_start_2": 13, "text_end_2": 24, "date_2": "2020-03", "text_1_tokenized": ["Do", "any", "#twitterstorians", "have", "preferred", "video", "clips", "or", "documentaries", "on", "containment", ",", "the", "Truman", "Doctrine", ",", "collective", "security", "in", "Europe", ",", "the", "Korean", "War", ",", "or", "NSC", "-", "68", "?"], "text_2_tokenized": ["i'm", "watching", "containment", "on", "netflix", "rn", ".", "ironically", ",", "it's", "based", "in", "atlanta", "during", "a", "pandemic", "called", "H7n2", ".", "all", "of", "midtown", "is", "quarantined", ",", "but", "some", "young", "ppl", "fucked", "it", "up", "and", "spread", "the", "disease", "bc", "they", "just", "HAD", "to", "have", "a", "party", ".", "y'all", "really", "need", "to", "STOP", "fuckin", "having", "kickbacks", "!", "!"]} -{"id": "0552-containment", "word": "containment", "label_binary": 1, "text_1": "This \u201ccontainment\u201d series is something else, why do I put myself through things like this \ud83d\ude2d", "token_idx_1": 2, "text_start_1": 6, "text_end_1": 17, "date_1": "2019-03", "text_2": "No but containment on Netflix hits too close to home and it was filmed 5 years ago RT @eleven8: Have they done the coronavirus episode of Law & Order SVU yet?", "token_idx_2": 2, "text_start_2": 7, "text_end_2": 18, "date_2": "2020-03", "text_1_tokenized": ["This", "\u201c", "containment", "\u201d", "series", "is", "something", "else", ",", "why", "do", "I", "put", "myself", "through", "things", "like", "this", "\ud83d\ude2d"], "text_2_tokenized": ["No", "but", "containment", "on", "Netflix", "hits", "too", "close", "to", "home", "and", "it", "was", "filmed", "5", "years", "ago", "RT", "@eleven8", ":", "Have", "they", "done", "the", "coronavirus", "episode", "of", "Law", "&", "Order", "SVU", "yet", "?"]} -{"id": "0553-containment", "word": "containment", "label_binary": 1, "text_1": "\"Mexico to set up CentAm migrant 'containment' belt in south\" via FOX NEWS #timbeta #betaajudabeta #timbetalab", "token_idx_1": 8, "text_start_1": 34, "text_end_1": 45, "date_1": "2019-03", "text_2": "Jobless leads to homeless. Homeless leads to, no self quarantine, which equals zero containment.", "token_idx_2": 16, "text_start_2": 84, "text_end_2": 95, "date_2": "2020-03", "text_1_tokenized": ["\"", "Mexico", "to", "set", "up", "CentAm", "migrant", "'", "containment", "'", "belt", "in", "south", "\"", "via", "FOX", "NEWS", "#timbeta", "#betaajudabeta", "#timbetalab"], "text_2_tokenized": ["Jobless", "leads", "to", "homeless", ".", "Homeless", "leads", "to", ",", "no", "self", "quarantine", ",", "which", "equals", "zero", "containment", "."]} -{"id": "0554-containment", "word": "containment", "label_binary": 1, "text_1": "He was the first to spot it. \u201cThe water tower has been breached. Spill containment is non-existent.\u201d The dispatcher couldn't resist. \u201cSir, it's potable water, not sulfuric acid. On their way.\u201d He was triggered. \u201cNot an emergency? Okay, let me just drain this with a vase.\u201d #vss365", "token_idx_1": 17, "text_start_1": 71, "text_end_1": 82, "date_1": "2019-03", "text_2": "Assam govt orders the sealing of a masjid in Athgaon in Guwahati city for 14 days and it will be a containment zone, announces health minister HB Sarma. Around 100 persons had attended a meet here on March 12 night and 3 of them have tested positive. @IndianExpress", "token_idx_2": 21, "text_start_2": 99, "text_end_2": 110, "date_2": "2020-03", "text_1_tokenized": ["He", "was", "the", "first", "to", "spot", "it", ".", "\u201c", "The", "water", "tower", "has", "been", "breached", ".", "Spill", "containment", "is", "non-existent", ".", "\u201d", "The", "dispatcher", "couldn't", "resist", ".", "\u201c", "Sir", ",", "it's", "potable", "water", ",", "not", "sulfuric", "acid", ".", "On", "their", "way", ".", "\u201d", "He", "was", "triggered", ".", "\u201c", "Not", "an", "emergency", "?", "Okay", ",", "let", "me", "just", "drain", "this", "with", "a", "vase", ".", "\u201d", "#vss365"], "text_2_tokenized": ["Assam", "govt", "orders", "the", "sealing", "of", "a", "masjid", "in", "Athgaon", "in", "Guwahati", "city", "for", "14", "days", "and", "it", "will", "be", "a", "containment", "zone", ",", "announces", "health", "minister", "HB", "Sarma", ".", "Around", "100", "persons", "had", "attended", "a", "meet", "here", "on", "March", "12", "night", "and", "3", "of", "them", "have", "tested", "positive", ".", "@IndianExpress"]} -{"id": "0555-containment", "word": "containment", "label_binary": 0, "text_1": "i am watching an scp containment breach playthru and this dude turned his back on 173 (they forgot not to) and they screamed it was hilarious", "token_idx_1": 5, "text_start_1": 21, "text_end_1": 32, "date_1": "2019-03", "text_2": "The worst thing I could probably do to myself is listen to the weekends album during containment, my ass gunna go down the abyss of SIMP U THOUGHT\ud83d\ude02", "token_idx_2": 16, "text_start_2": 85, "text_end_2": 96, "date_2": "2020-03", "text_1_tokenized": ["i", "am", "watching", "an", "scp", "containment", "breach", "playthru", "and", "this", "dude", "turned", "his", "back", "on", "173", "(", "they", "forgot", "not", "to", ")", "and", "they", "screamed", "it", "was", "hilarious"], "text_2_tokenized": ["The", "worst", "thing", "I", "could", "probably", "do", "to", "myself", "is", "listen", "to", "the", "weekends", "album", "during", "containment", ",", "my", "ass", "gunna", "go", "down", "the", "abyss", "of", "SIMP", "U", "THOUGHT", "\ud83d\ude02"]} -{"id": "0556-containment", "word": "containment", "label_binary": 1, "text_1": "The site is experiencing multiple Keter and Euclid level containment breaches. Full site lock-down initiated.", "token_idx_1": 9, "text_start_1": 57, "text_end_1": 68, "date_1": "2019-03", "text_2": "Crazy #COVID\u30fc19 #COVID19 times. I keep thinking of the balance we need to strike between avoiding panic & taking strong containment measures. This is true for each of us, but also at the national policy level: are certain measures excessive or strictly necessary? 1/2", "token_idx_2": 21, "text_start_2": 124, "text_end_2": 135, "date_2": "2020-03", "text_1_tokenized": ["The", "site", "is", "experiencing", "multiple", "Keter", "and", "Euclid", "level", "containment", "breaches", ".", "Full", "site", "lock-down", "initiated", "."], "text_2_tokenized": ["Crazy", "#COVID\u30fc19", "#COVID19", "times", ".", "I", "keep", "thinking", "of", "the", "balance", "we", "need", "to", "strike", "between", "avoiding", "panic", "&", "taking", "strong", "containment", "measures", ".", "This", "is", "true", "for", "each", "of", "us", ",", "but", "also", "at", "the", "national", "policy", "level", ":", "are", "certain", "measures", "excessive", "or", "strictly", "necessary", "?", "1/2"]} -{"id": "0557-containment", "word": "containment", "label_binary": 1, "text_1": "When are we going to hear more about cost containment and the need to develop regional systems that eliminate duplication and other forms of fraud and waste. No resources for primary or community care unless we do this. @SenSanders @BetoORourke", "token_idx_1": 9, "text_start_1": 42, "text_end_1": 53, "date_1": "2019-03", "text_2": "Its seeming more and more clear that we just havent tested anywhere near who we should have by now. The tests are just now easily availible for the elite class and so many of them already have it. The time for a meaningful containment was 1 week ago.", "token_idx_2": 45, "text_start_2": 223, "text_end_2": 234, "date_2": "2020-03", "text_1_tokenized": ["When", "are", "we", "going", "to", "hear", "more", "about", "cost", "containment", "and", "the", "need", "to", "develop", "regional", "systems", "that", "eliminate", "duplication", "and", "other", "forms", "of", "fraud", "and", "waste", ".", "No", "resources", "for", "primary", "or", "community", "care", "unless", "we", "do", "this", ".", "@SenSanders", "@BetoORourke"], "text_2_tokenized": ["Its", "seeming", "more", "and", "more", "clear", "that", "we", "just", "havent", "tested", "anywhere", "near", "who", "we", "should", "have", "by", "now", ".", "The", "tests", "are", "just", "now", "easily", "availible", "for", "the", "elite", "class", "and", "so", "many", "of", "them", "already", "have", "it", ".", "The", "time", "for", "a", "meaningful", "containment", "was", "1", "week", "ago", "."]} -{"id": "0558-containment", "word": "containment", "label_binary": 1, "text_1": "We need to provide containment around professional anxiety, we need to provide containment around organisational anxiety. If the system can do that then SWs can go off and do purposeful work with families #8020campaign @TStibbs @BASW_UK", "token_idx_1": 4, "text_start_1": 19, "text_end_1": 30, "date_1": "2019-03", "text_2": "U.K. Deputy CMO saying containment was right policy at time and widespread testing not necessary. We have a competence issue in U.K. #COVID19", "token_idx_2": 7, "text_start_2": 23, "text_end_2": 34, "date_2": "2020-03", "text_1_tokenized": ["We", "need", "to", "provide", "containment", "around", "professional", "anxiety", ",", "we", "need", "to", "provide", "containment", "around", "organisational", "anxiety", ".", "If", "the", "system", "can", "do", "that", "then", "SWs", "can", "go", "off", "and", "do", "purposeful", "work", "with", "families", "#8020campaign", "@TStibbs", "@BASW_UK"], "text_2_tokenized": ["U", ".", "K", ".", "Deputy", "CMO", "saying", "containment", "was", "right", "policy", "at", "time", "and", "widespread", "testing", "not", "necessary", ".", "We", "have", "a", "competence", "issue", "in", "U", ".", "K", ".", "#COVID19"]} -{"id": "0559-containment", "word": "containment", "label_binary": 1, "text_1": "Use Prisons No longer in use for containment. Deduct 10% aid being sent to these nations . To pay for it. Much needed \"Man Power\" at the borders . \"Green Deal\" ? Ha ! Ha ! \"Climate Change\" ? A Big \"Hoax\" !!", "token_idx_1": 7, "text_start_1": 33, "text_end_1": 44, "date_1": "2019-03", "text_2": "This \"lockdown\" Duterte imposed is going to exhibit military hardware, & based frm the inconsistent pronouncements made by Duterte henchmen, it's going to be one hell of a circus. Pray, tell me if it's abt containment - it's not. It's typical of a Duterte crazy governance.", "token_idx_2": 41, "text_start_2": 210, "text_end_2": 221, "date_2": "2020-03", "text_1_tokenized": ["Use", "Prisons", "No", "longer", "in", "use", "for", "containment", ".", "Deduct", "10", "%", "aid", "being", "sent", "to", "these", "nations", ".", "To", "pay", "for", "it", ".", "Much", "needed", "\"", "Man", "Power", "\"", "at", "the", "borders", ".", "\"", "Green", "Deal", "\"", "?", "Ha", "!", "Ha", "!", "\"", "Climate", "Change", "\"", "?", "A", "Big", "\"", "Hoax", "\"", "!", "!"], "text_2_tokenized": ["This", "\"", "lockdown", "\"", "Duterte", "imposed", "is", "going", "to", "exhibit", "military", "hardware", ",", "&", "based", "frm", "the", "inconsistent", "pronouncements", "made", "by", "Duterte", "henchmen", ",", "it's", "going", "to", "be", "one", "hell", "of", "a", "circus", ".", "Pray", ",", "tell", "me", "if", "it's", "abt", "containment", "-", "it's", "not", ".", "It's", "typical", "of", "a", "Duterte", "crazy", "governance", "."]} -{"id": "0560-containment", "word": "containment", "label_binary": 1, "text_1": "*Hits Blunt* In a Beetlejuice/Ghostbusters crossover; if Beetlejuice was captured and put in the containment unit, could he get out of someone said his name three times?", "token_idx_1": 19, "text_start_1": 97, "text_end_1": 108, "date_1": "2019-03", "text_2": "Um, popped out to see a neighbour earlier and there was a big crowd outside a local pub and at least two tables occupied at a restaurant- does this 12 week containment rely on only a percentage of people complying with advice to stay home? If not we're doomed \ud83d\ude22", "token_idx_2": 33, "text_start_2": 156, "text_end_2": 167, "date_2": "2020-03", "text_1_tokenized": ["*", "Hits", "Blunt", "*", "In", "a", "Beetlejuice", "/", "Ghostbusters", "crossover", ";", "if", "Beetlejuice", "was", "captured", "and", "put", "in", "the", "containment", "unit", ",", "could", "he", "get", "out", "of", "someone", "said", "his", "name", "three", "times", "?"], "text_2_tokenized": ["Um", ",", "popped", "out", "to", "see", "a", "neighbour", "earlier", "and", "there", "was", "a", "big", "crowd", "outside", "a", "local", "pub", "and", "at", "least", "two", "tables", "occupied", "at", "a", "restaurant", "-", "does", "this", "12", "week", "containment", "rely", "on", "only", "a", "percentage", "of", "people", "complying", "with", "advice", "to", "stay", "home", "?", "If", "not", "we're", "doomed", "\ud83d\ude22"]} -{"id": "0561-containment", "word": "containment", "label_binary": 1, "text_1": "Guys. a few days ago, I was assigned to work with SCP-871. It was really cool at the beginning, but now I can't eat more cake anymore, plz. They let me trapped in a containment chamber with 4 of them and now are 20. SEND HELP", "token_idx_1": 42, "text_start_1": 165, "text_end_1": 176, "date_1": "2019-03", "text_2": "This containment has me not wanting to leave the dam house", "token_idx_2": 1, "text_start_2": 5, "text_end_2": 16, "date_2": "2020-03", "text_1_tokenized": ["Guys", ".", "a", "few", "days", "ago", ",", "I", "was", "assigned", "to", "work", "with", "SCP", "-", "871", ".", "It", "was", "really", "cool", "at", "the", "beginning", ",", "but", "now", "I", "can't", "eat", "more", "cake", "anymore", ",", "plz", ".", "They", "let", "me", "trapped", "in", "a", "containment", "chamber", "with", "4", "of", "them", "and", "now", "are", "20", ".", "SEND", "HELP"], "text_2_tokenized": ["This", "containment", "has", "me", "not", "wanting", "to", "leave", "the", "dam", "house"]} -{"id": "0562-containment", "word": "containment", "label_binary": 1, "text_1": "There was a small Crack in 682's containment chamber, we sent down a couple of construction workers", "token_idx_1": 9, "text_start_1": 33, "text_end_1": 44, "date_1": "2019-03", "text_2": "Full Episode \ud83c\udfa5: Setting up family offices managing them over for a 3-5k square foot event space in SF are giving up on containment strategies entirely, which still help.", "token_idx_2": 25, "text_start_2": 119, "text_end_2": 130, "date_2": "2020-03", "text_1_tokenized": ["There", "was", "a", "small", "Crack", "in", "682", "'", "s", "containment", "chamber", ",", "we", "sent", "down", "a", "couple", "of", "construction", "workers"], "text_2_tokenized": ["Full", "Episode", "\ud83c\udfa5", ":", "Setting", "up", "family", "offices", "managing", "them", "over", "for", "a", "3-5", "k", "square", "foot", "event", "space", "in", "SF", "are", "giving", "up", "on", "containment", "strategies", "entirely", ",", "which", "still", "help", "."]} -{"id": "0563-containment", "word": "containment", "label_binary": 1, "text_1": "Oh man that containment building is painful", "token_idx_1": 3, "text_start_1": 12, "text_end_1": 23, "date_1": "2019-03", "text_2": "Rockland County Executive Ed Day on Thursday publicly called for a containment zone and construction of a temporary hospital in eastern Rockland County, noting the Spring Valley and Monsey areas had seen sharp increases in confirmed cases.", "token_idx_2": 11, "text_start_2": 67, "text_end_2": 78, "date_2": "2020-03", "text_1_tokenized": ["Oh", "man", "that", "containment", "building", "is", "painful"], "text_2_tokenized": ["Rockland", "County", "Executive", "Ed", "Day", "on", "Thursday", "publicly", "called", "for", "a", "containment", "zone", "and", "construction", "of", "a", "temporary", "hospital", "in", "eastern", "Rockland", "County", ",", "noting", "the", "Spring", "Valley", "and", "Monsey", "areas", "had", "seen", "sharp", "increases", "in", "confirmed", "cases", "."]} -{"id": "0564-containment", "word": "containment", "label_binary": 1, "text_1": "Watched the last episode of Fleabag in the bath which turned out to be a good tactic for tear containment. (I'm still sobbing) #Fleabag", "token_idx_1": 19, "text_start_1": 94, "text_end_1": 105, "date_1": "2019-03", "text_2": "Ehresmann talking about phases of response to COVID-19: containment, mitigation & pandemic management. Still in containment. Now starting to move into where we want to focus on community mitigation, \"where people's personal lives will likely be impacted...even if we are healthy.\"", "token_idx_2": 11, "text_start_2": 56, "text_end_2": 67, "date_2": "2020-03", "text_1_tokenized": ["Watched", "the", "last", "episode", "of", "Fleabag", "in", "the", "bath", "which", "turned", "out", "to", "be", "a", "good", "tactic", "for", "tear", "containment", ".", "(", "I'm", "still", "sobbing", ")", "#Fleabag"], "text_2_tokenized": ["Ehresmann", "talking", "about", "phases", "of", "response", "to", "COVID", "-", "19", ":", "containment", ",", "mitigation", "&", "pandemic", "management", ".", "Still", "in", "containment", ".", "Now", "starting", "to", "move", "into", "where", "we", "want", "to", "focus", "on", "community", "mitigation", ",", "\"", "where", "people's", "personal", "lives", "will", "likely", "be", "impacted", "...", "even", "if", "we", "are", "healthy", ".", "\""]} -{"id": "0565-containment", "word": "containment", "label_binary": 1, "text_1": "what if \ud83e\udd14 we kissed \ud83d\ude33 in SCP-682's containment chamber pool \ud83d\ude0d", "token_idx_1": 12, "text_start_1": 35, "text_end_1": 46, "date_1": "2019-03", "text_2": "Dr. Anthony Fauci on the coronavirus pandemic: \u201cWhat we need to do with containment and mitigation is to blunt that curve.\" #MTP", "token_idx_2": 16, "text_start_2": 72, "text_end_2": 83, "date_2": "2020-03", "text_1_tokenized": ["what", "if", "\ud83e\udd14", "we", "kissed", "\ud83d\ude33", "in", "SCP", "-", "682", "'", "s", "containment", "chamber", "pool", "\ud83d\ude0d"], "text_2_tokenized": ["Dr", ".", "Anthony", "Fauci", "on", "the", "coronavirus", "pandemic", ":", "\u201c", "What", "we", "need", "to", "do", "with", "containment", "and", "mitigation", "is", "to", "blunt", "that", "curve", ".", "\"", "#MTP"]} -{"id": "0566-containment", "word": "containment", "label_binary": 0, "text_1": "how did the people who made scp containment breach get someone to make the femur breaker noise because it's honestly the most horrifying thing I've ever heard", "token_idx_1": 7, "text_start_1": 32, "text_end_1": 43, "date_1": "2019-03", "text_2": "Dr. Fauci confirmed the only game changer is a vaccine (hopefully 1-2 years from now). Otherwise, the virus'll most likely return season after season, & the next best option is what South Korea is doing. If that (containment) fails, then it's mitigation (what we're doing now).", "token_idx_2": 45, "text_start_2": 217, "text_end_2": 228, "date_2": "2020-03", "text_1_tokenized": ["how", "did", "the", "people", "who", "made", "scp", "containment", "breach", "get", "someone", "to", "make", "the", "femur", "breaker", "noise", "because", "it's", "honestly", "the", "most", "horrifying", "thing", "I've", "ever", "heard"], "text_2_tokenized": ["Dr", ".", "Fauci", "confirmed", "the", "only", "game", "changer", "is", "a", "vaccine", "(", "hopefully", "1-2", "years", "from", "now", ")", ".", "Otherwise", ",", "the", "virus'll", "most", "likely", "return", "season", "after", "season", ",", "&", "the", "next", "best", "option", "is", "what", "South", "Korea", "is", "doing", ".", "If", "that", "(", "containment", ")", "fails", ",", "then", "it's", "mitigation", "(", "what", "we're", "doing", "now", ")", "."]} -{"id": "0567-containment", "word": "containment", "label_binary": 1, "text_1": "Be rude, do not reason with them, / they panic like words that flee the endless / junctures of containment and release.", "token_idx_1": 21, "text_start_1": 95, "text_end_1": 106, "date_1": "2019-03", "text_2": "Every word out of Trump's Asshole, mouth , is a distorted effort to take no responsibility for anything . He is eminently disposable . He is Chernobyl walking around without containment . Do not listen to anything this creature utters .", "token_idx_2": 31, "text_start_2": 174, "text_end_2": 185, "date_2": "2020-03", "text_1_tokenized": ["Be", "rude", ",", "do", "not", "reason", "with", "them", ",", "/", "they", "panic", "like", "words", "that", "flee", "the", "endless", "/", "junctures", "of", "containment", "and", "release", "."], "text_2_tokenized": ["Every", "word", "out", "of", "Trump's", "Asshole", ",", "mouth", ",", "is", "a", "distorted", "effort", "to", "take", "no", "responsibility", "for", "anything", ".", "He", "is", "eminently", "disposable", ".", "He", "is", "Chernobyl", "walking", "around", "without", "containment", ".", "Do", "not", "listen", "to", "anything", "this", "creature", "utters", "."]} -{"id": "0568-containment", "word": "containment", "label_binary": 1, "text_1": "Did that containment field really work...... I doubt it that was too easy", "token_idx_1": 2, "text_start_1": 9, "text_end_1": 20, "date_1": "2019-03", "text_2": "\u20661st& joint 1st ministers don't listen to \u2066@BorisJohnson\u2069 he shld locked down 2wks b4 he did to prevent ppl spreading it Ok he wanted to save economy but if he shut everywhere dwn economy wld survive due to containment & there would be less people ill less strain on NHS", "token_idx_2": 42, "text_start_2": 211, "text_end_2": 222, "date_2": "2020-03", "text_1_tokenized": ["Did", "that", "containment", "field", "really", "work", "...", "I", "doubt", "it", "that", "was", "too", "easy"], "text_2_tokenized": ["\u2066", "1st", "&", "joint", "1st", "ministers", "don't", "listen", "to", "\u2066", "@BorisJohnson", "\u2069", "he", "shld", "locked", "down", "2wks", "b4", "he", "did", "to", "prevent", "ppl", "spreading", "it", "Ok", "he", "wanted", "to", "save", "economy", "but", "if", "he", "shut", "everywhere", "dwn", "economy", "wld", "survive", "due", "to", "containment", "&", "there", "would", "be", "less", "people", "ill", "less", "strain", "on", "NHS"]} -{"id": "0569-containment", "word": "containment", "label_binary": 1, "text_1": "Dear Republicans. Medicaid is a cost containment device. Medical care and expenses are less for the taxpayers under Medicaid than under a \u2018Republican Plan' ( don't get sick but if you do, head to the ER & take what you find). Medicaid audits and controls costs. That's really true", "token_idx_1": 7, "text_start_1": 37, "text_end_1": 48, "date_1": "2019-03", "text_2": "When you discover that your house could have fast internet but the containment closed any technical procedure for your house... #cancelledduetocovid19 #Containment", "token_idx_2": 12, "text_start_2": 67, "text_end_2": 78, "date_2": "2020-03", "text_1_tokenized": ["Dear", "Republicans", ".", "Medicaid", "is", "a", "cost", "containment", "device", ".", "Medical", "care", "and", "expenses", "are", "less", "for", "the", "taxpayers", "under", "Medicaid", "than", "under", "a", "\u2018", "Republican", "Plan", "'", "(", "don't", "get", "sick", "but", "if", "you", "do", ",", "head", "to", "the", "ER", "&", "take", "what", "you", "find", ")", ".", "Medicaid", "audits", "and", "controls", "costs", ".", "That's", "really", "true"], "text_2_tokenized": ["When", "you", "discover", "that", "your", "house", "could", "have", "fast", "internet", "but", "the", "containment", "closed", "any", "technical", "procedure", "for", "your", "house", "...", "#cancelledduetocovid19", "#Containment"]} -{"id": "0570-containment", "word": "containment", "label_binary": 1, "text_1": "Very thin line between freedom and containment", "token_idx_1": 6, "text_start_1": 35, "text_end_1": 46, "date_1": "2019-03", "text_2": "Pres Trump intros NIH's Fauci, who says with nat'l emergency declaration as many restraints on containment, mitigation, testing being removed. Want to avoid exponential curve of spread. #Trump #coronavirus @anthonymace", "token_idx_2": 16, "text_start_2": 95, "text_end_2": 106, "date_2": "2020-03", "text_1_tokenized": ["Very", "thin", "line", "between", "freedom", "and", "containment"], "text_2_tokenized": ["Pres", "Trump", "intros", "NIH's", "Fauci", ",", "who", "says", "with", "nat'l", "emergency", "declaration", "as", "many", "restraints", "on", "containment", ",", "mitigation", ",", "testing", "being", "removed", ".", "Want", "to", "avoid", "exponential", "curve", "of", "spread", ".", "#Trump", "#coronavirus", "@anthonymace"]} -{"id": "0571-containment", "word": "containment", "label_binary": 1, "text_1": "why is animal containment still a thing. are we scared to not be the dominant specie? are we so wrapped up in our own egos that we have to have every living creature than is not human locked up in cages??? it's gross. literally disgusting.", "token_idx_1": 3, "text_start_1": 14, "text_end_1": 25, "date_1": "2019-03", "text_2": "I'm not trying to hoard PPE. You have no idea how guilty I feel when I throw away a N95 mask, no matter how containment it is. I save it in a brown bag. It's so gross.", "token_idx_2": 26, "text_start_2": 108, "text_end_2": 119, "date_2": "2020-03", "text_1_tokenized": ["why", "is", "animal", "containment", "still", "a", "thing", ".", "are", "we", "scared", "to", "not", "be", "the", "dominant", "specie", "?", "are", "we", "so", "wrapped", "up", "in", "our", "own", "egos", "that", "we", "have", "to", "have", "every", "living", "creature", "than", "is", "not", "human", "locked", "up", "in", "cages", "?", "?", "?", "it's", "gross", ".", "literally", "disgusting", "."], "text_2_tokenized": ["I'm", "not", "trying", "to", "hoard", "PPE", ".", "You", "have", "no", "idea", "how", "guilty", "I", "feel", "when", "I", "throw", "away", "a", "N95", "mask", ",", "no", "matter", "how", "containment", "it", "is", ".", "I", "save", "it", "in", "a", "brown", "bag", ".", "It's", "so", "gross", "."]} -{"id": "0572-containment", "word": "containment", "label_binary": 1, "text_1": "Southern African nations hit by Cyclone Idai needs urgent attention. If not well managed, humanitarian crises will escalate beyond containment. AU, ECOWAS, NEPAD should scale up support to victims. Let's draw the world's attention to Southern Africa. #CycloneIdai", "token_idx_1": 21, "text_start_1": 131, "text_end_1": 142, "date_1": "2019-03", "text_2": "It's imperative and a must that the cost of tests should be drastically reduced and made affordable. High cost defers people from taking COVID19 tests, which is very important for containment. Rampant free tests should become the norm, including in private labs.", "token_idx_2": 33, "text_start_2": 180, "text_end_2": 191, "date_2": "2020-03", "text_1_tokenized": ["Southern", "African", "nations", "hit", "by", "Cyclone", "Idai", "needs", "urgent", "attention", ".", "If", "not", "well", "managed", ",", "humanitarian", "crises", "will", "escalate", "beyond", "containment", ".", "AU", ",", "ECOWAS", ",", "NEPAD", "should", "scale", "up", "support", "to", "victims", ".", "Let's", "draw", "the", "world's", "attention", "to", "Southern", "Africa", ".", "#CycloneIdai"], "text_2_tokenized": ["It's", "imperative", "and", "a", "must", "that", "the", "cost", "of", "tests", "should", "be", "drastically", "reduced", "and", "made", "affordable", ".", "High", "cost", "defers", "people", "from", "taking", "COVID", "19", "tests", ",", "which", "is", "very", "important", "for", "containment", ".", "Rampant", "free", "tests", "should", "become", "the", "norm", ",", "including", "in", "private", "labs", "."]} -{"id": "0573-containment", "word": "containment", "label_binary": 0, "text_1": "alien: containment was pretty bad. cool enough premise even if it's not something entirely new or exciting, i just like the Evil Science Person Who Wants Xenomorphs On A Ship trope. this would be good if they did something more interesting with it and if the acting wasn't shit", "token_idx_1": 2, "text_start_1": 7, "text_end_1": 18, "date_1": "2019-03", "text_2": "ProximaX and @learnitude are building a #COVID19 tracing app to help governments with their containment measures. The app will allow governments to identify cases, dispense medication and take action as necessary to help slow the spread of the virulent disease. #tech #BUIDL \ud83d\udc47", "token_idx_2": 14, "text_start_2": 92, "text_end_2": 103, "date_2": "2020-03", "text_1_tokenized": ["alien", ":", "containment", "was", "pretty", "bad", ".", "cool", "enough", "premise", "even", "if", "it's", "not", "something", "entirely", "new", "or", "exciting", ",", "i", "just", "like", "the", "Evil", "Science", "Person", "Who", "Wants", "Xenomorphs", "On", "A", "Ship", "trope", ".", "this", "would", "be", "good", "if", "they", "did", "something", "more", "interesting", "with", "it", "and", "if", "the", "acting", "wasn't", "shit"], "text_2_tokenized": ["ProximaX", "and", "@learnitude", "are", "building", "a", "#COVID19", "tracing", "app", "to", "help", "governments", "with", "their", "containment", "measures", ".", "The", "app", "will", "allow", "governments", "to", "identify", "cases", ",", "dispense", "medication", "and", "take", "action", "as", "necessary", "to", "help", "slow", "the", "spread", "of", "the", "virulent", "disease", ".", "#tech", "#BUIDL", "\ud83d\udc47"]} -{"id": "0574-containment", "word": "containment", "label_binary": 1, "text_1": "My \u201cSara's technique of emotional containment and mental distraction\u201d is not working anymore and I have no clue what to do\ud83e\udd17", "token_idx_1": 6, "text_start_1": 34, "text_end_1": 45, "date_1": "2019-03", "text_2": "#SouthernCameroons been dealing with its own #Corvid19 disaster for 58 years. It's called #Francophonisation. Impact on health & livelihood, especially for the vulnerable - women, elderly, has been devastating. Our containment strategies - social distancing, being used worldwide", "token_idx_2": 36, "text_start_2": 219, "text_end_2": 230, "date_2": "2020-03", "text_1_tokenized": ["My", "\u201c", "Sara's", "technique", "of", "emotional", "containment", "and", "mental", "distraction", "\u201d", "is", "not", "working", "anymore", "and", "I", "have", "no", "clue", "what", "to", "do", "\ud83e\udd17"], "text_2_tokenized": ["#SouthernCameroons", "been", "dealing", "with", "its", "own", "#Corvid19", "disaster", "for", "58", "years", ".", "It's", "called", "#Francophonisation", ".", "Impact", "on", "health", "&", "livelihood", ",", "especially", "for", "the", "vulnerable", "-", "women", ",", "elderly", ",", "has", "been", "devastating", ".", "Our", "containment", "strategies", "-", "social", "distancing", ",", "being", "used", "worldwide"]} -{"id": "0575-containment", "word": "containment", "label_binary": 1, "text_1": "#BeachFire Update (4/1 AM): Fire is currently 60% contained, resources on scene are working to mop-up residual heat and check/secure containment lines. #AZFire", "token_idx_1": 26, "text_start_1": 133, "text_end_1": 144, "date_1": "2019-03", "text_2": "So far, the loudest calls have been for expanded testing, PPEs (and gen safety of HCWs), and social amelioration But let's not forget about other equally important issues: 1) For containment: community-based interventions (contact tracing, isolation and quarantine + safety nets)", "token_idx_2": 37, "text_start_2": 179, "text_end_2": 190, "date_2": "2020-03", "text_1_tokenized": ["#BeachFire", "Update", "(", "4/1", "AM", "):", "Fire", "is", "currently", "60", "%", "contained", ",", "resources", "on", "scene", "are", "working", "to", "mop-up", "residual", "heat", "and", "check", "/", "secure", "containment", "lines", ".", "#AZFire"], "text_2_tokenized": ["So", "far", ",", "the", "loudest", "calls", "have", "been", "for", "expanded", "testing", ",", "PPEs", "(", "and", "gen", "safety", "of", "HCWs", ")", ",", "and", "social", "amelioration", "But", "let's", "not", "forget", "about", "other", "equally", "important", "issues", ":", "1", ")", "For", "containment", ":", "community-based", "interventions", "(", "contact", "tracing", ",", "isolation", "and", "quarantine", "+", "safety", "nets", ")"]} -{"id": "0576-containment", "word": "containment", "label_binary": 1, "text_1": "With our steel barrier, we should electrically charge it to stop climbers! Just like we use for livestock containment!", "token_idx_1": 20, "text_start_1": 106, "text_end_1": 117, "date_1": "2019-03", "text_2": "Countries with well-functioning and inclusive health systems are more likely to catch an outbreak early when the chances of rapid containment are best - @WHO", "token_idx_2": 20, "text_start_2": 130, "text_end_2": 141, "date_2": "2020-03", "text_1_tokenized": ["With", "our", "steel", "barrier", ",", "we", "should", "electrically", "charge", "it", "to", "stop", "climbers", "!", "Just", "like", "we", "use", "for", "livestock", "containment", "!"], "text_2_tokenized": ["Countries", "with", "well-functioning", "and", "inclusive", "health", "systems", "are", "more", "likely", "to", "catch", "an", "outbreak", "early", "when", "the", "chances", "of", "rapid", "containment", "are", "best", "-", "@WHO"]} -{"id": "0577-containment", "word": "containment", "label_binary": 0, "text_1": "Evil containment wave!!! Vintage Roshi special!!! #DragonballSuper", "token_idx_1": 1, "text_start_1": 5, "text_end_1": 16, "date_1": "2019-03", "text_2": "Man containment got me telling Mf back up 4 to 6 feet please", "token_idx_2": 1, "text_start_2": 4, "text_end_2": 15, "date_2": "2020-03", "text_1_tokenized": ["Evil", "containment", "wave", "!", "!", "!", "Vintage", "Roshi", "special", "!", "!", "!", "#DragonballSuper"], "text_2_tokenized": ["Man", "containment", "got", "me", "telling", "Mf", "back", "up", "4", "to", "6", "feet", "please"]} -{"id": "0578-containment", "word": "containment", "label_binary": 1, "text_1": "Oh yeah forgot to tell @redacted_yeet not to let @j_scp049 out of his containment cell anymore. so uhh don't let him out anymore", "token_idx_1": 13, "text_start_1": 70, "text_end_1": 81, "date_1": "2019-03", "text_2": "The funniest part in all of this is maybe we get locked down for another two or three months, containment works and this isn't even noteworthy in history anymore. We don't even get to be in textbooks \ud83d\ude2d", "token_idx_2": 20, "text_start_2": 94, "text_end_2": 105, "date_2": "2020-03", "text_1_tokenized": ["Oh", "yeah", "forgot", "to", "tell", "@redacted_yeet", "not", "to", "let", "@j_scp049", "out", "of", "his", "containment", "cell", "anymore", ".", "so", "uhh", "don't", "let", "him", "out", "anymore"], "text_2_tokenized": ["The", "funniest", "part", "in", "all", "of", "this", "is", "maybe", "we", "get", "locked", "down", "for", "another", "two", "or", "three", "months", ",", "containment", "works", "and", "this", "isn't", "even", "noteworthy", "in", "history", "anymore", ".", "We", "don't", "even", "get", "to", "be", "in", "textbooks", "\ud83d\ude2d"]} -{"id": "3251-virus", "word": "virus", "label_binary": 0, "text_1": "-sakura -sakura but an AI nurse -sakura but an AI nurse that got a virus and decided to run the moon -sakura but there's five of her and they're computer programs the moon sakura created to help her do moon things", "token_idx_1": 17, "text_start_1": 67, "text_end_1": 72, "date_1": "2019-03", "text_2": "This corona virus shit is really fucking with my life right now", "token_idx_2": 2, "text_start_2": 12, "text_end_2": 17, "date_2": "2020-03", "text_1_tokenized": ["-", "sakura", "-", "sakura", "but", "an", "AI", "nurse", "-", "sakura", "but", "an", "AI", "nurse", "that", "got", "a", "virus", "and", "decided", "to", "run", "the", "moon", "-", "sakura", "but", "there's", "five", "of", "her", "and", "they're", "computer", "programs", "the", "moon", "sakura", "created", "to", "help", "her", "do", "moon", "things"], "text_2_tokenized": ["This", "corona", "virus", "shit", "is", "really", "fucking", "with", "my", "life", "right", "now"]} -{"id": "3252-virus", "word": "virus", "label_binary": 0, "text_1": "End of #Supergirl S3 : Brainy saids that, in the future, Brainic releases a virus which destroys all A.I. - but how does he know that in the here and now?", "token_idx_1": 16, "text_start_1": 76, "text_end_1": 81, "date_1": "2019-03", "text_2": "today has been a hard but also rewarding day. this is the only place I can talk about it but I think I'm relapsing. ): just wish this virus would end and things could go back to the way they were before", "token_idx_2": 30, "text_start_2": 134, "text_end_2": 139, "date_2": "2020-03", "text_1_tokenized": ["End", "of", "#Supergirl", "S3", ":", "Brainy", "saids", "that", ",", "in", "the", "future", ",", "Brainic", "releases", "a", "virus", "which", "destroys", "all", "A", ".", "I", ".", "-", "but", "how", "does", "he", "know", "that", "in", "the", "here", "and", "now", "?"], "text_2_tokenized": ["today", "has", "been", "a", "hard", "but", "also", "rewarding", "day", ".", "this", "is", "the", "only", "place", "I", "can", "talk", "about", "it", "but", "I", "think", "I'm", "relapsing", ".", "):", "just", "wish", "this", "virus", "would", "end", "and", "things", "could", "go", "back", "to", "the", "way", "they", "were", "before"]} -{"id": "3253-virus", "word": "virus", "label_binary": 0, "text_1": "What a political crisis smells like to me: Invective laden and overwrought Our political discourse has been Infected by the virus of contumely There's no way back now We must descend through Narrowing bands of the gyre As we circle the drain submerged In chaos outrage and hate", "token_idx_1": 21, "text_start_1": 124, "text_end_1": 129, "date_1": "2019-03", "text_2": "holy fuck i'm so scared a member of the cdc just called me personally and told me i need to isolate myself immediately bc they found out one of the new symptoms of the virus is having a juicy pussy and a great personality", "token_idx_2": 34, "text_start_2": 168, "text_end_2": 173, "date_2": "2020-03", "text_1_tokenized": ["What", "a", "political", "crisis", "smells", "like", "to", "me", ":", "Invective", "laden", "and", "overwrought", "Our", "political", "discourse", "has", "been", "Infected", "by", "the", "virus", "of", "contumely", "There's", "no", "way", "back", "now", "We", "must", "descend", "through", "Narrowing", "bands", "of", "the", "gyre", "As", "we", "circle", "the", "drain", "submerged", "In", "chaos", "outrage", "and", "hate"], "text_2_tokenized": ["holy", "fuck", "i'm", "so", "scared", "a", "member", "of", "the", "cdc", "just", "called", "me", "personally", "and", "told", "me", "i", "need", "to", "isolate", "myself", "immediately", "bc", "they", "found", "out", "one", "of", "the", "new", "symptoms", "of", "the", "virus", "is", "having", "a", "juicy", "pussy", "and", "a", "great", "personality"]} -{"id": "3254-virus", "word": "virus", "label_binary": 0, "text_1": "dongwoon is such a happy virus I just\ud83d\ude22\ud83d\ude1a\u2764", "token_idx_1": 5, "text_start_1": 25, "text_end_1": 30, "date_1": "2019-03", "text_2": "Do not take this virus lightly.", "token_idx_2": 4, "text_start_2": 17, "text_end_2": 22, "date_2": "2020-03", "text_1_tokenized": ["dongwoon", "is", "such", "a", "happy", "virus", "I", "just", "\ud83d\ude22", "\ud83d\ude1a", "\u2764"], "text_2_tokenized": ["Do", "not", "take", "this", "virus", "lightly", "."]} -{"id": "3255-virus", "word": "virus", "label_binary": 0, "text_1": "My son listens to the news on the radio, he catches glimpses on the TV and he overhears conversations. It breaks my heart to think that he will hear us compared to a virus that mustn't be exposed to \u201cinnocent\u201d minds.", "token_idx_1": 35, "text_start_1": 166, "text_end_1": 171, "date_1": "2019-03", "text_2": "I was fw Troy saying I think he gave me the corona virus. An hour later he said \u201cAshley you feel sick fr?\u201d I looked at this man and tried to fake cough .. he laughed at me and walked away \ud83d\ude02\ud83d\ude2d", "token_idx_2": 12, "text_start_2": 51, "text_end_2": 56, "date_2": "2020-03", "text_1_tokenized": ["My", "son", "listens", "to", "the", "news", "on", "the", "radio", ",", "he", "catches", "glimpses", "on", "the", "TV", "and", "he", "overhears", "conversations", ".", "It", "breaks", "my", "heart", "to", "think", "that", "he", "will", "hear", "us", "compared", "to", "a", "virus", "that", "mustn't", "be", "exposed", "to", "\u201c", "innocent", "\u201d", "minds", "."], "text_2_tokenized": ["I", "was", "fw", "Troy", "saying", "I", "think", "he", "gave", "me", "the", "corona", "virus", ".", "An", "hour", "later", "he", "said", "\u201c", "Ashley", "you", "feel", "sick", "fr", "?", "\u201d", "I", "looked", "at", "this", "man", "and", "tried", "to", "fake", "cough", "..", "he", "laughed", "at", "me", "and", "walked", "away", "\ud83d\ude02", "\ud83d\ude2d"]} -{"id": "3256-virus", "word": "virus", "label_binary": 0, "text_1": "my great aunt called my mom and said \u201cmy computer got a virus so I threw it away bc I didn't wanna catch it.\u201d i- \ud83d\ude10", "token_idx_1": 13, "text_start_1": 56, "text_end_1": 61, "date_1": "2019-03", "text_2": "Man I'm tired, I haven't been sad in a very long time but this virus stuff really pisses me off. Just being lockdown to my home unable to earn money, go to the gym and do what I want to do is the worst thing.", "token_idx_2": 15, "text_start_2": 63, "text_end_2": 68, "date_2": "2020-03", "text_1_tokenized": ["my", "great", "aunt", "called", "my", "mom", "and", "said", "\u201c", "my", "computer", "got", "a", "virus", "so", "I", "threw", "it", "away", "bc", "I", "didn't", "wanna", "catch", "it", ".", "\u201d", "i", "-", "\ud83d\ude10"], "text_2_tokenized": ["Man", "I'm", "tired", ",", "I", "haven't", "been", "sad", "in", "a", "very", "long", "time", "but", "this", "virus", "stuff", "really", "pisses", "me", "off", ".", "Just", "being", "lockdown", "to", "my", "home", "unable", "to", "earn", "money", ",", "go", "to", "the", "gym", "and", "do", "what", "I", "want", "to", "do", "is", "the", "worst", "thing", "."]} -{"id": "3257-virus", "word": "virus", "label_binary": 0, "text_1": "So I foolishly turn on CBS pregame coverage and immediately see a Dook analyst interviewing two Dook players. The ESPN virus \ud83e\udda0 is spreading!", "token_idx_1": 21, "text_start_1": 119, "text_end_1": 124, "date_1": "2019-03", "text_2": "they knew the girls was out this summer so they had somebody spread a virus. i hate men", "token_idx_2": 14, "text_start_2": 70, "text_end_2": 75, "date_2": "2020-03", "text_1_tokenized": ["So", "I", "foolishly", "turn", "on", "CBS", "pregame", "coverage", "and", "immediately", "see", "a", "Dook", "analyst", "interviewing", "two", "Dook", "players", ".", "The", "ESPN", "virus", "\ud83e\udda0", "is", "spreading", "!"], "text_2_tokenized": ["they", "knew", "the", "girls", "was", "out", "this", "summer", "so", "they", "had", "somebody", "spread", "a", "virus", ".", "i", "hate", "men"]} -{"id": "3258-virus", "word": "virus", "label_binary": 0, "text_1": "Sorry to anyone who tried to access my website and got redirected somewhere else. Social Warfare had a virus with their new update, but it has since been deleted, and we'll be moving to Social Pug.", "token_idx_1": 19, "text_start_1": 103, "text_end_1": 108, "date_1": "2019-03", "text_2": "PanAfrican Unity Movement radio, Tonight 8:00 pm Topic: Covid 19 virus speaking with one of my old professors Dr. Holmes. Call in!", "token_idx_2": 13, "text_start_2": 65, "text_end_2": 70, "date_2": "2020-03", "text_1_tokenized": ["Sorry", "to", "anyone", "who", "tried", "to", "access", "my", "website", "and", "got", "redirected", "somewhere", "else", ".", "Social", "Warfare", "had", "a", "virus", "with", "their", "new", "update", ",", "but", "it", "has", "since", "been", "deleted", ",", "and", "we'll", "be", "moving", "to", "Social", "Pug", "."], "text_2_tokenized": ["PanAfrican", "Unity", "Movement", "radio", ",", "Tonight", "8:", "00", "pm", "Topic", ":", "Covid", "19", "virus", "speaking", "with", "one", "of", "my", "old", "professors", "Dr", ".", "Holmes", ".", "Call", "in", "!"]} -{"id": "3259-virus", "word": "virus", "label_binary": 0, "text_1": "I swear to fucking God trend micro is more invasive than a virus!", "token_idx_1": 12, "text_start_1": 59, "text_end_1": 64, "date_1": "2019-03", "text_2": "Ww3, The corona virus, Countries on lockdown, school closed, sports canceled, Tom Brady on the buccaneers, DeAndre Hopkins on the Cardinals, Marcus Mariota on the Raiders, Pop Smoke died and it's only march bro", "token_idx_2": 4, "text_start_2": 16, "text_end_2": 21, "date_2": "2020-03", "text_1_tokenized": ["I", "swear", "to", "fucking", "God", "trend", "micro", "is", "more", "invasive", "than", "a", "virus", "!"], "text_2_tokenized": ["Ww3", ",", "The", "corona", "virus", ",", "Countries", "on", "lockdown", ",", "school", "closed", ",", "sports", "canceled", ",", "Tom", "Brady", "on", "the", "buccaneers", ",", "DeAndre", "Hopkins", "on", "the", "Cardinals", ",", "Marcus", "Mariota", "on", "the", "Raiders", ",", "Pop", "Smoke", "died", "and", "it's", "only", "march", "bro"]} -{"id": "3260-virus", "word": "virus", "label_binary": 1, "text_1": "Whatever virus K and I caught, it's a real doozy with the chills and aches #bothbedridden \ud83d\ude2d\ud83d\ude2d\ud83d\ude2d", "token_idx_1": 1, "text_start_1": 9, "text_end_1": 14, "date_1": "2019-03", "text_2": "Seeing people strike @amazon is absolutely mind blowing Countless amount of mask and gloves each day Rescheduling everyone's times so no one is clocking in at the same time No upt being taken so instead of going to work complaining about the how people have the virus don't go?", "token_idx_2": 46, "text_start_2": 262, "text_end_2": 267, "date_2": "2020-03", "text_1_tokenized": ["Whatever", "virus", "K", "and", "I", "caught", ",", "it's", "a", "real", "doozy", "with", "the", "chills", "and", "aches", "#bothbedridden", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude2d"], "text_2_tokenized": ["Seeing", "people", "strike", "@amazon", "is", "absolutely", "mind", "blowing", "Countless", "amount", "of", "mask", "and", "gloves", "each", "day", "Rescheduling", "everyone's", "times", "so", "no", "one", "is", "clocking", "in", "at", "the", "same", "time", "No", "upt", "being", "taken", "so", "instead", "of", "going", "to", "work", "complaining", "about", "the", "how", "people", "have", "the", "virus", "don't", "go", "?"]} -{"id": "3261-virus", "word": "virus", "label_binary": 0, "text_1": "Let's play a game! Does Sarah have strep or a virus? Tell me your thoughts. Answer around 8 pm!", "token_idx_1": 11, "text_start_1": 46, "text_end_1": 51, "date_1": "2019-03", "text_2": "The governments are acting as if they can control this virus. They can't. It WILL spread. The only thing they can and will end up controlling is the people - which is what they want. This virus serves 2 elite goals: depopulation & control, which is why I know it's intentional.", "token_idx_2": 10, "text_start_2": 55, "text_end_2": 60, "date_2": "2020-03", "text_1_tokenized": ["Let's", "play", "a", "game", "!", "Does", "Sarah", "have", "strep", "or", "a", "virus", "?", "Tell", "me", "your", "thoughts", ".", "Answer", "around", "8", "pm", "!"], "text_2_tokenized": ["The", "governments", "are", "acting", "as", "if", "they", "can", "control", "this", "virus", ".", "They", "can't", ".", "It", "WILL", "spread", ".", "The", "only", "thing", "they", "can", "and", "will", "end", "up", "controlling", "is", "the", "people", "-", "which", "is", "what", "they", "want", ".", "This", "virus", "serves", "2", "elite", "goals", ":", "depopulation", "&", "control", ",", "which", "is", "why", "I", "know", "it's", "intentional", "."]} -{"id": "3262-virus", "word": "virus", "label_binary": 1, "text_1": "I understand my body is trying to purge itself of this virus but i can i PLEASE not have a coughing fit every five minutes", "token_idx_1": 11, "text_start_1": 55, "text_end_1": 60, "date_1": "2019-03", "text_2": "I often find myself watching the news of this so called pandemic to a virus that has killed way less people than the flu and think to myself, WHAT THE F*** ARE WE DOING!!!!", "token_idx_2": 14, "text_start_2": 70, "text_end_2": 75, "date_2": "2020-03", "text_1_tokenized": ["I", "understand", "my", "body", "is", "trying", "to", "purge", "itself", "of", "this", "virus", "but", "i", "can", "i", "PLEASE", "not", "have", "a", "coughing", "fit", "every", "five", "minutes"], "text_2_tokenized": ["I", "often", "find", "myself", "watching", "the", "news", "of", "this", "so", "called", "pandemic", "to", "a", "virus", "that", "has", "killed", "way", "less", "people", "than", "the", "flu", "and", "think", "to", "myself", ",", "WHAT", "THE", "F", "*", "*", "*", "ARE", "WE", "DOING", "!", "!", "!"]} -{"id": "3263-virus", "word": "virus", "label_binary": 0, "text_1": "Immanently The recording of developing their attempts to possess humans are a metaphor, it counsel a virus, that humanity", "token_idx_1": 17, "text_start_1": 101, "text_end_1": 106, "date_1": "2019-03", "text_2": "Man, I keep seeing people saying they had no symptoms but actually have the virus.. stay home, distance yourself and you'll be fine. Why doesn't it feel THAT simple then? Lol", "token_idx_2": 15, "text_start_2": 76, "text_end_2": 81, "date_2": "2020-03", "text_1_tokenized": ["Immanently", "The", "recording", "of", "developing", "their", "attempts", "to", "possess", "humans", "are", "a", "metaphor", ",", "it", "counsel", "a", "virus", ",", "that", "humanity"], "text_2_tokenized": ["Man", ",", "I", "keep", "seeing", "people", "saying", "they", "had", "no", "symptoms", "but", "actually", "have", "the", "virus", "..", "stay", "home", ",", "distance", "yourself", "and", "you'll", "be", "fine", ".", "Why", "doesn't", "it", "feel", "THAT", "simple", "then", "?", "Lol"]} -{"id": "3264-virus", "word": "virus", "label_binary": 1, "text_1": "Jesus I've spread this stomach virus to everyone", "token_idx_1": 5, "text_start_1": 31, "text_end_1": 36, "date_1": "2019-03", "text_2": "2020 has been crazy but Honestly this have been the best year ever in my life !! I've been living life even with this shit going on idc my life ain't stopping for no dam virus!!!", "token_idx_2": 36, "text_start_2": 170, "text_end_2": 175, "date_2": "2020-03", "text_1_tokenized": ["Jesus", "I've", "spread", "this", "stomach", "virus", "to", "everyone"], "text_2_tokenized": ["2020", "has", "been", "crazy", "but", "Honestly", "this", "have", "been", "the", "best", "year", "ever", "in", "my", "life", "!", "!", "I've", "been", "living", "life", "even", "with", "this", "shit", "going", "on", "idc", "my", "life", "ain't", "stopping", "for", "no", "dam", "virus", "!", "!", "!"]} -{"id": "3265-virus", "word": "virus", "label_binary": 0, "text_1": "So, what's going on with #Wattpad ranking system? One day I'm like 40 in virus and now it's not even ranked. Seriously thinking about deleting Wattpad and just trying to traditionally publish or, at least, self-publish.", "token_idx_1": 16, "text_start_1": 73, "text_end_1": 78, "date_1": "2019-03", "text_2": "\"The #coronavirus panic is just that, an irrational panic, based on an unproven RNA test, that has never been connected to a virus. And which won't be connected to a virus unless the virus is purified\" DAVID CROWE", "token_idx_2": 26, "text_start_2": 12, "text_end_2": 17, "date_2": "2020-03", "text_1_tokenized": ["So", ",", "what's", "going", "on", "with", "#Wattpad", "ranking", "system", "?", "One", "day", "I'm", "like", "40", "in", "virus", "and", "now", "it's", "not", "even", "ranked", ".", "Seriously", "thinking", "about", "deleting", "Wattpad", "and", "just", "trying", "to", "traditionally", "publish", "or", ",", "at", "least", ",", "self-publish", "."], "text_2_tokenized": ["\"", "The", "#coronavirus", "panic", "is", "just", "that", ",", "an", "irrational", "panic", ",", "based", "on", "an", "unproven", "RNA", "test", ",", "that", "has", "never", "been", "connected", "to", "a", "virus", ".", "And", "which", "won't", "be", "connected", "to", "a", "virus", "unless", "the", "virus", "is", "purified", "\"", "DAVID", "CROWE"]} -{"id": "3266-virus", "word": "virus", "label_binary": 1, "text_1": "with all the positive news about HPV vaccine preventing cervical cancer be aware that vulval warts are a possible sign of having HPV virus \ud83e\udda0 & to be sure to get regular smears especially if you have vulval warts!", "token_idx_1": 23, "text_start_1": 133, "text_end_1": 138, "date_1": "2019-03", "text_2": "a friend of mine has been staying at home and even then she has symptoms of the virus and if she doesn't take care of herself now it could get worse so STAY YOUR BITCH ASSES HOME BEFORE YOU CRY BC YOU GOT SICK FOR GOING OUT WITH \u201conly 7 people\u201d", "token_idx_2": 17, "text_start_2": 80, "text_end_2": 85, "date_2": "2020-03", "text_1_tokenized": ["with", "all", "the", "positive", "news", "about", "HPV", "vaccine", "preventing", "cervical", "cancer", "be", "aware", "that", "vulval", "warts", "are", "a", "possible", "sign", "of", "having", "HPV", "virus", "\ud83e\udda0", "&", "to", "be", "sure", "to", "get", "regular", "smears", "especially", "if", "you", "have", "vulval", "warts", "!"], "text_2_tokenized": ["a", "friend", "of", "mine", "has", "been", "staying", "at", "home", "and", "even", "then", "she", "has", "symptoms", "of", "the", "virus", "and", "if", "she", "doesn't", "take", "care", "of", "herself", "now", "it", "could", "get", "worse", "so", "STAY", "YOUR", "BITCH", "ASSES", "HOME", "BEFORE", "YOU", "CRY", "BC", "YOU", "GOT", "SICK", "FOR", "GOING", "OUT", "WITH", "\u201c", "only", "7", "people", "\u201d"]} -{"id": "3267-virus", "word": "virus", "label_binary": 0, "text_1": "Encryptionware and drive-wipe self destruct virus infected my hard drive. Waited too long and only had one chance. Might have infected other friends' computers. It was a modified alarm program with the virus authorship masked by doge and dogecoin brandings. I lost everything.", "token_idx_1": 5, "text_start_1": 44, "text_end_1": 49, "date_1": "2019-03", "text_2": "My 3 best friends of 33 years are coming to pick me up to hang out. They don't yet know this will be the last chance we'll have to see each other because of my move cross country. I'm worried about the virus risk greatly but I only have this one last chance.", "token_idx_2": 44, "text_start_2": 202, "text_end_2": 207, "date_2": "2020-03", "text_1_tokenized": ["Encryptionware", "and", "drive-wipe", "self", "destruct", "virus", "infected", "my", "hard", "drive", ".", "Waited", "too", "long", "and", "only", "had", "one", "chance", ".", "Might", "have", "infected", "other", "friends", "'", "computers", ".", "It", "was", "a", "modified", "alarm", "program", "with", "the", "virus", "authorship", "masked", "by", "doge", "and", "dogecoin", "brandings", ".", "I", "lost", "everything", "."], "text_2_tokenized": ["My", "3", "best", "friends", "of", "33", "years", "are", "coming", "to", "pick", "me", "up", "to", "hang", "out", ".", "They", "don't", "yet", "know", "this", "will", "be", "the", "last", "chance", "we'll", "have", "to", "see", "each", "other", "because", "of", "my", "move", "cross", "country", ".", "I'm", "worried", "about", "the", "virus", "risk", "greatly", "but", "I", "only", "have", "this", "one", "last", "chance", "."]} -{"id": "3268-virus", "word": "virus", "label_binary": 0, "text_1": "Season after season for the last 7 years we've been saying we need a world class CB and still haven't got one. Having someone like Koulibaly, De Ligt or Varane would solve a lot of our problems and also I'd gladly sell that virus Paul Pogba off to sunny Spain\ud83d\ude34", "token_idx_1": 45, "text_start_1": 224, "text_end_1": 229, "date_1": "2019-03", "text_2": "Now all of a sudden rappers who been locked up got the virus f*ck outta here #Agenda at its finest", "token_idx_2": 12, "text_start_2": 55, "text_end_2": 60, "date_2": "2020-03", "text_1_tokenized": ["Season", "after", "season", "for", "the", "last", "7", "years", "we've", "been", "saying", "we", "need", "a", "world", "class", "CB", "and", "still", "haven't", "got", "one", ".", "Having", "someone", "like", "Koulibaly", ",", "De", "Ligt", "or", "Varane", "would", "solve", "a", "lot", "of", "our", "problems", "and", "also", "I'd", "gladly", "sell", "that", "virus", "Paul", "Pogba", "off", "to", "sunny", "Spain", "\ud83d\ude34"], "text_2_tokenized": ["Now", "all", "of", "a", "sudden", "rappers", "who", "been", "locked", "up", "got", "the", "virus", "f", "*", "ck", "outta", "here", "#Agenda", "at", "its", "finest"]} -{"id": "3269-virus", "word": "virus", "label_binary": 1, "text_1": "Trump is the human equivalent of the eboli virus.", "token_idx_1": 8, "text_start_1": 43, "text_end_1": 48, "date_1": "2019-03", "text_2": "ngl but with the whole virus/isolation situation plus personal problems, it really feels like the world did a 180 in just a month. time's fucking weird.", "token_idx_2": 5, "text_start_2": 23, "text_end_2": 28, "date_2": "2020-03", "text_1_tokenized": ["Trump", "is", "the", "human", "equivalent", "of", "the", "eboli", "virus", "."], "text_2_tokenized": ["ngl", "but", "with", "the", "whole", "virus", "/", "isolation", "situation", "plus", "personal", "problems", ",", "it", "really", "feels", "like", "the", "world", "did", "a", "180", "in", "just", "a", "month", ".", "time's", "fucking", "weird", "."]} -{"id": "3270-virus", "word": "virus", "label_binary": 0, "text_1": "Your kid being sick is one of the most heartbreaking things even when it's just something simple like a virus", "token_idx_1": 19, "text_start_1": 104, "text_end_1": 109, "date_1": "2019-03", "text_2": "Anyone know a speakeasy that's corona virus free?? I need to dance and take tequila shots asap", "token_idx_2": 6, "text_start_2": 38, "text_end_2": 43, "date_2": "2020-03", "text_1_tokenized": ["Your", "kid", "being", "sick", "is", "one", "of", "the", "most", "heartbreaking", "things", "even", "when", "it's", "just", "something", "simple", "like", "a", "virus"], "text_2_tokenized": ["Anyone", "know", "a", "speakeasy", "that's", "corona", "virus", "free", "?", "?", "I", "need", "to", "dance", "and", "take", "tequila", "shots", "asap"]} -{"id": "3271-virus", "word": "virus", "label_binary": 0, "text_1": "Dear Pisces, today you will be beheaded by an alien virus", "token_idx_1": 11, "text_start_1": 52, "text_end_1": 57, "date_1": "2019-03", "text_2": "i am just baffled that there's people out there who never even used baby wipes after using the bathroom, before the virus. I can't imagine using the bathroom without wipes \ud83d\ude33", "token_idx_2": 22, "text_start_2": 116, "text_end_2": 121, "date_2": "2020-03", "text_1_tokenized": ["Dear", "Pisces", ",", "today", "you", "will", "be", "beheaded", "by", "an", "alien", "virus"], "text_2_tokenized": ["i", "am", "just", "baffled", "that", "there's", "people", "out", "there", "who", "never", "even", "used", "baby", "wipes", "after", "using", "the", "bathroom", ",", "before", "the", "virus", ".", "I", "can't", "imagine", "using", "the", "bathroom", "without", "wipes", "\ud83d\ude33"]} -{"id": "3272-virus", "word": "virus", "label_binary": 0, "text_1": "I'm watching @nbcbrooklyn99 season 1 episode 9 Sal's pizza hahaha i love the virus that tells them all the detectives search history. I love this show so much #Brooklyn99", "token_idx_1": 13, "text_start_1": 77, "text_end_1": 82, "date_1": "2019-03", "text_2": "I just want to know how West Virginia of all places has no confirmed cases of the virus \ud83d\ude02 like what makes WEST VIRGINIA so special", "token_idx_2": 17, "text_start_2": 82, "text_end_2": 87, "date_2": "2020-03", "text_1_tokenized": ["I'm", "watching", "@nbcbrooklyn99", "season", "1", "episode", "9", "Sal's", "pizza", "hahaha", "i", "love", "the", "virus", "that", "tells", "them", "all", "the", "detectives", "search", "history", ".", "I", "love", "this", "show", "so", "much", "#Brooklyn99"], "text_2_tokenized": ["I", "just", "want", "to", "know", "how", "West", "Virginia", "of", "all", "places", "has", "no", "confirmed", "cases", "of", "the", "virus", "\ud83d\ude02", "like", "what", "makes", "WEST", "VIRGINIA", "so", "special"]} -{"id": "3273-virus", "word": "virus", "label_binary": 0, "text_1": "RT cjwerleman: This Hindutva terrorist refers to Muslims as a \"virus,\" compares them to pests and has publicly threatened genocide. Anywh\u2026 TW DRE HOTM", "token_idx_1": 12, "text_start_1": 63, "text_end_1": 68, "date_1": "2019-03", "text_2": "#CNN enemy of the people..... cheering for deaths of Americans from the virus #Fakenews", "token_idx_2": 13, "text_start_2": 72, "text_end_2": 77, "date_2": "2020-03", "text_1_tokenized": ["RT", "cjwerleman", ":", "This", "Hindutva", "terrorist", "refers", "to", "Muslims", "as", "a", "\"", "virus", ",", "\"", "compares", "them", "to", "pests", "and", "has", "publicly", "threatened", "genocide", ".", "Anywh", "\u2026", "TW", "DRE", "HOTM"], "text_2_tokenized": ["#CNN", "enemy", "of", "the", "people", "...", "cheering", "for", "deaths", "of", "Americans", "from", "the", "virus", "#Fakenews"]} -{"id": "3274-virus", "word": "virus", "label_binary": 0, "text_1": "Someone just said that religion is a mind virus!", "token_idx_1": 8, "text_start_1": 42, "text_end_1": 47, "date_1": "2019-03", "text_2": "Hearing that Charlotte Figi passed away from the virus is super sad. One of the main reasons I even believed in the power of Cannabis. A lot of my senior thesis was about her story that was so moving. Rest In Peace. \ud83e\udd7a", "token_idx_2": 8, "text_start_2": 49, "text_end_2": 54, "date_2": "2020-03", "text_1_tokenized": ["Someone", "just", "said", "that", "religion", "is", "a", "mind", "virus", "!"], "text_2_tokenized": ["Hearing", "that", "Charlotte", "Figi", "passed", "away", "from", "the", "virus", "is", "super", "sad", ".", "One", "of", "the", "main", "reasons", "I", "even", "believed", "in", "the", "power", "of", "Cannabis", ".", "A", "lot", "of", "my", "senior", "thesis", "was", "about", "her", "story", "that", "was", "so", "moving", ".", "Rest", "In", "Peace", ".", "\ud83e\udd7a"]} -{"id": "3275-virus", "word": "virus", "label_binary": 1, "text_1": "Is there some type of virus going around? Because I'm extremely dizzy & nauseous (& no I'm not pregnant already confirmed that)", "token_idx_1": 5, "text_start_1": 22, "text_end_1": 27, "date_1": "2019-03", "text_2": "yo we gonna have to be telling our homies we got the virus like we got an std or sum now", "token_idx_2": 12, "text_start_2": 53, "text_end_2": 58, "date_2": "2020-03", "text_1_tokenized": ["Is", "there", "some", "type", "of", "virus", "going", "around", "?", "Because", "I'm", "extremely", "dizzy", "&", "nauseous", "(", "&", "no", "I'm", "not", "pregnant", "already", "confirmed", "that", ")"], "text_2_tokenized": ["yo", "we", "gonna", "have", "to", "be", "telling", "our", "homies", "we", "got", "the", "virus", "like", "we", "got", "an", "std", "or", "sum", "now"]} -{"id": "3276-virus", "word": "virus", "label_binary": 1, "text_1": "Someone help me understand - I was sick last month & went to the doctor, no copay required .he didn't give me meds or run tests, told me I'm at the end of the virus so sleep it off and drink fluids. I was in and out. I get a bill for $150. I spent no more than 30 min there. HOW?", "token_idx_1": 37, "text_start_1": 163, "text_end_1": 168, "date_1": "2019-03", "text_2": "Starting Greys Anatomy again from season 1. By the time I get caught up on 15 seasons I pray this virus is gone and everyone is healed!", "token_idx_2": 21, "text_start_2": 98, "text_end_2": 103, "date_2": "2020-03", "text_1_tokenized": ["Someone", "help", "me", "understand", "-", "I", "was", "sick", "last", "month", "&", "went", "to", "the", "doctor", ",", "no", "copay", "required", ".", "he", "didn't", "give", "me", "meds", "or", "run", "tests", ",", "told", "me", "I'm", "at", "the", "end", "of", "the", "virus", "so", "sleep", "it", "off", "and", "drink", "fluids", ".", "I", "was", "in", "and", "out", ".", "I", "get", "a", "bill", "for", "$", "150", ".", "I", "spent", "no", "more", "than", "30", "min", "there", ".", "HOW", "?"], "text_2_tokenized": ["Starting", "Greys", "Anatomy", "again", "from", "season", "1", ".", "By", "the", "time", "I", "get", "caught", "up", "on", "15", "seasons", "I", "pray", "this", "virus", "is", "gone", "and", "everyone", "is", "healed", "!"]} -{"id": "3277-virus", "word": "virus", "label_binary": 1, "text_1": "Omg PLEASE HELP ME FIND THIS MANGA: this is a subplot but a group of survivors (maybe from a virus) meets a puppeteer who you find out froze all these people in this \"utopia\" to prevent a virus from spreading out", "token_idx_1": 21, "text_start_1": 93, "text_end_1": 98, "date_1": "2019-03", "text_2": "How I wish I can go back to 2019 cus this virus really sucks", "token_idx_2": 11, "text_start_2": 42, "text_end_2": 47, "date_2": "2020-03", "text_1_tokenized": ["Omg", "PLEASE", "HELP", "ME", "FIND", "THIS", "MANGA", ":", "this", "is", "a", "subplot", "but", "a", "group", "of", "survivors", "(", "maybe", "from", "a", "virus", ")", "meets", "a", "puppeteer", "who", "you", "find", "out", "froze", "all", "these", "people", "in", "this", "\"", "utopia", "\"", "to", "prevent", "a", "virus", "from", "spreading", "out"], "text_2_tokenized": ["How", "I", "wish", "I", "can", "go", "back", "to", "2019", "cus", "this", "virus", "really", "sucks"]} -{"id": "3278-virus", "word": "virus", "label_binary": 0, "text_1": "I have no idea why I remember some things. It's like a damn virus, you try deleting and it keeps popping up", "token_idx_1": 14, "text_start_1": 60, "text_end_1": 65, "date_1": "2019-03", "text_2": "\"Social distancing is a powerful tool\" emphasized by task force. Well, this virus is making to knee us down. Either it is not efficient or needs to be complemented by other tool. Please do not gaslighting us.", "token_idx_2": 16, "text_start_2": 76, "text_end_2": 81, "date_2": "2020-03", "text_1_tokenized": ["I", "have", "no", "idea", "why", "I", "remember", "some", "things", ".", "It's", "like", "a", "damn", "virus", ",", "you", "try", "deleting", "and", "it", "keeps", "popping", "up"], "text_2_tokenized": ["\"", "Social", "distancing", "is", "a", "powerful", "tool", "\"", "emphasized", "by", "task", "force", ".", "Well", ",", "this", "virus", "is", "making", "to", "knee", "us", "down", ".", "Either", "it", "is", "not", "efficient", "or", "needs", "to", "be", "complemented", "by", "other", "tool", ".", "Please", "do", "not", "gaslighting", "us", "."]} -{"id": "3279-virus", "word": "virus", "label_binary": 0, "text_1": "To the people of New Zealand, I'm sorry our virus of mass shootings and radicalism has infected your country. I wish you love and healing.", "token_idx_1": 10, "text_start_1": 44, "text_end_1": 49, "date_1": "2019-03", "text_2": "1 Trump, who ignored warnings about the coronavirus for months, blames New York and New Jersey for getting a late start on the virus....He donated 17.8 tons of our medical supplies to China in February during the time he was calling COVID-19 a Hoax", "token_idx_2": 25, "text_start_2": 46, "text_end_2": 51, "date_2": "2020-03", "text_1_tokenized": ["To", "the", "people", "of", "New", "Zealand", ",", "I'm", "sorry", "our", "virus", "of", "mass", "shootings", "and", "radicalism", "has", "infected", "your", "country", ".", "I", "wish", "you", "love", "and", "healing", "."], "text_2_tokenized": ["1", "Trump", ",", "who", "ignored", "warnings", "about", "the", "coronavirus", "for", "months", ",", "blames", "New", "York", "and", "New", "Jersey", "for", "getting", "a", "late", "start", "on", "the", "virus", "...", "He", "donated", "17.8", "tons", "of", "our", "medical", "supplies", "to", "China", "in", "February", "during", "the", "time", "he", "was", "calling", "COVID", "-", "19", "a", "Hoax"]} -{"id": "3280-virus", "word": "virus", "label_binary": 1, "text_1": "Picked up a nasty virus on the road. Had to spend the weekend in bed. Postponed my trip home by a day. @united let me rebook my flights for free. When I went to check out of the hotel this morning, the guy on the desk said, \"No charge for the extra night, you were sick.\"", "token_idx_1": 4, "text_start_1": 18, "text_end_1": 23, "date_1": "2019-03", "text_2": "a global virus really is the perfect metaphor for capitalism", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 14, "date_2": "2020-03", "text_1_tokenized": ["Picked", "up", "a", "nasty", "virus", "on", "the", "road", ".", "Had", "to", "spend", "the", "weekend", "in", "bed", ".", "Postponed", "my", "trip", "home", "by", "a", "day", ".", "@united", "let", "me", "rebook", "my", "flights", "for", "free", ".", "When", "I", "went", "to", "check", "out", "of", "the", "hotel", "this", "morning", ",", "the", "guy", "on", "the", "desk", "said", ",", "\"", "No", "charge", "for", "the", "extra", "night", ",", "you", "were", "sick", ".", "\""], "text_2_tokenized": ["a", "global", "virus", "really", "is", "the", "perfect", "metaphor", "for", "capitalism"]} -{"id": "3281-virus", "word": "virus", "label_binary": 1, "text_1": "Are people not tired of making movies or games where \"a mysterious virus has devastated the population and resources are scarce.\"", "token_idx_1": 13, "text_start_1": 67, "text_end_1": 72, "date_1": "2019-03", "text_2": "I'm still thinking about how those 60 choir members got together and 45 of them got the virus. 45 man. That's a really high number for such a small group.", "token_idx_2": 17, "text_start_2": 88, "text_end_2": 93, "date_2": "2020-03", "text_1_tokenized": ["Are", "people", "not", "tired", "of", "making", "movies", "or", "games", "where", "\"", "a", "mysterious", "virus", "has", "devastated", "the", "population", "and", "resources", "are", "scarce", ".", "\""], "text_2_tokenized": ["I'm", "still", "thinking", "about", "how", "those", "60", "choir", "members", "got", "together", "and", "45", "of", "them", "got", "the", "virus", ".", "45", "man", ".", "That's", "a", "really", "high", "number", "for", "such", "a", "small", "group", "."]} -{"id": "3282-virus", "word": "virus", "label_binary": 1, "text_1": "At this point it's inevitable that unvaccinated children and adults will contract the measles virus. Don't be stupid, get vaccinated, unless you want your 3 y/o to die.", "token_idx_1": 14, "text_start_1": 94, "text_end_1": 99, "date_1": "2019-03", "text_2": "1/2 Let me just sincerely ask this: How we can stop/prevent/reduce the spread of the virus without social distancing? How can the government help the workforce who can't afford to practice social distancing?", "token_idx_2": 20, "text_start_2": 85, "text_end_2": 90, "date_2": "2020-03", "text_1_tokenized": ["At", "this", "point", "it's", "inevitable", "that", "unvaccinated", "children", "and", "adults", "will", "contract", "the", "measles", "virus", ".", "Don't", "be", "stupid", ",", "get", "vaccinated", ",", "unless", "you", "want", "your", "3", "y", "/", "o", "to", "die", "."], "text_2_tokenized": ["1/2", "Let", "me", "just", "sincerely", "ask", "this", ":", "How", "we", "can", "stop", "/", "prevent", "/", "reduce", "the", "spread", "of", "the", "virus", "without", "social", "distancing", "?", "How", "can", "the", "government", "help", "the", "workforce", "who", "can't", "afford", "to", "practice", "social", "distancing", "?"]} -{"id": "3283-virus", "word": "virus", "label_binary": 0, "text_1": "Khalid - Alive is my new virus \u2764\ufe0f", "token_idx_1": 6, "text_start_1": 25, "text_end_1": 30, "date_1": "2019-03", "text_2": "I'm hopeful you understand that we NEED social distancing to slow the immediate rise of this virus. Yes, almost everyone will eventually have it before the year ends, we just can't have it all happen at once. Our healthcare would collapse.", "token_idx_2": 16, "text_start_2": 93, "text_end_2": 98, "date_2": "2020-03", "text_1_tokenized": ["Khalid", "-", "Alive", "is", "my", "new", "virus", "\u2764", "\ufe0f"], "text_2_tokenized": ["I'm", "hopeful", "you", "understand", "that", "we", "NEED", "social", "distancing", "to", "slow", "the", "immediate", "rise", "of", "this", "virus", ".", "Yes", ",", "almost", "everyone", "will", "eventually", "have", "it", "before", "the", "year", "ends", ",", "we", "just", "can't", "have", "it", "all", "happen", "at", "once", ".", "Our", "healthcare", "would", "collapse", "."]} -{"id": "3284-virus", "word": "virus", "label_binary": 0, "text_1": "Just clocked a sub 7-minute mile which just proves to me that I'm just a tech conference, a 24-hour virus and one good night's sleep away from greatness.", "token_idx_1": 24, "text_start_1": 100, "text_end_1": 105, "date_1": "2019-03", "text_2": "lockdown everywhere....still no cure.....continue lockdown......hunger begins.....declare that you must take a 'chip' to become immune to the virus...... THIS IS LOGICAL, I'M SCARED!!", "token_idx_2": 27, "text_start_2": 142, "text_end_2": 147, "date_2": "2020-03", "text_1_tokenized": ["Just", "clocked", "a", "sub", "7", "-", "minute", "mile", "which", "just", "proves", "to", "me", "that", "I'm", "just", "a", "tech", "conference", ",", "a", "24", "-", "hour", "virus", "and", "one", "good", "night's", "sleep", "away", "from", "greatness", "."], "text_2_tokenized": ["lockdown", "everywhere", "...", "still", "no", "cure", "...", "continue", "lockdown", "...", "hunger", "begins", "...", "declare", "that", "you", "must", "take", "a", "'", "chip", "'", "to", "become", "immune", "to", "the", "virus", "...", "THIS", "IS", "LOGICAL", ",", "I'M", "SCARED", "!", "!"]} -{"id": "3285-virus", "word": "virus", "label_binary": 1, "text_1": "What if the guys brought a virus back from the moon and that's why all the republicans since Nixon have been so stupid and crazy", "token_idx_1": 6, "text_start_1": 27, "text_end_1": 32, "date_1": "2019-03", "text_2": "took a nap and literally dreamed about catching the virus im actually quite fuckin worried", "token_idx_2": 9, "text_start_2": 52, "text_end_2": 57, "date_2": "2020-03", "text_1_tokenized": ["What", "if", "the", "guys", "brought", "a", "virus", "back", "from", "the", "moon", "and", "that's", "why", "all", "the", "republicans", "since", "Nixon", "have", "been", "so", "stupid", "and", "crazy"], "text_2_tokenized": ["took", "a", "nap", "and", "literally", "dreamed", "about", "catching", "the", "virus", "im", "actually", "quite", "fuckin", "worried"]} -{"id": "3286-virus", "word": "virus", "label_binary": 1, "text_1": "I'm not where I'm supposed to be. I feel it. Like Bruce Willis in 12 Monkeys. Without the virus and the weird plastic tube. #12Monkeys #sofuckinglost #alternateuniverse", "token_idx_1": 21, "text_start_1": 90, "text_end_1": 95, "date_1": "2019-03", "text_2": "Today we lost a dear one to covid19 This make me think , how we are in our home,living , joking about this virus , passed our days,listening to numbers of dead to covid & feel nothing But when u lose a friend,Family , to this then u feel how terrifying & sad this situation is", "token_idx_2": 26, "text_start_2": 107, "text_end_2": 112, "date_2": "2020-03", "text_1_tokenized": ["I'm", "not", "where", "I'm", "supposed", "to", "be", ".", "I", "feel", "it", ".", "Like", "Bruce", "Willis", "in", "12", "Monkeys", ".", "Without", "the", "virus", "and", "the", "weird", "plastic", "tube", ".", "#12Monkeys", "#sofuckinglost", "#alternateuniverse"], "text_2_tokenized": ["Today", "we", "lost", "a", "dear", "one", "to", "covid", "19", "This", "make", "me", "think", ",", "how", "we", "are", "in", "our", "home", ",", "living", ",", "joking", "about", "this", "virus", ",", "passed", "our", "days", ",", "listening", "to", "numbers", "of", "dead", "to", "covid", "&", "feel", "nothing", "But", "when", "u", "lose", "a", "friend", ",", "Family", ",", "to", "this", "then", "u", "feel", "how", "terrifying", "&", "sad", "this", "situation", "is"]} -{"id": "3287-virus", "word": "virus", "label_binary": 1, "text_1": "If your EBV VCA Ab IgG is 521 & your EBV NA Ab IgG is >600 does that mean you are flooded with the virus or just that your immune system is overreacting to a little bit of virus? Asking for a friend. ( My friend knows it means either way she will feel like crap.) #lyme #ebv", "token_idx_1": 25, "text_start_1": 106, "text_end_1": 111, "date_1": "2019-03", "text_2": "Did you know there will be many babies born after this virus disaster?", "token_idx_2": 11, "text_start_2": 55, "text_end_2": 60, "date_2": "2020-03", "text_1_tokenized": ["If", "your", "EBV", "VCA", "Ab", "IgG", "is", "521", "&", "your", "EBV", "NA", "Ab", "IgG", "is", ">", "600", "does", "that", "mean", "you", "are", "flooded", "with", "the", "virus", "or", "just", "that", "your", "immune", "system", "is", "overreacting", "to", "a", "little", "bit", "of", "virus", "?", "Asking", "for", "a", "friend", ".", "(", "My", "friend", "knows", "it", "means", "either", "way", "she", "will", "feel", "like", "crap", ".", ")", "#lyme", "#ebv"], "text_2_tokenized": ["Did", "you", "know", "there", "will", "be", "many", "babies", "born", "after", "this", "virus", "disaster", "?"]} -{"id": "3288-virus", "word": "virus", "label_binary": 1, "text_1": "I'm not feel good at all I got the stomach virus I got a bad headache Get well soon to me \u2014 feeling sick", "token_idx_1": 10, "text_start_1": 43, "text_end_1": 48, "date_1": "2019-03", "text_2": "Man I'm going crazy that first church service after this virus \ud83d\ude4c\ud83c\udffe", "token_idx_2": 10, "text_start_2": 57, "text_end_2": 62, "date_2": "2020-03", "text_1_tokenized": ["I'm", "not", "feel", "good", "at", "all", "I", "got", "the", "stomach", "virus", "I", "got", "a", "bad", "headache", "Get", "well", "soon", "to", "me", "\u2014", "feeling", "sick"], "text_2_tokenized": ["Man", "I'm", "going", "crazy", "that", "first", "church", "service", "after", "this", "virus", "\ud83d\ude4c\ud83c\udffe"]} -{"id": "3289-virus", "word": "virus", "label_binary": 0, "text_1": "There are a lot of good things apparent this morning. Not the least of which it really is looking like spring now and this virus is getting close to passing entirely. #amgrateful #amwriting", "token_idx_1": 25, "text_start_1": 123, "text_end_1": 128, "date_1": "2019-03", "text_2": "If this corona virus can give anybody a sign I pray it gives Paolo Nutini a sign and shows him he's no getting any younger, life's too short and should come back and produce more music or atleast do one final gig \ud83d\udc4d\ud83c\udffb", "token_idx_2": 3, "text_start_2": 15, "text_end_2": 20, "date_2": "2020-03", "text_1_tokenized": ["There", "are", "a", "lot", "of", "good", "things", "apparent", "this", "morning", ".", "Not", "the", "least", "of", "which", "it", "really", "is", "looking", "like", "spring", "now", "and", "this", "virus", "is", "getting", "close", "to", "passing", "entirely", ".", "#amgrateful", "#amwriting"], "text_2_tokenized": ["If", "this", "corona", "virus", "can", "give", "anybody", "a", "sign", "I", "pray", "it", "gives", "Paolo", "Nutini", "a", "sign", "and", "shows", "him", "he's", "no", "getting", "any", "younger", ",", "life's", "too", "short", "and", "should", "come", "back", "and", "produce", "more", "music", "or", "atleast", "do", "one", "final", "gig", "\ud83d\udc4d\ud83c\udffb"]} -{"id": "3290-virus", "word": "virus", "label_binary": 0, "text_1": "she's the ex-quisite virus of personality waves w bi-coastal reception that got away", "token_idx_1": 3, "text_start_1": 21, "text_end_1": 26, "date_1": "2019-03", "text_2": "It's bad enough we have to put up with this virus problem, but \"Curb Your Enthusiasm\" has finished it's current run for the season. Larry David should be forced by the federal government to make more episodes immediately.", "token_idx_2": 10, "text_start_2": 44, "text_end_2": 49, "date_2": "2020-03", "text_1_tokenized": ["she's", "the", "ex-quisite", "virus", "of", "personality", "waves", "w", "bi-coastal", "reception", "that", "got", "away"], "text_2_tokenized": ["It's", "bad", "enough", "we", "have", "to", "put", "up", "with", "this", "virus", "problem", ",", "but", "\"", "Curb", "Your", "Enthusiasm", "\"", "has", "finished", "it's", "current", "run", "for", "the", "season", ".", "Larry", "David", "should", "be", "forced", "by", "the", "federal", "government", "to", "make", "more", "episodes", "immediately", "."]} -{"id": "3291-virus", "word": "virus", "label_binary": 1, "text_1": "I remember when some of the YouTube comments had things like \"I just have a free Minecraft gift code, click here!\" and there would always be a snarky reply saying it's obviously a virus.", "token_idx_1": 37, "text_start_1": 180, "text_end_1": 185, "date_1": "2019-03", "text_2": "You are three times more likely to get a virus on your computer from a Religious website than from a Porn website.", "token_idx_2": 9, "text_start_2": 41, "text_end_2": 46, "date_2": "2020-03", "text_1_tokenized": ["I", "remember", "when", "some", "of", "the", "YouTube", "comments", "had", "things", "like", "\"", "I", "just", "have", "a", "free", "Minecraft", "gift", "code", ",", "click", "here", "!", "\"", "and", "there", "would", "always", "be", "a", "snarky", "reply", "saying", "it's", "obviously", "a", "virus", "."], "text_2_tokenized": ["You", "are", "three", "times", "more", "likely", "to", "get", "a", "virus", "on", "your", "computer", "from", "a", "Religious", "website", "than", "from", "a", "Porn", "website", "."]} -{"id": "3292-virus", "word": "virus", "label_binary": 1, "text_1": "Very glad that Hunter is available for the game (Huff too) and that Jerome is over his virus. So let's do this Hoos! #UVa #GoHoos", "token_idx_1": 19, "text_start_1": 87, "text_end_1": 92, "date_1": "2019-03", "text_2": "Despite the fact that what these Spring-brakers did was insensitive and unwise in a different setting. They were without knowingly was safer than most of us because no virus can survive in temperatures over 90* unless it was biologically modified and created a Lab? Uh? Again Uh?", "token_idx_2": 29, "text_start_2": 168, "text_end_2": 173, "date_2": "2020-03", "text_1_tokenized": ["Very", "glad", "that", "Hunter", "is", "available", "for", "the", "game", "(", "Huff", "too", ")", "and", "that", "Jerome", "is", "over", "his", "virus", ".", "So", "let's", "do", "this", "Hoos", "!", "#UVa", "#GoHoos"], "text_2_tokenized": ["Despite", "the", "fact", "that", "what", "these", "Spring-brakers", "did", "was", "insensitive", "and", "unwise", "in", "a", "different", "setting", ".", "They", "were", "without", "knowingly", "was", "safer", "than", "most", "of", "us", "because", "no", "virus", "can", "survive", "in", "temperatures", "over", "90", "*", "unless", "it", "was", "biologically", "modified", "and", "created", "a", "Lab", "?", "Uh", "?", "Again", "Uh", "?"]} -{"id": "3293-virus", "word": "virus", "label_binary": 1, "text_1": "[Humans] move to another area, and you multiply ... until every natural resource is consumed. ... There is another organism on this planet that follows the same pattern. ... A virus. Human beings are a disease, a cancer of this planet. - The Matrix", "token_idx_1": 33, "text_start_1": 176, "text_end_1": 181, "date_1": "2019-03", "text_2": "and honestly i'm not telling them \u201cpeople are out here dying because of a virus\u201d all they need to know is their safe, healthy and to wash hands.", "token_idx_2": 15, "text_start_2": 74, "text_end_2": 79, "date_2": "2020-03", "text_1_tokenized": ["[", "Humans", "]", "move", "to", "another", "area", ",", "and", "you", "multiply", "...", "until", "every", "natural", "resource", "is", "consumed", ". ...", "There", "is", "another", "organism", "on", "this", "planet", "that", "follows", "the", "same", "pattern", ". ...", "A", "virus", ".", "Human", "beings", "are", "a", "disease", ",", "a", "cancer", "of", "this", "planet", ".", "-", "The", "Matrix"], "text_2_tokenized": ["and", "honestly", "i'm", "not", "telling", "them", "\u201c", "people", "are", "out", "here", "dying", "because", "of", "a", "virus", "\u201d", "all", "they", "need", "to", "know", "is", "their", "safe", ",", "healthy", "and", "to", "wash", "hands", "."]} -{"id": "3294-virus", "word": "virus", "label_binary": 1, "text_1": "How does my doc say \u201cwell you might have the flu or you might just have a virus that is going around\u201d well which fuckin one buddy? Probably important to know", "token_idx_1": 18, "text_start_1": 74, "text_end_1": 79, "date_1": "2019-03", "text_2": "It really bothers me that y'all didn't know washing your hands for 5-10 seconds wasn't effective prior to this virus. It bothers me even more that you dirty mfs still not properly washing your hands.", "token_idx_2": 19, "text_start_2": 111, "text_end_2": 116, "date_2": "2020-03", "text_1_tokenized": ["How", "does", "my", "doc", "say", "\u201c", "well", "you", "might", "have", "the", "flu", "or", "you", "might", "just", "have", "a", "virus", "that", "is", "going", "around", "\u201d", "well", "which", "fuckin", "one", "buddy", "?", "Probably", "important", "to", "know"], "text_2_tokenized": ["It", "really", "bothers", "me", "that", "y'all", "didn't", "know", "washing", "your", "hands", "for", "5-10", "seconds", "wasn't", "effective", "prior", "to", "this", "virus", ".", "It", "bothers", "me", "even", "more", "that", "you", "dirty", "mfs", "still", "not", "properly", "washing", "your", "hands", "."]} -{"id": "3295-virus", "word": "virus", "label_binary": 0, "text_1": "All people on the world. As you know Our Nation Indonesia #RemoveTheNgaciroVirus. This virus is very dangerous for the democratic nation of Indonesia, so please throw in the trash and grave deep ... Once againt #RemoveTheNgaciroVirus", "token_idx_1": 15, "text_start_1": 87, "text_end_1": 92, "date_1": "2019-03", "text_2": "Corona virus got me going to sleep when the sun goes down and waking up when it goes down.....", "token_idx_2": 1, "text_start_2": 7, "text_end_2": 12, "date_2": "2020-03", "text_1_tokenized": ["All", "people", "on", "the", "world", ".", "As", "you", "know", "Our", "Nation", "Indonesia", "#RemoveTheNgaciroVirus", ".", "This", "virus", "is", "very", "dangerous", "for", "the", "democratic", "nation", "of", "Indonesia", ",", "so", "please", "throw", "in", "the", "trash", "and", "grave", "deep", "...", "Once", "againt", "#RemoveTheNgaciroVirus"], "text_2_tokenized": ["Corona", "virus", "got", "me", "going", "to", "sleep", "when", "the", "sun", "goes", "down", "and", "waking", "up", "when", "it", "goes", "down", "..."]} -{"id": "3296-virus", "word": "virus", "label_binary": 0, "text_1": "I need an intervention, @heyriddleriddle! It's been two days since I heard \"The More You Know!\" and I cannot stop repeating \"Phoebe P. Peabody Bebe\" and cackling. My pets are scared and my friends are all infected with the #feebeepeepebadeebebe virus. SEND HELP PLEASE", "token_idx_1": 49, "text_start_1": 245, "text_end_1": 250, "date_1": "2019-03", "text_2": "So Prince Charles has the corona virus and Camilla hasn't. Not sharing the same bedroom ? She might be out in the stable! #coronavirusaustralia #RoyalFamily", "token_idx_2": 6, "text_start_2": 33, "text_end_2": 38, "date_2": "2020-03", "text_1_tokenized": ["I", "need", "an", "intervention", ",", "@heyriddleriddle", "!", "It's", "been", "two", "days", "since", "I", "heard", "\"", "The", "More", "You", "Know", "!", "\"", "and", "I", "cannot", "stop", "repeating", "\"", "Phoebe", "P", ".", "Peabody", "Bebe", "\"", "and", "cackling", ".", "My", "pets", "are", "scared", "and", "my", "friends", "are", "all", "infected", "with", "the", "#feebeepeepebadeebebe", "virus", ".", "SEND", "HELP", "PLEASE"], "text_2_tokenized": ["So", "Prince", "Charles", "has", "the", "corona", "virus", "and", "Camilla", "hasn't", ".", "Not", "sharing", "the", "same", "bedroom", "?", "She", "might", "be", "out", "in", "the", "stable", "!", "#coronavirusaustralia", "#RoyalFamily"]} -{"id": "3297-virus", "word": "virus", "label_binary": 1, "text_1": "I just don't understand why all my professors are being such dickheads to me like...I had the stomach virus and missed one fucking day...wtf do you fucking expect from me", "token_idx_1": 20, "text_start_1": 102, "text_end_1": 107, "date_1": "2019-03", "text_2": "Waterfront workers in Melbourne and Henderson (WA) have refused work on health and safety grounds this week. They point out that there is a general fourteen day quarantine on all arrivals to limit the spread of the virus. (thread)", "token_idx_2": 40, "text_start_2": 215, "text_end_2": 220, "date_2": "2020-03", "text_1_tokenized": ["I", "just", "don't", "understand", "why", "all", "my", "professors", "are", "being", "such", "dickheads", "to", "me", "like", "...", "I", "had", "the", "stomach", "virus", "and", "missed", "one", "fucking", "day", "...", "wtf", "do", "you", "fucking", "expect", "from", "me"], "text_2_tokenized": ["Waterfront", "workers", "in", "Melbourne", "and", "Henderson", "(", "WA", ")", "have", "refused", "work", "on", "health", "and", "safety", "grounds", "this", "week", ".", "They", "point", "out", "that", "there", "is", "a", "general", "fourteen", "day", "quarantine", "on", "all", "arrivals", "to", "limit", "the", "spread", "of", "the", "virus", ".", "(", "thread", ")"]} -{"id": "3298-virus", "word": "virus", "label_binary": 1, "text_1": "Which nasty ass bitch gave me a stomach virus??!!!", "token_idx_1": 8, "text_start_1": 40, "text_end_1": 45, "date_1": "2019-03", "text_2": "I'm lucky enough to work for a company that pays me for this time off, but so many people do not have this privilege. People cannot afford to be sick, and it is your responsible to stay home and not spread the virus as it can be spread without symptoms. #StayHomeSaveLives", "token_idx_2": 45, "text_start_2": 210, "text_end_2": 215, "date_2": "2020-03", "text_1_tokenized": ["Which", "nasty", "ass", "bitch", "gave", "me", "a", "stomach", "virus", "?", "?", "!", "!", "!"], "text_2_tokenized": ["I'm", "lucky", "enough", "to", "work", "for", "a", "company", "that", "pays", "me", "for", "this", "time", "off", ",", "but", "so", "many", "people", "do", "not", "have", "this", "privilege", ".", "People", "cannot", "afford", "to", "be", "sick", ",", "and", "it", "is", "your", "responsible", "to", "stay", "home", "and", "not", "spread", "the", "virus", "as", "it", "can", "be", "spread", "without", "symptoms", ".", "#StayHomeSaveLives"]} -{"id": "3299-virus", "word": "virus", "label_binary": 0, "text_1": "nothing like getting an email from someone in your past... but all it is is a virus they picked up that sends to everyone in their address book", "token_idx_1": 17, "text_start_1": 78, "text_end_1": 83, "date_1": "2019-03", "text_2": "Everyone is pissing me off on this virus!", "token_idx_2": 7, "text_start_2": 35, "text_end_2": 40, "date_2": "2020-03", "text_1_tokenized": ["nothing", "like", "getting", "an", "email", "from", "someone", "in", "your", "past", "...", "but", "all", "it", "is", "is", "a", "virus", "they", "picked", "up", "that", "sends", "to", "everyone", "in", "their", "address", "book"], "text_2_tokenized": ["Everyone", "is", "pissing", "me", "off", "on", "this", "virus", "!"]} -{"id": "3300-virus", "word": "virus", "label_binary": 1, "text_1": "I hate selfish people that go places when they are really sick. Stay your ass at home and get better & stop spreading that virus around to other people.", "token_idx_1": 25, "text_start_1": 127, "text_end_1": 132, "date_1": "2019-03", "text_2": "Y'all know all these mask y'all making not stopping the virus , but get ya hussle on", "token_idx_2": 10, "text_start_2": 56, "text_end_2": 61, "date_2": "2020-03", "text_1_tokenized": ["I", "hate", "selfish", "people", "that", "go", "places", "when", "they", "are", "really", "sick", ".", "Stay", "your", "ass", "at", "home", "and", "get", "better", "&", "stop", "spreading", "that", "virus", "around", "to", "other", "people", "."], "text_2_tokenized": ["Y'all", "know", "all", "these", "mask", "y'all", "making", "not", "stopping", "the", "virus", ",", "but", "get", "ya", "hussle", "on"]} -{"id": "3301-virus", "word": "virus", "label_binary": 1, "text_1": "thought I had a mild virus But no it was just the start Woke up this morning felling even worse!!", "token_idx_1": 5, "text_start_1": 21, "text_end_1": 26, "date_1": "2019-03", "text_2": "Robert has the virus , nobody talk to him ty", "token_idx_2": 3, "text_start_2": 15, "text_end_2": 20, "date_2": "2020-03", "text_1_tokenized": ["thought", "I", "had", "a", "mild", "virus", "But", "no", "it", "was", "just", "the", "start", "Woke", "up", "this", "morning", "felling", "even", "worse", "!", "!"], "text_2_tokenized": ["Robert", "has", "the", "virus", ",", "nobody", "talk", "to", "him", "ty"]} -{"id": "3302-virus", "word": "virus", "label_binary": 0, "text_1": "Dear citizens of Earth, We're so sorry we fucked up. We're sorry that we have allowed one man to not only divide our country but every country around the globe. He is a virus we are trying to get rid of. It will take time but please forgive us. Signed America", "token_idx_1": 36, "text_start_1": 169, "text_end_1": 174, "date_1": "2019-03", "text_2": "The amount of people suffering from depression & dying because of this virus got me sick AF \ud83d\ude22\ud83d\ude29 #NotGonnaLie", "token_idx_2": 12, "text_start_2": 75, "text_end_2": 80, "date_2": "2020-03", "text_1_tokenized": ["Dear", "citizens", "of", "Earth", ",", "We're", "so", "sorry", "we", "fucked", "up", ".", "We're", "sorry", "that", "we", "have", "allowed", "one", "man", "to", "not", "only", "divide", "our", "country", "but", "every", "country", "around", "the", "globe", ".", "He", "is", "a", "virus", "we", "are", "trying", "to", "get", "rid", "of", ".", "It", "will", "take", "time", "but", "please", "forgive", "us", ".", "Signed", "America"], "text_2_tokenized": ["The", "amount", "of", "people", "suffering", "from", "depression", "&", "dying", "because", "of", "this", "virus", "got", "me", "sick", "AF", "\ud83d\ude22", "\ud83d\ude29", "#NotGonnaLie"]} -{"id": "3303-virus", "word": "virus", "label_binary": 0, "text_1": "The biggest mistake of @ncbn govt is irresponsible to the core, Injustice, and neglecting the genuine issues through DASH-BOARD. The virus effected to the DASHBOARD possibly cost his 2019 election-!!!.", "token_idx_1": 23, "text_start_1": 133, "text_end_1": 138, "date_1": "2019-03", "text_2": "I honestly joke about this virus, but the way my asthma is set up has me terrified asf about it. My lungsss go through enough already", "token_idx_2": 5, "text_start_2": 27, "text_end_2": 32, "date_2": "2020-03", "text_1_tokenized": ["The", "biggest", "mistake", "of", "@ncbn", "govt", "is", "irresponsible", "to", "the", "core", ",", "Injustice", ",", "and", "neglecting", "the", "genuine", "issues", "through", "DASH-BOARD", ".", "The", "virus", "effected", "to", "the", "DASHBOARD", "possibly", "cost", "his", "2019", "election", "-", "!", "!", "!", "."], "text_2_tokenized": ["I", "honestly", "joke", "about", "this", "virus", ",", "but", "the", "way", "my", "asthma", "is", "set", "up", "has", "me", "terrified", "asf", "about", "it", ".", "My", "lungsss", "go", "through", "enough", "already"]} -{"id": "3304-virus", "word": "virus", "label_binary": 1, "text_1": "Everyone is miserable at the Phoenix airport right now. All I hear is people complaining.. I'm holding my breath so I don't catch the virus.", "token_idx_1": 26, "text_start_1": 134, "text_end_1": 139, "date_1": "2019-03", "text_2": "I wish my birthday wasn't ruined because of the stupid virus", "token_idx_2": 10, "text_start_2": 55, "text_end_2": 60, "date_2": "2020-03", "text_1_tokenized": ["Everyone", "is", "miserable", "at", "the", "Phoenix", "airport", "right", "now", ".", "All", "I", "hear", "is", "people", "complaining", "..", "I'm", "holding", "my", "breath", "so", "I", "don't", "catch", "the", "virus", "."], "text_2_tokenized": ["I", "wish", "my", "birthday", "wasn't", "ruined", "because", "of", "the", "stupid", "virus"]} -{"id": "3305-virus", "word": "virus", "label_binary": 1, "text_1": "The Ebola virus is a formidable foe. It seems harmless to think about though. It only has 7 genes and is smaller than a red blood cell. A red blood cell is 8000 nm and Ebola is 1400 nm, how can something so seemingly insignificant cause so much destruction?", "token_idx_1": 2, "text_start_1": 10, "text_end_1": 15, "date_1": "2019-03", "text_2": "remember when trump called the virus a hoax haha lol", "token_idx_2": 5, "text_start_2": 31, "text_end_2": 36, "date_2": "2020-03", "text_1_tokenized": ["The", "Ebola", "virus", "is", "a", "formidable", "foe", ".", "It", "seems", "harmless", "to", "think", "about", "though", ".", "It", "only", "has", "7", "genes", "and", "is", "smaller", "than", "a", "red", "blood", "cell", ".", "A", "red", "blood", "cell", "is", "8000", "nm", "and", "Ebola", "is", "1400", "nm", ",", "how", "can", "something", "so", "seemingly", "insignificant", "cause", "so", "much", "destruction", "?"], "text_2_tokenized": ["remember", "when", "trump", "called", "the", "virus", "a", "hoax", "haha", "lol"]} -{"id": "3306-virus", "word": "virus", "label_binary": 0, "text_1": "Something is up with twitter. Or maybe it's with Kaspersky, because every time a new page is opened up, my anti virus system shouts at me about malicious links being blocked...", "token_idx_1": 24, "text_start_1": 112, "text_end_1": 117, "date_1": "2019-03", "text_2": "Ah perfect people are learning to be racist towards asians for a virus they couldn't control", "token_idx_2": 12, "text_start_2": 65, "text_end_2": 70, "date_2": "2020-03", "text_1_tokenized": ["Something", "is", "up", "with", "twitter", ".", "Or", "maybe", "it's", "with", "Kaspersky", ",", "because", "every", "time", "a", "new", "page", "is", "opened", "up", ",", "my", "anti", "virus", "system", "shouts", "at", "me", "about", "malicious", "links", "being", "blocked", "..."], "text_2_tokenized": ["Ah", "perfect", "people", "are", "learning", "to", "be", "racist", "towards", "asians", "for", "a", "virus", "they", "couldn't", "control"]} -{"id": "3307-virus", "word": "virus", "label_binary": 0, "text_1": "dear @SMTOWNGLOBAL can u bring back cuties and happy virus baby jaehyun back? Let him be himself", "token_idx_1": 9, "text_start_1": 53, "text_end_1": 58, "date_1": "2019-03", "text_2": "forever wishing people would stay inside their homes so that this nasty virus would go away!!", "token_idx_2": 12, "text_start_2": 72, "text_end_2": 77, "date_2": "2020-03", "text_1_tokenized": ["dear", "@SMTOWNGLOBAL", "can", "u", "bring", "back", "cuties", "and", "happy", "virus", "baby", "jaehyun", "back", "?", "Let", "him", "be", "himself"], "text_2_tokenized": ["forever", "wishing", "people", "would", "stay", "inside", "their", "homes", "so", "that", "this", "nasty", "virus", "would", "go", "away", "!", "!"]} -{"id": "3308-virus", "word": "virus", "label_binary": 0, "text_1": "OBSTINACY is virus to growth. Same holds for MINDSET", "token_idx_1": 2, "text_start_1": 13, "text_end_1": 18, "date_1": "2019-03", "text_2": "Hey now that the virus has already killed way more people than a year's worth of homicides in the US, maybe republicans will get real into fancy soaps and the guns thing was just a phase", "token_idx_2": 4, "text_start_2": 17, "text_end_2": 22, "date_2": "2020-03", "text_1_tokenized": ["OBSTINACY", "is", "virus", "to", "growth", ".", "Same", "holds", "for", "MINDSET"], "text_2_tokenized": ["Hey", "now", "that", "the", "virus", "has", "already", "killed", "way", "more", "people", "than", "a", "year's", "worth", "of", "homicides", "in", "the", "US", ",", "maybe", "republicans", "will", "get", "real", "into", "fancy", "soaps", "and", "the", "guns", "thing", "was", "just", "a", "phase"]} -{"id": "3309-virus", "word": "virus", "label_binary": 0, "text_1": "heads up if you get a notif from @asdesdq5 im checking if the virus link still works", "token_idx_1": 13, "text_start_1": 62, "text_end_1": 67, "date_1": "2019-03", "text_2": "I know a lot of people who are #HighRiskCovid19 who could die if they catch this virus. I know a lot of people who are on the frontlines, saving lives, routinely exposing themselves to high doses of virus, and all the dangers that comes with that.", "token_idx_2": 16, "text_start_2": 81, "text_end_2": 86, "date_2": "2020-03", "text_1_tokenized": ["heads", "up", "if", "you", "get", "a", "notif", "from", "@asdesdq5", "im", "checking", "if", "the", "virus", "link", "still", "works"], "text_2_tokenized": ["I", "know", "a", "lot", "of", "people", "who", "are", "#HighRiskCovid19", "who", "could", "die", "if", "they", "catch", "this", "virus", ".", "I", "know", "a", "lot", "of", "people", "who", "are", "on", "the", "frontlines", ",", "saving", "lives", ",", "routinely", "exposing", "themselves", "to", "high", "doses", "of", "virus", ",", "and", "all", "the", "dangers", "that", "comes", "with", "that", "."]} -{"id": "3310-virus", "word": "virus", "label_binary": 1, "text_1": "Great I have a cold. Cold virus pls get tf out of my body before finals hit, otherwise it's ginger tea all day err day", "token_idx_1": 7, "text_start_1": 26, "text_end_1": 31, "date_1": "2019-03", "text_2": "A lot of people not taking this shit serious at all that's y'all problem now being press to be in mfs faces until one of y'all got that damn virus \ud83d\udcaf", "token_idx_2": 29, "text_start_2": 141, "text_end_2": 146, "date_2": "2020-03", "text_1_tokenized": ["Great", "I", "have", "a", "cold", ".", "Cold", "virus", "pls", "get", "tf", "out", "of", "my", "body", "before", "finals", "hit", ",", "otherwise", "it's", "ginger", "tea", "all", "day", "err", "day"], "text_2_tokenized": ["A", "lot", "of", "people", "not", "taking", "this", "shit", "serious", "at", "all", "that's", "y'all", "problem", "now", "being", "press", "to", "be", "in", "mfs", "faces", "until", "one", "of", "y'all", "got", "that", "damn", "virus", "\ud83d\udcaf"]} -{"id": "3311-virus", "word": "virus", "label_binary": 1, "text_1": "2019 is gross - they closed one of my town's elementary schools due to a stomach virus outbreak like EWWWWWWE. never in my life \ud83e\udd2e", "token_idx_1": 16, "text_start_1": 81, "text_end_1": 86, "date_1": "2019-03", "text_2": "Just so American people know every Democrat voted no on the virus funding. They're trying to slip abortion bill in with the virus bill this is not right. So all us American voters that are trying to make ends meet you can thank your Democrats for not voting on a clean bill.", "token_idx_2": 11, "text_start_2": 60, "text_end_2": 65, "date_2": "2020-03", "text_1_tokenized": ["2019", "is", "gross", "-", "they", "closed", "one", "of", "my", "town's", "elementary", "schools", "due", "to", "a", "stomach", "virus", "outbreak", "like", "EWWWWWWE", ".", "never", "in", "my", "life", "\ud83e\udd2e"], "text_2_tokenized": ["Just", "so", "American", "people", "know", "every", "Democrat", "voted", "no", "on", "the", "virus", "funding", ".", "They're", "trying", "to", "slip", "abortion", "bill", "in", "with", "the", "virus", "bill", "this", "is", "not", "right", ".", "So", "all", "us", "American", "voters", "that", "are", "trying", "to", "make", "ends", "meet", "you", "can", "thank", "your", "Democrats", "for", "not", "voting", "on", "a", "clean", "bill", "."]} -{"id": "3312-virus", "word": "virus", "label_binary": 0, "text_1": "Is there a virus going on in the @Timberwolves locker room. I see every week players are out due to injuries or being sick. #timberwolves @22wiggins @KarlTowns ?", "token_idx_1": 3, "text_start_1": 11, "text_end_1": 16, "date_1": "2019-03", "text_2": "The fact that if you think you could possibly have corona virus you can't talk to anyone or get tested, they give you an automated message telling you to self isolate for 7 day's \ud83d\ude15", "token_idx_2": 11, "text_start_2": 58, "text_end_2": 63, "date_2": "2020-03", "text_1_tokenized": ["Is", "there", "a", "virus", "going", "on", "in", "the", "@Timberwolves", "locker", "room", ".", "I", "see", "every", "week", "players", "are", "out", "due", "to", "injuries", "or", "being", "sick", ".", "#timberwolves", "@22wiggins", "@KarlTowns", "?"], "text_2_tokenized": ["The", "fact", "that", "if", "you", "think", "you", "could", "possibly", "have", "corona", "virus", "you", "can't", "talk", "to", "anyone", "or", "get", "tested", ",", "they", "give", "you", "an", "automated", "message", "telling", "you", "to", "self", "isolate", "for", "7", "day's", "\ud83d\ude15"]} -{"id": "3313-virus", "word": "virus", "label_binary": 1, "text_1": "Also I haven't been able to touch a Girl Scout cookie since my stomach virus/migraine/whatever it was 2 weeks ago \ud83d\ude11\ud83d\ude11\ud83d\ude11 I need to get over it", "token_idx_1": 14, "text_start_1": 71, "text_end_1": 76, "date_1": "2019-03", "text_2": "This virus has me scared", "token_idx_2": 1, "text_start_2": 5, "text_end_2": 10, "date_2": "2020-03", "text_1_tokenized": ["Also", "I", "haven't", "been", "able", "to", "touch", "a", "Girl", "Scout", "cookie", "since", "my", "stomach", "virus", "/", "migraine", "/", "whatever", "it", "was", "2", "weeks", "ago", "\ud83d\ude11", "\ud83d\ude11", "\ud83d\ude11", "I", "need", "to", "get", "over", "it"], "text_2_tokenized": ["This", "virus", "has", "me", "scared"]} -{"id": "3314-virus", "word": "virus", "label_binary": 0, "text_1": "By the way, regarding the new Titus Andronicus track, rhyming \"society\" with \"the virus tree\"? Muah!", "token_idx_1": 18, "text_start_1": 82, "text_end_1": 87, "date_1": "2019-03", "text_2": "completely destroyed the roomba in a debate about virus mitigation just now. just crushed her. was honestly embarrassing", "token_idx_2": 8, "text_start_2": 50, "text_end_2": 55, "date_2": "2020-03", "text_1_tokenized": ["By", "the", "way", ",", "regarding", "the", "new", "Titus", "Andronicus", "track", ",", "rhyming", "\"", "society", "\"", "with", "\"", "the", "virus", "tree", "\"", "?", "Muah", "!"], "text_2_tokenized": ["completely", "destroyed", "the", "roomba", "in", "a", "debate", "about", "virus", "mitigation", "just", "now", ".", "just", "crushed", "her", ".", "was", "honestly", "embarrassing"]} -{"id": "3315-virus", "word": "virus", "label_binary": 1, "text_1": "Hello from me and my tonsil virus \ud83d\ude37", "token_idx_1": 6, "text_start_1": 28, "text_end_1": 33, "date_1": "2019-03", "text_2": "So my mama just had me let up all the windows while we on the expressway talmbout \u201cthe virus is airborne\u201d...I'm drawing the line", "token_idx_2": 19, "text_start_2": 87, "text_end_2": 92, "date_2": "2020-03", "text_1_tokenized": ["Hello", "from", "me", "and", "my", "tonsil", "virus", "\ud83d\ude37"], "text_2_tokenized": ["So", "my", "mama", "just", "had", "me", "let", "up", "all", "the", "windows", "while", "we", "on", "the", "expressway", "talmbout", "\u201c", "the", "virus", "is", "airborne", "\u201d", "...", "I'm", "drawing", "the", "line"]} -{"id": "3316-virus", "word": "virus", "label_binary": 1, "text_1": "Forced vto bc of a virus hayayaya", "token_idx_1": 5, "text_start_1": 19, "text_end_1": 24, "date_1": "2019-03", "text_2": "The virus can stick to metallic surfaces (hand rails, elevator buttons, some credit cards, door handles) for up to 9 days EVEN after being \u201ccleaned.\u201d I know yall know not to be real touchy with things during this period but be a little more careful with metallic surfaces.", "token_idx_2": 1, "text_start_2": 4, "text_end_2": 9, "date_2": "2020-03", "text_1_tokenized": ["Forced", "vto", "bc", "of", "a", "virus", "hayayaya"], "text_2_tokenized": ["The", "virus", "can", "stick", "to", "metallic", "surfaces", "(", "hand", "rails", ",", "elevator", "buttons", ",", "some", "credit", "cards", ",", "door", "handles", ")", "for", "up", "to", "9", "days", "EVEN", "after", "being", "\u201c", "cleaned", ".", "\u201d", "I", "know", "yall", "know", "not", "to", "be", "real", "touchy", "with", "things", "during", "this", "period", "but", "be", "a", "little", "more", "careful", "with", "metallic", "surfaces", "."]} -{"id": "3317-virus", "word": "virus", "label_binary": 1, "text_1": "Want to go out for st paddys but with a sprained ankle, cold virus, no bank card or ID, I think the universe is telling me no \ud83c\udf40\ud83c\udf40\ud83c\udf40", "token_idx_1": 14, "text_start_1": 61, "text_end_1": 66, "date_1": "2019-03", "text_2": "US spends the most money on the military but can't secure their safety against a virus?", "token_idx_2": 15, "text_start_2": 81, "text_end_2": 86, "date_2": "2020-03", "text_1_tokenized": ["Want", "to", "go", "out", "for", "st", "paddys", "but", "with", "a", "sprained", "ankle", ",", "cold", "virus", ",", "no", "bank", "card", "or", "ID", ",", "I", "think", "the", "universe", "is", "telling", "me", "no", "\ud83c\udf40", "\ud83c\udf40", "\ud83c\udf40"], "text_2_tokenized": ["US", "spends", "the", "most", "money", "on", "the", "military", "but", "can't", "secure", "their", "safety", "against", "a", "virus", "?"]} -{"id": "3318-virus", "word": "virus", "label_binary": 1, "text_1": "\"It's too intimate.\" \"It's about a flesh eating virus, how is that too intimate?\"", "token_idx_1": 12, "text_start_1": 48, "text_end_1": 53, "date_1": "2019-03", "text_2": "My mom barely started letting me go out and have fun and this bitch ass virus is ruining it.", "token_idx_2": 15, "text_start_2": 72, "text_end_2": 77, "date_2": "2020-03", "text_1_tokenized": ["\"", "It's", "too", "intimate", ".", "\"", "\"", "It's", "about", "a", "flesh", "eating", "virus", ",", "how", "is", "that", "too", "intimate", "?", "\""], "text_2_tokenized": ["My", "mom", "barely", "started", "letting", "me", "go", "out", "and", "have", "fun", "and", "this", "bitch", "ass", "virus", "is", "ruining", "it", "."]} -{"id": "3319-virus", "word": "virus", "label_binary": 0, "text_1": "he said he wanted cybersex i said i didn't want a virus", "token_idx_1": 11, "text_start_1": 50, "text_end_1": 55, "date_1": "2019-03", "text_2": "@ corona virus, i'm tired of fucking crying", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 14, "date_2": "2020-03", "text_1_tokenized": ["he", "said", "he", "wanted", "cybersex", "i", "said", "i", "didn't", "want", "a", "virus"], "text_2_tokenized": ["@", "corona", "virus", ",", "i'm", "tired", "of", "fucking", "crying"]} -{"id": "3320-virus", "word": "virus", "label_binary": 1, "text_1": "I don't know why it's so hard but to read this textbook and see that I have every bacteria and virus there is really upsets me", "token_idx_1": 20, "text_start_1": 95, "text_end_1": 100, "date_1": "2019-03", "text_2": "i wear lashes every day, but since this virus started i haven't worn them for bout a month.", "token_idx_2": 9, "text_start_2": 40, "text_end_2": 45, "date_2": "2020-03", "text_1_tokenized": ["I", "don't", "know", "why", "it's", "so", "hard", "but", "to", "read", "this", "textbook", "and", "see", "that", "I", "have", "every", "bacteria", "and", "virus", "there", "is", "really", "upsets", "me"], "text_2_tokenized": ["i", "wear", "lashes", "every", "day", ",", "but", "since", "this", "virus", "started", "i", "haven't", "worn", "them", "for", "bout", "a", "month", "."]} -{"id": "3321-virus", "word": "virus", "label_binary": 0, "text_1": "While others are looking amazed for the sleek Samsung S10...specs like camera bla bla bla I was amazed at the #ethereum or should I say #crypto technology wallet virus spreading .... Some people are clueless and afraid of change,hello..the world is changing . Wake up..... #btc", "token_idx_1": 30, "text_start_1": 162, "text_end_1": 167, "date_1": "2019-03", "text_2": "I was talking to a person at work earlier about the virus and he said more people die from automobile accidents and I asked him if automobile accidents are highly contagious and kill elderly people and he just stopped his train of thought", "token_idx_2": 11, "text_start_2": 52, "text_end_2": 57, "date_2": "2020-03", "text_1_tokenized": ["While", "others", "are", "looking", "amazed", "for", "the", "sleek", "Samsung", "S10", "...", "specs", "like", "camera", "bla", "bla", "bla", "I", "was", "amazed", "at", "the", "#ethereum", "or", "should", "I", "say", "#crypto", "technology", "wallet", "virus", "spreading", "...", "Some", "people", "are", "clueless", "and", "afraid", "of", "change", ",", "hello", "..", "the", "world", "is", "changing", ".", "Wake", "up", "...", "#btc"], "text_2_tokenized": ["I", "was", "talking", "to", "a", "person", "at", "work", "earlier", "about", "the", "virus", "and", "he", "said", "more", "people", "die", "from", "automobile", "accidents", "and", "I", "asked", "him", "if", "automobile", "accidents", "are", "highly", "contagious", "and", "kill", "elderly", "people", "and", "he", "just", "stopped", "his", "train", "of", "thought"]} -{"id": "3322-virus", "word": "virus", "label_binary": 0, "text_1": "Been fighting virus the past few days. Anyone else have this one? Fever, chills, Zendaya is Meechee on loop in my head?", "token_idx_1": 2, "text_start_1": 14, "text_end_1": 19, "date_1": "2019-03", "text_2": "Saratoga Springs Mayor Meg Kelly said number of coronavirus cases in county has risen to 12, virus is now throughout county. Earlier today number was at 10.", "token_idx_2": 17, "text_start_2": 54, "text_end_2": 59, "date_2": "2020-03", "text_1_tokenized": ["Been", "fighting", "virus", "the", "past", "few", "days", ".", "Anyone", "else", "have", "this", "one", "?", "Fever", ",", "chills", ",", "Zendaya", "is", "Meechee", "on", "loop", "in", "my", "head", "?"], "text_2_tokenized": ["Saratoga", "Springs", "Mayor", "Meg", "Kelly", "said", "number", "of", "coronavirus", "cases", "in", "county", "has", "risen", "to", "12", ",", "virus", "is", "now", "throughout", "county", ".", "Earlier", "today", "number", "was", "at", "10", "."]} -{"id": "3323-virus", "word": "virus", "label_binary": 1, "text_1": "Woke up with a stomach virus. Love that for me.", "token_idx_1": 5, "text_start_1": 23, "text_end_1": 28, "date_1": "2019-03", "text_2": "i am handicap and i cant talk but i can hear. I had a virus attack in my brain at 3 months years old so if i make one sound it means no and if i make two sounds it means yes. I have a disability to read and send text message. I have to use Google Translate to read and send messag", "token_idx_2": 15, "text_start_2": 54, "text_end_2": 59, "date_2": "2020-03", "text_1_tokenized": ["Woke", "up", "with", "a", "stomach", "virus", ".", "Love", "that", "for", "me", "."], "text_2_tokenized": ["i", "am", "handicap", "and", "i", "cant", "talk", "but", "i", "can", "hear", ".", "I", "had", "a", "virus", "attack", "in", "my", "brain", "at", "3", "months", "years", "old", "so", "if", "i", "make", "one", "sound", "it", "means", "no", "and", "if", "i", "make", "two", "sounds", "it", "means", "yes", ".", "I", "have", "a", "disability", "to", "read", "and", "send", "text", "message", ".", "I", "have", "to", "use", "Google", "Translate", "to", "read", "and", "send", "messag"]} -{"id": "3324-virus", "word": "virus", "label_binary": 0, "text_1": "my grandma got a virus on the computer looking for a chocolate chip recipe!!! SO NOW OUR IP ADDRESS IS LEAKED !!!!!", "token_idx_1": 4, "text_start_1": 17, "text_end_1": 22, "date_1": "2019-03", "text_2": "One day we will sit and laugh next to a fire with a white claw in hand talking bout how we bought toilet paper to fight corona virus", "token_idx_2": 27, "text_start_2": 127, "text_end_2": 132, "date_2": "2020-03", "text_1_tokenized": ["my", "grandma", "got", "a", "virus", "on", "the", "computer", "looking", "for", "a", "chocolate", "chip", "recipe", "!", "!", "!", "SO", "NOW", "OUR", "IP", "ADDRESS", "IS", "LEAKED", "!", "!", "!"], "text_2_tokenized": ["One", "day", "we", "will", "sit", "and", "laugh", "next", "to", "a", "fire", "with", "a", "white", "claw", "in", "hand", "talking", "bout", "how", "we", "bought", "toilet", "paper", "to", "fight", "corona", "virus"]} -{"id": "3325-virus", "word": "virus", "label_binary": 1, "text_1": "\u201cI want you to know that I'm not scared. Well, not of dying, anyway. It's more forgetting. It's losing myself to this virus, that's what scares me.\u201d", "token_idx_1": 28, "text_start_1": 118, "text_end_1": 123, "date_1": "2019-03", "text_2": "so there is another virus going around... ok we dying", "token_idx_2": 4, "text_start_2": 20, "text_end_2": 25, "date_2": "2020-03", "text_1_tokenized": ["\u201c", "I", "want", "you", "to", "know", "that", "I'm", "not", "scared", ".", "Well", ",", "not", "of", "dying", ",", "anyway", ".", "It's", "more", "forgetting", ".", "It's", "losing", "myself", "to", "this", "virus", ",", "that's", "what", "scares", "me", ".", "\u201d"], "text_2_tokenized": ["so", "there", "is", "another", "virus", "going", "around", "...", "ok", "we", "dying"]} -{"id": "3326-virus", "word": "virus", "label_binary": 1, "text_1": "\ud83c\udf24\ud83c\udf24What a beautiful morning \ud83d\ude00\ud83d\ude00Finally, the wind has dropped & the sun is out \u2600\ufe0fNo guests this morning with me @PaulRTempoFM - Pat has been stricken with this horrible virus that's going round \ud83d\ude2b\ud83d\ude2b\ud83d\ude2b #menopause & #HRT discussion with Gail & Pat rescheduled for next month \ud83d\udc4d", "token_idx_1": 35, "text_start_1": 170, "text_end_1": 175, "date_1": "2019-03", "text_2": "Facebook memes/twitter posts by girls saying this virus got you wishing you had a girl who could cook makes me giggle. Half of you can just read and follow a Pinterest recipe. That doesn't count", "token_idx_2": 9, "text_start_2": 50, "text_end_2": 55, "date_2": "2020-03", "text_1_tokenized": ["\ud83c\udf24", "\ud83c\udf24", "What", "a", "beautiful", "morning", "\ud83d\ude00", "\ud83d\ude00", "Finally", ",", "the", "wind", "has", "dropped", "&", "the", "sun", "is", "out", "\u2600", "\ufe0fNo", "guests", "this", "morning", "with", "me", "@PaulRTempoFM", "-", "Pat", "has", "been", "stricken", "with", "this", "horrible", "virus", "that's", "going", "round", "\ud83d\ude2b", "\ud83d\ude2b", "\ud83d\ude2b", "#menopause", "&", "#HRT", "discussion", "with", "Gail", "&", "Pat", "rescheduled", "for", "next", "month", "\ud83d\udc4d"], "text_2_tokenized": ["Facebook", "memes", "/", "twitter", "posts", "by", "girls", "saying", "this", "virus", "got", "you", "wishing", "you", "had", "a", "girl", "who", "could", "cook", "makes", "me", "giggle", ".", "Half", "of", "you", "can", "just", "read", "and", "follow", "a", "Pinterest", "recipe", ".", "That", "doesn't", "count"]} -{"id": "3327-virus", "word": "virus", "label_binary": 1, "text_1": "My virus induced cough gives me a perfect reason to enjoy @JRiegerCo whisky while awaiting the KU game. Medicinal you know.", "token_idx_1": 1, "text_start_1": 3, "text_end_1": 8, "date_1": "2019-03", "text_2": "My granny just said this was her worst birthday bc of the virus. I feel so bad \ud83d\ude29 I had plans for her & everything & now I can't even surprise her", "token_idx_2": 12, "text_start_2": 58, "text_end_2": 63, "date_2": "2020-03", "text_1_tokenized": ["My", "virus", "induced", "cough", "gives", "me", "a", "perfect", "reason", "to", "enjoy", "@JRiegerCo", "whisky", "while", "awaiting", "the", "KU", "game", ".", "Medicinal", "you", "know", "."], "text_2_tokenized": ["My", "granny", "just", "said", "this", "was", "her", "worst", "birthday", "bc", "of", "the", "virus", ".", "I", "feel", "so", "bad", "\ud83d\ude29", "I", "had", "plans", "for", "her", "&", "everything", "&", "now", "I", "can't", "even", "surprise", "her"]} -{"id": "3328-virus", "word": "virus", "label_binary": 1, "text_1": "I love that girl who survived the tumor virus thingy", "token_idx_1": 8, "text_start_1": 40, "text_end_1": 45, "date_1": "2019-03", "text_2": "I feel like it's inevitable that everyone is going to get the virus no matter what and... I'm a lil scared", "token_idx_2": 12, "text_start_2": 62, "text_end_2": 67, "date_2": "2020-03", "text_1_tokenized": ["I", "love", "that", "girl", "who", "survived", "the", "tumor", "virus", "thingy"], "text_2_tokenized": ["I", "feel", "like", "it's", "inevitable", "that", "everyone", "is", "going", "to", "get", "the", "virus", "no", "matter", "what", "and", "...", "I'm", "a", "lil", "scared"]} -{"id": "3329-virus", "word": "virus", "label_binary": 1, "text_1": "If you're so homophobic, thinking gayness is like a virus or contagious disease, and unable to appreciate art, don't ever stan NCT, don't even stan Kpop, and you'd better get lost! NCTzen fandom and any Kpop fandom don't need the existence of people like you.", "token_idx_1": 10, "text_start_1": 52, "text_end_1": 57, "date_1": "2019-03", "text_2": "If a diarrhea virus breaks out now will people buy up all of the nasal spray?", "token_idx_2": 3, "text_start_2": 14, "text_end_2": 19, "date_2": "2020-03", "text_1_tokenized": ["If", "you're", "so", "homophobic", ",", "thinking", "gayness", "is", "like", "a", "virus", "or", "contagious", "disease", ",", "and", "unable", "to", "appreciate", "art", ",", "don't", "ever", "stan", "NCT", ",", "don't", "even", "stan", "Kpop", ",", "and", "you'd", "better", "get", "lost", "!", "NCTzen", "fandom", "and", "any", "Kpop", "fandom", "don't", "need", "the", "existence", "of", "people", "like", "you", "."], "text_2_tokenized": ["If", "a", "diarrhea", "virus", "breaks", "out", "now", "will", "people", "buy", "up", "all", "of", "the", "nasal", "spray", "?"]} -{"id": "3330-virus", "word": "virus", "label_binary": 1, "text_1": "Having a toddler with a stomach virus really freaking sucks. And now I'm nauseous too \ud83d\ude05", "token_idx_1": 6, "text_start_1": 32, "text_end_1": 37, "date_1": "2019-03", "text_2": "Jus saw a cardiologist on tv saying that trump and Pence should be kept apart from each other so that the virus has less chance of getting the both of them-be QUIET doctor \ud83d\ude44", "token_idx_2": 21, "text_start_2": 106, "text_end_2": 111, "date_2": "2020-03", "text_1_tokenized": ["Having", "a", "toddler", "with", "a", "stomach", "virus", "really", "freaking", "sucks", ".", "And", "now", "I'm", "nauseous", "too", "\ud83d\ude05"], "text_2_tokenized": ["Jus", "saw", "a", "cardiologist", "on", "tv", "saying", "that", "trump", "and", "Pence", "should", "be", "kept", "apart", "from", "each", "other", "so", "that", "the", "virus", "has", "less", "chance", "of", "getting", "the", "both", "of", "them-be", "QUIET", "doctor", "\ud83d\ude44"]} -{"id": "3331-virus", "word": "virus", "label_binary": 0, "text_1": "Her virus was his awe A magnetic soliloquy Collapsing time into Reckless fun #vss365", "token_idx_1": 1, "text_start_1": 4, "text_end_1": 9, "date_1": "2019-03", "text_2": "Can the virus transmit through the sea water where people are swimming?", "token_idx_2": 2, "text_start_2": 8, "text_end_2": 13, "date_2": "2020-03", "text_1_tokenized": ["Her", "virus", "was", "his", "awe", "A", "magnetic", "soliloquy", "Collapsing", "time", "into", "Reckless", "fun", "#vss365"], "text_2_tokenized": ["Can", "the", "virus", "transmit", "through", "the", "sea", "water", "where", "people", "are", "swimming", "?"]} -{"id": "3332-virus", "word": "virus", "label_binary": 1, "text_1": "I've been seeing the topic about Dr. Sebi reach a dangerous height. If you believe in holistic healthcare, that is your prerogative. It is dangerous to push the idea that Dr. Sebi cured HIV. I would like to send light to everyone who is continuing to fight the virus daily.", "token_idx_1": 54, "text_start_1": 261, "text_end_1": 266, "date_1": "2019-03", "text_2": "My father just told me he read that putting Vick's VapoRub on your nose blocks the virus from entering. The older generations are out here seeing our memes and believing them as truth.", "token_idx_2": 16, "text_start_2": 83, "text_end_2": 88, "date_2": "2020-03", "text_1_tokenized": ["I've", "been", "seeing", "the", "topic", "about", "Dr", ".", "Sebi", "reach", "a", "dangerous", "height", ".", "If", "you", "believe", "in", "holistic", "healthcare", ",", "that", "is", "your", "prerogative", ".", "It", "is", "dangerous", "to", "push", "the", "idea", "that", "Dr", ".", "Sebi", "cured", "HIV", ".", "I", "would", "like", "to", "send", "light", "to", "everyone", "who", "is", "continuing", "to", "fight", "the", "virus", "daily", "."], "text_2_tokenized": ["My", "father", "just", "told", "me", "he", "read", "that", "putting", "Vick's", "VapoRub", "on", "your", "nose", "blocks", "the", "virus", "from", "entering", ".", "The", "older", "generations", "are", "out", "here", "seeing", "our", "memes", "and", "believing", "them", "as", "truth", "."]} -{"id": "3334-virus", "word": "virus", "label_binary": 1, "text_1": ".......stomach virus is assaulting me.....I'm bedridden today \ud83e\udd22 \ud83e\udd2e \u2014 feeling sick", "token_idx_1": 2, "text_start_1": 15, "text_end_1": 20, "date_1": "2019-03", "text_2": "Gotta admit it; the \"How to stay home and avoid the virus\" commercials are a nice break from the \"Do you need a lawyer\" and \"Ask your Dr about this new medicine\" commercials.", "token_idx_2": 13, "text_start_2": 52, "text_end_2": 57, "date_2": "2020-03", "text_1_tokenized": ["...", "stomach", "virus", "is", "assaulting", "me", "...", "I'm", "bedridden", "today", "\ud83e\udd22", "\ud83e\udd2e", "\u2014", "feeling", "sick"], "text_2_tokenized": ["Gotta", "admit", "it", ";", "the", "\"", "How", "to", "stay", "home", "and", "avoid", "the", "virus", "\"", "commercials", "are", "a", "nice", "break", "from", "the", "\"", "Do", "you", "need", "a", "lawyer", "\"", "and", "\"", "Ask", "your", "Dr", "about", "this", "new", "medicine", "\"", "commercials", "."]} -{"id": "3335-virus", "word": "virus", "label_binary": 0, "text_1": "happy happy HAPPIEST of birthdays to our happy virus, baby jihoon \ud83d\udc9c will forever be indebted to u for how happy u make every single person blessed enough to cross paths with u. im always sorry for what u had to go thru, but im so proud of how strong u have become", "token_idx_1": 8, "text_start_1": 47, "text_end_1": 52, "date_1": "2019-03", "text_2": "This virus and weather trying to \u201cstop my bag\u201d I see. \ud83d\ude44", "token_idx_2": 1, "text_start_2": 5, "text_end_2": 10, "date_2": "2020-03", "text_1_tokenized": ["happy", "happy", "HAPPIEST", "of", "birthdays", "to", "our", "happy", "virus", ",", "baby", "jihoon", "\ud83d\udc9c", "will", "forever", "be", "indebted", "to", "u", "for", "how", "happy", "u", "make", "every", "single", "person", "blessed", "enough", "to", "cross", "paths", "with", "u", ".", "im", "always", "sorry", "for", "what", "u", "had", "to", "go", "thru", ",", "but", "im", "so", "proud", "of", "how", "strong", "u", "have", "become"], "text_2_tokenized": ["This", "virus", "and", "weather", "trying", "to", "\u201c", "stop", "my", "bag", "\u201d", "I", "see", ".", "\ud83d\ude44"]} -{"id": "3336-virus", "word": "virus", "label_binary": 0, "text_1": "Philistines must not be allowed to define culture. When they approach the table, stand up and very firmly lead them away. They must have no opinions, must not \"engage\". A virus does not reform a sick body. Neither can a philistine aid an end that is not deformed and ugly.", "token_idx_1": 37, "text_start_1": 171, "text_end_1": 176, "date_1": "2019-03", "text_2": "Let's cheer up medical staffs who struggling their life only to save our's. Give them lot of respects and welcoming them with love and hugs. Enemy of our enemies is not our friend, that is corona virus.", "token_idx_2": 39, "text_start_2": 196, "text_end_2": 201, "date_2": "2020-03", "text_1_tokenized": ["Philistines", "must", "not", "be", "allowed", "to", "define", "culture", ".", "When", "they", "approach", "the", "table", ",", "stand", "up", "and", "very", "firmly", "lead", "them", "away", ".", "They", "must", "have", "no", "opinions", ",", "must", "not", "\"", "engage", "\"", ".", "A", "virus", "does", "not", "reform", "a", "sick", "body", ".", "Neither", "can", "a", "philistine", "aid", "an", "end", "that", "is", "not", "deformed", "and", "ugly", "."], "text_2_tokenized": ["Let's", "cheer", "up", "medical", "staffs", "who", "struggling", "their", "life", "only", "to", "save", "our's", ".", "Give", "them", "lot", "of", "respects", "and", "welcoming", "them", "with", "love", "and", "hugs", ".", "Enemy", "of", "our", "enemies", "is", "not", "our", "friend", ",", "that", "is", "corona", "virus", "."]} -{"id": "3337-virus", "word": "virus", "label_binary": 0, "text_1": "Yet again humanity proves Bill Hicks was right all along... \u2018We're a virus with shoes'", "token_idx_1": 14, "text_start_1": 69, "text_end_1": 74, "date_1": "2019-03", "text_2": "There is a problem with this antibody test allowing ppl to get back to work. If you did disciplined self isolation from the start and never got the virus you would not have antibody-should you be disadvantaged in getting back to normal as a result? #COVID19au @CNN", "token_idx_2": 29, "text_start_2": 148, "text_end_2": 153, "date_2": "2020-03", "text_1_tokenized": ["Yet", "again", "humanity", "proves", "Bill", "Hicks", "was", "right", "all", "along", "...", "\u2018", "We're", "a", "virus", "with", "shoes", "'"], "text_2_tokenized": ["There", "is", "a", "problem", "with", "this", "antibody", "test", "allowing", "ppl", "to", "get", "back", "to", "work", ".", "If", "you", "did", "disciplined", "self", "isolation", "from", "the", "start", "and", "never", "got", "the", "virus", "you", "would", "not", "have", "antibody-should", "you", "be", "disadvantaged", "in", "getting", "back", "to", "normal", "as", "a", "result", "?", "#COVID19au", "@CNN"]} -{"id": "3338-virus", "word": "virus", "label_binary": 1, "text_1": "Nigga...I really have stomach virus...\ud83e\udd2e", "token_idx_1": 6, "text_start_1": 30, "text_end_1": 35, "date_1": "2019-03", "text_2": "Make no mistake. When this illusion of\"racing w/o spectators\"is dispelld&the virus spread due2thousands crowded in2dingy unsanitary OTBs Tues, Sat&Sun it'll b GOVT'S FAULT. This lunacy has been givenGOVT's imprimatur byJRC. Is cabinet2turn a blindeye while it quarantines7-8Miles?", "token_idx_2": 19, "text_start_2": 81, "text_end_2": 86, "date_2": "2020-03", "text_1_tokenized": ["Nigga", "...", "I", "really", "have", "stomach", "virus", "...", "\ud83e\udd2e"], "text_2_tokenized": ["Make", "no", "mistake", ".", "When", "this", "illusion", "of", "\"", "racing", "w", "/", "o", "spectators", "\"", "is", "dispelld", "&", "the", "virus", "spread", "due", "2thousands", "crowded", "in2dingy", "unsanitary", "OTBs", "Tues", ",", "Sat", "&", "Sun", "it'll", "b", "GOVT'S", "FAULT", ".", "This", "lunacy", "has", "been", "givenGOVT's", "imprimatur", "byJRC", ".", "Is", "cabinet", "2turn", "a", "blindeye", "while", "it", "quarantines", "7-8", "Miles", "?"]} -{"id": "3339-virus", "word": "virus", "label_binary": 1, "text_1": "Finally over this virus n I'm out of quarantine, so meet me at the mall coppin some shoes and then goin to get some Pho", "token_idx_1": 3, "text_start_1": 18, "text_end_1": 23, "date_1": "2019-03", "text_2": "the virus not just in health rather in ecomony and political #Covid_19", "token_idx_2": 1, "text_start_2": 4, "text_end_2": 9, "date_2": "2020-03", "text_1_tokenized": ["Finally", "over", "this", "virus", "n", "I'm", "out", "of", "quarantine", ",", "so", "meet", "me", "at", "the", "mall", "coppin", "some", "shoes", "and", "then", "goin", "to", "get", "some", "Pho"], "text_2_tokenized": ["the", "virus", "not", "just", "in", "health", "rather", "in", "ecomony", "and", "political", "#Covid_19"]} -{"id": "3340-virus", "word": "virus", "label_binary": 0, "text_1": "What if a virus killed the dinosaurs?", "token_idx_1": 3, "text_start_1": 10, "text_end_1": 15, "date_1": "2019-03", "text_2": "Today, tomorrow and forever there will be one heart that would always beat for you. You know Whose??? Your Own Stupid!!! Happy April Fool's Day. Corona virus \ud83c\udfc3\ud83c\udfc3", "token_idx_2": 35, "text_start_2": 152, "text_end_2": 157, "date_2": "2020-03", "text_1_tokenized": ["What", "if", "a", "virus", "killed", "the", "dinosaurs", "?"], "text_2_tokenized": ["Today", ",", "tomorrow", "and", "forever", "there", "will", "be", "one", "heart", "that", "would", "always", "beat", "for", "you", ".", "You", "know", "Whose", "?", "?", "?", "Your", "Own", "Stupid", "!", "!", "!", "Happy", "April", "Fool's", "Day", ".", "Corona", "virus", "\ud83c\udfc3", "\ud83c\udfc3"]} -{"id": "3341-virus", "word": "virus", "label_binary": 1, "text_1": "So I think I may have a stomach virus!! I was throwing up all day! Then my tummy woke me in the middle of the night with throw up!!", "token_idx_1": 8, "text_start_1": 32, "text_end_1": 37, "date_1": "2019-03", "text_2": "It's really sad how y'all think this outside shit cute. It's not a joke people real life dying from this virus and y'all rather go pop that lil hot ass $40 Coochie!", "token_idx_2": 21, "text_start_2": 105, "text_end_2": 110, "date_2": "2020-03", "text_1_tokenized": ["So", "I", "think", "I", "may", "have", "a", "stomach", "virus", "!", "!", "I", "was", "throwing", "up", "all", "day", "!", "Then", "my", "tummy", "woke", "me", "in", "the", "middle", "of", "the", "night", "with", "throw", "up", "!", "!"], "text_2_tokenized": ["It's", "really", "sad", "how", "y'all", "think", "this", "outside", "shit", "cute", ".", "It's", "not", "a", "joke", "people", "real", "life", "dying", "from", "this", "virus", "and", "y'all", "rather", "go", "pop", "that", "lil", "hot", "ass", "$", "40", "Coochie", "!"]} -{"id": "3342-virus", "word": "virus", "label_binary": 1, "text_1": "my mom has a cold but it could be a virus that i could catch so let's pray i don't get sick \ud83d\udd7a\ud83c\udffb this is the time i should be glowing up not down", "token_idx_1": 10, "text_start_1": 36, "text_end_1": 41, "date_1": "2019-03", "text_2": "Complacency was the problem. Most of us didn't take the virus seriously. Many of us still don't. There wouldn't any room for BS excuses like wanting small government if we did. A shared sense of danger is key. Wearing a mask is the best way to share that.", "token_idx_2": 11, "text_start_2": 56, "text_end_2": 61, "date_2": "2020-03", "text_1_tokenized": ["my", "mom", "has", "a", "cold", "but", "it", "could", "be", "a", "virus", "that", "i", "could", "catch", "so", "let's", "pray", "i", "don't", "get", "sick", "\ud83d\udd7a\ud83c\udffb", "this", "is", "the", "time", "i", "should", "be", "glowing", "up", "not", "down"], "text_2_tokenized": ["Complacency", "was", "the", "problem", ".", "Most", "of", "us", "didn't", "take", "the", "virus", "seriously", ".", "Many", "of", "us", "still", "don't", ".", "There", "wouldn't", "any", "room", "for", "BS", "excuses", "like", "wanting", "small", "government", "if", "we", "did", ".", "A", "shared", "sense", "of", "danger", "is", "key", ".", "Wearing", "a", "mask", "is", "the", "best", "way", "to", "share", "that", "."]} -{"id": "3343-virus", "word": "virus", "label_binary": 1, "text_1": "So I'm going to try to get in contact with @13_j10 maby she can tell me more than these fools although I still don't trust her or anyone on her side... They are infected with the virus...", "token_idx_1": 37, "text_start_1": 179, "text_end_1": 184, "date_1": "2019-03", "text_2": "I haven't ran in weeks and before the virus hit, it was in the 60s/70s in Cstat. Today it's 87 and I am sweating buckets", "token_idx_2": 8, "text_start_2": 38, "text_end_2": 43, "date_2": "2020-03", "text_1_tokenized": ["So", "I'm", "going", "to", "try", "to", "get", "in", "contact", "with", "@13_j10", "maby", "she", "can", "tell", "me", "more", "than", "these", "fools", "although", "I", "still", "don't", "trust", "her", "or", "anyone", "on", "her", "side", "...", "They", "are", "infected", "with", "the", "virus", "..."], "text_2_tokenized": ["I", "haven't", "ran", "in", "weeks", "and", "before", "the", "virus", "hit", ",", "it", "was", "in", "the", "60s", "/", "70s", "in", "Cstat", ".", "Today", "it's", "87", "and", "I", "am", "sweating", "buckets"]} -{"id": "3344-virus", "word": "virus", "label_binary": 0, "text_1": "Her IG must got a virus..", "token_idx_1": 5, "text_start_1": 18, "text_end_1": 23, "date_1": "2019-03", "text_2": "No orange monkey its not the Chinese virus asshole @realDonaldTrump", "token_idx_2": 7, "text_start_2": 37, "text_end_2": 42, "date_2": "2020-03", "text_1_tokenized": ["Her", "IG", "must", "got", "a", "virus", ".."], "text_2_tokenized": ["No", "orange", "monkey", "its", "not", "the", "Chinese", "virus", "asshole", "@realDonaldTrump"]} -{"id": "3345-virus", "word": "virus", "label_binary": 1, "text_1": "Plantar warts are difficult to get rid of because of how thick the skin on your feet is. Due to its thickness, it's difficult for regular strength creams or other treatments to penetrate deep enough to treat every cell that has been infected with the virus.", "token_idx_1": 47, "text_start_1": 251, "text_end_1": 256, "date_1": "2019-03", "text_2": "Recovering economically from the covid-19 is going to depend on every person being tested to see if they have antibodies to the virus. Those who have already beat the virus need to start returning to work ASAP. #BeatCovidTogether", "token_idx_2": 24, "text_start_2": 128, "text_end_2": 133, "date_2": "2020-03", "text_1_tokenized": ["Plantar", "warts", "are", "difficult", "to", "get", "rid", "of", "because", "of", "how", "thick", "the", "skin", "on", "your", "feet", "is", ".", "Due", "to", "its", "thickness", ",", "it's", "difficult", "for", "regular", "strength", "creams", "or", "other", "treatments", "to", "penetrate", "deep", "enough", "to", "treat", "every", "cell", "that", "has", "been", "infected", "with", "the", "virus", "."], "text_2_tokenized": ["Recovering", "economically", "from", "the", "covid", "-", "19", "is", "going", "to", "depend", "on", "every", "person", "being", "tested", "to", "see", "if", "they", "have", "antibodies", "to", "the", "virus", ".", "Those", "who", "have", "already", "beat", "the", "virus", "need", "to", "start", "returning", "to", "work", "ASAP", ".", "#BeatCovidTogether"]} -{"id": "3346-virus", "word": "virus", "label_binary": 1, "text_1": "Bro noooo, my family passed their stomach virus to me \ud83e\udd74\ud83e\udd2e", "token_idx_1": 8, "text_start_1": 42, "text_end_1": 47, "date_1": "2019-03", "text_2": "My thoughts on virus. Is we have been advertorial been promoting products since the 60's which eradicate all bacteria and virus'. Making our homes into aseptic suites. In doing so our bodies haven't been exposed to illness whether viral or bacterial, resulting in a us", "token_idx_2": 3, "text_start_2": 15, "text_end_2": 20, "date_2": "2020-03", "text_1_tokenized": ["Bro", "noooo", ",", "my", "family", "passed", "their", "stomach", "virus", "to", "me", "\ud83e\udd74", "\ud83e\udd2e"], "text_2_tokenized": ["My", "thoughts", "on", "virus", ".", "Is", "we", "have", "been", "advertorial", "been", "promoting", "products", "since", "the", "60", "'", "s", "which", "eradicate", "all", "bacteria", "and", "virus", "'", ".", "Making", "our", "homes", "into", "aseptic", "suites", ".", "In", "doing", "so", "our", "bodies", "haven't", "been", "exposed", "to", "illness", "whether", "viral", "or", "bacterial", ",", "resulting", "in", "a", "us"]} -{"id": "1869-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "Something really odd about the Witton Road Mosque attack. If you get caught there, you end up in the morgue. Some random white boy didn't do this. It's also the epicenter of the riots we had some years ago. It feels like calculated provocation.", "token_idx_1": 34, "text_start_1": 161, "text_end_1": 170, "date_1": "2019-03", "text_2": "BREAKING - COVID-19 just killed over 1,000 in Michigan. Detroit had a bad day so far as bad Federal supply chain & Intl airport (Delta's HQ hub & heavy FL Spring Break traffic IMHO) has it becoming a new tragic infection epicenter! Guess DeVos'll have to move her yachts north.", "token_idx_2": 46, "text_start_2": 229, "text_end_2": 238, "date_2": "2020-03", "text_1_tokenized": ["Something", "really", "odd", "about", "the", "Witton", "Road", "Mosque", "attack", ".", "If", "you", "get", "caught", "there", ",", "you", "end", "up", "in", "the", "morgue", ".", "Some", "random", "white", "boy", "didn't", "do", "this", ".", "It's", "also", "the", "epicenter", "of", "the", "riots", "we", "had", "some", "years", "ago", ".", "It", "feels", "like", "calculated", "provocation", "."], "text_2_tokenized": ["BREAKING", "-", "COVID", "-", "19", "just", "killed", "over", "1,000", "in", "Michigan", ".", "Detroit", "had", "a", "bad", "day", "so", "far", "as", "bad", "Federal", "supply", "chain", "&", "Intl", "airport", "(", "Delta's", "HQ", "hub", "&", "heavy", "FL", "Spring", "Break", "traffic", "IMHO", ")", "has", "it", "becoming", "a", "new", "tragic", "infection", "epicenter", "!", "Guess", "DeVos'll", "have", "to", "move", "her", "yachts", "north", "."]} -{"id": "1870-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "NWS: On March 27, 1964 at 5:36pm local time (March 28 at 3:36 UTC) a great earthquake of magnitude 9.2 occurred in the Prince William Sound region of Alaska. The earthquake rupture started approximately 25 km beneath the surface, with its epicenter about 75 miles east of Anc\u2026", "token_idx_1": 48, "text_start_1": 239, "text_end_1": 248, "date_1": "2019-03", "text_2": "Nepal is epicenter of covid-19 seriously???? Really man @aajtak is totally sold out.. This is the height of indian media right now..", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 18, "date_2": "2020-03", "text_1_tokenized": ["NWS", ":", "On", "March", "27", ",", "1964", "at", "5:36", "pm", "local", "time", "(", "March", "28", "at", "3:36", "UTC", ")", "a", "great", "earthquake", "of", "magnitude", "9.2", "occurred", "in", "the", "Prince", "William", "Sound", "region", "of", "Alaska", ".", "The", "earthquake", "rupture", "started", "approximately", "25", "km", "beneath", "the", "surface", ",", "with", "its", "epicenter", "about", "75", "miles", "east", "of", "Anc", "\u2026"], "text_2_tokenized": ["Nepal", "is", "epicenter", "of", "covid", "-", "19", "seriously", "?", "?", "?", "Really", "man", "@aajtak", "is", "totally", "sold", "out", "..", "This", "is", "the", "height", "of", "indian", "media", "right", "now", ".."]} -{"id": "1871-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "The DFW area is the epicenter of temp tags. #TaxReturnSeason #CantAffordTheSalesTaxTho", "token_idx_1": 5, "text_start_1": 20, "text_end_1": 29, "date_1": "2019-03", "text_2": "My city had its primary. There were only 5 out of 180 stations open. Lines stretched blocks around parks and streets, not only exposing folks in the lines to each other but to everyone passing by on the street. My city is the COVID19 epicenter in the state.", "token_idx_2": 49, "text_start_2": 234, "text_end_2": 243, "date_2": "2020-03", "text_1_tokenized": ["The", "DFW", "area", "is", "the", "epicenter", "of", "temp", "tags", ".", "#TaxReturnSeason", "#CantAffordTheSalesTaxTho"], "text_2_tokenized": ["My", "city", "had", "its", "primary", ".", "There", "were", "only", "5", "out", "of", "180", "stations", "open", ".", "Lines", "stretched", "blocks", "around", "parks", "and", "streets", ",", "not", "only", "exposing", "folks", "in", "the", "lines", "to", "each", "other", "but", "to", "everyone", "passing", "by", "on", "the", "street", ".", "My", "city", "is", "the", "COVID", "19", "epicenter", "in", "the", "state", "."]} -{"id": "1872-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "A monster of a win for Michigan State hoops and #Spartans fans crammed into the Breslin Center late into the night. How did Tom Izzo's team topple Duke to reach the #FinalFour in Minneapolis? Bill Simonson @Hugeshow joins us from the epicenter of the Madness @AfterHoursCBS", "token_idx_1": 43, "text_start_1": 234, "text_end_1": 243, "date_1": "2019-03", "text_2": "i keep getting so fckin high and having the same realization that we are actually the epicenter of a pandemic rn", "token_idx_2": 16, "text_start_2": 86, "text_end_2": 95, "date_2": "2020-03", "text_1_tokenized": ["A", "monster", "of", "a", "win", "for", "Michigan", "State", "hoops", "and", "#Spartans", "fans", "crammed", "into", "the", "Breslin", "Center", "late", "into", "the", "night", ".", "How", "did", "Tom", "Izzo's", "team", "topple", "Duke", "to", "reach", "the", "#FinalFour", "in", "Minneapolis", "?", "Bill", "Simonson", "@Hugeshow", "joins", "us", "from", "the", "epicenter", "of", "the", "Madness", "@AfterHoursCBS"], "text_2_tokenized": ["i", "keep", "getting", "so", "fckin", "high", "and", "having", "the", "same", "realization", "that", "we", "are", "actually", "the", "epicenter", "of", "a", "pandemic", "rn"]} -{"id": "1873-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Man it's been a year since the epicenter announcement! Still working hard to get another shot like that this season and hoping for a chance to give it my all once more.", "token_idx_1": 7, "text_start_1": 31, "text_end_1": 40, "date_1": "2019-03", "text_2": "America has now become the epicenter of the #COVID19 pandemic. More people have tested positive than in any other place in the world. -- @minishmael #Farrakhan #TheTime", "token_idx_2": 5, "text_start_2": 27, "text_end_2": 36, "date_2": "2020-03", "text_1_tokenized": ["Man", "it's", "been", "a", "year", "since", "the", "epicenter", "announcement", "!", "Still", "working", "hard", "to", "get", "another", "shot", "like", "that", "this", "season", "and", "hoping", "for", "a", "chance", "to", "give", "it", "my", "all", "once", "more", "."], "text_2_tokenized": ["America", "has", "now", "become", "the", "epicenter", "of", "the", "#COVID19", "pandemic", ".", "More", "people", "have", "tested", "positive", "than", "in", "any", "other", "place", "in", "the", "world", ".", "-", "-", "@minishmael", "#Farrakhan", "#TheTime"]} -{"id": "1874-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "I've got so many things coming up. A race, Arizona, North Carolina for epicenter, miami for @XBIZ, let's fucking dooooo this.", "token_idx_1": 16, "text_start_1": 71, "text_end_1": 80, "date_1": "2019-03", "text_2": "#London is a ticking time bomb. Wtf is @BorisJohnson doing? Put a curfew in place, close everywhere and sort the food situation out. The rate it's going London will become the new epicenter for the #CoronaVirus. #COVID\u30fc19", "token_idx_2": 36, "text_start_2": 180, "text_end_2": 189, "date_2": "2020-03", "text_1_tokenized": ["I've", "got", "so", "many", "things", "coming", "up", ".", "A", "race", ",", "Arizona", ",", "North", "Carolina", "for", "epicenter", ",", "miami", "for", "@XBIZ", ",", "let's", "fucking", "dooooo", "this", "."], "text_2_tokenized": ["#London", "is", "a", "ticking", "time", "bomb", ".", "Wtf", "is", "@BorisJohnson", "doing", "?", "Put", "a", "curfew", "in", "place", ",", "close", "everywhere", "and", "sort", "the", "food", "situation", "out", ".", "The", "rate", "it's", "going", "London", "will", "become", "the", "new", "epicenter", "for", "the", "#CoronaVirus", ".", "#COVID\u30fc19"]} -{"id": "1875-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Welcome to #Twitter, the epicenter of Indonesian jokes culture.", "token_idx_1": 5, "text_start_1": 25, "text_end_1": 34, "date_1": "2019-03", "text_2": "De Blasio and New York City Health Commissioner Oxiris Barbot took turns telling citizens in February and March that the virus was not as widespread as people thought. Data now show that the city is becoming the epicenter for the coronavirus, which has killed 883 New Yorkers.", "token_idx_2": 38, "text_start_2": 212, "text_end_2": 221, "date_2": "2020-03", "text_1_tokenized": ["Welcome", "to", "#Twitter", ",", "the", "epicenter", "of", "Indonesian", "jokes", "culture", "."], "text_2_tokenized": ["De", "Blasio", "and", "New", "York", "City", "Health", "Commissioner", "Oxiris", "Barbot", "took", "turns", "telling", "citizens", "in", "February", "and", "March", "that", "the", "virus", "was", "not", "as", "widespread", "as", "people", "thought", ".", "Data", "now", "show", "that", "the", "city", "is", "becoming", "the", "epicenter", "for", "the", "coronavirus", ",", "which", "has", "killed", "883", "New", "Yorkers", "."]} -{"id": "1876-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "RT MMswayambhu \"davidfrawleyved OpIndia_com Both are suffering from the same pain, thus both nations worked together to wipe out the epicenter of the breeding grounds, training centers, & seminar halls. That's to ensure peace in the region. Myanmar played a pivotal role in m\u2026", "token_idx_1": 22, "text_start_1": 133, "text_end_1": 142, "date_1": "2019-03", "text_2": "America is going to be next Italy and new epicenter of #CoronavirusPandemic by Wednesday", "token_idx_2": 9, "text_start_2": 42, "text_end_2": 51, "date_2": "2020-03", "text_1_tokenized": ["RT", "MMswayambhu", "\"", "davidfrawleyved", "OpIndia_com", "Both", "are", "suffering", "from", "the", "same", "pain", ",", "thus", "both", "nations", "worked", "together", "to", "wipe", "out", "the", "epicenter", "of", "the", "breeding", "grounds", ",", "training", "centers", ",", "&", "seminar", "halls", ".", "That's", "to", "ensure", "peace", "in", "the", "region", ".", "Myanmar", "played", "a", "pivotal", "role", "in", "m", "\u2026"], "text_2_tokenized": ["America", "is", "going", "to", "be", "next", "Italy", "and", "new", "epicenter", "of", "#CoronavirusPandemic", "by", "Wednesday"]} -{"id": "1877-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "The burial story of trembling and earthquake restores death at the epicenter of thinking.", "token_idx_1": 11, "text_start_1": 67, "text_end_1": 76, "date_1": "2019-03", "text_2": "I really wanted to go to the corner deli to get some goodies but the way elmhurst hospital is looking (corner of my house) mejor me quedo en casa \ud83d\ude30 13 deaths in one night and we are close to 40,000 confirmed covid-19 cases in NYS.Queens is literally the epicenter STAY HOME PLS!", "token_idx_2": 53, "text_start_2": 254, "text_end_2": 263, "date_2": "2020-03", "text_1_tokenized": ["The", "burial", "story", "of", "trembling", "and", "earthquake", "restores", "death", "at", "the", "epicenter", "of", "thinking", "."], "text_2_tokenized": ["I", "really", "wanted", "to", "go", "to", "the", "corner", "deli", "to", "get", "some", "goodies", "but", "the", "way", "elmhurst", "hospital", "is", "looking", "(", "corner", "of", "my", "house", ")", "mejor", "me", "quedo", "en", "casa", "\ud83d\ude30", "13", "deaths", "in", "one", "night", "and", "we", "are", "close", "to", "40,000", "confirmed", "covid", "-", "19", "cases", "in", "NYS.Queens", "is", "literally", "the", "epicenter", "STAY", "HOME", "PLS", "!"]} -{"id": "1878-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "At the epicenter of long-legged European woman. I need to find my wifey.", "token_idx_1": 2, "text_start_1": 7, "text_end_1": 16, "date_1": "2019-03", "text_2": "19 known cases in my daughters town. And she is alone with 2 kids in diapers. This is scary Cases identified in my town and my county is the epicenter of my state. I hope everyone is being abundantly cautious and staying safe.", "token_idx_2": 31, "text_start_2": 141, "text_end_2": 150, "date_2": "2020-03", "text_1_tokenized": ["At", "the", "epicenter", "of", "long-legged", "European", "woman", ".", "I", "need", "to", "find", "my", "wifey", "."], "text_2_tokenized": ["19", "known", "cases", "in", "my", "daughters", "town", ".", "And", "she", "is", "alone", "with", "2", "kids", "in", "diapers", ".", "This", "is", "scary", "Cases", "identified", "in", "my", "town", "and", "my", "county", "is", "the", "epicenter", "of", "my", "state", ".", "I", "hope", "everyone", "is", "being", "abundantly", "cautious", "and", "staying", "safe", "."]} -{"id": "1879-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "We are facing a possible Tsunami @ coast in 231 minutes. People are asked to seek higher grounds until we get another geological review about the earthquake epicenter. Be warned. Stay safe.", "token_idx_1": 28, "text_start_1": 157, "text_end_1": 166, "date_1": "2019-03", "text_2": "2019: -climbed the Great Wall of China -fall break in NYC -saw @AaronTveit on Broadway -spent Thanksgiving in Uganda 2020: -literally no one can go to China -NYC is the epicenter of a global pandemic -Aaron had COVID19 -Broadway is closed indefinitely -can't travel anywhere", "token_idx_2": 38, "text_start_2": 169, "text_end_2": 178, "date_2": "2020-03", "text_1_tokenized": ["We", "are", "facing", "a", "possible", "Tsunami", "@", "coast", "in", "231", "minutes", ".", "People", "are", "asked", "to", "seek", "higher", "grounds", "until", "we", "get", "another", "geological", "review", "about", "the", "earthquake", "epicenter", ".", "Be", "warned", ".", "Stay", "safe", "."], "text_2_tokenized": ["2019", ":", "-", "climbed", "the", "Great", "Wall", "of", "China", "-", "fall", "break", "in", "NYC", "-", "saw", "@AaronTveit", "on", "Broadway", "-", "spent", "Thanksgiving", "in", "Uganda", "2020", ":", "-", "literally", "no", "one", "can", "go", "to", "China", "-", "NYC", "is", "the", "epicenter", "of", "a", "global", "pandemic", "-", "Aaron", "had", "COVID", "19", "-", "Broadway", "is", "closed", "indefinitely", "-", "can't", "travel", "anywhere"]} -{"id": "1880-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "NewsATW Ebola treatment center in Congo reopens after attack An Ebola treatment center located at the epicenter of the current outbreak in eastern Democratic Republic of Congo has resumed operations after it was attacked last month, the country's health ministry said on Sat...", "token_idx_1": 16, "text_start_1": 102, "text_end_1": 111, "date_1": "2019-03", "text_2": "Hey, remember when @realDonaldTrump called this all a Democrat hoax and then said the heat would \u201cburn off the virus\u201d? And now the US is the epicenter of the virus! Still want to try & convince us that this unqualified, fake millionaire, orange windbag is doing a great job?", "token_idx_2": 30, "text_start_2": 141, "text_end_2": 150, "date_2": "2020-03", "text_1_tokenized": ["NewsATW", "Ebola", "treatment", "center", "in", "Congo", "reopens", "after", "attack", "An", "Ebola", "treatment", "center", "located", "at", "the", "epicenter", "of", "the", "current", "outbreak", "in", "eastern", "Democratic", "Republic", "of", "Congo", "has", "resumed", "operations", "after", "it", "was", "attacked", "last", "month", ",", "the", "country's", "health", "ministry", "said", "on", "Sat", "..."], "text_2_tokenized": ["Hey", ",", "remember", "when", "@realDonaldTrump", "called", "this", "all", "a", "Democrat", "hoax", "and", "then", "said", "the", "heat", "would", "\u201c", "burn", "off", "the", "virus", "\u201d", "?", "And", "now", "the", "US", "is", "the", "epicenter", "of", "the", "virus", "!", "Still", "want", "to", "try", "&", "convince", "us", "that", "this", "unqualified", ",", "fake", "millionaire", ",", "orange", "windbag", "is", "doing", "a", "great", "job", "?"]} -{"id": "1881-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "At the DMV and this is the epicenter of \u2018can I speak to your manager' energy", "token_idx_1": 7, "text_start_1": 27, "text_end_1": 36, "date_1": "2019-03", "text_2": "\uf0a7A dozen physicians at the epicenter of Italy's Covid-19 outbreak issued a plea to the rest of the world: Because hospitals \u201care themselves becoming sources of [coronavirus] infection,\u201d they urge other countries to deliver care to many patients in their homes.", "token_idx_2": 6, "text_start_2": 27, "text_end_2": 36, "date_2": "2020-03", "text_1_tokenized": ["At", "the", "DMV", "and", "this", "is", "the", "epicenter", "of", "\u2018", "can", "I", "speak", "to", "your", "manager", "'", "energy"], "text_2_tokenized": ["\uf0a7", "A", "dozen", "physicians", "at", "the", "epicenter", "of", "Italy's", "Covid", "-", "19", "outbreak", "issued", "a", "plea", "to", "the", "rest", "of", "the", "world", ":", "Because", "hospitals", "\u201c", "are", "themselves", "becoming", "sources", "of", "[", "coronavirus", "]", "infection", ",", "\u201d", "they", "urge", "other", "countries", "to", "deliver", "care", "to", "many", "patients", "in", "their", "homes", "."]} -{"id": "1882-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "#kodakblack is what happens when a person is reared in the epicenter of death/gun violence...it's normalized in their environment so a person becomes desensitized to it's effects. not excusing him just providing a perspective of severe trauma...", "token_idx_1": 11, "text_start_1": 59, "text_end_1": 68, "date_1": "2019-03", "text_2": "The US will soon become the epicenter of COVID19.", "token_idx_2": 6, "text_start_2": 28, "text_end_2": 37, "date_2": "2020-03", "text_1_tokenized": ["#kodakblack", "is", "what", "happens", "when", "a", "person", "is", "reared", "in", "the", "epicenter", "of", "death", "/", "gun", "violence", "...", "it's", "normalized", "in", "their", "environment", "so", "a", "person", "becomes", "desensitized", "to", "it's", "effects", ".", "not", "excusing", "him", "just", "providing", "a", "perspective", "of", "severe", "trauma", "..."], "text_2_tokenized": ["The", "US", "will", "soon", "become", "the", "epicenter", "of", "COVID", "19", "."]} -{"id": "1883-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "Xenophobia in AA communities in the US towards other blacks is real, just as the vice versa is in the Caribbean. The biggest difference, however, is that we as black people in America sit at the epicenter of justice for ALL the diaspora against American white supremacy.", "token_idx_1": 40, "text_start_1": 195, "text_end_1": 204, "date_1": "2019-03", "text_2": "The only reason New York is the epicenter for the corona virus is because they test the most.", "token_idx_2": 7, "text_start_2": 32, "text_end_2": 41, "date_2": "2020-03", "text_1_tokenized": ["Xenophobia", "in", "AA", "communities", "in", "the", "US", "towards", "other", "blacks", "is", "real", ",", "just", "as", "the", "vice", "versa", "is", "in", "the", "Caribbean", ".", "The", "biggest", "difference", ",", "however", ",", "is", "that", "we", "as", "black", "people", "in", "America", "sit", "at", "the", "epicenter", "of", "justice", "for", "ALL", "the", "diaspora", "against", "American", "white", "supremacy", "."], "text_2_tokenized": ["The", "only", "reason", "New", "York", "is", "the", "epicenter", "for", "the", "corona", "virus", "is", "because", "they", "test", "the", "most", "."]} -{"id": "1884-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Congratulations to new york city voted the greatest city in the world..again! Nyc, the big apple and the epicenter of the universe!", "token_idx_1": 22, "text_start_1": 105, "text_end_1": 114, "date_1": "2019-03", "text_2": "idk how true but how ironic is it that one of the few neighborhoods presented as part of the epicenter IN the epicenter of THE epicenter of the coronavirus outbreak in the us is corona queens new york", "token_idx_2": 19, "text_start_2": 93, "text_end_2": 102, "date_2": "2020-03", "text_1_tokenized": ["Congratulations", "to", "new", "york", "city", "voted", "the", "greatest", "city", "in", "the", "world", "..", "again", "!", "Nyc", ",", "the", "big", "apple", "and", "the", "epicenter", "of", "the", "universe", "!"], "text_2_tokenized": ["idk", "how", "true", "but", "how", "ironic", "is", "it", "that", "one", "of", "the", "few", "neighborhoods", "presented", "as", "part", "of", "the", "epicenter", "IN", "the", "epicenter", "of", "THE", "epicenter", "of", "the", "coronavirus", "outbreak", "in", "the", "us", "is", "corona", "queens", "new", "york"]} -{"id": "1885-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Holy fuck i can't wait to see bring me the horizon at epicenter", "token_idx_1": 12, "text_start_1": 54, "text_end_1": 63, "date_1": "2019-03", "text_2": "Trump says he's going to slowly open up the economy and people complain to stay home. He says he's thinking about putting a Quarantine in place for the epicenter of COVID-19 in the US responsible for over 50% of cases and deaths and people say you can't do that. He just can't win", "token_idx_2": 29, "text_start_2": 152, "text_end_2": 161, "date_2": "2020-03", "text_1_tokenized": ["Holy", "fuck", "i", "can't", "wait", "to", "see", "bring", "me", "the", "horizon", "at", "epicenter"], "text_2_tokenized": ["Trump", "says", "he's", "going", "to", "slowly", "open", "up", "the", "economy", "and", "people", "complain", "to", "stay", "home", ".", "He", "says", "he's", "thinking", "about", "putting", "a", "Quarantine", "in", "place", "for", "the", "epicenter", "of", "COVID", "-", "19", "in", "the", "US", "responsible", "for", "over", "50", "%", "of", "cases", "and", "deaths", "and", "people", "say", "you", "can't", "do", "that", ".", "He", "just", "can't", "win"]} -{"id": "1886-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Star log 513 - In search of the epicenter of this shitshow.", "token_idx_1": 8, "text_start_1": 32, "text_end_1": 41, "date_1": "2019-03", "text_2": "Our country might be the next epicenter of the virus so please do proper hygiene everyone and PRAY!", "token_idx_2": 6, "text_start_2": 30, "text_end_2": 39, "date_2": "2020-03", "text_1_tokenized": ["Star", "log", "513", "-", "In", "search", "of", "the", "epicenter", "of", "this", "shitshow", "."], "text_2_tokenized": ["Our", "country", "might", "be", "the", "next", "epicenter", "of", "the", "virus", "so", "please", "do", "proper", "hygiene", "everyone", "and", "PRAY", "!"]} -{"id": "1887-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "When a society worship &glorify money;and become the epicenter that measure success. Corruption become the catalyst that buy honor.", "token_idx_1": 11, "text_start_1": 57, "text_end_1": 66, "date_1": "2019-03", "text_2": "Trying to write an essay on developmental literacy disorders BREAKING NEWS: Prince Charles has COVID-19 *attempts to read research paper* BREAKING NEWS: Now Boris has it *reads another line of paper* BREAKING NEWS: your home country is new epicenter This isn't going to work", "token_idx_2": 47, "text_start_2": 240, "text_end_2": 249, "date_2": "2020-03", "text_1_tokenized": ["When", "a", "society", "worship", "&", "glorify", "money", ";", "and", "become", "the", "epicenter", "that", "measure", "success", ".", "Corruption", "become", "the", "catalyst", "that", "buy", "honor", "."], "text_2_tokenized": ["Trying", "to", "write", "an", "essay", "on", "developmental", "literacy", "disorders", "BREAKING", "NEWS", ":", "Prince", "Charles", "has", "COVID", "-", "19", "*", "attempts", "to", "read", "research", "paper", "*", "BREAKING", "NEWS", ":", "Now", "Boris", "has", "it", "*", "reads", "another", "line", "of", "paper", "*", "BREAKING", "NEWS", ":", "your", "home", "country", "is", "new", "epicenter", "This", "isn't", "going", "to", "work"]} -{"id": "1888-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "Northerners and west coasters call the south the epicenter of American racism then say the most racist shit about african leftist leaders. The 3/5ths compromise wasn't signed in Texas or Alabama. The racism is inherent in America's system and you're trying to escape blame.", "token_idx_1": 8, "text_start_1": 49, "text_end_1": 58, "date_1": "2019-03", "text_2": "My parents and I've agreed to call each other every Sunday now. They wanna make sure I'm okay, since I live in the new COVID epicenter. I wanna check up on them, esp. since my mom still has to work No point to this post really, except a reminder to check up on your loved ones", "token_idx_2": 27, "text_start_2": 125, "text_end_2": 134, "date_2": "2020-03", "text_1_tokenized": ["Northerners", "and", "west", "coasters", "call", "the", "south", "the", "epicenter", "of", "American", "racism", "then", "say", "the", "most", "racist", "shit", "about", "african", "leftist", "leaders", ".", "The", "3/5", "ths", "compromise", "wasn't", "signed", "in", "Texas", "or", "Alabama", ".", "The", "racism", "is", "inherent", "in", "America's", "system", "and", "you're", "trying", "to", "escape", "blame", "."], "text_2_tokenized": ["My", "parents", "and", "I've", "agreed", "to", "call", "each", "other", "every", "Sunday", "now", ".", "They", "wanna", "make", "sure", "I'm", "okay", ",", "since", "I", "live", "in", "the", "new", "COVID", "epicenter", ".", "I", "wanna", "check", "up", "on", "them", ",", "esp", ".", "since", "my", "mom", "still", "has", "to", "work", "No", "point", "to", "this", "post", "really", ",", "except", "a", "reminder", "to", "check", "up", "on", "your", "loved", "ones"]} -{"id": "1889-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "i work right by the epicenter and no one ever comes and has lunch with me lol", "token_idx_1": 5, "text_start_1": 20, "text_end_1": 29, "date_1": "2019-03", "text_2": "//So if you don't know Europe is the current epicenter of the pandemic and honestly the hardest part is the fucking boredom", "token_idx_2": 11, "text_start_2": 45, "text_end_2": 54, "date_2": "2020-03", "text_1_tokenized": ["i", "work", "right", "by", "the", "epicenter", "and", "no", "one", "ever", "comes", "and", "has", "lunch", "with", "me", "lol"], "text_2_tokenized": ["/", "/", "So", "if", "you", "don't", "know", "Europe", "is", "the", "current", "epicenter", "of", "the", "pandemic", "and", "honestly", "the", "hardest", "part", "is", "the", "fucking", "boredom"]} -{"id": "1890-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Strive to then same title but began different, And the epicenter to think fashion relief and want, Full off just feeling there Evening News.", "token_idx_1": 11, "text_start_1": 55, "text_end_1": 64, "date_1": "2019-03", "text_2": "In the middle of the epicenter of a pandemic so I figure it's the best time to move. I'm smart.", "token_idx_2": 5, "text_start_2": 21, "text_end_2": 30, "date_2": "2020-03", "text_1_tokenized": ["Strive", "to", "then", "same", "title", "but", "began", "different", ",", "And", "the", "epicenter", "to", "think", "fashion", "relief", "and", "want", ",", "Full", "off", "just", "feeling", "there", "Evening", "News", "."], "text_2_tokenized": ["In", "the", "middle", "of", "the", "epicenter", "of", "a", "pandemic", "so", "I", "figure", "it's", "the", "best", "time", "to", "move", ".", "I'm", "smart", "."]} -{"id": "1891-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "The best parts about living in CA is being at the source of a cultural epicenter, Wildflowers, and NEVER EVER being jealous of and in fact pitying NE \u201ctaco Tuesday's\u201d", "token_idx_1": 15, "text_start_1": 71, "text_end_1": 80, "date_1": "2019-03", "text_2": "Today's range expected 1.0910 x 1.1082. NY now #Covid19 epicenter - 3,000 deaths. AUD-pairs spiked overnight at extreme #China PMI data - possibly questionable? Eco EUR German unemployment still pos., #CoronaVirus no impact yet, USD 9:45 Chicago PMI, 10:00 Consumer Confidence.", "token_idx_2": 10, "text_start_2": 56, "text_end_2": 65, "date_2": "2020-03", "text_1_tokenized": ["The", "best", "parts", "about", "living", "in", "CA", "is", "being", "at", "the", "source", "of", "a", "cultural", "epicenter", ",", "Wildflowers", ",", "and", "NEVER", "EVER", "being", "jealous", "of", "and", "in", "fact", "pitying", "NE", "\u201c", "taco", "Tuesday's", "\u201d"], "text_2_tokenized": ["Today's", "range", "expected", "1.0910", "x", "1.1082", ".", "NY", "now", "#Covid19", "epicenter", "-", "3,000", "deaths", ".", "AUD-pairs", "spiked", "overnight", "at", "extreme", "#China", "PMI", "data", "-", "possibly", "questionable", "?", "Eco", "EUR", "German", "unemployment", "still", "pos", ".", ",", "#CoronaVirus", "no", "impact", "yet", ",", "USD", "9:45", "Chicago", "PMI", ",", "10:00", "Consumer", "Confidence", "."]} -{"id": "1892-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "fun fact, i was extremely close to the epicenter of a 6.1 earthquake a couple years ago. I was playing pokemon and all of a sudden the earth shook and my ceiling fan and wall clock broke. It was a bruh moment for sure.", "token_idx_1": 9, "text_start_1": 39, "text_end_1": 48, "date_1": "2019-03", "text_2": "Looks like India will be the next biggest epicenter of #Covid_19 \ud83e\udd7a\ud83d\ude2b\ud83d\ude14", "token_idx_2": 8, "text_start_2": 42, "text_end_2": 51, "date_2": "2020-03", "text_1_tokenized": ["fun", "fact", ",", "i", "was", "extremely", "close", "to", "the", "epicenter", "of", "a", "6.1", "earthquake", "a", "couple", "years", "ago", ".", "I", "was", "playing", "pokemon", "and", "all", "of", "a", "sudden", "the", "earth", "shook", "and", "my", "ceiling", "fan", "and", "wall", "clock", "broke", ".", "It", "was", "a", "bruh", "moment", "for", "sure", "."], "text_2_tokenized": ["Looks", "like", "India", "will", "be", "the", "next", "biggest", "epicenter", "of", "#Covid_19", "\ud83e\udd7a", "\ud83d\ude2b", "\ud83d\ude14"]} -{"id": "1893-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Kickstarter is the epicenter of high ideas. The video is normally the apex of a person's productivity in this state", "token_idx_1": 3, "text_start_1": 19, "text_end_1": 28, "date_1": "2019-03", "text_2": "I just cant process that I need to write future research plans or teaching plans when literally life and death matters. Researchers inNY must have felt this for some time. Seeing the news warning that Philly being the next epicenter, I just cant anymore.", "token_idx_2": 41, "text_start_2": 223, "text_end_2": 232, "date_2": "2020-03", "text_1_tokenized": ["Kickstarter", "is", "the", "epicenter", "of", "high", "ideas", ".", "The", "video", "is", "normally", "the", "apex", "of", "a", "person's", "productivity", "in", "this", "state"], "text_2_tokenized": ["I", "just", "cant", "process", "that", "I", "need", "to", "write", "future", "research", "plans", "or", "teaching", "plans", "when", "literally", "life", "and", "death", "matters", ".", "Researchers", "inNY", "must", "have", "felt", "this", "for", "some", "time", ".", "Seeing", "the", "news", "warning", "that", "Philly", "being", "the", "next", "epicenter", ",", "I", "just", "cant", "anymore", "."]} -{"id": "1894-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "RT UNFPA_Zimbabwe: .Emuia who has been part of a UNZimbabwe delegation to the epicenter of the #CycloneIdai in #Chimanimani has commende\u20261111258273716584448", "token_idx_1": 15, "text_start_1": 78, "text_end_1": 87, "date_1": "2019-03", "text_2": "New York City has been the epicenter of the century's two biggest calamities in 9/11 and the coronavirus pandemic. New Yorkers' pride after this is over will be through the roof and they will have deserved every drop.", "token_idx_2": 6, "text_start_2": 27, "text_end_2": 36, "date_2": "2020-03", "text_1_tokenized": ["RT", "UNFPA_Zimbabwe", ":", ".", "Emuia", "who", "has", "been", "part", "of", "a", "UNZimbabwe", "delegation", "to", "the", "epicenter", "of", "the", "#CycloneIdai", "in", "#Chimanimani", "has", "commende", "\u2026", "11112582737", "16584448"], "text_2_tokenized": ["New", "York", "City", "has", "been", "the", "epicenter", "of", "the", "century's", "two", "biggest", "calamities", "in", "9/11", "and", "the", "coronavirus", "pandemic", ".", "New", "Yorkers", "'", "pride", "after", "this", "is", "over", "will", "be", "through", "the", "roof", "and", "they", "will", "have", "deserved", "every", "drop", "."]} -{"id": "1895-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "I'm the epicenter of every explosion at my store and I'm fucking sick of it", "token_idx_1": 2, "text_start_1": 8, "text_end_1": 17, "date_1": "2019-03", "text_2": "In Gotham, the global epicenter of this pandemic, several times more people have now died of the virus than were killed in the city on 9/11. The piles of corpses are being stored in freezer trucks and dumped in mass graves.", "token_idx_2": 5, "text_start_2": 22, "text_end_2": 31, "date_2": "2020-03", "text_1_tokenized": ["I'm", "the", "epicenter", "of", "every", "explosion", "at", "my", "store", "and", "I'm", "fucking", "sick", "of", "it"], "text_2_tokenized": ["In", "Gotham", ",", "the", "global", "epicenter", "of", "this", "pandemic", ",", "several", "times", "more", "people", "have", "now", "died", "of", "the", "virus", "than", "were", "killed", "in", "the", "city", "on", "9/11", ".", "The", "piles", "of", "corpses", "are", "being", "stored", "in", "freezer", "trucks", "and", "dumped", "in", "mass", "graves", "."]} -{"id": "1896-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Western Mass is the epicenter on college hockey! In the sleepy west, of the woody east. Is valley full, full of pioneer.", "token_idx_1": 4, "text_start_1": 20, "text_end_1": 29, "date_1": "2019-03", "text_2": "How many masks have you shipped to NYC @3M the epicenter of #USA How many? New Yorkers would love to know?", "token_idx_2": 10, "text_start_2": 47, "text_end_2": 56, "date_2": "2020-03", "text_1_tokenized": ["Western", "Mass", "is", "the", "epicenter", "on", "college", "hockey", "!", "In", "the", "sleepy", "west", ",", "of", "the", "woody", "east", ".", "Is", "valley", "full", ",", "full", "of", "pioneer", "."], "text_2_tokenized": ["How", "many", "masks", "have", "you", "shipped", "to", "NYC", "@3M", "the", "epicenter", "of", "#USA", "How", "many", "?", "New", "Yorkers", "would", "love", "to", "know", "?"]} -{"id": "1897-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Mad we missed the day time festivities at the epicenter! \u2639\ufe0f we pulled up about 30 mins ago and everybody getting ready to leave", "token_idx_1": 9, "text_start_1": 46, "text_end_1": 55, "date_1": "2019-03", "text_2": "So no country is levying tariff against #China for being the epicenter of the #COVID\u30fc19 pandemic. They are instead looking inward to strengthen their public health systems and improve the lives of their people. Isn't this a good lesson and story to tell to our children unborn?", "token_idx_2": 11, "text_start_2": 61, "text_end_2": 70, "date_2": "2020-03", "text_1_tokenized": ["Mad", "we", "missed", "the", "day", "time", "festivities", "at", "the", "epicenter", "!", "\u2639", "\ufe0f", "we", "pulled", "up", "about", "30", "mins", "ago", "and", "everybody", "getting", "ready", "to", "leave"], "text_2_tokenized": ["So", "no", "country", "is", "levying", "tariff", "against", "#China", "for", "being", "the", "epicenter", "of", "the", "#COVID\u30fc19", "pandemic", ".", "They", "are", "instead", "looking", "inward", "to", "strengthen", "their", "public", "health", "systems", "and", "improve", "the", "lives", "of", "their", "people", ".", "Isn't", "this", "a", "good", "lesson", "and", "story", "to", "tell", "to", "our", "children", "unborn", "?"]} -{"id": "1898-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "One thing a tour guide at U.S.C. didn't mention: that the university was at the epicenter of an unfolding college admissions scandal involving federal charges of bribery, cheating & parents who were willing to pay big money to get their children into top colleges.", "token_idx_1": 21, "text_start_1": 80, "text_end_1": 89, "date_1": "2019-03", "text_2": "#coroavirus #misplacedoptimism WHO official said that the U.S. has the potential to become the new epicenter of the global crisis. Trump said Tuesday that he wants to end the restrictions by Easter \u2014 April 12 \u2014 and continued to play down the dangers of the pandemic", "token_idx_2": 18, "text_start_2": 99, "text_end_2": 108, "date_2": "2020-03", "text_1_tokenized": ["One", "thing", "a", "tour", "guide", "at", "U", ".", "S", ".", "C", ".", "didn't", "mention", ":", "that", "the", "university", "was", "at", "the", "epicenter", "of", "an", "unfolding", "college", "admissions", "scandal", "involving", "federal", "charges", "of", "bribery", ",", "cheating", "&", "parents", "who", "were", "willing", "to", "pay", "big", "money", "to", "get", "their", "children", "into", "top", "colleges", "."], "text_2_tokenized": ["#coroavirus", "#misplacedoptimism", "WHO", "official", "said", "that", "the", "U", ".", "S", ".", "has", "the", "potential", "to", "become", "the", "new", "epicenter", "of", "the", "global", "crisis", ".", "Trump", "said", "Tuesday", "that", "he", "wants", "to", "end", "the", "restrictions", "by", "Easter", "\u2014", "April", "12", "\u2014", "and", "continued", "to", "play", "down", "the", "dangers", "of", "the", "pandemic"]} -{"id": "1899-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "Countering Ebola #misinformation by carefully choosing messengers 1/5 \u201c\u2026near the epidemic's epicenter there is widespread distrust of local politicians, doubt whether Ebola even exists, and concern that the outbreak was fabricated for financial or political gain\u2026.", "token_idx_1": 13, "text_start_1": 92, "text_end_1": 101, "date_1": "2019-03", "text_2": "Doesn't Florida have the equivalent demographic to Italy? The same percentage of an elderly population? And we're a huge epicenter for tourism that goes unchecked even now. Yeah, it's going to be bad.", "token_idx_2": 21, "text_start_2": 121, "text_end_2": 130, "date_2": "2020-03", "text_1_tokenized": ["Countering", "Ebola", "#misinformation", "by", "carefully", "choosing", "messengers", "1/5", "\u201c", "\u2026", "near", "the", "epidemic's", "epicenter", "there", "is", "widespread", "distrust", "of", "local", "politicians", ",", "doubt", "whether", "Ebola", "even", "exists", ",", "and", "concern", "that", "the", "outbreak", "was", "fabricated", "for", "financial", "or", "political", "gain", "\u2026", "."], "text_2_tokenized": ["Doesn't", "Florida", "have", "the", "equivalent", "demographic", "to", "Italy", "?", "The", "same", "percentage", "of", "an", "elderly", "population", "?", "And", "we're", "a", "huge", "epicenter", "for", "tourism", "that", "goes", "unchecked", "even", "now", ".", "Yeah", ",", "it's", "going", "to", "be", "bad", "."]} -{"id": "1900-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Americans should think long & hard, dig far & deep before considering a politician from Chicago - the epicenter of political corruption - for any important office ever again. EVER!", "token_idx_1": 19, "text_start_1": 110, "text_end_1": 119, "date_1": "2019-03", "text_2": "20th floor and 5.6 Richter scale earthquake (epicenter) are not fun at all, I grabbed my cat and macbook cause I know my #priorities - and now we are getting some snow in #Zagreb great week \ud83d\udc4c gg Earth", "token_idx_2": 8, "text_start_2": 45, "text_end_2": 54, "date_2": "2020-03", "text_1_tokenized": ["Americans", "should", "think", "long", "&", "hard", ",", "dig", "far", "&", "deep", "before", "considering", "a", "politician", "from", "Chicago", "-", "the", "epicenter", "of", "political", "corruption", "-", "for", "any", "important", "office", "ever", "again", ".", "EVER", "!"], "text_2_tokenized": ["20th", "floor", "and", "5.6", "Richter", "scale", "earthquake", "(", "epicenter", ")", "are", "not", "fun", "at", "all", ",", "I", "grabbed", "my", "cat", "and", "macbook", "cause", "I", "know", "my", "#priorities", "-", "and", "now", "we", "are", "getting", "some", "snow", "in", "#Zagreb", "great", "week", "\ud83d\udc4c", "gg", "Earth"]} -{"id": "1901-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "A light #Earthquake of magnitude 4.8 on the Richter scale was experienced in South Eastern Kenya this evening at 7:21pm. The depth was 9.54km, with its epicenter in Wundanyi, Taita-Taveta County.", "token_idx_1": 30, "text_start_1": 152, "text_end_1": 161, "date_1": "2019-03", "text_2": "The Spirit of NYC is undefeated! People on stoops cheering runners, waving, sending positivity. With that said, we desperately need military support - #NYC is the epicenter of #COVID19 in the US. Too many people, not enough time @USArmy @USMC", "token_idx_2": 31, "text_start_2": 163, "text_end_2": 172, "date_2": "2020-03", "text_1_tokenized": ["A", "light", "#Earthquake", "of", "magnitude", "4.8", "on", "the", "Richter", "scale", "was", "experienced", "in", "South", "Eastern", "Kenya", "this", "evening", "at", "7:21", "pm", ".", "The", "depth", "was", "9.54", "km", ",", "with", "its", "epicenter", "in", "Wundanyi", ",", "Taita-Taveta", "County", "."], "text_2_tokenized": ["The", "Spirit", "of", "NYC", "is", "undefeated", "!", "People", "on", "stoops", "cheering", "runners", ",", "waving", ",", "sending", "positivity", ".", "With", "that", "said", ",", "we", "desperately", "need", "military", "support", "-", "#NYC", "is", "the", "epicenter", "of", "#COVID19", "in", "the", "US", ".", "Too", "many", "people", ",", "not", "enough", "time", "@USArmy", "@USMC"]} -{"id": "1902-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "The US has seen more reports of measles cases in the first three months of 2019 than in the whole of last year, health officials said this week. Thank the evangelical led home-school movement that is the epicenter of anti-science and anti-vax e mythology of all kinds.", "token_idx_1": 39, "text_start_1": 204, "text_end_1": 213, "date_1": "2019-03", "text_2": "we are the epicenter of the pandemic in the us. WE ARE", "token_idx_2": 3, "text_start_2": 11, "text_end_2": 20, "date_2": "2020-03", "text_1_tokenized": ["The", "US", "has", "seen", "more", "reports", "of", "measles", "cases", "in", "the", "first", "three", "months", "of", "2019", "than", "in", "the", "whole", "of", "last", "year", ",", "health", "officials", "said", "this", "week", ".", "Thank", "the", "evangelical", "led", "home-school", "movement", "that", "is", "the", "epicenter", "of", "anti-science", "and", "anti-vax", "e", "mythology", "of", "all", "kinds", "."], "text_2_tokenized": ["we", "are", "the", "epicenter", "of", "the", "pandemic", "in", "the", "us", ".", "WE", "ARE"]} -{"id": "1903-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "#OperationVarsityBlues: The University of Southern California is at the epicenter of the scandal, with some of the biggest names linked to it, including actress Lori Loughlin and her husband, fashion designer Mossimo Giannulli.", "token_idx_1": 10, "text_start_1": 72, "text_end_1": 81, "date_1": "2019-03", "text_2": "Day 10 of isolation in Bozeman, #Montana and thinking of all of you at the epicenter of the outbreak. Stay strong and keep your eye on the horizon. Smoother seas will come. Until then, batten down the hatches and know that we are all riding out the storm together.", "token_idx_2": 16, "text_start_2": 75, "text_end_2": 84, "date_2": "2020-03", "text_1_tokenized": ["#OperationVarsityBlues", ":", "The", "University", "of", "Southern", "California", "is", "at", "the", "epicenter", "of", "the", "scandal", ",", "with", "some", "of", "the", "biggest", "names", "linked", "to", "it", ",", "including", "actress", "Lori", "Loughlin", "and", "her", "husband", ",", "fashion", "designer", "Mossimo", "Giannulli", "."], "text_2_tokenized": ["Day", "10", "of", "isolation", "in", "Bozeman", ",", "#Montana", "and", "thinking", "of", "all", "of", "you", "at", "the", "epicenter", "of", "the", "outbreak", ".", "Stay", "strong", "and", "keep", "your", "eye", "on", "the", "horizon", ".", "Smoother", "seas", "will", "come", ".", "Until", "then", ",", "batten", "down", "the", "hatches", "and", "know", "that", "we", "are", "all", "riding", "out", "the", "storm", "together", "."]} -{"id": "1904-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "#Italy has cut growth to 0.1% form 1% ...remember where the epicenter of EU balance sheet issues lies...", "token_idx_1": 14, "text_start_1": 60, "text_end_1": 69, "date_1": "2019-03", "text_2": "I know the US is all but certain to become the new epicenter, but a part of me is still thinking Japan's going to be whacked senseless by this in the next week or so too.", "token_idx_2": 12, "text_start_2": 51, "text_end_2": 60, "date_2": "2020-03", "text_1_tokenized": ["#Italy", "has", "cut", "growth", "to", "0.1", "%", "form", "1", "%", "...", "remember", "where", "the", "epicenter", "of", "EU", "balance", "sheet", "issues", "lies", "..."], "text_2_tokenized": ["I", "know", "the", "US", "is", "all", "but", "certain", "to", "become", "the", "new", "epicenter", ",", "but", "a", "part", "of", "me", "is", "still", "thinking", "Japan's", "going", "to", "be", "whacked", "senseless", "by", "this", "in", "the", "next", "week", "or", "so", "too", "."]} -{"id": "1905-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "It's April 8, 2019. Exactly 5 years from today, Cleveland will be at the epicenter of a rare 100% solar eclipse, likely drawing tens of thousands of visitors to our city. Where do you want Cleveland to be like by then? What do we have to do, starting today, to get there?", "token_idx_1": 17, "text_start_1": 73, "text_end_1": 82, "date_1": "2019-03", "text_2": "NYC is the epicenter crowded as all fuck like sardines that is why numbers are high not to say that 174 hospitals were closed over the years we sure need them now!!!", "token_idx_2": 3, "text_start_2": 11, "text_end_2": 20, "date_2": "2020-03", "text_1_tokenized": ["It's", "April", "8", ",", "2019", ".", "Exactly", "5", "years", "from", "today", ",", "Cleveland", "will", "be", "at", "the", "epicenter", "of", "a", "rare", "100", "%", "solar", "eclipse", ",", "likely", "drawing", "tens", "of", "thousands", "of", "visitors", "to", "our", "city", ".", "Where", "do", "you", "want", "Cleveland", "to", "be", "like", "by", "then", "?", "What", "do", "we", "have", "to", "do", ",", "starting", "today", ",", "to", "get", "there", "?"], "text_2_tokenized": ["NYC", "is", "the", "epicenter", "crowded", "as", "all", "fuck", "like", "sardines", "that", "is", "why", "numbers", "are", "high", "not", "to", "say", "that", "174", "hospitals", "were", "closed", "over", "the", "years", "we", "sure", "need", "them", "now", "!", "!", "!"]} -{"id": "1906-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "Pope names Wilton Gregory as archbishop of Washington, DC, epicenter of US sex abuse crisis. | via @AP", "token_idx_1": 11, "text_start_1": 59, "text_end_1": 68, "date_1": "2019-03", "text_2": "Some where in Kisumu Dala: I like your covid smile my Corona, my love for you is quarantined in my heart. You are the epicenter of my happiness, why don't you give me your number that we may talk in isolation.", "token_idx_2": 27, "text_start_2": 118, "text_end_2": 127, "date_2": "2020-03", "text_1_tokenized": ["Pope", "names", "Wilton", "Gregory", "as", "archbishop", "of", "Washington", ",", "DC", ",", "epicenter", "of", "US", "sex", "abuse", "crisis", ".", "|", "via", "@AP"], "text_2_tokenized": ["Some", "where", "in", "Kisumu", "Dala", ":", "I", "like", "your", "covid", "smile", "my", "Corona", ",", "my", "love", "for", "you", "is", "quarantined", "in", "my", "heart", ".", "You", "are", "the", "epicenter", "of", "my", "happiness", ",", "why", "don't", "you", "give", "me", "your", "number", "that", "we", "may", "talk", "in", "isolation", "."]} -{"id": "1907-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "you know you're from LA when you can sleep through an earthquake with the epicenter near you", "token_idx_1": 14, "text_start_1": 74, "text_end_1": 83, "date_1": "2019-03", "text_2": "bruh it was the freaking epicenter", "token_idx_2": 5, "text_start_2": 25, "text_end_2": 34, "date_2": "2020-03", "text_1_tokenized": ["you", "know", "you're", "from", "LA", "when", "you", "can", "sleep", "through", "an", "earthquake", "with", "the", "epicenter", "near", "you"], "text_2_tokenized": ["bruh", "it", "was", "the", "freaking", "epicenter"]} -{"id": "1908-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "If I'm the FBI, I'd have a team if agents surveying JayPro on the nation's 250th. Philly is gonna be the epicenter of everything, wouldn't put it past him to try to coordinate something out of jealousy his loser second rate city isn't important enough to have such an event", "token_idx_1": 23, "text_start_1": 105, "text_end_1": 114, "date_1": "2019-03", "text_2": "WHO confirms newlodge as new coronavirus epicenter", "token_idx_2": 6, "text_start_2": 41, "text_end_2": 50, "date_2": "2020-03", "text_1_tokenized": ["If", "I'm", "the", "FBI", ",", "I'd", "have", "a", "team", "if", "agents", "surveying", "JayPro", "on", "the", "nation's", "250th", ".", "Philly", "is", "gonna", "be", "the", "epicenter", "of", "everything", ",", "wouldn't", "put", "it", "past", "him", "to", "try", "to", "coordinate", "something", "out", "of", "jealousy", "his", "loser", "second", "rate", "city", "isn't", "important", "enough", "to", "have", "such", "an", "event"], "text_2_tokenized": ["WHO", "confirms", "newlodge", "as", "new", "coronavirus", "epicenter"]} -{"id": "1909-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "want to go to the epicenter for my bf bday but his birthday is 2 weeks after my due date and I think that's a little much", "token_idx_1": 5, "text_start_1": 18, "text_end_1": 27, "date_1": "2019-03", "text_2": "Update on Rona: Our dearly beloved president just released 15 billion naira to fight rona. 10 to Lagos as being the epicenter and 5 to @NCDCgov We move\ud83d\ude0e\ud83c\uddf3\ud83c\uddec", "token_idx_2": 23, "text_start_2": 116, "text_end_2": 125, "date_2": "2020-03", "text_1_tokenized": ["want", "to", "go", "to", "the", "epicenter", "for", "my", "bf", "bday", "but", "his", "birthday", "is", "2", "weeks", "after", "my", "due", "date", "and", "I", "think", "that's", "a", "little", "much"], "text_2_tokenized": ["Update", "on", "Rona", ":", "Our", "dearly", "beloved", "president", "just", "released", "15", "billion", "naira", "to", "fight", "rona", ".", "10", "to", "Lagos", "as", "being", "the", "epicenter", "and", "5", "to", "@NCDCgov", "We", "move", "\ud83d\ude0e", "\ud83c\uddf3", "\ud83c\uddec"]} -{"id": "1910-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "Marduk was the patron deity of the city of Babylon when it became the political epicenter of the Euphrates valley in the 18th century BC.", "token_idx_1": 15, "text_start_1": 80, "text_end_1": 89, "date_1": "2019-03", "text_2": "My Ravi album just got here from Daegu, the epicenter of Korea's outbreak \ud83d\udc40", "token_idx_2": 10, "text_start_2": 44, "text_end_2": 53, "date_2": "2020-03", "text_1_tokenized": ["Marduk", "was", "the", "patron", "deity", "of", "the", "city", "of", "Babylon", "when", "it", "became", "the", "political", "epicenter", "of", "the", "Euphrates", "valley", "in", "the", "18th", "century", "BC", "."], "text_2_tokenized": ["My", "Ravi", "album", "just", "got", "here", "from", "Daegu", ",", "the", "epicenter", "of", "Korea's", "outbreak", "\ud83d\udc40"]} -{"id": "1911-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "God is never far. Sometimes He allows our lives to be stripped down to the bones so we can seek for Him, reach for Him & find Him. Sometimes God in His sovereignty will strip away those earthly things we cling to hardest to get us back to the epicenter of our existence. Him. #God", "token_idx_1": 52, "text_start_1": 247, "text_end_1": 256, "date_1": "2019-03", "text_2": "#Wuhan city, the epicenter of the #coronavirus outbreak, said it will require all overseas travellers arriving in the city to be centrally quarantined for 14 days at their own cost from Tuesday.", "token_idx_2": 4, "text_start_2": 17, "text_end_2": 26, "date_2": "2020-03", "text_1_tokenized": ["God", "is", "never", "far", ".", "Sometimes", "He", "allows", "our", "lives", "to", "be", "stripped", "down", "to", "the", "bones", "so", "we", "can", "seek", "for", "Him", ",", "reach", "for", "Him", "&", "find", "Him", ".", "Sometimes", "God", "in", "His", "sovereignty", "will", "strip", "away", "those", "earthly", "things", "we", "cling", "to", "hardest", "to", "get", "us", "back", "to", "the", "epicenter", "of", "our", "existence", ".", "Him", ".", "#God"], "text_2_tokenized": ["#Wuhan", "city", ",", "the", "epicenter", "of", "the", "#coronavirus", "outbreak", ",", "said", "it", "will", "require", "all", "overseas", "travellers", "arriving", "in", "the", "city", "to", "be", "centrally", "quarantined", "for", "14", "days", "at", "their", "own", "cost", "from", "Tuesday", "."]} -{"id": "1912-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "I want nothing more than to go to sleep. I cant bc my sons social life has him out watching basketball with friends and I need to go get him soon. Is this the epicenter of parenthood?", "token_idx_1": 36, "text_start_1": 159, "text_end_1": 168, "date_1": "2019-03", "text_2": "Bengal is done&dusted. Bengal shd be the epicenter of india Worried is a mild word. I am shit scared of Bengal #MamataBanerjee is so much intoxicated with power that she is just finishing Bengal & she is doing on behalf of Sonia who promised her PM chair. @NIA_India @HMOIndia", "token_idx_2": 10, "text_start_2": 45, "text_end_2": 54, "date_2": "2020-03", "text_1_tokenized": ["I", "want", "nothing", "more", "than", "to", "go", "to", "sleep", ".", "I", "cant", "bc", "my", "sons", "social", "life", "has", "him", "out", "watching", "basketball", "with", "friends", "and", "I", "need", "to", "go", "get", "him", "soon", ".", "Is", "this", "the", "epicenter", "of", "parenthood", "?"], "text_2_tokenized": ["Bengal", "is", "done", "&", "dusted", ".", "Bengal", "shd", "be", "the", "epicenter", "of", "india", "Worried", "is", "a", "mild", "word", ".", "I", "am", "shit", "scared", "of", "Bengal", "#MamataBanerjee", "is", "so", "much", "intoxicated", "with", "power", "that", "she", "is", "just", "finishing", "Bengal", "&", "she", "is", "doing", "on", "behalf", "of", "Sonia", "who", "promised", "her", "PM", "chair", ".", "@NIA_India", "@HMOIndia"]} -{"id": "1913-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "At the epicenter of chinatown you will find jehovas witnesses, ppl selling crystals, hoteps, live music, evangelicals, the homeless, rayciss capitols fans, YOU NAME IT", "token_idx_1": 2, "text_start_1": 7, "text_end_1": 16, "date_1": "2019-03", "text_2": "Europe is the epicenter of Ncov and I worry for my brother who's a nurse and taking care of ncov patientssssss but I know he'll be safe. Just worried. \ud83d\ude37\ud83d\ude14\ud83d\ude37", "token_idx_2": 3, "text_start_2": 14, "text_end_2": 23, "date_2": "2020-03", "text_1_tokenized": ["At", "the", "epicenter", "of", "chinatown", "you", "will", "find", "jehovas", "witnesses", ",", "ppl", "selling", "crystals", ",", "hoteps", ",", "live", "music", ",", "evangelicals", ",", "the", "homeless", ",", "rayciss", "capitols", "fans", ",", "YOU", "NAME", "IT"], "text_2_tokenized": ["Europe", "is", "the", "epicenter", "of", "Ncov", "and", "I", "worry", "for", "my", "brother", "who's", "a", "nurse", "and", "taking", "care", "of", "ncov", "patientssssss", "but", "I", "know", "he'll", "be", "safe", ".", "Just", "worried", ".", "\ud83d\ude37", "\ud83d\ude14", "\ud83d\ude37"]} -{"id": "1914-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Watched The First Purge. Definitely my least favorite out of the franchise. Weak plot with no info on the research about lawful killings that lead to political confidence in the purge. The racism leaped out in choosing staten island as the experiment epicenter...", "token_idx_1": 45, "text_start_1": 251, "text_end_1": 260, "date_1": "2019-03", "text_2": "What he doesn't understand about this is everything. New York is testing and is the epicenter of all travel in the United States. That's why they have more people that are sick", "token_idx_2": 16, "text_start_2": 84, "text_end_2": 93, "date_2": "2020-03", "text_1_tokenized": ["Watched", "The", "First", "Purge", ".", "Definitely", "my", "least", "favorite", "out", "of", "the", "franchise", ".", "Weak", "plot", "with", "no", "info", "on", "the", "research", "about", "lawful", "killings", "that", "lead", "to", "political", "confidence", "in", "the", "purge", ".", "The", "racism", "leaped", "out", "in", "choosing", "staten", "island", "as", "the", "experiment", "epicenter", "..."], "text_2_tokenized": ["What", "he", "doesn't", "understand", "about", "this", "is", "everything", ".", "New", "York", "is", "testing", "and", "is", "the", "epicenter", "of", "all", "travel", "in", "the", "United", "States", ".", "That's", "why", "they", "have", "more", "people", "that", "are", "sick"]} -{"id": "1915-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Am wondering how #USGS could detect Wundanyi as the epicenter of the earthquake without a word from our very own Met department.....", "token_idx_1": 9, "text_start_1": 52, "text_end_1": 61, "date_1": "2019-03", "text_2": "Lol W.H.O. Just predicted America as the next major epicenter of the virus, but I'm sure we will be good by the end of the week. Trump feels optimistic about it, so we should be fine.", "token_idx_2": 14, "text_start_2": 52, "text_end_2": 61, "date_2": "2020-03", "text_1_tokenized": ["Am", "wondering", "how", "#USGS", "could", "detect", "Wundanyi", "as", "the", "epicenter", "of", "the", "earthquake", "without", "a", "word", "from", "our", "very", "own", "Met", "department", "..."], "text_2_tokenized": ["Lol", "W", ".", "H", ".", "O", ".", "Just", "predicted", "America", "as", "the", "next", "major", "epicenter", "of", "the", "virus", ",", "but", "I'm", "sure", "we", "will", "be", "good", "by", "the", "end", "of", "the", "week", ".", "Trump", "feels", "optimistic", "about", "it", ",", "so", "we", "should", "be", "fine", "."]} -{"id": "1916-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "got a call from denver,epicenter of cyclonebomb,where my friend is. she's bugging out!!!", "token_idx_1": 6, "text_start_1": 23, "text_end_1": 32, "date_1": "2019-03", "text_2": "I need to call my family in New York since the epicenter to this shit is in Coney Island. I just don't want to talk to the whole family though", "token_idx_2": 11, "text_start_2": 47, "text_end_2": 56, "date_2": "2020-03", "text_1_tokenized": ["got", "a", "call", "from", "denver", ",", "epicenter", "of", "cyclonebomb", ",", "where", "my", "friend", "is", ".", "she's", "bugging", "out", "!", "!", "!"], "text_2_tokenized": ["I", "need", "to", "call", "my", "family", "in", "New", "York", "since", "the", "epicenter", "to", "this", "shit", "is", "in", "Coney", "Island", ".", "I", "just", "don't", "want", "to", "talk", "to", "the", "whole", "family", "though"]} -{"id": "1917-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Sadly, the other 50% is a safe bet for the epicenter.", "token_idx_1": 12, "text_start_1": 43, "text_end_1": 52, "date_1": "2019-03", "text_2": "I saw parents dropping off and picking up their sweaty children at hockey practice in Wake Forest, NC...the very epicenter of Covid19 along with Durham County. Why is this continuing? The gym at The Factory is closed but not the indoor ice facility. Schools are closed but not ???", "token_idx_2": 22, "text_start_2": 113, "text_end_2": 122, "date_2": "2020-03", "text_1_tokenized": ["Sadly", ",", "the", "other", "50", "%", "is", "a", "safe", "bet", "for", "the", "epicenter", "."], "text_2_tokenized": ["I", "saw", "parents", "dropping", "off", "and", "picking", "up", "their", "sweaty", "children", "at", "hockey", "practice", "in", "Wake", "Forest", ",", "NC", "...", "the", "very", "epicenter", "of", "Covid", "19", "along", "with", "Durham", "County", ".", "Why", "is", "this", "continuing", "?", "The", "gym", "at", "The", "Factory", "is", "closed", "but", "not", "the", "indoor", "ice", "facility", ".", "Schools", "are", "closed", "but", "not", "?", "?", "?"]} -{"id": "1918-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "maybe instead of epicenter next month ill go see sum 41 since ive been getting into them lately", "token_idx_1": 3, "text_start_1": 17, "text_end_1": 26, "date_1": "2019-03", "text_2": "Philippines will be the next epicenter of COVID-19", "token_idx_2": 5, "text_start_2": 29, "text_end_2": 38, "date_2": "2020-03", "text_1_tokenized": ["maybe", "instead", "of", "epicenter", "next", "month", "ill", "go", "see", "sum", "41", "since", "ive", "been", "getting", "into", "them", "lately"], "text_2_tokenized": ["Philippines", "will", "be", "the", "next", "epicenter", "of", "COVID", "-", "19"]} -{"id": "1919-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Even at the epicenter of understanding, you can plateau and get stuck in a linear mindset that corrodes the idea you're trying to understand.", "token_idx_1": 3, "text_start_1": 12, "text_end_1": 21, "date_1": "2019-03", "text_2": "In Guayaquil, Ecuador's covid19 epicenter, people are dying in their homes\u2026 and those that remain to push the dead bodies in the streets. This is according to media reports. And a simple Google search about Ecuador's coronavirus crisis brings this fact to the fore.-MM", "token_idx_2": 6, "text_start_2": 32, "text_end_2": 41, "date_2": "2020-03", "text_1_tokenized": ["Even", "at", "the", "epicenter", "of", "understanding", ",", "you", "can", "plateau", "and", "get", "stuck", "in", "a", "linear", "mindset", "that", "corrodes", "the", "idea", "you're", "trying", "to", "understand", "."], "text_2_tokenized": ["In", "Guayaquil", ",", "Ecuador's", "covid", "19", "epicenter", ",", "people", "are", "dying", "in", "their", "homes", "\u2026", "and", "those", "that", "remain", "to", "push", "the", "dead", "bodies", "in", "the", "streets", ".", "This", "is", "according", "to", "media", "reports", ".", "And", "a", "simple", "Google", "search", "about", "Ecuador's", "coronavirus", "crisis", "brings", "this", "fact", "to", "the", "fore", ".", "-", "MM"]} -{"id": "1920-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Feeling too excited \ud83d\ude18 Just\ud83d\udc4c #2DaysToDSSFoundationMonth \ud83d\udc49Shah Mastana ji chose Sirsa as the epicenter for Spirituality & put the foundation stone of @derasachasauda on 29th April 1948. Still the Same Power is working as Saint @Gurmeetramrahim Ji for the salvation of mankind.", "token_idx_1": 15, "text_start_1": 91, "text_end_1": 100, "date_1": "2019-03", "text_2": "As of Wednesday, more than 90,000 retired and active health care workers had signed up online to volunteer at the epicenter of the pandemic, including 25,000 from outside New York, the governor's office said. @nytimes", "token_idx_2": 21, "text_start_2": 114, "text_end_2": 123, "date_2": "2020-03", "text_1_tokenized": ["Feeling", "too", "excited", "\ud83d\ude18", "Just", "\ud83d\udc4c", "#2DaysToDSSFoundationMonth", "\ud83d\udc49", "Shah", "Mastana", "ji", "chose", "Sirsa", "as", "the", "epicenter", "for", "Spirituality", "&", "put", "the", "foundation", "stone", "of", "@derasachasauda", "on", "29th", "April", "1948", ".", "Still", "the", "Same", "Power", "is", "working", "as", "Saint", "@Gurmeetramrahim", "Ji", "for", "the", "salvation", "of", "mankind", "."], "text_2_tokenized": ["As", "of", "Wednesday", ",", "more", "than", "90,000", "retired", "and", "active", "health", "care", "workers", "had", "signed", "up", "online", "to", "volunteer", "at", "the", "epicenter", "of", "the", "pandemic", ",", "including", "25,000", "from", "outside", "New", "York", ",", "the", "governor's", "office", "said", ".", "@nytimes"]} -{"id": "1921-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "Ozzy has a fever. We are in the epicenter of the measles outbreak in NY. Do you think I'm freaking out? Yes, yes I am.", "token_idx_1": 9, "text_start_1": 32, "text_end_1": 41, "date_1": "2019-03", "text_2": "I'm the granddaughter of a nurse who served in the epicenter of the 1918 flu in Philadelphia. Her memories were hair raising. How would you like to stack corpses like firewood?", "token_idx_2": 10, "text_start_2": 51, "text_end_2": 60, "date_2": "2020-03", "text_1_tokenized": ["Ozzy", "has", "a", "fever", ".", "We", "are", "in", "the", "epicenter", "of", "the", "measles", "outbreak", "in", "NY", ".", "Do", "you", "think", "I'm", "freaking", "out", "?", "Yes", ",", "yes", "I", "am", "."], "text_2_tokenized": ["I'm", "the", "granddaughter", "of", "a", "nurse", "who", "served", "in", "the", "epicenter", "of", "the", "1918", "flu", "in", "Philadelphia", ".", "Her", "memories", "were", "hair", "raising", ".", "How", "would", "you", "like", "to", "stack", "corpses", "like", "firewood", "?"]} -{"id": "1922-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "As someone who works for @skidroworg in LA (the epicenter of homelessness in the US) and a former/future resident of Seattle, I have thought a lot about the \"Seattle Is Dying\" story recently aired by @komonews.", "token_idx_1": 10, "text_start_1": 48, "text_end_1": 57, "date_1": "2019-03", "text_2": "Tom Clancy made 2 The Division games based on a virus . The first game was in NY which is also the epicenter of the Corona Virus & The Division 2 takes played only dc also with quarantine and virus \ud83d\ude2d", "token_idx_2": 22, "text_start_2": 99, "text_end_2": 108, "date_2": "2020-03", "text_1_tokenized": ["As", "someone", "who", "works", "for", "@skidroworg", "in", "LA", "(", "the", "epicenter", "of", "homelessness", "in", "the", "US", ")", "and", "a", "former", "/", "future", "resident", "of", "Seattle", ",", "I", "have", "thought", "a", "lot", "about", "the", "\"", "Seattle", "Is", "Dying", "\"", "story", "recently", "aired", "by", "@komonews", "."], "text_2_tokenized": ["Tom", "Clancy", "made", "2", "The", "Division", "games", "based", "on", "a", "virus", ".", "The", "first", "game", "was", "in", "NY", "which", "is", "also", "the", "epicenter", "of", "the", "Corona", "Virus", "&", "The", "Division", "2", "takes", "played", "only", "dc", "also", "with", "quarantine", "and", "virus", "\ud83d\ude2d"]} -{"id": "1923-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "We should turn Minecraft into a trans epicenter just to annoy notch Itd be like vrchat, but worse", "token_idx_1": 7, "text_start_1": 38, "text_end_1": 47, "date_1": "2019-03", "text_2": "US has 34,000+ cases. NY has almost half that number thereby making NY the epicenter of the outbreak in the US.", "token_idx_2": 15, "text_start_2": 75, "text_end_2": 84, "date_2": "2020-03", "text_1_tokenized": ["We", "should", "turn", "Minecraft", "into", "a", "trans", "epicenter", "just", "to", "annoy", "notch", "Itd", "be", "like", "vrchat", ",", "but", "worse"], "text_2_tokenized": ["US", "has", "34,000+", "cases", ".", "NY", "has", "almost", "half", "that", "number", "thereby", "making", "NY", "the", "epicenter", "of", "the", "outbreak", "in", "the", "US", "."]} -{"id": "1924-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Secondly, there were amazing financial perks that came with being at the epicenter of Jewish religion in the first century. Jews all over the world paid what was called a Temple tax. So, Caiaphas had access to extraordinary wealth. #Shortsighted", "token_idx_1": 13, "text_start_1": 73, "text_end_1": 82, "date_1": "2019-03", "text_2": "They just quarantined the town next to us. The entire town. 41,000 people because it is the epicenter in our county. I'm legit terrified. And I know I shouldn't let that fear take over but I'd be content to not leave my house for weeks but no, I have to work on Monday.", "token_idx_2": 19, "text_start_2": 92, "text_end_2": 101, "date_2": "2020-03", "text_1_tokenized": ["Secondly", ",", "there", "were", "amazing", "financial", "perks", "that", "came", "with", "being", "at", "the", "epicenter", "of", "Jewish", "religion", "in", "the", "first", "century", ".", "Jews", "all", "over", "the", "world", "paid", "what", "was", "called", "a", "Temple", "tax", ".", "So", ",", "Caiaphas", "had", "access", "to", "extraordinary", "wealth", ".", "#Shortsighted"], "text_2_tokenized": ["They", "just", "quarantined", "the", "town", "next", "to", "us", ".", "The", "entire", "town", ".", "41,000", "people", "because", "it", "is", "the", "epicenter", "in", "our", "county", ".", "I'm", "legit", "terrified", ".", "And", "I", "know", "I", "shouldn't", "let", "that", "fear", "take", "over", "but", "I'd", "be", "content", "to", "not", "leave", "my", "house", "for", "weeks", "but", "no", ",", "I", "have", "to", "work", "on", "Monday", "."]} -{"id": "1925-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "Fascinating book on Africa by John Reader demystifying the 'dark continent', the epicenter of much of life's beginnings. From single to multicellular organisms, mammals and dinosaurs, and the evolution of humans. The continent has seen it all #Africa", "token_idx_1": 15, "text_start_1": 81, "text_end_1": 90, "date_1": "2019-03", "text_2": "FGN should lock down Lagos as the epicenter for the spread of COVID-19. No buses nor flights in and out of Lagos. #totallockdown", "token_idx_2": 7, "text_start_2": 34, "text_end_2": 43, "date_2": "2020-03", "text_1_tokenized": ["Fascinating", "book", "on", "Africa", "by", "John", "Reader", "demystifying", "the", "'", "dark", "continent", "'", ",", "the", "epicenter", "of", "much", "of", "life's", "beginnings", ".", "From", "single", "to", "multicellular", "organisms", ",", "mammals", "and", "dinosaurs", ",", "and", "the", "evolution", "of", "humans", ".", "The", "continent", "has", "seen", "it", "all", "#Africa"], "text_2_tokenized": ["FGN", "should", "lock", "down", "Lagos", "as", "the", "epicenter", "for", "the", "spread", "of", "COVID", "-", "19", ".", "No", "buses", "nor", "flights", "in", "and", "out", "of", "Lagos", ".", "#totallockdown"]} -{"id": "1926-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "How FM stations will roll kesho: PLOT FOR SALE Near earthquake epicenter, Taita Taveta... A quarter going for only 250k! Buy and watch where the earthquake hit on Sunday 24 March! 40% already sold out! Hurry, get yours before another quake hits!", "token_idx_1": 12, "text_start_1": 63, "text_end_1": 72, "date_1": "2019-03", "text_2": "Covid-19 has put a lie to the myth of Western efficiency. From Europe, the pandemic epicenter is moving to the US. My thoughts are with everyone down with Covid-19. We shall overcome. Please #StayHome & #staysafe Seek medical help if you have any funny symptoms!", "token_idx_2": 19, "text_start_2": 84, "text_end_2": 93, "date_2": "2020-03", "text_1_tokenized": ["How", "FM", "stations", "will", "roll", "kesho", ":", "PLOT", "FOR", "SALE", "Near", "earthquake", "epicenter", ",", "Taita", "Taveta", "...", "A", "quarter", "going", "for", "only", "250k", "!", "Buy", "and", "watch", "where", "the", "earthquake", "hit", "on", "Sunday", "24", "March", "!", "40", "%", "already", "sold", "out", "!", "Hurry", ",", "get", "yours", "before", "another", "quake", "hits", "!"], "text_2_tokenized": ["Covid", "-", "19", "has", "put", "a", "lie", "to", "the", "myth", "of", "Western", "efficiency", ".", "From", "Europe", ",", "the", "pandemic", "epicenter", "is", "moving", "to", "the", "US", ".", "My", "thoughts", "are", "with", "everyone", "down", "with", "Covid", "-", "19", ".", "We", "shall", "overcome", ".", "Please", "#StayHome", "&", "#staysafe", "Seek", "medical", "help", "if", "you", "have", "any", "funny", "symptoms", "!"]} -{"id": "1927-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Might need to take a trip to the bay. Y'all making it sound like the lit Californian Black epicenter \ud83d\ude02", "token_idx_1": 19, "text_start_1": 91, "text_end_1": 100, "date_1": "2019-03", "text_2": "Seriously, can someone explain to me how could wuhan (the epicenter) go back to normal in such a short period and the US has to hunker down for 18 months! Gates tells us that we will not be back to normal unless we take his vaccine! WTF.", "token_idx_2": 12, "text_start_2": 58, "text_end_2": 67, "date_2": "2020-03", "text_1_tokenized": ["Might", "need", "to", "take", "a", "trip", "to", "the", "bay", ".", "Y'all", "making", "it", "sound", "like", "the", "lit", "Californian", "Black", "epicenter", "\ud83d\ude02"], "text_2_tokenized": ["Seriously", ",", "can", "someone", "explain", "to", "me", "how", "could", "wuhan", "(", "the", "epicenter", ")", "go", "back", "to", "normal", "in", "such", "a", "short", "period", "and", "the", "US", "has", "to", "hunker", "down", "for", "18", "months", "!", "Gates", "tells", "us", "that", "we", "will", "not", "be", "back", "to", "normal", "unless", "we", "take", "his", "vaccine", "!", "WTF", "."]} -{"id": "1928-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "really bummed that i'm not going to the epicenter festival \ud83d\ude44", "token_idx_1": 8, "text_start_1": 40, "text_end_1": 49, "date_1": "2019-03", "text_2": "As Trump considers #newyorkquarantine Gov. Andrew Cuomo is one busy guy currently to find quick fix to the epicenter. America we are with you #CoronavirusPandemic", "token_idx_2": 19, "text_start_2": 107, "text_end_2": 116, "date_2": "2020-03", "text_1_tokenized": ["really", "bummed", "that", "i'm", "not", "going", "to", "the", "epicenter", "festival", "\ud83d\ude44"], "text_2_tokenized": ["As", "Trump", "considers", "#newyorkquarantine", "Gov", ".", "Andrew", "Cuomo", "is", "one", "busy", "guy", "currently", "to", "find", "quick", "fix", "to", "the", "epicenter", ".", "America", "we", "are", "with", "you", "#CoronavirusPandemic"]} -{"id": "1929-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "The tiny slip of land know as Israel was once inhabited by peaceful Arab farmers, Palestinians. Now an epicenter of acrimony as Palestinians fight against Israel's encroaching settler colonialism and oppression. Jews originally settled there,desperate for \u201cpeace\u201d. What happened?", "token_idx_1": 20, "text_start_1": 103, "text_end_1": 112, "date_1": "2019-03", "text_2": "If @AOC was so bright, how come NY is the epicenter of the #WuFlu? What did NY do so wrong?", "token_idx_2": 11, "text_start_2": 42, "text_end_2": 51, "date_2": "2020-03", "text_1_tokenized": ["The", "tiny", "slip", "of", "land", "know", "as", "Israel", "was", "once", "inhabited", "by", "peaceful", "Arab", "farmers", ",", "Palestinians", ".", "Now", "an", "epicenter", "of", "acrimony", "as", "Palestinians", "fight", "against", "Israel's", "encroaching", "settler", "colonialism", "and", "oppression", ".", "Jews", "originally", "settled", "there", ",", "desperate", "for", "\u201c", "peace", "\u201d", ".", "What", "happened", "?"], "text_2_tokenized": ["If", "@AOC", "was", "so", "bright", ",", "how", "come", "NY", "is", "the", "epicenter", "of", "the", "#WuFlu", "?", "What", "did", "NY", "do", "so", "wrong", "?"]} -{"id": "1930-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "Tuticorin was an epicenter of Anti-BJP, Anti-ADMK BJP protests, yet BJP is contesting there that too against KaniMozhi. BJP are either stupid or have lots of guts to run in Tuticorin.", "token_idx_1": 3, "text_start_1": 17, "text_end_1": 26, "date_1": "2019-03", "text_2": "I'm just curious, are \u201cessential workers\u201d gonna jump to the front of the line at hospitals or get an extra check or anything? ...When we catch covid-19 that is. It's not like NYC isn't the epicenter. Just thinking out loud. @realDonaldTrump @NYGovCuomo", "token_idx_2": 43, "text_start_2": 189, "text_end_2": 198, "date_2": "2020-03", "text_1_tokenized": ["Tuticorin", "was", "an", "epicenter", "of", "Anti-BJP", ",", "Anti-ADMK", "BJP", "protests", ",", "yet", "BJP", "is", "contesting", "there", "that", "too", "against", "KaniMozhi", ".", "BJP", "are", "either", "stupid", "or", "have", "lots", "of", "guts", "to", "run", "in", "Tuticorin", "."], "text_2_tokenized": ["I'm", "just", "curious", ",", "are", "\u201c", "essential", "workers", "\u201d", "gonna", "jump", "to", "the", "front", "of", "the", "line", "at", "hospitals", "or", "get", "an", "extra", "check", "or", "anything", "?", "...", "When", "we", "catch", "covid", "-", "19", "that", "is", ".", "It's", "not", "like", "NYC", "isn't", "the", "epicenter", ".", "Just", "thinking", "out", "loud", ".", "@realDonaldTrump", "@NYGovCuomo"]} -{"id": "1931-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "NYC the epicenter of nightlife why are ya still going to marthitas enlighten me lmaoooooooooo", "token_idx_1": 2, "text_start_1": 8, "text_end_1": 17, "date_1": "2019-03", "text_2": "We are also at the epicenter of an upcoming mass starvation not that the will be no food in the market,A huge percentage won't have money to buy food has companies to continue to lay off workers due to the ongoing pandemic.", "token_idx_2": 5, "text_start_2": 19, "text_end_2": 28, "date_2": "2020-03", "text_1_tokenized": ["NYC", "the", "epicenter", "of", "nightlife", "why", "are", "ya", "still", "going", "to", "marthitas", "enlighten", "me", "lmaoooooooooo"], "text_2_tokenized": ["We", "are", "also", "at", "the", "epicenter", "of", "an", "upcoming", "mass", "starvation", "not", "that", "the", "will", "be", "no", "food", "in", "the", "market", ",", "A", "huge", "percentage", "won't", "have", "money", "to", "buy", "food", "has", "companies", "to", "continue", "to", "lay", "off", "workers", "due", "to", "the", "ongoing", "pandemic", "."]} -{"id": "1932-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "#Selflove as the epicenter to a harmonious global community.", "token_idx_1": 3, "text_start_1": 17, "text_end_1": 26, "date_1": "2019-03", "text_2": "living in the epicenter of everything that's happening has been beyond mentally exhausting, but it's also taking a huge toll on me physically now, to the point where my anxiety makes it hard for me to eat anything", "token_idx_2": 3, "text_start_2": 14, "text_end_2": 23, "date_2": "2020-03", "text_1_tokenized": ["#Selflove", "as", "the", "epicenter", "to", "a", "harmonious", "global", "community", "."], "text_2_tokenized": ["living", "in", "the", "epicenter", "of", "everything", "that's", "happening", "has", "been", "beyond", "mentally", "exhausting", ",", "but", "it's", "also", "taking", "a", "huge", "toll", "on", "me", "physically", "now", ",", "to", "the", "point", "where", "my", "anxiety", "makes", "it", "hard", "for", "me", "to", "eat", "anything"]} -{"id": "1933-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Once again, the irony of ironies, my TL is the epicenter of discussion about female #boxing. Cue Alanis... LULZ", "token_idx_1": 12, "text_start_1": 47, "text_end_1": 56, "date_1": "2019-03", "text_2": "I'm hearing both Californians are fucked & the virus is clearing up at the epicenter, I've never been more ambiguous about a situation in my life", "token_idx_2": 14, "text_start_2": 79, "text_end_2": 88, "date_2": "2020-03", "text_1_tokenized": ["Once", "again", ",", "the", "irony", "of", "ironies", ",", "my", "TL", "is", "the", "epicenter", "of", "discussion", "about", "female", "#boxing", ".", "Cue", "Alanis", "...", "LULZ"], "text_2_tokenized": ["I'm", "hearing", "both", "Californians", "are", "fucked", "&", "the", "virus", "is", "clearing", "up", "at", "the", "epicenter", ",", "I've", "never", "been", "more", "ambiguous", "about", "a", "situation", "in", "my", "life"]} -{"id": "1934-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "The all-time epicenter of effortless cool is Billy Dee Williams waking out right now at #StarWarsCelebaration", "token_idx_1": 2, "text_start_1": 13, "text_end_1": 22, "date_1": "2019-03", "text_2": "My mom's abusive bf won't let them stock up on supplies and not letting my mom stay safe. Both my parents are alcoholics & in poor health, my grandparents are not staying at home and live near the epicenter. Jon's mom is being exposed daily to suspected cases. I am worried", "token_idx_2": 40, "text_start_2": 201, "text_end_2": 210, "date_2": "2020-03", "text_1_tokenized": ["The", "all-time", "epicenter", "of", "effortless", "cool", "is", "Billy", "Dee", "Williams", "waking", "out", "right", "now", "at", "#StarWarsCelebaration"], "text_2_tokenized": ["My", "mom's", "abusive", "bf", "won't", "let", "them", "stock", "up", "on", "supplies", "and", "not", "letting", "my", "mom", "stay", "safe", ".", "Both", "my", "parents", "are", "alcoholics", "&", "in", "poor", "health", ",", "my", "grandparents", "are", "not", "staying", "at", "home", "and", "live", "near", "the", "epicenter", ".", "Jon's", "mom", "is", "being", "exposed", "daily", "to", "suspected", "cases", ".", "I", "am", "worried"]} -{"id": "1935-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "The love of God is the epicenter of Christianity.", "token_idx_1": 6, "text_start_1": 23, "text_end_1": 32, "date_1": "2019-03", "text_2": "China our Best friends,The original epicenter of Covid-19 contained the menace..can't they expedite an extended olive branch to assist us deal with this Beast,,what are friends for?Or we owe them too much to trade the Exorbitant strategic Recovery Work Plan \ud83d\ude2d\ud83d\ude2d #lockdowneffect", "token_idx_2": 7, "text_start_2": 36, "text_end_2": 45, "date_2": "2020-03", "text_1_tokenized": ["The", "love", "of", "God", "is", "the", "epicenter", "of", "Christianity", "."], "text_2_tokenized": ["China", "our", "Best", "friends", ",", "The", "original", "epicenter", "of", "Covid", "-", "19", "contained", "the", "menace", "..", "can't", "they", "expedite", "an", "extended", "olive", "branch", "to", "assist", "us", "deal", "with", "this", "Beast", ",", ",", "what", "are", "friends", "for", "?", "Or", "we", "owe", "them", "too", "much", "to", "trade", "the", "Exorbitant", "strategic", "Recovery", "Work", "Plan", "\ud83d\ude2d", "\ud83d\ude2d", "#lockdowneffect"]} -{"id": "1936-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "we can say objectively that america has set the bar for hamburgers, with its creative epicenter being minnesota", "token_idx_1": 16, "text_start_1": 86, "text_end_1": 95, "date_1": "2019-03", "text_2": "\"United States could become next epicenter of coronavirus crisis, WHO warns, citing \u2018very large acceleration' in infections\" Make America great again Trump' way!", "token_idx_2": 6, "text_start_2": 33, "text_end_2": 42, "date_2": "2020-03", "text_1_tokenized": ["we", "can", "say", "objectively", "that", "america", "has", "set", "the", "bar", "for", "hamburgers", ",", "with", "its", "creative", "epicenter", "being", "minnesota"], "text_2_tokenized": ["\"", "United", "States", "could", "become", "next", "epicenter", "of", "coronavirus", "crisis", ",", "WHO", "warns", ",", "citing", "\u2018", "very", "large", "acceleration", "'", "in", "infections", "\"", "Make", "America", "great", "again", "Trump", "'", "way", "!"]} -{"id": "1937-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "Williamsburg being the epicenter of the measles outbreak in NY doesn't surprise me at all when you look at the demographic that has taken over the area \ud83d\ude43\ud83d\ude43", "token_idx_1": 3, "text_start_1": 23, "text_end_1": 32, "date_1": "2019-03", "text_2": "US has surpassed ITALY to become the country with the highest confirmed Coronavirus cases. With New York being the epicenter of the pandemic. This shit is beyond crazy. Thousands of people are dead. I'm beyond mortified \ud83d\udc94.. #COVID19KE", "token_idx_2": 20, "text_start_2": 115, "text_end_2": 124, "date_2": "2020-03", "text_1_tokenized": ["Williamsburg", "being", "the", "epicenter", "of", "the", "measles", "outbreak", "in", "NY", "doesn't", "surprise", "me", "at", "all", "when", "you", "look", "at", "the", "demographic", "that", "has", "taken", "over", "the", "area", "\ud83d\ude43", "\ud83d\ude43"], "text_2_tokenized": ["US", "has", "surpassed", "ITALY", "to", "become", "the", "country", "with", "the", "highest", "confirmed", "Coronavirus", "cases", ".", "With", "New", "York", "being", "the", "epicenter", "of", "the", "pandemic", ".", "This", "shit", "is", "beyond", "crazy", ".", "Thousands", "of", "people", "are", "dead", ".", "I'm", "beyond", "mortified", "\ud83d\udc94", "..", "#COVID19KE"]} -{"id": "1938-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "tracing all threads backwards outwards from the epicenter", "token_idx_1": 7, "text_start_1": 48, "text_end_1": 57, "date_1": "2019-03", "text_2": "The United States are not the epicenter of the coronavirus. Add up the total cases in the EU. The 27 nations of the EU make up the epicenter. #COVID19 #CoronavirusOutbreak", "token_idx_2": 6, "text_start_2": 30, "text_end_2": 39, "date_2": "2020-03", "text_1_tokenized": ["tracing", "all", "threads", "backwards", "outwards", "from", "the", "epicenter"], "text_2_tokenized": ["The", "United", "States", "are", "not", "the", "epicenter", "of", "the", "coronavirus", ".", "Add", "up", "the", "total", "cases", "in", "the", "EU", ".", "The", "27", "nations", "of", "the", "EU", "make", "up", "the", "epicenter", ".", "#COVID19", "#CoronavirusOutbreak"]} -{"id": "1939-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "an epicenter is a geodesy: pseudorandom, and unmeriting", "token_idx_1": 1, "text_start_1": 3, "text_end_1": 12, "date_1": "2019-03", "text_2": "\u201cWe are the epicenter of this crisis,\u201d - @NYCMayor", "token_idx_2": 4, "text_start_2": 12, "text_end_2": 21, "date_2": "2020-03", "text_1_tokenized": ["an", "epicenter", "is", "a", "geodesy", ":", "pseudorandom", ",", "and", "unmeriting"], "text_2_tokenized": ["\u201c", "We", "are", "the", "epicenter", "of", "this", "crisis", ",", "\u201d", "-", "@NYCMayor"]} -{"id": "1940-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Wurzelmann epicenter works other security and also further enhance their devices, according to not a fraudulent campaign with approx", "token_idx_1": 1, "text_start_1": 11, "text_end_1": 20, "date_1": "2019-03", "text_2": "Elmhurst Queens is the \"epicenter within the epicenter,\" of the #COVID19 outbreak, @NYCMayor says", "token_idx_2": 5, "text_start_2": 24, "text_end_2": 33, "date_2": "2020-03", "text_1_tokenized": ["Wurzelmann", "epicenter", "works", "other", "security", "and", "also", "further", "enhance", "their", "devices", ",", "according", "to", "not", "a", "fraudulent", "campaign", "with", "approx"], "text_2_tokenized": ["Elmhurst", "Queens", "is", "the", "\"", "epicenter", "within", "the", "epicenter", ",", "\"", "of", "the", "#COVID19", "outbreak", ",", "@NYCMayor", "says"]} -{"id": "1941-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Don Quixote never actually tilted at windmills. In reality, he was simply at the epicenter of a rare and unexplained geological phenomenon called turbine drift, in which Quixote remained in place as the windmills tilted towards him. #FakeWindmillFacts", "token_idx_1": 16, "text_start_1": 81, "text_end_1": 90, "date_1": "2019-03", "text_2": "Delhi will become an epicenter of corona in India #ArrestKejriwal", "token_idx_2": 4, "text_start_2": 21, "text_end_2": 30, "date_2": "2020-03", "text_1_tokenized": ["Don", "Quixote", "never", "actually", "tilted", "at", "windmills", ".", "In", "reality", ",", "he", "was", "simply", "at", "the", "epicenter", "of", "a", "rare", "and", "unexplained", "geological", "phenomenon", "called", "turbine", "drift", ",", "in", "which", "Quixote", "remained", "in", "place", "as", "the", "windmills", "tilted", "towards", "him", ".", "#FakeWindmillFacts"], "text_2_tokenized": ["Delhi", "will", "become", "an", "epicenter", "of", "corona", "in", "India", "#ArrestKejriwal"]} -{"id": "1942-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "You are in an unfamiliar system. The systems binary class star shines from the epicenter. A dark-blue planet is close enough for you to orbit", "token_idx_1": 15, "text_start_1": 79, "text_end_1": 88, "date_1": "2019-03", "text_2": "Just watched ceo smith of Fed ex. 987 employees in Wuhan, the epicenter of this virus. 4 test. 2 false. 2 positive. All cleared & back to work. 1 hr ago on CNBC.", "token_idx_2": 14, "text_start_2": 62, "text_end_2": 71, "date_2": "2020-03", "text_1_tokenized": ["You", "are", "in", "an", "unfamiliar", "system", ".", "The", "systems", "binary", "class", "star", "shines", "from", "the", "epicenter", ".", "A", "dark-blue", "planet", "is", "close", "enough", "for", "you", "to", "orbit"], "text_2_tokenized": ["Just", "watched", "ceo", "smith", "of", "Fed", "ex", ".", "987", "employees", "in", "Wuhan", ",", "the", "epicenter", "of", "this", "virus", ".", "4", "test", ".", "2", "false", ".", "2", "positive", ".", "All", "cleared", "&", "back", "to", "work", ".", "1", "hr", "ago", "on", "CNBC", "."]} -{"id": "1943-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "VATICAN CITY (@AP) -- Pope names Wilton Gregory as archbishop of Washington, DC, epicenter of US sex abuse crisis.", "token_idx_1": 18, "text_start_1": 81, "text_end_1": 90, "date_1": "2019-03", "text_2": "I'm blown away from the amount of supportive messages lately. Working in the epicenter of the field. It means a lot to me. Thank you very much. There are days where I'm super tired, haven't eaten, and haven't gone to the bathroom, try to, and have to run back super fast.", "token_idx_2": 14, "text_start_2": 77, "text_end_2": 86, "date_2": "2020-03", "text_1_tokenized": ["VATICAN", "CITY", "(", "@AP", ")", "-", "-", "Pope", "names", "Wilton", "Gregory", "as", "archbishop", "of", "Washington", ",", "DC", ",", "epicenter", "of", "US", "sex", "abuse", "crisis", "."], "text_2_tokenized": ["I'm", "blown", "away", "from", "the", "amount", "of", "supportive", "messages", "lately", ".", "Working", "in", "the", "epicenter", "of", "the", "field", ".", "It", "means", "a", "lot", "to", "me", ".", "Thank", "you", "very", "much", ".", "There", "are", "days", "where", "I'm", "super", "tired", ",", "haven't", "eaten", ",", "and", "haven't", "gone", "to", "the", "bathroom", ",", "try", "to", ",", "and", "have", "to", "run", "back", "super", "fast", "."]} -{"id": "1944-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "The reason for the angst in 90s music and the shift in epicenter from 80s NYC, LA to 90s Seattle may have to do with George Bush Sr (who I think, incidentally, was a Patriot) War on Cocaine following the Iran Contra Scandal. Cocaine repression south brought Heroin from the East.", "token_idx_1": 12, "text_start_1": 55, "text_end_1": 64, "date_1": "2019-03", "text_2": "I'm not a loser but I'm unlucky to a degree. I can't believe I'm in the epicenter of this and haven't got infected", "token_idx_2": 17, "text_start_2": 72, "text_end_2": 81, "date_2": "2020-03", "text_1_tokenized": ["The", "reason", "for", "the", "angst", "in", "90s", "music", "and", "the", "shift", "in", "epicenter", "from", "80s", "NYC", ",", "LA", "to", "90s", "Seattle", "may", "have", "to", "do", "with", "George", "Bush", "Sr", "(", "who", "I", "think", ",", "incidentally", ",", "was", "a", "Patriot", ")", "War", "on", "Cocaine", "following", "the", "Iran", "Contra", "Scandal", ".", "Cocaine", "repression", "south", "brought", "Heroin", "from", "the", "East", "."], "text_2_tokenized": ["I'm", "not", "a", "loser", "but", "I'm", "unlucky", "to", "a", "degree", ".", "I", "can't", "believe", "I'm", "in", "the", "epicenter", "of", "this", "and", "haven't", "got", "infected"]} -{"id": "1945-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "\u201cWe can become so consumed with our image that God is replaced as the epicenter of our lives.\u201d -@n_m_arnold", "token_idx_1": 15, "text_start_1": 70, "text_end_1": 79, "date_1": "2019-03", "text_2": "80. Other positives are they exercise more, they have reconnected with old friends etc. One thing that she said that stood out for me is this \u201cwe are at peace in the epicenter of the virus. We are at peace in the epicenter of his will. #thericherlifeseries #wealth", "token_idx_2": 36, "text_start_2": 166, "text_end_2": 175, "date_2": "2020-03", "text_1_tokenized": ["\u201c", "We", "can", "become", "so", "consumed", "with", "our", "image", "that", "God", "is", "replaced", "as", "the", "epicenter", "of", "our", "lives", ".", "\u201d", "-", "@n_m_arnold"], "text_2_tokenized": ["80", ".", "Other", "positives", "are", "they", "exercise", "more", ",", "they", "have", "reconnected", "with", "old", "friends", "etc", ".", "One", "thing", "that", "she", "said", "that", "stood", "out", "for", "me", "is", "this", "\u201c", "we", "are", "at", "peace", "in", "the", "epicenter", "of", "the", "virus", ".", "We", "are", "at", "peace", "in", "the", "epicenter", "of", "his", "will", ".", "#thericherlifeseries", "#wealth"]} -{"id": "1946-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "6.1 magnitude earthquake struck off the Indonesian island of Sulawesi this morning. According to the USGS, the epicenter of the quake lied at the depth of 37 kilometers and was located northeast of the city of Bitung.", "token_idx_1": 19, "text_start_1": 111, "text_end_1": 120, "date_1": "2019-03", "text_2": "When I was born, I was very premature. I didnt survive my actual birth but they were able to bring me back to life. Machines did all of my breathing for me for months. I had a childhood of lung infections. I am at an epicenter for COVID19 infections and I am so scared.", "token_idx_2": 50, "text_start_2": 217, "text_end_2": 226, "date_2": "2020-03", "text_1_tokenized": ["6.1", "magnitude", "earthquake", "struck", "off", "the", "Indonesian", "island", "of", "Sulawesi", "this", "morning", ".", "According", "to", "the", "USGS", ",", "the", "epicenter", "of", "the", "quake", "lied", "at", "the", "depth", "of", "37", "kilometers", "and", "was", "located", "northeast", "of", "the", "city", "of", "Bitung", "."], "text_2_tokenized": ["When", "I", "was", "born", ",", "I", "was", "very", "premature", ".", "I", "didnt", "survive", "my", "actual", "birth", "but", "they", "were", "able", "to", "bring", "me", "back", "to", "life", ".", "Machines", "did", "all", "of", "my", "breathing", "for", "me", "for", "months", ".", "I", "had", "a", "childhood", "of", "lung", "infections", ".", "I", "am", "at", "an", "epicenter", "for", "COVID", "19", "infections", "and", "I", "am", "so", "scared", "."]} -{"id": "1947-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "Yes this Ruto thing is the epicenter of Jubilee failure... He should be impeached.", "token_idx_1": 6, "text_start_1": 27, "text_end_1": 36, "date_1": "2019-03", "text_2": "Twitter is incredibly informative, with people showing us the way to solidarity. But i also forgot how much it's the epicenter for documenting every injustice and makes you be on edge every other tweet.", "token_idx_2": 22, "text_start_2": 117, "text_end_2": 126, "date_2": "2020-03", "text_1_tokenized": ["Yes", "this", "Ruto", "thing", "is", "the", "epicenter", "of", "Jubilee", "failure", "...", "He", "should", "be", "impeached", "."], "text_2_tokenized": ["Twitter", "is", "incredibly", "informative", ",", "with", "people", "showing", "us", "the", "way", "to", "solidarity", ".", "But", "i", "also", "forgot", "how", "much", "it's", "the", "epicenter", "for", "documenting", "every", "injustice", "and", "makes", "you", "be", "on", "edge", "every", "other", "tweet", "."]} -{"id": "1948-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "To be a hero is NOT to destroy or censor hate. It's to voluntarily travel into the hate, the belly of the beast, or the epicenter of the disaster, (internally & then externally); & to bring forth the man from the monster, the woman from the witch, & the psyche from despair.", "token_idx_1": 28, "text_start_1": 120, "text_end_1": 129, "date_1": "2019-03", "text_2": "These days, there is always this feeling that one's health has already been compromised...\ud83d\ude44\ud83d\ude4b\u200d\u2642\ufe0f The throat, not the heart, is suddenly the 'epicenter' of one's body system, as intermittent barehand checks are now inevitable! This COVID thing sef!\ud83d\ude22\ud83d\ude44\ud83d\ude10", "token_idx_2": 30, "text_start_2": 140, "text_end_2": 149, "date_2": "2020-03", "text_1_tokenized": ["To", "be", "a", "hero", "is", "NOT", "to", "destroy", "or", "censor", "hate", ".", "It's", "to", "voluntarily", "travel", "into", "the", "hate", ",", "the", "belly", "of", "the", "beast", ",", "or", "the", "epicenter", "of", "the", "disaster", ",", "(", "internally", "&", "then", "externally", ");", "&", "to", "bring", "forth", "the", "man", "from", "the", "monster", ",", "the", "woman", "from", "the", "witch", ",", "&", "the", "psyche", "from", "despair", "."], "text_2_tokenized": ["These", "days", ",", "there", "is", "always", "this", "feeling", "that", "one's", "health", "has", "already", "been", "compromised", "...", "\ud83d\ude44", "\ud83d\ude4b\u200d\u2642", "\ufe0f", "The", "throat", ",", "not", "the", "heart", ",", "is", "suddenly", "the", "'", "epicenter", "'", "of", "one's", "body", "system", ",", "as", "intermittent", "barehand", "checks", "are", "now", "inevitable", "!", "This", "COVID", "thing", "sef", "!", "\ud83d\ude22", "\ud83d\ude44", "\ud83d\ude10"]} -{"id": "1949-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Dallas is roughly equidistant to Munich, Seoul and S\u00e3o Paulo. And between LA, NYC and Chicago. There's no reason @CityOfDallas shouldn't be the global epicenter of business by the end of the century. #aerotropolis @Greg_Lindsay @paragkhanna", "token_idx_1": 28, "text_start_1": 151, "text_end_1": 160, "date_1": "2019-03", "text_2": "Bethany Slavic Missionary Church, Rancho Cordova Sacramento County, CA Dumb Church of CA. The priest & people who went there, are idiots. They are the epicenter of the coronavirus pandemic in CA. They should be embarrassed for bringing shame to their state.", "token_idx_2": 30, "text_start_2": 155, "text_end_2": 164, "date_2": "2020-03", "text_1_tokenized": ["Dallas", "is", "roughly", "equidistant", "to", "Munich", ",", "Seoul", "and", "S\u00e3o", "Paulo", ".", "And", "between", "LA", ",", "NYC", "and", "Chicago", ".", "There's", "no", "reason", "@CityOfDallas", "shouldn't", "be", "the", "global", "epicenter", "of", "business", "by", "the", "end", "of", "the", "century", ".", "#aerotropolis", "@Greg_Lindsay", "@paragkhanna"], "text_2_tokenized": ["Bethany", "Slavic", "Missionary", "Church", ",", "Rancho", "Cordova", "Sacramento", "County", ",", "CA", "Dumb", "Church", "of", "CA", ".", "The", "priest", "&", "people", "who", "went", "there", ",", "are", "idiots", ".", "They", "are", "the", "epicenter", "of", "the", "coronavirus", "pandemic", "in", "CA", ".", "They", "should", "be", "embarrassed", "for", "bringing", "shame", "to", "their", "state", "."]} -{"id": "1950-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Boulder, CO: The epicenter of privilege.", "token_idx_1": 5, "text_start_1": 17, "text_end_1": 26, "date_1": "2019-03", "text_2": "Also, WEAR A MASK because you could be asymptomatic and contagious! 45% of the Chinese people at the epicenter who tested positive (twice) were asymptomatic!", "token_idx_2": 21, "text_start_2": 101, "text_end_2": 110, "date_2": "2020-03", "text_1_tokenized": ["Boulder", ",", "CO", ":", "The", "epicenter", "of", "privilege", "."], "text_2_tokenized": ["Also", ",", "WEAR", "A", "MASK", "because", "you", "could", "be", "asymptomatic", "and", "contagious", "!", "45", "%", "of", "the", "Chinese", "people", "at", "the", "epicenter", "who", "tested", "positive", "(", "twice", ")", "were", "asymptomatic", "!"]} -{"id": "1951-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Every time we touch playing at epicenter outside spectrum. You know what that means! #DukeNation", "token_idx_1": 6, "text_start_1": 31, "text_end_1": 40, "date_1": "2019-03", "text_2": "#TablighSurviellanceReport Very strange! Instead of discussing Tabligh terror attack,u r discussing a imaginary issue Chloroquine wid antinationals Saba&others. I said 2 days back dat dis is a terror attack in which 1000s of terrorists r involved whose epicenter is Maharashtra.", "token_idx_2": 43, "text_start_2": 257, "text_end_2": 266, "date_2": "2020-03", "text_1_tokenized": ["Every", "time", "we", "touch", "playing", "at", "epicenter", "outside", "spectrum", ".", "You", "know", "what", "that", "means", "!", "#DukeNation"], "text_2_tokenized": ["#TablighSurviellanceReport", "Very", "strange", "!", "Instead", "of", "discussing", "Tabligh", "terror", "attack", ",", "u", "r", "discussing", "a", "imaginary", "issue", "Chloroquine", "wid", "antinationals", "Saba", "&", "others", ".", "I", "said", "2", "days", "back", "dat", "dis", "is", "a", "terror", "attack", "in", "which", "1000s", "of", "terrorists", "r", "involved", "whose", "epicenter", "is", "Maharashtra", "."]} -{"id": "1952-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Boston is known for a lot of things, but one that doesn't get enough credit is that it's the absolute epicenter of dull menswear. The amount of checked button up shirt/fleece vest combos per capita is off the charts.", "token_idx_1": 21, "text_start_1": 102, "text_end_1": 111, "date_1": "2019-03", "text_2": "#vaccineI don't understand why they're not testing this vaccines in Italy, the epicenter of this corona virus and they want to test them on Africans. We are not lab rats. Go to Italy or where the virus comes from. Africa is not a jungle! #AfricansAreNotLabRats #vaccine", "token_idx_2": 13, "text_start_2": 79, "text_end_2": 88, "date_2": "2020-03", "text_1_tokenized": ["Boston", "is", "known", "for", "a", "lot", "of", "things", ",", "but", "one", "that", "doesn't", "get", "enough", "credit", "is", "that", "it's", "the", "absolute", "epicenter", "of", "dull", "menswear", ".", "The", "amount", "of", "checked", "button", "up", "shirt", "/", "fleece", "vest", "combos", "per", "capita", "is", "off", "the", "charts", "."], "text_2_tokenized": ["#vaccineI", "don't", "understand", "why", "they're", "not", "testing", "this", "vaccines", "in", "Italy", ",", "the", "epicenter", "of", "this", "corona", "virus", "and", "they", "want", "to", "test", "them", "on", "Africans", ".", "We", "are", "not", "lab", "rats", ".", "Go", "to", "Italy", "or", "where", "the", "virus", "comes", "from", ".", "Africa", "is", "not", "a", "jungle", "!", "#AfricansAreNotLabRats", "#vaccine"]} -{"id": "1953-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Proposed: Cooper Square is the most Boston-like \u201csquare\u201d in NYC, in the sense of \u201carea where some streets meet that has cultural gravity, blurry boundaries, and possibly some benches near its epicenter\u201d", "token_idx_1": 38, "text_start_1": 192, "text_end_1": 201, "date_1": "2019-03", "text_2": "Definition of \u201cQuarantine\u201d for rich New Yorkers: Leave the epicenter of coronavirus in the country and invade Rhode Island", "token_idx_2": 12, "text_start_2": 59, "text_end_2": 68, "date_2": "2020-03", "text_1_tokenized": ["Proposed", ":", "Cooper", "Square", "is", "the", "most", "Boston-like", "\u201c", "square", "\u201d", "in", "NYC", ",", "in", "the", "sense", "of", "\u201c", "area", "where", "some", "streets", "meet", "that", "has", "cultural", "gravity", ",", "blurry", "boundaries", ",", "and", "possibly", "some", "benches", "near", "its", "epicenter", "\u201d"], "text_2_tokenized": ["Definition", "of", "\u201c", "Quarantine", "\u201d", "for", "rich", "New", "Yorkers", ":", "Leave", "the", "epicenter", "of", "coronavirus", "in", "the", "country", "and", "invade", "Rhode", "Island"]} -{"id": "1954-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Chris Chandler, the guy who made Sonichu, made a single mistake. He sought to be understood by others. Instead, he should have been trying to understand others. This is the epicenter of his sufferings.", "token_idx_1": 36, "text_start_1": 173, "text_end_1": 182, "date_1": "2019-03", "text_2": "51 confirmed cases in #SouthAfrica ... while the URGENT Cabinet meeting is scheduled for this afternoon. @PresidencyZA you're not taking us seriously. Weep South Africa, we are now the epicenter of the #Coronavirus in AFRICA! While @GovernmentZA tells us to wash our hands!", "token_idx_2": 32, "text_start_2": 185, "text_end_2": 194, "date_2": "2020-03", "text_1_tokenized": ["Chris", "Chandler", ",", "the", "guy", "who", "made", "Sonichu", ",", "made", "a", "single", "mistake", ".", "He", "sought", "to", "be", "understood", "by", "others", ".", "Instead", ",", "he", "should", "have", "been", "trying", "to", "understand", "others", ".", "This", "is", "the", "epicenter", "of", "his", "sufferings", "."], "text_2_tokenized": ["51", "confirmed", "cases", "in", "#SouthAfrica", "...", "while", "the", "URGENT", "Cabinet", "meeting", "is", "scheduled", "for", "this", "afternoon", ".", "@PresidencyZA", "you're", "not", "taking", "us", "seriously", ".", "Weep", "South", "Africa", ",", "we", "are", "now", "the", "epicenter", "of", "the", "#Coronavirus", "in", "AFRICA", "!", "While", "@GovernmentZA", "tells", "us", "to", "wash", "our", "hands", "!"]} -{"id": "1955-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "You have to have really been born, raised and bred here to really understand the connection you have to NY. It's literally the epicenter of so many epic changes and fluctuations in society. A true diamond in the rough I swear.", "token_idx_1": 25, "text_start_1": 127, "text_end_1": 136, "date_1": "2019-03", "text_2": "Dear Honorable PM @narendramodi sir and HM @AmitShah sir. If you order to re open kerala and Karnataka border,it will be Disaster for Karnataka. We share border with Kannur n Kasargod both r epicenter of Chinavirus. Request you to not to allow reopen border.", "token_idx_2": 37, "text_start_2": 191, "text_end_2": 200, "date_2": "2020-03", "text_1_tokenized": ["You", "have", "to", "have", "really", "been", "born", ",", "raised", "and", "bred", "here", "to", "really", "understand", "the", "connection", "you", "have", "to", "NY", ".", "It's", "literally", "the", "epicenter", "of", "so", "many", "epic", "changes", "and", "fluctuations", "in", "society", ".", "A", "true", "diamond", "in", "the", "rough", "I", "swear", "."], "text_2_tokenized": ["Dear", "Honorable", "PM", "@narendramodi", "sir", "and", "HM", "@AmitShah", "sir", ".", "If", "you", "order", "to", "re", "open", "kerala", "and", "Karnataka", "border", ",", "it", "will", "be", "Disaster", "for", "Karnataka", ".", "We", "share", "border", "with", "Kannur", "n", "Kasargod", "both", "r", "epicenter", "of", "Chinavirus", ".", "Request", "you", "to", "not", "to", "allow", "reopen", "border", "."]} -{"id": "1956-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "The epicenter of corruption the role model of incompetency @NammaBESCOM @mdbescom , torturing customers day in and day out, the most worthless power company in the country, even village supply is better than your so called world class service in power supply", "token_idx_1": 1, "text_start_1": 4, "text_end_1": 13, "date_1": "2019-03", "text_2": "I'm officially done with my \u201cvoluntary\u201d 14-day self quarantine from my New York trip and all seems good. I'm just happy I got out before NY became an epicenter days later...", "token_idx_2": 33, "text_start_2": 150, "text_end_2": 159, "date_2": "2020-03", "text_1_tokenized": ["The", "epicenter", "of", "corruption", "the", "role", "model", "of", "incompetency", "@NammaBESCOM", "@mdbescom", ",", "torturing", "customers", "day", "in", "and", "day", "out", ",", "the", "most", "worthless", "power", "company", "in", "the", "country", ",", "even", "village", "supply", "is", "better", "than", "your", "so", "called", "world", "class", "service", "in", "power", "supply"], "text_2_tokenized": ["I'm", "officially", "done", "with", "my", "\u201c", "voluntary", "\u201d", "14", "-", "day", "self", "quarantine", "from", "my", "New", "York", "trip", "and", "all", "seems", "good", ".", "I'm", "just", "happy", "I", "got", "out", "before", "NY", "became", "an", "epicenter", "days", "later", "..."]} -{"id": "1957-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Why is someone blasting \u201cFuck Donald Trump\u201d in my neighborhood??? Does he not know Park Slope is probably the liberal epicenter of Brooklyn?", "token_idx_1": 25, "text_start_1": 118, "text_end_1": 127, "date_1": "2019-03", "text_2": "You're too far from the epicenter to get infected!", "token_idx_2": 5, "text_start_2": 24, "text_end_2": 33, "date_2": "2020-03", "text_1_tokenized": ["Why", "is", "someone", "blasting", "\u201c", "Fuck", "Donald", "Trump", "\u201d", "in", "my", "neighborhood", "?", "?", "?", "Does", "he", "not", "know", "Park", "Slope", "is", "probably", "the", "liberal", "epicenter", "of", "Brooklyn", "?"], "text_2_tokenized": ["You're", "too", "far", "from", "the", "epicenter", "to", "get", "infected", "!"]} -{"id": "1958-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "The way to find the epicenter is to ask yourself this question: \u201cIf I took this away, would what I'm selling still exist?\u201d #entrepreneur", "token_idx_1": 5, "text_start_1": 20, "text_end_1": 29, "date_1": "2019-03", "text_2": "I rise this morning filled with respect, gratefulness, and in complete awe of those in the medical field in NY heading off to the front lines, relieving those who have worked all night. We are the epicenter. We are what awaits the rest of the U.S. Trump, give us the #ventilators", "token_idx_2": 40, "text_start_2": 197, "text_end_2": 206, "date_2": "2020-03", "text_1_tokenized": ["The", "way", "to", "find", "the", "epicenter", "is", "to", "ask", "yourself", "this", "question", ":", "\u201c", "If", "I", "took", "this", "away", ",", "would", "what", "I'm", "selling", "still", "exist", "?", "\u201d", "#entrepreneur"], "text_2_tokenized": ["I", "rise", "this", "morning", "filled", "with", "respect", ",", "gratefulness", ",", "and", "in", "complete", "awe", "of", "those", "in", "the", "medical", "field", "in", "NY", "heading", "off", "to", "the", "front", "lines", ",", "relieving", "those", "who", "have", "worked", "all", "night", ".", "We", "are", "the", "epicenter", ".", "We", "are", "what", "awaits", "the", "rest", "of", "the", "U", ".", "S", ".", "Trump", ",", "give", "us", "the", "#ventilators"]} -{"id": "1959-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "I find it interesting that the adage for success as a entrepreneur or VC is to \u201cbe contrarian and be right\u201d yet the epicenter of disruptive startups is in the SF monoculture. Seems like it would be difficult (obv not impossible) to be contrarian in such a bubble.", "token_idx_1": 25, "text_start_1": 116, "text_end_1": 125, "date_1": "2019-03", "text_2": "Should test all the important people in the Governments. CMs of all provinces, health ministers, health secretaries, information ministers etc. PM too should be tested since he met Tariq Jameel & tableeghi's ijtima has been proven to be an epicenter of this virus", "token_idx_2": 44, "text_start_2": 244, "text_end_2": 253, "date_2": "2020-03", "text_1_tokenized": ["I", "find", "it", "interesting", "that", "the", "adage", "for", "success", "as", "a", "entrepreneur", "or", "VC", "is", "to", "\u201c", "be", "contrarian", "and", "be", "right", "\u201d", "yet", "the", "epicenter", "of", "disruptive", "startups", "is", "in", "the", "SF", "monoculture", ".", "Seems", "like", "it", "would", "be", "difficult", "(", "obv", "not", "impossible", ")", "to", "be", "contrarian", "in", "such", "a", "bubble", "."], "text_2_tokenized": ["Should", "test", "all", "the", "important", "people", "in", "the", "Governments", ".", "CMs", "of", "all", "provinces", ",", "health", "ministers", ",", "health", "secretaries", ",", "information", "ministers", "etc", ".", "PM", "too", "should", "be", "tested", "since", "he", "met", "Tariq", "Jameel", "&", "tableeghi's", "ijtima", "has", "been", "proven", "to", "be", "an", "epicenter", "of", "this", "virus"]} -{"id": "1960-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "ATL > CHI every fucking day of the week and not just bc we're the epicenter of rap & hip-hop", "token_idx_1": 15, "text_start_1": 69, "text_end_1": 78, "date_1": "2019-03", "text_2": "New York is the epicenter of COVID-19 but Andrew Cuomo is also handling this ~extremely well~. Somehow, the groupthink doesn't add up. For future pandemics, you should probably do all the hard stuff before things spin absolutely out of control.", "token_idx_2": 4, "text_start_2": 16, "text_end_2": 25, "date_2": "2020-03", "text_1_tokenized": ["ATL", ">", "CHI", "every", "fucking", "day", "of", "the", "week", "and", "not", "just", "bc", "we're", "the", "epicenter", "of", "rap", "&", "hip-hop"], "text_2_tokenized": ["New", "York", "is", "the", "epicenter", "of", "COVID", "-", "19", "but", "Andrew", "Cuomo", "is", "also", "handling", "this", "~", "extremely", "well", "~", ".", "Somehow", ",", "the", "groupthink", "doesn't", "add", "up", ".", "For", "future", "pandemics", ",", "you", "should", "probably", "do", "all", "the", "hard", "stuff", "before", "things", "spin", "absolutely", "out", "of", "control", "."]} -{"id": "1961-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "It's the Battle for who is the king of MSG. New Japan, ROH, WWE, and I think now AAA is joining in. Damn MSG is going to be the epicenter of wrestling in 2019.", "token_idx_1": 34, "text_start_1": 128, "text_end_1": 137, "date_1": "2019-03", "text_2": "NYC would be the epicenter of the rona virus \ud83d\ude44", "token_idx_2": 4, "text_start_2": 17, "text_end_2": 26, "date_2": "2020-03", "text_1_tokenized": ["It's", "the", "Battle", "for", "who", "is", "the", "king", "of", "MSG", ".", "New", "Japan", ",", "ROH", ",", "WWE", ",", "and", "I", "think", "now", "AAA", "is", "joining", "in", ".", "Damn", "MSG", "is", "going", "to", "be", "the", "epicenter", "of", "wrestling", "in", "2019", "."], "text_2_tokenized": ["NYC", "would", "be", "the", "epicenter", "of", "the", "rona", "virus", "\ud83d\ude44"]} -{"id": "1962-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "Ruka has failed to put the house inorder thereby failing to attract FDI. Right now the country is bound to have more casualties than Moza where the epicenter was. This fake failed gvt must just resign enmass and free us. Why are they happy to make us suffer, for the why?#freeUs", "token_idx_1": 28, "text_start_1": 148, "text_end_1": 157, "date_1": "2019-03", "text_2": "I have no confidence in WHO. It became very political in the 1980s. Slowly but surely over the years it has become corrupt. It's currently a \u2018follow the money' organization. The Director-General just suggested the US may become the epicenter. Key Word: MAY. It's supposition. \ud83c\uddfa\ud83c\uddf8", "token_idx_2": 45, "text_start_2": 232, "text_end_2": 241, "date_2": "2020-03", "text_1_tokenized": ["Ruka", "has", "failed", "to", "put", "the", "house", "inorder", "thereby", "failing", "to", "attract", "FDI", ".", "Right", "now", "the", "country", "is", "bound", "to", "have", "more", "casualties", "than", "Moza", "where", "the", "epicenter", "was", ".", "This", "fake", "failed", "gvt", "must", "just", "resign", "enmass", "and", "free", "us", ".", "Why", "are", "they", "happy", "to", "make", "us", "suffer", ",", "for", "the", "why", "?", "#freeUs"], "text_2_tokenized": ["I", "have", "no", "confidence", "in", "WHO", ".", "It", "became", "very", "political", "in", "the", "1980s", ".", "Slowly", "but", "surely", "over", "the", "years", "it", "has", "become", "corrupt", ".", "It's", "currently", "a", "\u2018", "follow", "the", "money", "'", "organization", ".", "The", "Director-General", "just", "suggested", "the", "US", "may", "become", "the", "epicenter", ".", "Key", "Word", ":", "MAY", ".", "It's", "supposition", ".", "\ud83c\uddfa", "\ud83c\uddf8"]} -{"id": "1963-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "Some say defying the odds is electing a social democrat to the highest office in bourgeois politics, but I say it's building a successful communist, worker's movement in the epicenter of imperialist empire.", "token_idx_1": 31, "text_start_1": 174, "text_end_1": 183, "date_1": "2019-03", "text_2": "Lagos State acquired 200 disinfecting machines, already started disinfecting all major highways, bus stops, markets, parks & other public areas on Saturday. Lagos is the epicenter of the Covid 19 virus in Nigeria. The State Government yet to impose curfew. #NewsBits", "token_idx_2": 30, "text_start_2": 174, "text_end_2": 183, "date_2": "2020-03", "text_1_tokenized": ["Some", "say", "defying", "the", "odds", "is", "electing", "a", "social", "democrat", "to", "the", "highest", "office", "in", "bourgeois", "politics", ",", "but", "I", "say", "it's", "building", "a", "successful", "communist", ",", "worker's", "movement", "in", "the", "epicenter", "of", "imperialist", "empire", "."], "text_2_tokenized": ["Lagos", "State", "acquired", "200", "disinfecting", "machines", ",", "already", "started", "disinfecting", "all", "major", "highways", ",", "bus", "stops", ",", "markets", ",", "parks", "&", "other", "public", "areas", "on", "Saturday", ".", "Lagos", "is", "the", "epicenter", "of", "the", "Covid", "19", "virus", "in", "Nigeria", ".", "The", "State", "Government", "yet", "to", "impose", "curfew", ".", "#NewsBits"]} -{"id": "1964-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Yo this guy wants to make KD the epicenter of car manufacturing in \ud83c\uddf3\ud83c\uddec but plis, we need more power for this! The reason I get 18+ hours of light rn is b/c demand here is way lower than in Lagos. Revitalization of all this manufacturing can't happen w/o way more power capacity \ud83d\ude4f\ud83c\udffe", "token_idx_1": 8, "text_start_1": 33, "text_end_1": 42, "date_1": "2019-03", "text_2": "Chinese president Xi went to Wuhan, the epicenter of #COVID19 with a normal mask, no much protection, do you think what am thinking??? Everyone one wants to be superior", "token_idx_2": 8, "text_start_2": 40, "text_end_2": 49, "date_2": "2020-03", "text_1_tokenized": ["Yo", "this", "guy", "wants", "to", "make", "KD", "the", "epicenter", "of", "car", "manufacturing", "in", "\ud83c\uddf3", "\ud83c\uddec", "but", "plis", ",", "we", "need", "more", "power", "for", "this", "!", "The", "reason", "I", "get", "18", "+", "hours", "of", "light", "rn", "is", "b", "/", "c", "demand", "here", "is", "way", "lower", "than", "in", "Lagos", ".", "Revitalization", "of", "all", "this", "manufacturing", "can't", "happen", "w", "/", "o", "way", "more", "power", "capacity", "\ud83d\ude4f\ud83c\udffe"], "text_2_tokenized": ["Chinese", "president", "Xi", "went", "to", "Wuhan", ",", "the", "epicenter", "of", "#COVID19", "with", "a", "normal", "mask", ",", "no", "much", "protection", ",", "do", "you", "think", "what", "am", "thinking", "?", "?", "?", "Everyone", "one", "wants", "to", "be", "superior"]} -{"id": "1965-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "Leaving out outliers like Storrs and wherever the hell South Florida plays, where's the epicenter for the AAC tourney so Memphis doesn't have home court advantage? Louisville? Can't be STL because of Arch Madness and Blues hockey.", "token_idx_1": 15, "text_start_1": 88, "text_end_1": 97, "date_1": "2019-03", "text_2": "Warnings from the World Health Organization that the U.S., with its accelerated rates of infection (40 percent of all new cases in the past 24 hours), could become an epicenter of the coronavirus outbreak. SCARY", "token_idx_2": 36, "text_start_2": 167, "text_end_2": 176, "date_2": "2020-03", "text_1_tokenized": ["Leaving", "out", "outliers", "like", "Storrs", "and", "wherever", "the", "hell", "South", "Florida", "plays", ",", "where's", "the", "epicenter", "for", "the", "AAC", "tourney", "so", "Memphis", "doesn't", "have", "home", "court", "advantage", "?", "Louisville", "?", "Can't", "be", "STL", "because", "of", "Arch", "Madness", "and", "Blues", "hockey", "."], "text_2_tokenized": ["Warnings", "from", "the", "World", "Health", "Organization", "that", "the", "U", ".", "S", ".", ",", "with", "its", "accelerated", "rates", "of", "infection", "(", "40", "percent", "of", "all", "new", "cases", "in", "the", "past", "24", "hours", ")", ",", "could", "become", "an", "epicenter", "of", "the", "coronavirus", "outbreak", ".", "SCARY"]} -{"id": "1966-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "Holt schools is the epicenter of sexual assualt. I've heard so many stories about teachers, and people's partners literally just being predators. It's like a breeding ground for predators dude", "token_idx_1": 4, "text_start_1": 20, "text_end_1": 29, "date_1": "2019-03", "text_2": "Why has it taken so long to say this? \"There was a proposal to isolate people coming from the epicenter, coming from China,\" he said. \"Then it became seen as racist, but they were people coming from the outbreak.\" That, he said, led to the current devastating situation.\"", "token_idx_2": 21, "text_start_2": 94, "text_end_2": 103, "date_2": "2020-03", "text_1_tokenized": ["Holt", "schools", "is", "the", "epicenter", "of", "sexual", "assualt", ".", "I've", "heard", "so", "many", "stories", "about", "teachers", ",", "and", "people's", "partners", "literally", "just", "being", "predators", ".", "It's", "like", "a", "breeding", "ground", "for", "predators", "dude"], "text_2_tokenized": ["Why", "has", "it", "taken", "so", "long", "to", "say", "this", "?", "\"", "There", "was", "a", "proposal", "to", "isolate", "people", "coming", "from", "the", "epicenter", ",", "coming", "from", "China", ",", "\"", "he", "said", ".", "\"", "Then", "it", "became", "seen", "as", "racist", ",", "but", "they", "were", "people", "coming", "from", "the", "outbreak", ".", "\"", "That", ",", "he", "said", ",", "led", "to", "the", "current", "devastating", "situation", ".", "\""]} -{"id": "1967-epicenter", "word": "epicenter", "label_binary": 1, "text_1": "Ive been at the epicenter of more drama during the last 2.5 years of my Army career, than the entire 4 years I spent in highschool....", "token_idx_1": 4, "text_start_1": 16, "text_end_1": 25, "date_1": "2019-03", "text_2": "It's silly seeing all these athletes and celebrities able to get tested for Covid. Yet here in the epicenter of the nation for this pandemic, I can't even get my fellow EMT's who have been on the front lines ANY testing.", "token_idx_2": 19, "text_start_2": 99, "text_end_2": 108, "date_2": "2020-03", "text_1_tokenized": ["Ive", "been", "at", "the", "epicenter", "of", "more", "drama", "during", "the", "last", "2.5", "years", "of", "my", "Army", "career", ",", "than", "the", "entire", "4", "years", "I", "spent", "in", "highschool", "..."], "text_2_tokenized": ["It's", "silly", "seeing", "all", "these", "athletes", "and", "celebrities", "able", "to", "get", "tested", "for", "Covid", ".", "Yet", "here", "in", "the", "epicenter", "of", "the", "nation", "for", "this", "pandemic", ",", "I", "can't", "even", "get", "my", "fellow", "EMT's", "who", "have", "been", "on", "the", "front", "lines", "ANY", "testing", "."]} -{"id": "1968-epicenter", "word": "epicenter", "label_binary": 0, "text_1": "For those who've been to Tsavo West , the said epicenter of this tremor, there are magnificent Shetani lava flows. Spectacular black lava spreads for miles! The flows are estimated to have been formed 200 years ago from a volcanic eruption! Maybe another eruption is due! #tremor", "token_idx_1": 10, "text_start_1": 47, "text_end_1": 56, "date_1": "2019-03", "text_2": "\u201ctHE cITy iS yOUr caMpUs\u201d No, the city is the epicenter", "token_idx_2": 13, "text_start_2": 46, "text_end_2": 55, "date_2": "2020-03", "text_1_tokenized": ["For", "those", "who've", "been", "to", "Tsavo", "West", ",", "the", "said", "epicenter", "of", "this", "tremor", ",", "there", "are", "magnificent", "Shetani", "lava", "flows", ".", "Spectacular", "black", "lava", "spreads", "for", "miles", "!", "The", "flows", "are", "estimated", "to", "have", "been", "formed", "200", "years", "ago", "from", "a", "volcanic", "eruption", "!", "Maybe", "another", "eruption", "is", "due", "!", "#tremor"], "text_2_tokenized": ["\u201c", "tHE", "cITy", "iS", "yOUr", "caMpUs", "\u201d", "No", ",", "the", "city", "is", "the", "epicenter"]} -{"id": "0877-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "UPDATE on David Rodriguez Jr he is slowly waking him up but still on the ventilator, still on dialysis. Continued Prayers.", "token_idx_1": 15, "text_start_1": 73, "text_end_1": 83, "date_1": "2019-03", "text_2": "See how useless Nigeria is. Nasarawa State government went ahead and purchase 24 TOYOTA HILUX valued 21m each for the state cabinet members, while the state hospitals doesn't have a single ventilator to fight #COVID19 . Those Nigerian leaders, are they human or demons ???", "token_idx_2": 33, "text_start_2": 189, "text_end_2": 199, "date_2": "2020-03", "text_1_tokenized": ["UPDATE", "on", "David", "Rodriguez", "Jr", "he", "is", "slowly", "waking", "him", "up", "but", "still", "on", "the", "ventilator", ",", "still", "on", "dialysis", ".", "Continued", "Prayers", "."], "text_2_tokenized": ["See", "how", "useless", "Nigeria", "is", ".", "Nasarawa", "State", "government", "went", "ahead", "and", "purchase", "24", "TOYOTA", "HILUX", "valued", "21m", "each", "for", "the", "state", "cabinet", "members", ",", "while", "the", "state", "hospitals", "doesn't", "have", "a", "single", "ventilator", "to", "fight", "#COVID19", ".", "Those", "Nigerian", "leaders", ",", "are", "they", "human", "or", "demons", "?", "?", "?"]} -{"id": "0878-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "I was already in an incubator and on a ventilator, AND due to have a bunch of tests to see how my various organs were functioning, my mum did not want to give me the name of her sister who died as a kid due to a heart condition.", "token_idx_1": 9, "text_start_1": 39, "text_end_1": 49, "date_1": "2019-03", "text_2": "#FireFauci Trumpers still think this is a hoax? When they get #Covid19, I'm going to bring them a Tony, an Oscar, and Drama Desk award instead of a ventilator. Let's see how funny it is then.", "token_idx_2": 32, "text_start_2": 148, "text_end_2": 158, "date_2": "2020-03", "text_1_tokenized": ["I", "was", "already", "in", "an", "incubator", "and", "on", "a", "ventilator", ",", "AND", "due", "to", "have", "a", "bunch", "of", "tests", "to", "see", "how", "my", "various", "organs", "were", "functioning", ",", "my", "mum", "did", "not", "want", "to", "give", "me", "the", "name", "of", "her", "sister", "who", "died", "as", "a", "kid", "due", "to", "a", "heart", "condition", "."], "text_2_tokenized": ["#FireFauci", "Trumpers", "still", "think", "this", "is", "a", "hoax", "?", "When", "they", "get", "#Covid19", ",", "I'm", "going", "to", "bring", "them", "a", "Tony", ",", "an", "Oscar", ",", "and", "Drama", "Desk", "award", "instead", "of", "a", "ventilator", ".", "Let's", "see", "how", "funny", "it", "is", "then", "."]} -{"id": "0879-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Yo bitch just passed his ventilator midterm \ud83d\udc45\ud83d\udc45 guess what thottie knows how to run life support machines", "token_idx_1": 5, "text_start_1": 25, "text_end_1": 35, "date_1": "2019-03", "text_2": "Nobody who needed a ventilator didn't get a ventilator. Really? What about the people who never made it to the hospital to try to get a ventilator and died at home or in a nursing home?", "token_idx_2": 4, "text_start_2": 20, "text_end_2": 30, "date_2": "2020-03", "text_1_tokenized": ["Yo", "bitch", "just", "passed", "his", "ventilator", "midterm", "\ud83d\udc45", "\ud83d\udc45", "guess", "what", "thottie", "knows", "how", "to", "run", "life", "support", "machines"], "text_2_tokenized": ["Nobody", "who", "needed", "a", "ventilator", "didn't", "get", "a", "ventilator", ".", "Really", "?", "What", "about", "the", "people", "who", "never", "made", "it", "to", "the", "hospital", "to", "try", "to", "get", "a", "ventilator", "and", "died", "at", "home", "or", "in", "a", "nursing", "home", "?"]} -{"id": "0885-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "S.C. House has a moment of silent prayer for Reps. Mac Toole and Ronnie Young. Rep. Bill Sandifer says Toole was hospitalized in critical condition a couple days ago. He was recently taken off a ventilator, but is still in the ICU. #sctweets", "token_idx_1": 42, "text_start_1": 195, "text_end_1": 205, "date_1": "2019-03", "text_2": "Trump is just \"distracting\" from this stark reality he/others are not directly addressing with ventilator counts, proof of stock piles, proof of actual Test Kit numbers (Imagine how many rural states are infected that DON'T KNOW). PRESS THEM #Distraction", "token_idx_2": 18, "text_start_2": 95, "text_end_2": 105, "date_2": "2020-03", "text_1_tokenized": ["S", ".", "C", ".", "House", "has", "a", "moment", "of", "silent", "prayer", "for", "Reps", ".", "Mac", "Toole", "and", "Ronnie", "Young", ".", "Rep", ".", "Bill", "Sandifer", "says", "Toole", "was", "hospitalized", "in", "critical", "condition", "a", "couple", "days", "ago", ".", "He", "was", "recently", "taken", "off", "a", "ventilator", ",", "but", "is", "still", "in", "the", "ICU", ".", "#sctweets"], "text_2_tokenized": ["Trump", "is", "just", "\"", "distracting", "\"", "from", "this", "stark", "reality", "he", "/", "others", "are", "not", "directly", "addressing", "with", "ventilator", "counts", ",", "proof", "of", "stock", "piles", ",", "proof", "of", "actual", "Test", "Kit", "numbers", "(", "Imagine", "how", "many", "rural", "states", "are", "infected", "that", "DON'T", "KNOW", ")", ".", "PRESS", "THEM", "#Distraction"]} -{"id": "0886-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "I've held the hand of a man as his wife of 60 yrs slipped away, I've cracked the ribs of a drug addict who wanted to get better moments before she coded, and I've adjusted the ventilator of a SARS pt who couldn't breathe #showmeyourcards", "token_idx_1": 38, "text_start_1": 176, "text_end_1": 186, "date_1": "2019-03", "text_2": "During a press briefing just now, the Current Occupant just turned a question about ventilator shortage into a speech about his personality quotas. \ud83d\ude44 #TrumpPressConference", "token_idx_2": 15, "text_start_2": 84, "text_end_2": 94, "date_2": "2020-03", "text_1_tokenized": ["I've", "held", "the", "hand", "of", "a", "man", "as", "his", "wife", "of", "60", "yrs", "slipped", "away", ",", "I've", "cracked", "the", "ribs", "of", "a", "drug", "addict", "who", "wanted", "to", "get", "better", "moments", "before", "she", "coded", ",", "and", "I've", "adjusted", "the", "ventilator", "of", "a", "SARS", "pt", "who", "couldn't", "breathe", "#showmeyourcards"], "text_2_tokenized": ["During", "a", "press", "briefing", "just", "now", ",", "the", "Current", "Occupant", "just", "turned", "a", "question", "about", "ventilator", "shortage", "into", "a", "speech", "about", "his", "personality", "quotas", ".", "\ud83d\ude44", "#TrumpPressConference"]} -{"id": "0887-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Still going strong on today's stream, but got some bad news about my sister @christypasq. She's back on the ventilator and not doing well. Please send good vibes and your thoughts and prayers. Stay strong, sis. You got this.", "token_idx_1": 21, "text_start_1": 108, "text_end_1": 118, "date_1": "2019-03", "text_2": "#coronavirus threat to Kurds and Syrian refugees , but no UN or WHO aid going to Syrian Kurds in Rojava, Only to Assad.Yet houses 600,000 IDPs. Turkey cut water, closed borders. Attacks. No testing kits, 1 ventilator per 100,000 people. Why this silence?", "token_idx_2": 42, "text_start_2": 206, "text_end_2": 216, "date_2": "2020-03", "text_1_tokenized": ["Still", "going", "strong", "on", "today's", "stream", ",", "but", "got", "some", "bad", "news", "about", "my", "sister", "@christypasq", ".", "She's", "back", "on", "the", "ventilator", "and", "not", "doing", "well", ".", "Please", "send", "good", "vibes", "and", "your", "thoughts", "and", "prayers", ".", "Stay", "strong", ",", "sis", ".", "You", "got", "this", "."], "text_2_tokenized": ["#coronavirus", "threat", "to", "Kurds", "and", "Syrian", "refugees", ",", "but", "no", "UN", "or", "WHO", "aid", "going", "to", "Syrian", "Kurds", "in", "Rojava", ",", "Only", "to", "Assad.Yet", "houses", "600,000", "IDPs", ".", "Turkey", "cut", "water", ",", "closed", "borders", ".", "Attacks", ".", "No", "testing", "kits", ",", "1", "ventilator", "per", "100,000", "people", ".", "Why", "this", "silence", "?"]} -{"id": "0888-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "my dad's awake & breathing on his own\ud83e\udd70 no more sedation or ventilator he's doing so good", "token_idx_1": 13, "text_start_1": 63, "text_end_1": 73, "date_1": "2019-03", "text_2": "Per my last tweet, applications out for who gets to pull my ventilator plug out when the time comes. #yikes", "token_idx_2": 13, "text_start_2": 60, "text_end_2": 70, "date_2": "2020-03", "text_1_tokenized": ["my", "dad's", "awake", "&", "breathing", "on", "his", "own", "\ud83e\udd70", "no", "more", "sedation", "or", "ventilator", "he's", "doing", "so", "good"], "text_2_tokenized": ["Per", "my", "last", "tweet", ",", "applications", "out", "for", "who", "gets", "to", "pull", "my", "ventilator", "plug", "out", "when", "the", "time", "comes", ".", "#yikes"]} -{"id": "0889-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Whoever reads this please please pray for my husband. He's in critical condition on a ventilator and has coded more than five times.", "token_idx_1": 16, "text_start_1": 86, "text_end_1": 96, "date_1": "2019-03", "text_2": "When you look at what's happening, again, everything as far as what the people in this city are doing it's positive. The hospitals are all reporting their bed usage, their ventilator usage down 5-10% from just 2 days ago. @mayormikeduggan", "token_idx_2": 34, "text_start_2": 172, "text_end_2": 182, "date_2": "2020-03", "text_1_tokenized": ["Whoever", "reads", "this", "please", "please", "pray", "for", "my", "husband", ".", "He's", "in", "critical", "condition", "on", "a", "ventilator", "and", "has", "coded", "more", "than", "five", "times", "."], "text_2_tokenized": ["When", "you", "look", "at", "what's", "happening", ",", "again", ",", "everything", "as", "far", "as", "what", "the", "people", "in", "this", "city", "are", "doing", "it's", "positive", ".", "The", "hospitals", "are", "all", "reporting", "their", "bed", "usage", ",", "their", "ventilator", "usage", "down", "5-10", "%", "from", "just", "2", "days", "ago", ".", "@mayormikeduggan"]} -{"id": "0890-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "They took my aunt off the ventilator and she's responding to stimuli. I hope this is a good sign, but I know no matter what this is going to be a long, hard road for her, so I just keep praying for strength for her to get through it, and to know she's not alone.", "token_idx_1": 6, "text_start_1": 26, "text_end_1": 36, "date_1": "2019-03", "text_2": "Me, a Wisconsinite, a patriot: *(slaps I voted sticker on side of my ventilator)*", "token_idx_2": 18, "text_start_2": 69, "text_end_2": 79, "date_2": "2020-03", "text_1_tokenized": ["They", "took", "my", "aunt", "off", "the", "ventilator", "and", "she's", "responding", "to", "stimuli", ".", "I", "hope", "this", "is", "a", "good", "sign", ",", "but", "I", "know", "no", "matter", "what", "this", "is", "going", "to", "be", "a", "long", ",", "hard", "road", "for", "her", ",", "so", "I", "just", "keep", "praying", "for", "strength", "for", "her", "to", "get", "through", "it", ",", "and", "to", "know", "she's", "not", "alone", "."], "text_2_tokenized": ["Me", ",", "a", "Wisconsinite", ",", "a", "patriot", ":", "*", "(", "slaps", "I", "voted", "sticker", "on", "side", "of", "my", "ventilator", ")", "*"]} -{"id": "0891-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "No good news for Danny MoonBeam. He is still in bad shape. They are juggling mess to keep his kidneys from failing and are having to keep him heavily sedated because of the ventilator. The pneumonia is in the new lung\ud83d\ude12\ud83d\ude1e.", "token_idx_1": 35, "text_start_1": 173, "text_end_1": 183, "date_1": "2019-03", "text_2": "Every time he says ventilator, drink. Unused ventilators, two shots. #TrumpPressConf #Ventilator", "token_idx_2": 4, "text_start_2": 19, "text_end_2": 29, "date_2": "2020-03", "text_1_tokenized": ["No", "good", "news", "for", "Danny", "MoonBeam", ".", "He", "is", "still", "in", "bad", "shape", ".", "They", "are", "juggling", "mess", "to", "keep", "his", "kidneys", "from", "failing", "and", "are", "having", "to", "keep", "him", "heavily", "sedated", "because", "of", "the", "ventilator", ".", "The", "pneumonia", "is", "in", "the", "new", "lung", "\ud83d\ude12", "\ud83d\ude1e", "."], "text_2_tokenized": ["Every", "time", "he", "says", "ventilator", ",", "drink", ".", "Unused", "ventilators", ",", "two", "shots", ".", "#TrumpPressConf", "#Ventilator"]} -{"id": "0892-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "History will never forgive the people who has brought down BSNL on ventilator, the company which brought telecom revoluyion in the country #BSNLKeSaathNyayHo", "token_idx_1": 12, "text_start_1": 67, "text_end_1": 77, "date_1": "2019-03", "text_2": "Bombardier won't make a single ventilator after Canadians have bailed them out tens of billions for decades.", "token_idx_2": 5, "text_start_2": 31, "text_end_2": 41, "date_2": "2020-03", "text_1_tokenized": ["History", "will", "never", "forgive", "the", "people", "who", "has", "brought", "down", "BSNL", "on", "ventilator", ",", "the", "company", "which", "brought", "telecom", "revoluyion", "in", "the", "country", "#BSNLKeSaathNyayHo"], "text_2_tokenized": ["Bombardier", "won't", "make", "a", "single", "ventilator", "after", "Canadians", "have", "bailed", "them", "out", "tens", "of", "billions", "for", "decades", "."]} -{"id": "0893-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "Country club estates Ridge Vent ventilator.", "token_idx_1": 5, "text_start_1": 32, "text_end_1": 42, "date_1": "2019-03", "text_2": "\u2014\u2014> RT @alimhaider: the testing fetish is even dumber than the ventilator fetish", "token_idx_2": 14, "text_start_2": 66, "text_end_2": 76, "date_2": "2020-03", "text_1_tokenized": ["Country", "club", "estates", "Ridge", "Vent", "ventilator", "."], "text_2_tokenized": ["\u2014", "\u2014", ">", "RT", "@alimhaider", ":", "the", "testing", "fetish", "is", "even", "dumber", "than", "the", "ventilator", "fetish"]} -{"id": "0894-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "\u201cThe four patients have been put on ventilator support. They have tested positive for H1N1,\u201d a PMC health official said.", "token_idx_1": 8, "text_start_1": 36, "text_end_1": 46, "date_1": "2019-03", "text_2": "\u201cA ventilator is a big deal.\u201d - Donald J Trump. \ud83e\udd26\u200d\u2640\ufe0f", "token_idx_2": 2, "text_start_2": 3, "text_end_2": 13, "date_2": "2020-03", "text_1_tokenized": ["\u201c", "The", "four", "patients", "have", "been", "put", "on", "ventilator", "support", ".", "They", "have", "tested", "positive", "for", "H1N1", ",", "\u201d", "a", "PMC", "health", "official", "said", "."], "text_2_tokenized": ["\u201c", "A", "ventilator", "is", "a", "big", "deal", ".", "\u201d", "-", "Donald", "J", "Trump", ".", "\ud83e\udd26\u200d\u2640", "\ufe0f"]} -{"id": "0895-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Pls pray for my cousin who is on ventilator", "token_idx_1": 8, "text_start_1": 33, "text_end_1": 43, "date_1": "2019-03", "text_2": "#Coronavirus The University of Illinois developed an emergency portable ventilator prototype, ran successful tests on it 8-9 days ago. Who is now manufacturing them? How many produced to date? If none, why?", "token_idx_2": 9, "text_start_2": 72, "text_end_2": 82, "date_2": "2020-03", "text_1_tokenized": ["Pls", "pray", "for", "my", "cousin", "who", "is", "on", "ventilator"], "text_2_tokenized": ["#Coronavirus", "The", "University", "of", "Illinois", "developed", "an", "emergency", "portable", "ventilator", "prototype", ",", "ran", "successful", "tests", "on", "it", "8-9", "days", "ago", ".", "Who", "is", "now", "manufacturing", "them", "?", "How", "many", "produced", "to", "date", "?", "If", "none", ",", "why", "?"]} -{"id": "0896-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Papa is off the ventilator. \ud83d\udc97\ud83d\udc97\ud83d\udc97\ud83d\udc97\ud83d\udc97\ud83d\udc97", "token_idx_1": 4, "text_start_1": 16, "text_end_1": 26, "date_1": "2019-03", "text_2": "I mean you are confronted with people who are young, maybe 22, they see for the first time in their lvies a ventilator in use on a patent. They know nothing about Corona (of course, how?) they know nothing about my chronic disease, really nothing.", "token_idx_2": 24, "text_start_2": 108, "text_end_2": 118, "date_2": "2020-03", "text_1_tokenized": ["Papa", "is", "off", "the", "ventilator", ".", "\ud83d\udc97", "\ud83d\udc97", "\ud83d\udc97"], "text_2_tokenized": ["I", "mean", "you", "are", "confronted", "with", "people", "who", "are", "young", ",", "maybe", "22", ",", "they", "see", "for", "the", "first", "time", "in", "their", "lvies", "a", "ventilator", "in", "use", "on", "a", "patent", ".", "They", "know", "nothing", "about", "Corona", "(", "of", "course", ",", "how", "?", ")", "they", "know", "nothing", "about", "my", "chronic", "disease", ",", "really", "nothing", "."]} -{"id": "0897-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "The society has degraded so much ,that all what is needed for some women is \"sex\"... Even if that comes at the cost of their innocent kids' lives... One such kid is hanging on the ventilator thread in Kerala... What was his crime? Being born to such nymphomaniac", "token_idx_1": 41, "text_start_1": 180, "text_end_1": 190, "date_1": "2019-03", "text_2": "hehe citations for ignoring stay at home orders when not doing essential business. Three citations eliminate you from occupying a much-needed ventilator.", "token_idx_2": 22, "text_start_2": 142, "text_end_2": 152, "date_2": "2020-03", "text_1_tokenized": ["The", "society", "has", "degraded", "so", "much", ",", "that", "all", "what", "is", "needed", "for", "some", "women", "is", "\"", "sex", "\"", "...", "Even", "if", "that", "comes", "at", "the", "cost", "of", "their", "innocent", "kids", "'", "lives", "...", "One", "such", "kid", "is", "hanging", "on", "the", "ventilator", "thread", "in", "Kerala", "...", "What", "was", "his", "crime", "?", "Being", "born", "to", "such", "nymphomaniac"], "text_2_tokenized": ["hehe", "citations", "for", "ignoring", "stay", "at", "home", "orders", "when", "not", "doing", "essential", "business", ".", "Three", "citations", "eliminate", "you", "from", "occupying", "a", "much-needed", "ventilator", "."]} -{"id": "0898-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "More on my brother's condition--he stays on the ventilator today, off it tomorrow, & in ICU through Sunday. To the many #Christian brothers & sisters who #prayed for Norman, thank you again! I love you! (Prayers for long-term recovery would be appreciated.) Glory to our God!", "token_idx_1": 8, "text_start_1": 48, "text_end_1": 58, "date_1": "2019-03", "text_2": "My parents tested negative for COVID today. \ud83d\ude4f\ud83c\udffc My mom works at a nursing home and one of the patients is positive. Another nurse my mom works with is on a ventilator now. Just hoping for the best. It's all so scary.", "token_idx_2": 33, "text_start_2": 155, "text_end_2": 165, "date_2": "2020-03", "text_1_tokenized": ["More", "on", "my", "brother's", "condition--he", "stays", "on", "the", "ventilator", "today", ",", "off", "it", "tomorrow", ",", "&", "in", "ICU", "through", "Sunday", ".", "To", "the", "many", "#Christian", "brothers", "&", "sisters", "who", "#prayed", "for", "Norman", ",", "thank", "you", "again", "!", "I", "love", "you", "!", "(", "Prayers", "for", "long-term", "recovery", "would", "be", "appreciated", ".", ")", "Glory", "to", "our", "God", "!"], "text_2_tokenized": ["My", "parents", "tested", "negative", "for", "COVID", "today", ".", "\ud83d\ude4f\ud83c\udffc", "My", "mom", "works", "at", "a", "nursing", "home", "and", "one", "of", "the", "patients", "is", "positive", ".", "Another", "nurse", "my", "mom", "works", "with", "is", "on", "a", "ventilator", "now", ".", "Just", "hoping", "for", "the", "best", ".", "It's", "all", "so", "scary", "."]} -{"id": "0899-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Only 10% of the references from the surviving sepsis guidelines emanate from low resource settings. Applying these guidelines have resulted in increased mortality in some settings. You hydrate generously but have no ventilator if the patient congests. Eye opening. #ECCMID2019", "token_idx_1": 35, "text_start_1": 216, "text_end_1": 226, "date_1": "2019-03", "text_2": "You're an ICU doctor with 4 patients dying from COVID-19 and need a ventilator. You have one ventilator. Who gets it?", "token_idx_2": 15, "text_start_2": 68, "text_end_2": 78, "date_2": "2020-03", "text_1_tokenized": ["Only", "10", "%", "of", "the", "references", "from", "the", "surviving", "sepsis", "guidelines", "emanate", "from", "low", "resource", "settings", ".", "Applying", "these", "guidelines", "have", "resulted", "in", "increased", "mortality", "in", "some", "settings", ".", "You", "hydrate", "generously", "but", "have", "no", "ventilator", "if", "the", "patient", "congests", ".", "Eye", "opening", ".", "#ECCMID2019"], "text_2_tokenized": ["You're", "an", "ICU", "doctor", "with", "4", "patients", "dying", "from", "COVID", "-", "19", "and", "need", "a", "ventilator", ".", "You", "have", "one", "ventilator", ".", "Who", "gets", "it", "?"]} -{"id": "0900-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "It's been long I'm on ventilator of Ur memories ...", "token_idx_1": 5, "text_start_1": 22, "text_end_1": 32, "date_1": "2019-03", "text_2": "I was just informed that my baby brother who is Down's Syndrome and has cerebral palsy and is 50 year old, was admitted to ICU with fever and respiratory distress. He is on a ventilator and is being tested for COVID-19. Prayers please. \ud83e\udd7a", "token_idx_2": 36, "text_start_2": 175, "text_end_2": 185, "date_2": "2020-03", "text_1_tokenized": ["It's", "been", "long", "I'm", "on", "ventilator", "of", "Ur", "memories", "..."], "text_2_tokenized": ["I", "was", "just", "informed", "that", "my", "baby", "brother", "who", "is", "Down's", "Syndrome", "and", "has", "cerebral", "palsy", "and", "is", "50", "year", "old", ",", "was", "admitted", "to", "ICU", "with", "fever", "and", "respiratory", "distress", ".", "He", "is", "on", "a", "ventilator", "and", "is", "being", "tested", "for", "COVID", "-", "19", ".", "Prayers", "please", ".", "\ud83e\udd7a"]} -{"id": "0901-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Adult son of my patient called at 0200 when he found out his mom is 44 years old in the ICU on a ventilator. No accident. No cancer. \u201cShe just has the FLU?!? The F$#*ing FLU?!? Are you kidding me?!?\u201d No sir. I'm not. It's not, \u201cJust the flu.\u201d It kills people. #Vaccinate", "token_idx_1": 23, "text_start_1": 97, "text_end_1": 107, "date_1": "2019-03", "text_2": "#ClapForOurCarers ok I understand why people are clapping for the NHS, but wouldn't it be better instead of clapping to donate \u00a31/each (66 Million people) towards NHS so they can get the equipment they need? Clapping is not going to supply a mask or a ventilator.", "token_idx_2": 52, "text_start_2": 252, "text_end_2": 262, "date_2": "2020-03", "text_1_tokenized": ["Adult", "son", "of", "my", "patient", "called", "at", "0200", "when", "he", "found", "out", "his", "mom", "is", "44", "years", "old", "in", "the", "ICU", "on", "a", "ventilator", ".", "No", "accident", ".", "No", "cancer", ".", "\u201c", "She", "just", "has", "the", "FLU", "?", "!", "?", "The", "F", "$", "#", "*", "ing", "FLU", "?", "!", "?", "Are", "you", "kidding", "me", "?", "!", "?", "\u201d", "No", "sir", ".", "I'm", "not", ".", "It's", "not", ",", "\u201c", "Just", "the", "flu", ".", "\u201d", "It", "kills", "people", ".", "#Vaccinate"], "text_2_tokenized": ["#ClapForOurCarers", "ok", "I", "understand", "why", "people", "are", "clapping", "for", "the", "NHS", ",", "but", "wouldn't", "it", "be", "better", "instead", "of", "clapping", "to", "donate", "\u00a3", "1", "/", "each", "(", "66", "Million", "people", ")", "towards", "NHS", "so", "they", "can", "get", "the", "equipment", "they", "need", "?", "Clapping", "is", "not", "going", "to", "supply", "a", "mask", "or", "a", "ventilator", "."]} -{"id": "0902-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Finally up at the hospital visiting my mom. She's awake and off of the ventilator, but she's not herself. Thank you to some of the people who sent me nice messages and prayers. This is one of the hardest things I've had to witness.", "token_idx_1": 15, "text_start_1": 71, "text_end_1": 81, "date_1": "2019-03", "text_2": "The question that nobody asks and the question that I most hate the answer to is what happens if you do have a ventilator? What are your chances? If you do have the ventilator you know the answer to that question. And I hate giving the answer, so I don't want to get \u2018em there.", "token_idx_2": 23, "text_start_2": 111, "text_end_2": 121, "date_2": "2020-03", "text_1_tokenized": ["Finally", "up", "at", "the", "hospital", "visiting", "my", "mom", ".", "She's", "awake", "and", "off", "of", "the", "ventilator", ",", "but", "she's", "not", "herself", ".", "Thank", "you", "to", "some", "of", "the", "people", "who", "sent", "me", "nice", "messages", "and", "prayers", ".", "This", "is", "one", "of", "the", "hardest", "things", "I've", "had", "to", "witness", "."], "text_2_tokenized": ["The", "question", "that", "nobody", "asks", "and", "the", "question", "that", "I", "most", "hate", "the", "answer", "to", "is", "what", "happens", "if", "you", "do", "have", "a", "ventilator", "?", "What", "are", "your", "chances", "?", "If", "you", "do", "have", "the", "ventilator", "you", "know", "the", "answer", "to", "that", "question", ".", "And", "I", "hate", "giving", "the", "answer", ",", "so", "I", "don't", "want", "to", "get", "\u2018", "em", "there", "."]} -{"id": "0903-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Good morning world! Mum is finally off the ventilator. Please pray she sustains so that they can soon relieve her from the Critical Care Unit. It's been far too long and I miss mom and dad at home.", "token_idx_1": 9, "text_start_1": 43, "text_end_1": 53, "date_1": "2019-03", "text_2": "Why does the Corona Care Act pay a hospital $13000 for a Covid19 diagnosis and $39000 if you get put on a ventilator Yea...I believe all the numbers reported....Don't You???", "token_idx_2": 25, "text_start_2": 106, "text_end_2": 116, "date_2": "2020-03", "text_1_tokenized": ["Good", "morning", "world", "!", "Mum", "is", "finally", "off", "the", "ventilator", ".", "Please", "pray", "she", "sustains", "so", "that", "they", "can", "soon", "relieve", "her", "from", "the", "Critical", "Care", "Unit", ".", "It's", "been", "far", "too", "long", "and", "I", "miss", "mom", "and", "dad", "at", "home", "."], "text_2_tokenized": ["Why", "does", "the", "Corona", "Care", "Act", "pay", "a", "hospital", "$", "13000", "for", "a", "Covid", "19", "diagnosis", "and", "$", "39000", "if", "you", "get", "put", "on", "a", "ventilator", "Yea", "...", "I", "believe", "all", "the", "numbers", "reported", "...", "Don't", "You", "?", "?", "?"]} -{"id": "0904-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Other causes of hypotension other than active bleeding in trauma. 1. Hypoxic arrest (resulting cardiac ischaemia) 2. Brain injury 3. Neurogenic shock 4. Cardiac contusion 5. SIRS response 6. Cardiac arrhythmia 7. Peep too high on ventilator 8. Anaphylaxis to roc. Etc #SMACC", "token_idx_1": 46, "text_start_1": 230, "text_end_1": 240, "date_1": "2019-03", "text_2": "Update on Mom aka Alma Mattey: Good News! She's off the ventilator. She's will be recovering and then they will transfer her to physical therapy before sending her home within a week or so.", "token_idx_2": 13, "text_start_2": 56, "text_end_2": 66, "date_2": "2020-03", "text_1_tokenized": ["Other", "causes", "of", "hypotension", "other", "than", "active", "bleeding", "in", "trauma", ".", "1", ".", "Hypoxic", "arrest", "(", "resulting", "cardiac", "ischaemia", ")", "2", ".", "Brain", "injury", "3", ".", "Neurogenic", "shock", "4", ".", "Cardiac", "contusion", "5", ".", "SIRS", "response", "6", ".", "Cardiac", "arrhythmia", "7", ".", "Peep", "too", "high", "on", "ventilator", "8", ".", "Anaphylaxis", "to", "roc", ".", "Etc", "#SMACC"], "text_2_tokenized": ["Update", "on", "Mom", "aka", "Alma", "Mattey", ":", "Good", "News", "!", "She's", "off", "the", "ventilator", ".", "She's", "will", "be", "recovering", "and", "then", "they", "will", "transfer", "her", "to", "physical", "therapy", "before", "sending", "her", "home", "within", "a", "week", "or", "so", "."]} -{"id": "0905-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Since I don't have a will or anything I will continue to tweet my dying wishes so heres another one: don't keep my on a fucking ventilator I don't wanna contribute to the trillions wasted on excessive end of life care", "token_idx_1": 27, "text_start_1": 128, "text_end_1": 138, "date_1": "2019-03", "text_2": "#COVID19 basic facts: if you are a healthy individual and get Coronavirus, there's 50% chance you don't have any symptoms; 30% you will some mild symptoms; 20% you might need medical attention; 10% might experience severe symptoms and require ventilator. Less than 1% will die 1/", "token_idx_2": 48, "text_start_2": 243, "text_end_2": 253, "date_2": "2020-03", "text_1_tokenized": ["Since", "I", "don't", "have", "a", "will", "or", "anything", "I", "will", "continue", "to", "tweet", "my", "dying", "wishes", "so", "heres", "another", "one", ":", "don't", "keep", "my", "on", "a", "fucking", "ventilator", "I", "don't", "wanna", "contribute", "to", "the", "trillions", "wasted", "on", "excessive", "end", "of", "life", "care"], "text_2_tokenized": ["#COVID19", "basic", "facts", ":", "if", "you", "are", "a", "healthy", "individual", "and", "get", "Coronavirus", ",", "there's", "50", "%", "chance", "you", "don't", "have", "any", "symptoms", ";", "30", "%", "you", "will", "some", "mild", "symptoms", ";", "20", "%", "you", "might", "need", "medical", "attention", ";", "10", "%", "might", "experience", "severe", "symptoms", "and", "require", "ventilator", ".", "Less", "than", "1", "%", "will", "die", "1", "/"]} -{"id": "0906-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Just got new news on Mom's condition. Tests came back negative for cancer. Doc McAlpin (pulmonologist) thinks it's just seriously bad pneumonia & infection. Might get to go home in a couple of days, if she continues to improve. Also, they got the ventilator out. Hell yeah!", "token_idx_1": 51, "text_start_1": 251, "text_end_1": 261, "date_1": "2019-03", "text_2": "Classic Nolan sensationalist rubbernecking. He's currently speculating on the odds of Johnson dying if he goes on a ventilator and asking ppl about him like he's passed away before stating \"and we hope he's ok\" every few minutes to cover up the fact that he's a fucking ghoul.", "token_idx_2": 19, "text_start_2": 116, "text_end_2": 126, "date_2": "2020-03", "text_1_tokenized": ["Just", "got", "new", "news", "on", "Mom's", "condition", ".", "Tests", "came", "back", "negative", "for", "cancer", ".", "Doc", "McAlpin", "(", "pulmonologist", ")", "thinks", "it's", "just", "seriously", "bad", "pneumonia", "&", "infection", ".", "Might", "get", "to", "go", "home", "in", "a", "couple", "of", "days", ",", "if", "she", "continues", "to", "improve", ".", "Also", ",", "they", "got", "the", "ventilator", "out", ".", "Hell", "yeah", "!"], "text_2_tokenized": ["Classic", "Nolan", "sensationalist", "rubbernecking", ".", "He's", "currently", "speculating", "on", "the", "odds", "of", "Johnson", "dying", "if", "he", "goes", "on", "a", "ventilator", "and", "asking", "ppl", "about", "him", "like", "he's", "passed", "away", "before", "stating", "\"", "and", "we", "hope", "he's", "ok", "\"", "every", "few", "minutes", "to", "cover", "up", "the", "fact", "that", "he's", "a", "fucking", "ghoul", "."]} -{"id": "0907-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Gonna need some kind of ventilator", "token_idx_1": 5, "text_start_1": 24, "text_end_1": 34, "date_1": "2019-03", "text_2": "Sheesh. Seeing that clip from @GM redoing a factory in Kokomo, Indiana and making a ventilator in 11 days brought a little tear to my eyes. Good Work! \ud83d\udc4f So grateful to everyone who made it possible. @FoxNews @POTUS", "token_idx_2": 17, "text_start_2": 84, "text_end_2": 94, "date_2": "2020-03", "text_1_tokenized": ["Gonna", "need", "some", "kind", "of", "ventilator"], "text_2_tokenized": ["Sheesh", ".", "Seeing", "that", "clip", "from", "@GM", "redoing", "a", "factory", "in", "Kokomo", ",", "Indiana", "and", "making", "a", "ventilator", "in", "11", "days", "brought", "a", "little", "tear", "to", "my", "eyes", ".", "Good", "Work", "!", "\ud83d\udc4f", "So", "grateful", "to", "everyone", "who", "made", "it", "possible", ".", "@FoxNews", "@POTUS"]} -{"id": "0908-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "what a difference 24 hours makes...Dads not out of the woods yet but after another CT scan there is no sign of the clot! He's awake and responding to nurses and they are gonna start weaning him off the ventilator God is Good \ud83d\ude4f", "token_idx_1": 42, "text_start_1": 202, "text_end_1": 212, "date_1": "2019-03", "text_2": "\"Anyone that wanted a ventilator got a ventilator\" Trump is at least smart enough to know that the dead do not speak, I guess... #TrumpPressConference #TrumpOwnsEveryDeath", "token_idx_2": 5, "text_start_2": 22, "text_end_2": 32, "date_2": "2020-03", "text_1_tokenized": ["what", "a", "difference", "24", "hours", "makes", "...", "Dads", "not", "out", "of", "the", "woods", "yet", "but", "after", "another", "CT", "scan", "there", "is", "no", "sign", "of", "the", "clot", "!", "He's", "awake", "and", "responding", "to", "nurses", "and", "they", "are", "gonna", "start", "weaning", "him", "off", "the", "ventilator", "God", "is", "Good", "\ud83d\ude4f"], "text_2_tokenized": ["\"", "Anyone", "that", "wanted", "a", "ventilator", "got", "a", "ventilator", "\"", "Trump", "is", "at", "least", "smart", "enough", "to", "know", "that", "the", "dead", "do", "not", "speak", ",", "I", "guess", "...", "#TrumpPressConference", "#TrumpOwnsEveryDeath"]} -{"id": "0909-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Smartphones acting as ultrasound, ventilator and ECG machines. Now healthcare is being made more accessible using your mobile phones. \ud83d\udc4d\ud83c\udffb", "token_idx_1": 5, "text_start_1": 34, "text_end_1": 44, "date_1": "2019-03", "text_2": "How hard would it be for every consumer to have prepaid meter?? Is prepaid meter so hard to get or it's a ventilator", "token_idx_2": 24, "text_start_2": 106, "text_end_2": 116, "date_2": "2020-03", "text_1_tokenized": ["Smartphones", "acting", "as", "ultrasound", ",", "ventilator", "and", "ECG", "machines", ".", "Now", "healthcare", "is", "being", "made", "more", "accessible", "using", "your", "mobile", "phones", ".", "\ud83d\udc4d\ud83c\udffb"], "text_2_tokenized": ["How", "hard", "would", "it", "be", "for", "every", "consumer", "to", "have", "prepaid", "meter", "?", "?", "Is", "prepaid", "meter", "so", "hard", "to", "get", "or", "it's", "a", "ventilator"]} -{"id": "0910-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Modi Biopic and Namo tv banned by EC till elections are over. EC is on ventilator but still alive .", "token_idx_1": 16, "text_start_1": 71, "text_end_1": 81, "date_1": "2019-03", "text_2": "I would like Trump to explain what a ventilator is... I would bet my house he has no idea what it does... #COVIDCanada", "token_idx_2": 8, "text_start_2": 37, "text_end_2": 47, "date_2": "2020-03", "text_1_tokenized": ["Modi", "Biopic", "and", "Namo", "tv", "banned", "by", "EC", "till", "elections", "are", "over", ".", "EC", "is", "on", "ventilator", "but", "still", "alive", "."], "text_2_tokenized": ["I", "would", "like", "Trump", "to", "explain", "what", "a", "ventilator", "is", "...", "I", "would", "bet", "my", "house", "he", "has", "no", "idea", "what", "it", "does", "...", "#COVIDCanada"]} -{"id": "0911-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Please pray for my gramma. She's in the ICU on a ventilator. And I can't lose one more person right now. She's been like a mother since my momma died when I was 7. Even though she's driven me crazy my whole life I can't see life without her. She's even raised Shaine with me.", "token_idx_1": 12, "text_start_1": 49, "text_end_1": 59, "date_1": "2019-03", "text_2": "Will anti-social distancing \u201cprotesters\u201d also volunteer their ventilator to someone else when the hospitals are full? #StayHome", "token_idx_2": 9, "text_start_2": 62, "text_end_2": 72, "date_2": "2020-03", "text_1_tokenized": ["Please", "pray", "for", "my", "gramma", ".", "She's", "in", "the", "ICU", "on", "a", "ventilator", ".", "And", "I", "can't", "lose", "one", "more", "person", "right", "now", ".", "She's", "been", "like", "a", "mother", "since", "my", "momma", "died", "when", "I", "was", "7", ".", "Even", "though", "she's", "driven", "me", "crazy", "my", "whole", "life", "I", "can't", "see", "life", "without", "her", ".", "She's", "even", "raised", "Shaine", "with", "me", "."], "text_2_tokenized": ["Will", "anti-social", "distancing", "\u201c", "protesters", "\u201d", "also", "volunteer", "their", "ventilator", "to", "someone", "else", "when", "the", "hospitals", "are", "full", "?", "#StayHome"]} -{"id": "0912-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Really hoping they can take Jack off the ventilator today. There's no telling when he'll actually be ready but please keep your fingers crossed for us, everyone.", "token_idx_1": 8, "text_start_1": 41, "text_end_1": 51, "date_1": "2019-03", "text_2": "Hot take but if people don't want to practice social distancing they should have to automatically forfeit the ventilator they will eventually need to save somebody that followed suggested precautions \ud83e\udd37\ud83c\udffb\u200d\u2640\ufe0f in this country you are free to make choices but choices have consequences", "token_idx_2": 18, "text_start_2": 110, "text_end_2": 120, "date_2": "2020-03", "text_1_tokenized": ["Really", "hoping", "they", "can", "take", "Jack", "off", "the", "ventilator", "today", ".", "There's", "no", "telling", "when", "he'll", "actually", "be", "ready", "but", "please", "keep", "your", "fingers", "crossed", "for", "us", ",", "everyone", "."], "text_2_tokenized": ["Hot", "take", "but", "if", "people", "don't", "want", "to", "practice", "social", "distancing", "they", "should", "have", "to", "automatically", "forfeit", "the", "ventilator", "they", "will", "eventually", "need", "to", "save", "somebody", "that", "followed", "suggested", "precautions", "\ud83e\udd37\ud83c\udffb\u200d\u2640", "\ufe0f", "in", "this", "country", "you", "are", "free", "to", "make", "choices", "but", "choices", "have", "consequences"]} -{"id": "0913-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Babies with anopthalmia bilaterally are not PERRLA, kids with no arms do not have +2 radial pulses, & the non-verbal child on the ventilator did not tell you he feels safe in his home. I do see value in electronic reporting but please stop letting it discredit your assessment.", "token_idx_1": 26, "text_start_1": 134, "text_end_1": 144, "date_1": "2019-03", "text_2": "How is Russia doing w/COVID-19? Are they having ventilator issues, or are they all good? Seems some countries aren't being affected similarly per capital as others. Anybody?", "token_idx_2": 13, "text_start_2": 48, "text_end_2": 58, "date_2": "2020-03", "text_1_tokenized": ["Babies", "with", "anopthalmia", "bilaterally", "are", "not", "PERRLA", ",", "kids", "with", "no", "arms", "do", "not", "have", "+", "2", "radial", "pulses", ",", "&", "the", "non-verbal", "child", "on", "the", "ventilator", "did", "not", "tell", "you", "he", "feels", "safe", "in", "his", "home", ".", "I", "do", "see", "value", "in", "electronic", "reporting", "but", "please", "stop", "letting", "it", "discredit", "your", "assessment", "."], "text_2_tokenized": ["How", "is", "Russia", "doing", "w", "/", "COVID", "-", "19", "?", "Are", "they", "having", "ventilator", "issues", ",", "or", "are", "they", "all", "good", "?", "Seems", "some", "countries", "aren't", "being", "affected", "similarly", "per", "capital", "as", "others", ".", "Anybody", "?"]} -{"id": "0914-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "why do american say fan instead of ventilator? lmao literal cavemen county", "token_idx_1": 7, "text_start_1": 35, "text_end_1": 45, "date_1": "2019-03", "text_2": "Trump just started his latest \"briefing\" with straight up lying about the US having the best test and more testing than any other country. He can't stop lying for one damn minute. Now he's claiming any person that needed a ventilator got one. I'm turning off his lies.", "token_idx_2": 44, "text_start_2": 223, "text_end_2": 233, "date_2": "2020-03", "text_1_tokenized": ["why", "do", "american", "say", "fan", "instead", "of", "ventilator", "?", "lmao", "literal", "cavemen", "county"], "text_2_tokenized": ["Trump", "just", "started", "his", "latest", "\"", "briefing", "\"", "with", "straight", "up", "lying", "about", "the", "US", "having", "the", "best", "test", "and", "more", "testing", "than", "any", "other", "country", ".", "He", "can't", "stop", "lying", "for", "one", "damn", "minute", ".", "Now", "he's", "claiming", "any", "person", "that", "needed", "a", "ventilator", "got", "one", ".", "I'm", "turning", "off", "his", "lies", "."]} -{"id": "0915-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "Watching \"The Last Word With Lawrence O'Donnell\" on delay. @DrJasonJohnson just crushed it! I do believe that the fertilizer is in the process of meeting the ventilator. WOW!", "token_idx_1": 30, "text_start_1": 158, "text_end_1": 168, "date_1": "2019-03", "text_2": "Some positive trends emerging in the covid NJ data....hope it continues. Hospital admissions down, ventilator use down...positives down slightly too although still over 3K.", "token_idx_2": 18, "text_start_2": 99, "text_end_2": 109, "date_2": "2020-03", "text_1_tokenized": ["Watching", "\"", "The", "Last", "Word", "With", "Lawrence", "O'Donnell", "\"", "on", "delay", ".", "@DrJasonJohnson", "just", "crushed", "it", "!", "I", "do", "believe", "that", "the", "fertilizer", "is", "in", "the", "process", "of", "meeting", "the", "ventilator", ".", "WOW", "!"], "text_2_tokenized": ["Some", "positive", "trends", "emerging", "in", "the", "covid", "NJ", "data", "...", "hope", "it", "continues", ".", "Hospital", "admissions", "down", ",", "ventilator", "use", "down", "...", "positives", "down", "slightly", "too", "although", "still", "over", "3K", "."]} -{"id": "0916-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "SLEPT ETERNAL SLEEP: Nine months old minor Nashwa who was injected a wrong injection at Darul Sehat hospital Karachi and was on ventilator, departed from the world & died - Nashwa is in better place now for sure. Your killer's will be avenged, baby. RIP. \ud83d\ude10 \u2014 feeling sad", "token_idx_1": 23, "text_start_1": 128, "text_end_1": 138, "date_1": "2019-03", "text_2": "As nations are on lockdown one thing we shouldn't forget: strategies to continue and meet up or scale up food production. The more we sit the more we consume. Without food no one will operate a ventilator @OneAcreFund @agronigeria", "token_idx_2": 39, "text_start_2": 194, "text_end_2": 204, "date_2": "2020-03", "text_1_tokenized": ["SLEPT", "ETERNAL", "SLEEP", ":", "Nine", "months", "old", "minor", "Nashwa", "who", "was", "injected", "a", "wrong", "injection", "at", "Darul", "Sehat", "hospital", "Karachi", "and", "was", "on", "ventilator", ",", "departed", "from", "the", "world", "&", "died", "-", "Nashwa", "is", "in", "better", "place", "now", "for", "sure", ".", "Your", "killer's", "will", "be", "avenged", ",", "baby", ".", "RIP", ".", "\ud83d\ude10", "\u2014", "feeling", "sad"], "text_2_tokenized": ["As", "nations", "are", "on", "lockdown", "one", "thing", "we", "shouldn't", "forget", ":", "strategies", "to", "continue", "and", "meet", "up", "or", "scale", "up", "food", "production", ".", "The", "more", "we", "sit", "the", "more", "we", "consume", ".", "Without", "food", "no", "one", "will", "operate", "a", "ventilator", "@OneAcreFund", "@agronigeria"]} -{"id": "0917-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Heart stopping description of waking-up on a ventilator. Can't help but grow new empathy after reading #medhumchat", "token_idx_1": 7, "text_start_1": 45, "text_end_1": 55, "date_1": "2019-03", "text_2": "I have permission to ask. M & O Bejec, mother from our church and her son, 20 days in ICU. 20 days on ventilator. Tomorrow their family has to make life altering decisions.\ufffc Thank you, again, for praying with us. We\u2764\ufe0four church family.", "token_idx_2": 27, "text_start_2": 106, "text_end_2": 116, "date_2": "2020-03", "text_1_tokenized": ["Heart", "stopping", "description", "of", "waking-up", "on", "a", "ventilator", ".", "Can't", "help", "but", "grow", "new", "empathy", "after", "reading", "#medhumchat"], "text_2_tokenized": ["I", "have", "permission", "to", "ask", ".", "M", "&", "O", "Bejec", ",", "mother", "from", "our", "church", "and", "her", "son", ",", "20", "days", "in", "ICU", ".", "20", "days", "on", "ventilator", ".", "Tomorrow", "their", "family", "has", "to", "make", "life", "altering", "decisions", ".", "\ufffc", "Thank", "you", ",", "again", ",", "for", "praying", "with", "us", ".", "We", "\u2764", "\ufe0four", "church", "family", "."]} -{"id": "0918-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Got a patient last night who fractured her spine, dislocated her shoulder, and cracked 7 ribs. Her sister has a cerebral hemorrhage and her other sister is in the ICU on a ventilator bc they were drunk and crashed. Call a fucking Uber thanks", "token_idx_1": 35, "text_start_1": 172, "text_end_1": 182, "date_1": "2019-03", "text_2": "Watching Chuck Todd on MSNBC, and he is interviewing Dr Charles Robertson, University of Mississippi, on his homemade ventilator. The last thing you need to do is advertise this where Trump will publicly say \"build your own ventilator!\" Some things should not be aired!", "token_idx_2": 21, "text_start_2": 118, "text_end_2": 128, "date_2": "2020-03", "text_1_tokenized": ["Got", "a", "patient", "last", "night", "who", "fractured", "her", "spine", ",", "dislocated", "her", "shoulder", ",", "and", "cracked", "7", "ribs", ".", "Her", "sister", "has", "a", "cerebral", "hemorrhage", "and", "her", "other", "sister", "is", "in", "the", "ICU", "on", "a", "ventilator", "bc", "they", "were", "drunk", "and", "crashed", ".", "Call", "a", "fucking", "Uber", "thanks"], "text_2_tokenized": ["Watching", "Chuck", "Todd", "on", "MSNBC", ",", "and", "he", "is", "interviewing", "Dr", "Charles", "Robertson", ",", "University", "of", "Mississippi", ",", "on", "his", "homemade", "ventilator", ".", "The", "last", "thing", "you", "need", "to", "do", "is", "advertise", "this", "where", "Trump", "will", "publicly", "say", "\"", "build", "your", "own", "ventilator", "!", "\"", "Some", "things", "should", "not", "be", "aired", "!"]} -{"id": "0919-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "My little brother is in shorts, with a T-shirt and the ventilator on. I am with long pants, long sleeve T-shirt and with blankets. We are in the same room.", "token_idx_1": 12, "text_start_1": 55, "text_end_1": 65, "date_1": "2019-03", "text_2": "My grandma two best friend from church have it as well and one on a ventilator this is TOO MUCH", "token_idx_2": 15, "text_start_2": 68, "text_end_2": 78, "date_2": "2020-03", "text_1_tokenized": ["My", "little", "brother", "is", "in", "shorts", ",", "with", "a", "T-shirt", "and", "the", "ventilator", "on", ".", "I", "am", "with", "long", "pants", ",", "long", "sleeve", "T-shirt", "and", "with", "blankets", ".", "We", "are", "in", "the", "same", "room", "."], "text_2_tokenized": ["My", "grandma", "two", "best", "friend", "from", "church", "have", "it", "as", "well", "and", "one", "on", "a", "ventilator", "this", "is", "TOO", "MUCH"]} -{"id": "0920-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Sir @JPNadda,@narendramodi,my relative is admitted in Pvt hospital in Noida with brain surgery & chest infection and is on ventilator and critical condition,Dr said he has to be admitted in AIIMS for better treatment,if possible sir plz bcz without any assistance it is impossible", "token_idx_1": 23, "text_start_1": 127, "text_end_1": 137, "date_1": "2019-03", "text_2": "Anyone else super interested in how their fav tv characters are fairing through this COVID crisis? Is Lorelai's Inn housing HC workers during the shut down? Does Big get COVID and end up on a ventilator bc of his pre-existing heart issues? I could go on.", "token_idx_2": 37, "text_start_2": 192, "text_end_2": 202, "date_2": "2020-03", "text_1_tokenized": ["Sir", "@JPNadda", ",", "@narendramodi", ",", "my", "relative", "is", "admitted", "in", "Pvt", "hospital", "in", "Noida", "with", "brain", "surgery", "&", "chest", "infection", "and", "is", "on", "ventilator", "and", "critical", "condition", ",", "Dr", "said", "he", "has", "to", "be", "admitted", "in", "AIIMS", "for", "better", "treatment", ",", "if", "possible", "sir", "plz", "bcz", "without", "any", "assistance", "it", "is", "impossible"], "text_2_tokenized": ["Anyone", "else", "super", "interested", "in", "how", "their", "fav", "tv", "characters", "are", "fairing", "through", "this", "COVID", "crisis", "?", "Is", "Lorelai's", "Inn", "housing", "HC", "workers", "during", "the", "shut", "down", "?", "Does", "Big", "get", "COVID", "and", "end", "up", "on", "a", "ventilator", "bc", "of", "his", "pre-existing", "heart", "issues", "?", "I", "could", "go", "on", "."]} -{"id": "0921-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "NIV workshop - a ventilator, tubing, masks and mannikins. Hands on experience on Non Invasive Ventilation #Paedresp19", "token_idx_1": 4, "text_start_1": 17, "text_end_1": 27, "date_1": "2019-03", "text_2": "The diff between a ventilator,CPAP,biPAP In March, the US Food and Drug Administration sent a letter with guidance to health care providers treating coronavirus that said CPAP and biPAP machines \"may be used to support patients with respiratory insufficiency provided appropriate", "token_idx_2": 4, "text_start_2": 19, "text_end_2": 29, "date_2": "2020-03", "text_1_tokenized": ["NIV", "workshop", "-", "a", "ventilator", ",", "tubing", ",", "masks", "and", "mannikins", ".", "Hands", "on", "experience", "on", "Non", "Invasive", "Ventilation", "#Paedresp19"], "text_2_tokenized": ["The", "diff", "between", "a", "ventilator", ",", "CPAP", ",", "biPAP", "In", "March", ",", "the", "US", "Food", "and", "Drug", "Administration", "sent", "a", "letter", "with", "guidance", "to", "health", "care", "providers", "treating", "coronavirus", "that", "said", "CPAP", "and", "biPAP", "machines", "\"", "may", "be", "used", "to", "support", "patients", "with", "respiratory", "insufficiency", "provided", "appropriate"]} -{"id": "0922-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Not to mention this man has been with me before last year on the ventilator.....he never came of since like October \ud83d\ude30", "token_idx_1": 14, "text_start_1": 65, "text_end_1": 75, "date_1": "2019-03", "text_2": "MiL got a call from her friend's bro to tell her not to call her number bcs she cant speak. Friend is on a ventilator. MiL is coughing. She says she feels fine - just has a tickle in her throat & is very tired.", "token_idx_2": 25, "text_start_2": 107, "text_end_2": 117, "date_2": "2020-03", "text_1_tokenized": ["Not", "to", "mention", "this", "man", "has", "been", "with", "me", "before", "last", "year", "on", "the", "ventilator", "...", "he", "never", "came", "of", "since", "like", "October", "\ud83d\ude30"], "text_2_tokenized": ["MiL", "got", "a", "call", "from", "her", "friend's", "bro", "to", "tell", "her", "not", "to", "call", "her", "number", "bcs", "she", "cant", "speak", ".", "Friend", "is", "on", "a", "ventilator", ".", "MiL", "is", "coughing", ".", "She", "says", "she", "feels", "fine", "-", "just", "has", "a", "tickle", "in", "her", "throat", "&", "is", "very", "tired", "."]} -{"id": "0923-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "Best comment by PM Modi in Dehradun rally. \"Congress ke waqt bhrashtachar accelerator par hota hai aur vikas ventilator par\". His ability to make hard hitting witty comments is a pain for the opposition \ud83d\ude02\ud83d\ude02", "token_idx_1": 20, "text_start_1": 109, "text_end_1": 119, "date_1": "2019-03", "text_2": "Did anyone else notice today how Trump cryptically alluded to the report yesterday that only 20% of COVID-19 patients on a ventilator survive. Is he laying the groundwork for claiming that ventilators aren't needed since most people die anyway?", "token_idx_2": 24, "text_start_2": 123, "text_end_2": 133, "date_2": "2020-03", "text_1_tokenized": ["Best", "comment", "by", "PM", "Modi", "in", "Dehradun", "rally", ".", "\"", "Congress", "ke", "waqt", "bhrashtachar", "accelerator", "par", "hota", "hai", "aur", "vikas", "ventilator", "par", "\"", ".", "His", "ability", "to", "make", "hard", "hitting", "witty", "comments", "is", "a", "pain", "for", "the", "opposition", "\ud83d\ude02", "\ud83d\ude02"], "text_2_tokenized": ["Did", "anyone", "else", "notice", "today", "how", "Trump", "cryptically", "alluded", "to", "the", "report", "yesterday", "that", "only", "20", "%", "of", "COVID", "-", "19", "patients", "on", "a", "ventilator", "survive", ".", "Is", "he", "laying", "the", "groundwork", "for", "claiming", "that", "ventilators", "aren't", "needed", "since", "most", "people", "die", "anyway", "?"]} -{"id": "0924-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Somebody said that somehow #Rocket might have gotten kicked by someone, and my answer is If I find out who I will put them in ICU on a ventilator!!!!", "token_idx_1": 29, "text_start_1": 135, "text_end_1": 145, "date_1": "2019-03", "text_2": "Coronavirus: #Tory Government increases ventilator capacity by just 200 in one week \u2013 despite health secretary's Lie ... 1,500 more would be available", "token_idx_2": 5, "text_start_2": 40, "text_end_2": 50, "date_2": "2020-03", "text_1_tokenized": ["Somebody", "said", "that", "somehow", "#Rocket", "might", "have", "gotten", "kicked", "by", "someone", ",", "and", "my", "answer", "is", "If", "I", "find", "out", "who", "I", "will", "put", "them", "in", "ICU", "on", "a", "ventilator", "!", "!", "!"], "text_2_tokenized": ["Coronavirus", ":", "#Tory", "Government", "increases", "ventilator", "capacity", "by", "just", "200", "in", "one", "week", "\u2013", "despite", "health", "secretary's", "Lie", "...", "1,500", "more", "would", "be", "available"]} -{"id": "0925-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "slowly detoriating, he struggles to breathe without the mechanical ventilator. as time goes by we all knew there's only one outcome and so it did. it might be for the best and could also be not. but his family's decision matters the most. rest in peace tatay alvez.", "token_idx_1": 10, "text_start_1": 67, "text_end_1": 77, "date_1": "2019-03", "text_2": "Just reading in the evening standard two thirds of patients given a ventilator have died showing just how near death you have to be to get one . I've also read sleep apnea boxes are very helpful in treating covid early on with a good success rate", "token_idx_2": 12, "text_start_2": 68, "text_end_2": 78, "date_2": "2020-03", "text_1_tokenized": ["slowly", "detoriating", ",", "he", "struggles", "to", "breathe", "without", "the", "mechanical", "ventilator", ".", "as", "time", "goes", "by", "we", "all", "knew", "there's", "only", "one", "outcome", "and", "so", "it", "did", ".", "it", "might", "be", "for", "the", "best", "and", "could", "also", "be", "not", ".", "but", "his", "family's", "decision", "matters", "the", "most", ".", "rest", "in", "peace", "tatay", "alvez", "."], "text_2_tokenized": ["Just", "reading", "in", "the", "evening", "standard", "two", "thirds", "of", "patients", "given", "a", "ventilator", "have", "died", "showing", "just", "how", "near", "death", "you", "have", "to", "be", "to", "get", "one", ".", "I've", "also", "read", "sleep", "apnea", "boxes", "are", "very", "helpful", "in", "treating", "covid", "early", "on", "with", "a", "good", "success", "rate"]} -{"id": "0926-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "Every 80s horror/sci-fi film had a giant ventilator fan. I believe it was an Executive Order from Ronald Reagan #SatMat", "token_idx_1": 9, "text_start_1": 41, "text_end_1": 51, "date_1": "2019-03", "text_2": "\ud83c\udfb6 Here comes the sun \ud83c\udfb6 is stuck in my head - been hearing it often this week whenever a COVID patient is off the ventilator or gets better/discharged, which says a lot since I'm on night shift. All the praise and glory to God!", "token_idx_2": 25, "text_start_2": 113, "text_end_2": 123, "date_2": "2020-03", "text_1_tokenized": ["Every", "80s", "horror", "/", "sci-fi", "film", "had", "a", "giant", "ventilator", "fan", ".", "I", "believe", "it", "was", "an", "Executive", "Order", "from", "Ronald", "Reagan", "#SatMat"], "text_2_tokenized": ["\ud83c\udfb6", "Here", "comes", "the", "sun", "\ud83c\udfb6", "is", "stuck", "in", "my", "head", "-", "been", "hearing", "it", "often", "this", "week", "whenever", "a", "COVID", "patient", "is", "off", "the", "ventilator", "or", "gets", "better", "/", "discharged", ",", "which", "says", "a", "lot", "since", "I'm", "on", "night", "shift", ".", "All", "the", "praise", "and", "glory", "to", "God", "!"]} -{"id": "0927-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Interesting takeaways from \"Headspace\" today. 1. When directed to let my mind do what it wants: it felt like a ping pong ball was let loose in my head at high speed. but no specific thoughts 2. During focused breathing I was reminded of the ventilator dad was on at the end.", "token_idx_1": 52, "text_start_1": 241, "text_end_1": 251, "date_1": "2019-03", "text_2": "My step-father's brother-in-law just passed away after a week of being on the ventilator. They took him off and said there was nothing more they could do. Twenty minutes later, his heart stopped. He had heart issues & high BP so COVID attacked him hard. So sad for his family. \ud83d\ude22", "token_idx_2": 13, "text_start_2": 78, "text_end_2": 88, "date_2": "2020-03", "text_1_tokenized": ["Interesting", "takeaways", "from", "\"", "Headspace", "\"", "today", ".", "1", ".", "When", "directed", "to", "let", "my", "mind", "do", "what", "it", "wants", ":", "it", "felt", "like", "a", "ping", "pong", "ball", "was", "let", "loose", "in", "my", "head", "at", "high", "speed", ".", "but", "no", "specific", "thoughts", "2", ".", "During", "focused", "breathing", "I", "was", "reminded", "of", "the", "ventilator", "dad", "was", "on", "at", "the", "end", "."], "text_2_tokenized": ["My", "step-father's", "brother-in-law", "just", "passed", "away", "after", "a", "week", "of", "being", "on", "the", "ventilator", ".", "They", "took", "him", "off", "and", "said", "there", "was", "nothing", "more", "they", "could", "do", ".", "Twenty", "minutes", "later", ",", "his", "heart", "stopped", ".", "He", "had", "heart", "issues", "&", "high", "BP", "so", "COVID", "attacked", "him", "hard", ".", "So", "sad", "for", "his", "family", ".", "\ud83d\ude22"]} -{"id": "0928-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "So I just bought 400 euro worth of pc stuff xd . New processor and a ventilator for it I could say I bought myself something for my b day xp", "token_idx_1": 16, "text_start_1": 69, "text_end_1": 79, "date_1": "2019-03", "text_2": "My old radio car partner, Mike Sasuk, moved into @StonyBrookMed ICU on Mar 25, on a ventilator for 18 days, was moved off the unit to a standard room today. A miraculous recovery from COVID. #NYPD #BabylonFD", "token_idx_2": 19, "text_start_2": 84, "text_end_2": 94, "date_2": "2020-03", "text_1_tokenized": ["So", "I", "just", "bought", "400", "euro", "worth", "of", "pc", "stuff", "xd", ".", "New", "processor", "and", "a", "ventilator", "for", "it", "I", "could", "say", "I", "bought", "myself", "something", "for", "my", "b", "day", "xp"], "text_2_tokenized": ["My", "old", "radio", "car", "partner", ",", "Mike", "Sasuk", ",", "moved", "into", "@StonyBrookMed", "ICU", "on", "Mar", "25", ",", "on", "a", "ventilator", "for", "18", "days", ",", "was", "moved", "off", "the", "unit", "to", "a", "standard", "room", "today", ".", "A", "miraculous", "recovery", "from", "COVID", ".", "#NYPD", "#BabylonFD"]} -{"id": "0929-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "#HumanRightsViolations Where are human rights activists today? #SadhviPragyaSinghThakur #SadhviPragyaThakur was beaten by male police & tortured for 24 days witout any proof,had to be on a ventilator, and was acquitted 9 yrs later. Who will give her those 9 yrs back?", "token_idx_1": 29, "text_start_1": 193, "text_end_1": 203, "date_1": "2019-03", "text_2": "Can't get a fucking ventilator but he's got 29 million unproven pills set aside! #PressConference", "token_idx_2": 4, "text_start_2": 20, "text_end_2": 30, "date_2": "2020-03", "text_1_tokenized": ["#HumanRightsViolations", "Where", "are", "human", "rights", "activists", "today", "?", "#SadhviPragyaSinghThakur", "#SadhviPragyaThakur", "was", "beaten", "by", "male", "police", "&", "tortured", "for", "24", "days", "witout", "any", "proof", ",", "had", "to", "be", "on", "a", "ventilator", ",", "and", "was", "acquitted", "9", "yrs", "later", ".", "Who", "will", "give", "her", "those", "9", "yrs", "back", "?"], "text_2_tokenized": ["Can't", "get", "a", "fucking", "ventilator", "but", "he's", "got", "29", "million", "unproven", "pills", "set", "aside", "!", "#PressConference"]} -{"id": "0930-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "My grandma had open heart surgery today. She is out of surgery but she's on a ventilator... I got to see her and it was hard. I couldn't go in the room all the way at first. We're letting her rest, but it's going to be a long road to recovery...\u2764", "token_idx_1": 17, "text_start_1": 78, "text_end_1": 88, "date_1": "2019-03", "text_2": "I just found out I now know a person that is expected to die tonight from the coronavirus. He is 80 and has refused the ventilator. And he has infected at least two of his adult children and most likely another. They are dear friends of mine. We all went sky diving together.", "token_idx_2": 26, "text_start_2": 120, "text_end_2": 130, "date_2": "2020-03", "text_1_tokenized": ["My", "grandma", "had", "open", "heart", "surgery", "today", ".", "She", "is", "out", "of", "surgery", "but", "she's", "on", "a", "ventilator", "...", "I", "got", "to", "see", "her", "and", "it", "was", "hard", ".", "I", "couldn't", "go", "in", "the", "room", "all", "the", "way", "at", "first", ".", "We're", "letting", "her", "rest", ",", "but", "it's", "going", "to", "be", "a", "long", "road", "to", "recovery", "...", "\u2764"], "text_2_tokenized": ["I", "just", "found", "out", "I", "now", "know", "a", "person", "that", "is", "expected", "to", "die", "tonight", "from", "the", "coronavirus", ".", "He", "is", "80", "and", "has", "refused", "the", "ventilator", ".", "And", "he", "has", "infected", "at", "least", "two", "of", "his", "adult", "children", "and", "most", "likely", "another", ".", "They", "are", "dear", "friends", "of", "mine", ".", "We", "all", "went", "sky", "diving", "together", "."]} -{"id": "0931-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "I'm having mixed emotions right now, my cousin just died, i didn't even know he's sick, then I'm working for 4 days straight on a ventilator patient, now I'm seeing Minseok telling to not forget him and to wait for him.. I'm 2tired to process this feeling \ud83d\ude2d", "token_idx_1": 28, "text_start_1": 130, "text_end_1": 140, "date_1": "2019-03", "text_2": "PM of UK and a common man in the same ICU! Dave Hunt, a London software salesperson, stayed in the same hospital ICU as UK Prime Minister Boris Johnson while sick with COVID-19. Hunt recounted his experience there, included being placed on a ventilator and seeing another patient.", "token_idx_2": 50, "text_start_2": 242, "text_end_2": 252, "date_2": "2020-03", "text_1_tokenized": ["I'm", "having", "mixed", "emotions", "right", "now", ",", "my", "cousin", "just", "died", ",", "i", "didn't", "even", "know", "he's", "sick", ",", "then", "I'm", "working", "for", "4", "days", "straight", "on", "a", "ventilator", "patient", ",", "now", "I'm", "seeing", "Minseok", "telling", "to", "not", "forget", "him", "and", "to", "wait", "for", "him", "..", "I'm", "2tired", "to", "process", "this", "feeling", "\ud83d\ude2d"], "text_2_tokenized": ["PM", "of", "UK", "and", "a", "common", "man", "in", "the", "same", "ICU", "!", "Dave", "Hunt", ",", "a", "London", "software", "salesperson", ",", "stayed", "in", "the", "same", "hospital", "ICU", "as", "UK", "Prime", "Minister", "Boris", "Johnson", "while", "sick", "with", "COVID", "-", "19", ".", "Hunt", "recounted", "his", "experience", "there", ",", "included", "being", "placed", "on", "a", "ventilator", "and", "seeing", "another", "patient", "."]} -{"id": "0932-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "Ummmm so I just finished inking my project for Lang and I just realized my ventilator doesn't look like a ventilator and I CANT FIX IT", "token_idx_1": 15, "text_start_1": 75, "text_end_1": 85, "date_1": "2019-03", "text_2": "Honest question. If you need a ventilator, is it already too late?", "token_idx_2": 7, "text_start_2": 31, "text_end_2": 41, "date_2": "2020-03", "text_1_tokenized": ["Ummmm", "so", "I", "just", "finished", "inking", "my", "project", "for", "Lang", "and", "I", "just", "realized", "my", "ventilator", "doesn't", "look", "like", "a", "ventilator", "and", "I", "CANT", "FIX", "IT"], "text_2_tokenized": ["Honest", "question", ".", "If", "you", "need", "a", "ventilator", ",", "is", "it", "already", "too", "late", "?"]} -{"id": "0933-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "#edu6323 Just another example of using tech for good. My dad has been in ICU since last Friday.He just came off the ventilator yesterday and has some confusion which is normal after being this sick. He has an amazing connection with his dog Jac. Facetime = familiarty= healing", "token_idx_1": 23, "text_start_1": 116, "text_end_1": 126, "date_1": "2019-03", "text_2": "I has major surgery on March 13, 2020. I was then released 3 days later. Everything was fine until 2 days after and I was rushed back up the University Of Iowa Hospitals And Clinics. I was septic, hypoxic, and was then placed on a ventilator.", "token_idx_2": 51, "text_start_2": 231, "text_end_2": 241, "date_2": "2020-03", "text_1_tokenized": ["#edu6323", "Just", "another", "example", "of", "using", "tech", "for", "good", ".", "My", "dad", "has", "been", "in", "ICU", "since", "last", "Friday.He", "just", "came", "off", "the", "ventilator", "yesterday", "and", "has", "some", "confusion", "which", "is", "normal", "after", "being", "this", "sick", ".", "He", "has", "an", "amazing", "connection", "with", "his", "dog", "Jac", ".", "Facetime", "=", "familiarty", "=", "healing"], "text_2_tokenized": ["I", "has", "major", "surgery", "on", "March", "13", ",", "2020", ".", "I", "was", "then", "released", "3", "days", "later", ".", "Everything", "was", "fine", "until", "2", "days", "after", "and", "I", "was", "rushed", "back", "up", "the", "University", "Of", "Iowa", "Hospitals", "And", "Clinics", ".", "I", "was", "septic", ",", "hypoxic", ",", "and", "was", "then", "placed", "on", "a", "ventilator", "."]} -{"id": "0934-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "This ventilator shaft...I'm wildly attracted to it. #MeTVStarTrek", "token_idx_1": 1, "text_start_1": 5, "text_end_1": 15, "date_1": "2019-03", "text_2": "If you want a job doing, get NASA to do it. The space agency has developed a ventilator for coronavirus in just 37 days. It's now passed medical tests and looks set to be given final approval for manufacturing early next week.", "token_idx_2": 19, "text_start_2": 77, "text_end_2": 87, "date_2": "2020-03", "text_1_tokenized": ["This", "ventilator", "shaft", "...", "I'm", "wildly", "attracted", "to", "it", ".", "#MeTVStarTrek"], "text_2_tokenized": ["If", "you", "want", "a", "job", "doing", ",", "get", "NASA", "to", "do", "it", ".", "The", "space", "agency", "has", "developed", "a", "ventilator", "for", "coronavirus", "in", "just", "37", "days", ".", "It's", "now", "passed", "medical", "tests", "and", "looks", "set", "to", "be", "given", "final", "approval", "for", "manufacturing", "early", "next", "week", "."]} -{"id": "0935-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Poch presumably running to get his boots on there. If anyone needs me I'm lying down in a darkened room on a ventilator for the next 15.", "token_idx_1": 23, "text_start_1": 109, "text_end_1": 119, "date_1": "2019-03", "text_2": "I honestly pray Trump enforces a nationwide 2 week shutdown. Y'all mf not gone listen until it's ya own mother connected to a ventilator...", "token_idx_2": 24, "text_start_2": 126, "text_end_2": 136, "date_2": "2020-03", "text_1_tokenized": ["Poch", "presumably", "running", "to", "get", "his", "boots", "on", "there", ".", "If", "anyone", "needs", "me", "I'm", "lying", "down", "in", "a", "darkened", "room", "on", "a", "ventilator", "for", "the", "next", "15", "."], "text_2_tokenized": ["I", "honestly", "pray", "Trump", "enforces", "a", "nationwide", "2", "week", "shutdown", ".", "Y'all", "mf", "not", "gone", "listen", "until", "it's", "ya", "own", "mother", "connected", "to", "a", "ventilator", "..."]} -{"id": "0936-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "5 years ago, my dads ventilator got weaned down to 45% from 50%. That might not of seemed like a lot but to us it was\u2764\ufe0f\u2764\ufe0f", "token_idx_1": 6, "text_start_1": 21, "text_end_1": 31, "date_1": "2019-03", "text_2": "I have a scratchy throat so of course my brain assumes I will be on a ventilator tomorrow", "token_idx_2": 16, "text_start_2": 70, "text_end_2": 80, "date_2": "2020-03", "text_1_tokenized": ["5", "years", "ago", ",", "my", "dads", "ventilator", "got", "weaned", "down", "to", "45", "%", "from", "50", "%", ".", "That", "might", "not", "of", "seemed", "like", "a", "lot", "but", "to", "us", "it", "was", "\u2764", "\ufe0f", "\u2764", "\ufe0f"], "text_2_tokenized": ["I", "have", "a", "scratchy", "throat", "so", "of", "course", "my", "brain", "assumes", "I", "will", "be", "on", "a", "ventilator", "tomorrow"]} -{"id": "0937-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Nawaz Sharif medical report as presented before SC shows him suffering extreme danger with acute heart problem almost on ventilator but actually he attends political meerings for hours without oxygen", "token_idx_1": 19, "text_start_1": 121, "text_end_1": 131, "date_1": "2019-03", "text_2": "Reports are out that hospitals get paid more money to label a death as \u201cCobid-19\u201d without a diagnosis. So numbers are drastically skewed. \u201c$13,000 for a Covid related admission and $39,000 if they are placed on a ventilator.\u201d", "token_idx_2": 46, "text_start_2": 213, "text_end_2": 223, "date_2": "2020-03", "text_1_tokenized": ["Nawaz", "Sharif", "medical", "report", "as", "presented", "before", "SC", "shows", "him", "suffering", "extreme", "danger", "with", "acute", "heart", "problem", "almost", "on", "ventilator", "but", "actually", "he", "attends", "political", "meerings", "for", "hours", "without", "oxygen"], "text_2_tokenized": ["Reports", "are", "out", "that", "hospitals", "get", "paid", "more", "money", "to", "label", "a", "death", "as", "\u201c", "Cobid", "-", "19", "\u201d", "without", "a", "diagnosis", ".", "So", "numbers", "are", "drastically", "skewed", ".", "\u201c", "$", "13,000", "for", "a", "Covid", "related", "admission", "and", "$", "39,000", "if", "they", "are", "placed", "on", "a", "ventilator", ".", "\u201d"]} -{"id": "0938-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Y'all pls send a prayer up for my family friend. Their house burned down & her two kids are on the ventilator \ud83d\ude29 this lady just went into remission from breast cancer & my heart is just so heavy for her. Please pray for her y'all! \ud83d\ude4f\ud83c\udffd\ud83d\ude4f\ud83c\udffd\ud83d\ude4f\ud83c\udffd\ud83d\ude4f\ud83c\udffd", "token_idx_1": 22, "text_start_1": 103, "text_end_1": 113, "date_1": "2019-03", "text_2": "I really hope that every single one of these Lansing protestors don't need to be put on a ventilator in the near future. They lost that right when they willingly disobeyed the Stay At Home order. #michiganshutdown", "token_idx_2": 18, "text_start_2": 90, "text_end_2": 100, "date_2": "2020-03", "text_1_tokenized": ["Y'all", "pls", "send", "a", "prayer", "up", "for", "my", "family", "friend", ".", "Their", "house", "burned", "down", "&", "her", "two", "kids", "are", "on", "the", "ventilator", "\ud83d\ude29", "this", "lady", "just", "went", "into", "remission", "from", "breast", "cancer", "&", "my", "heart", "is", "just", "so", "heavy", "for", "her", ".", "Please", "pray", "for", "her", "y'all", "!", "\ud83d\ude4f\ud83c\udffd", "\ud83d\ude4f\ud83c\udffd", "\ud83d\ude4f\ud83c\udffd", "\ud83d\ude4f\ud83c\udffd"], "text_2_tokenized": ["I", "really", "hope", "that", "every", "single", "one", "of", "these", "Lansing", "protestors", "don't", "need", "to", "be", "put", "on", "a", "ventilator", "in", "the", "near", "future", ".", "They", "lost", "that", "right", "when", "they", "willingly", "disobeyed", "the", "Stay", "At", "Home", "order", ".", "#michiganshutdown"]} -{"id": "0939-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Dear <creator, deity, god, universe, whatever>, Stop letting people just be alive enough to be vegetative on a ventilator. Kthxbye. Me.", "token_idx_1": 25, "text_start_1": 117, "text_end_1": 127, "date_1": "2019-03", "text_2": "In kenya we have brilliant minds. Only that the successive corrupt regimes never empower our own. Recently KU made a ventilator, just today, @ToyotaKenya made another one. But we will still set aside Ksh 40b to import the same from china Mutahi #LipaNaMpesa #lockdownextension", "token_idx_2": 22, "text_start_2": 117, "text_end_2": 127, "date_2": "2020-03", "text_1_tokenized": ["Dear", "<", "creator", ",", "deity", ",", "god", ",", "universe", ",", "whatever", ">", ",", "Stop", "letting", "people", "just", "be", "alive", "enough", "to", "be", "vegetative", "on", "a", "ventilator", ".", "Kthxbye", ".", "Me", "."], "text_2_tokenized": ["In", "kenya", "we", "have", "brilliant", "minds", ".", "Only", "that", "the", "successive", "corrupt", "regimes", "never", "empower", "our", "own", ".", "Recently", "KU", "made", "a", "ventilator", ",", "just", "today", ",", "@ToyotaKenya", "made", "another", "one", ".", "But", "we", "will", "still", "set", "aside", "Ksh", "40b", "to", "import", "the", "same", "from", "china", "Mutahi", "#LipaNaMpesa", "#lockdownextension"]} -{"id": "0940-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "I stand corrected, she's still on the ventilator her daughter just said she has to make the decision to take her off & she \u201ccan't do it\u201d \ud83e\udd7a", "token_idx_1": 8, "text_start_1": 38, "text_end_1": 48, "date_1": "2019-03", "text_2": "what if @Starbucks made a venti sized latte, and called it a \u201cventi-latte\u201d..... like a ventilator", "token_idx_2": 19, "text_start_2": 87, "text_end_2": 97, "date_2": "2020-03", "text_1_tokenized": ["I", "stand", "corrected", ",", "she's", "still", "on", "the", "ventilator", "her", "daughter", "just", "said", "she", "has", "to", "make", "the", "decision", "to", "take", "her", "off", "&", "she", "\u201c", "can't", "do", "it", "\u201d", "\ud83e\udd7a"], "text_2_tokenized": ["what", "if", "@Starbucks", "made", "a", "venti", "sized", "latte", ",", "and", "called", "it", "a", "\u201c", "venti-latte", "\u201d", "...", "like", "a", "ventilator"]} -{"id": "0941-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "The 7-year-old boy, who was on ventilator support at a medical college at Kolencherry near Ernakulam with severe brain injury and damage to skull following brutal attack by mother's partner has been declared dead at 11.35 am #ThodupuzhaAttack", "token_idx_1": 9, "text_start_1": 31, "text_end_1": 41, "date_1": "2019-03", "text_2": "The Devil went down to Georgia He was looking for a soul to steal But then he stopped and got a haircut And contracted Corona and was put on a ventilator", "token_idx_2": 30, "text_start_2": 143, "text_end_2": 153, "date_2": "2020-03", "text_1_tokenized": ["The", "7", "-", "year-old", "boy", ",", "who", "was", "on", "ventilator", "support", "at", "a", "medical", "college", "at", "Kolencherry", "near", "Ernakulam", "with", "severe", "brain", "injury", "and", "damage", "to", "skull", "following", "brutal", "attack", "by", "mother's", "partner", "has", "been", "declared", "dead", "at", "11.35", "am", "#ThodupuzhaAttack"], "text_2_tokenized": ["The", "Devil", "went", "down", "to", "Georgia", "He", "was", "looking", "for", "a", "soul", "to", "steal", "But", "then", "he", "stopped", "and", "got", "a", "haircut", "And", "contracted", "Corona", "and", "was", "put", "on", "a", "ventilator"]} -{"id": "0942-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Its been a rough day. Last year on this day, I watched my granddaddy be taken off the ventilator and I watched his body shut down... I've tried to keep my mind off it all day \ud83d\ude2d", "token_idx_1": 20, "text_start_1": 86, "text_end_1": 96, "date_1": "2019-03", "text_2": "Good night all. Just had it with the whole CoVid19 panic. Crazy arseholes. Half the death are from wrong ventilator settings and panicking people and doctors.", "token_idx_2": 23, "text_start_2": 105, "text_end_2": 115, "date_2": "2020-03", "text_1_tokenized": ["Its", "been", "a", "rough", "day", ".", "Last", "year", "on", "this", "day", ",", "I", "watched", "my", "granddaddy", "be", "taken", "off", "the", "ventilator", "and", "I", "watched", "his", "body", "shut", "down", "...", "I've", "tried", "to", "keep", "my", "mind", "off", "it", "all", "day", "\ud83d\ude2d"], "text_2_tokenized": ["Good", "night", "all", ".", "Just", "had", "it", "with", "the", "whole", "CoVid", "19", "panic", ".", "Crazy", "arseholes", ".", "Half", "the", "death", "are", "from", "wrong", "ventilator", "settings", "and", "panicking", "people", "and", "doctors", "."]} -{"id": "0943-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Doctors just removed the ventilator. My Grandpa's life is in God's hands now \ud83d\ude4f", "token_idx_1": 4, "text_start_1": 25, "text_end_1": 35, "date_1": "2019-03", "text_2": "It became ever so real today. My granddaughters other grandad passed away this morning. Been on a ventilator for a couple of days. 63 years old. Same age as me. It's sad and crazy all at the same time.", "token_idx_2": 19, "text_start_2": 98, "text_end_2": 108, "date_2": "2020-03", "text_1_tokenized": ["Doctors", "just", "removed", "the", "ventilator", ".", "My", "Grandpa's", "life", "is", "in", "God's", "hands", "now", "\ud83d\ude4f"], "text_2_tokenized": ["It", "became", "ever", "so", "real", "today", ".", "My", "granddaughters", "other", "grandad", "passed", "away", "this", "morning", ".", "Been", "on", "a", "ventilator", "for", "a", "couple", "of", "days", ".", "63", "years", "old", ".", "Same", "age", "as", "me", ".", "It's", "sad", "and", "crazy", "all", "at", "the", "same", "time", "."]} -{"id": "0944-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "The respiratory therapist lowered the setting on the ventilator and she lasted for 6 hours!!! I really needed this good news today", "token_idx_1": 8, "text_start_1": 53, "text_end_1": 63, "date_1": "2019-03", "text_2": "This is obvious, but here goes: Trump keeps saying journalists were rooting for ventilator shortages and are thrilled that the virus is widespread. This is preposterous and insulting. We are Americans too. Our friends and relatives are sick too, and are working on the frontlines.", "token_idx_2": 15, "text_start_2": 80, "text_end_2": 90, "date_2": "2020-03", "text_1_tokenized": ["The", "respiratory", "therapist", "lowered", "the", "setting", "on", "the", "ventilator", "and", "she", "lasted", "for", "6", "hours", "!", "!", "!", "I", "really", "needed", "this", "good", "news", "today"], "text_2_tokenized": ["This", "is", "obvious", ",", "but", "here", "goes", ":", "Trump", "keeps", "saying", "journalists", "were", "rooting", "for", "ventilator", "shortages", "and", "are", "thrilled", "that", "the", "virus", "is", "widespread", ".", "This", "is", "preposterous", "and", "insulting", ".", "We", "are", "Americans", "too", ".", "Our", "friends", "and", "relatives", "are", "sick", "too", ",", "and", "are", "working", "on", "the", "frontlines", "."]} -{"id": "0945-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Post night 1 of 4. Feel jet lagged. Look like I need Tazocin and a ventilator. #TuesdayMorning only three more to go \ud83e\udd74", "token_idx_1": 17, "text_start_1": 67, "text_end_1": 77, "date_1": "2019-03", "text_2": "Ok America! I have a question! When someone is on a ventilator and has Coronid 19, are we cleaning this thing enough that the machine is NOT spreading it???", "token_idx_2": 13, "text_start_2": 52, "text_end_2": 62, "date_2": "2020-03", "text_1_tokenized": ["Post", "night", "1", "of", "4", ".", "Feel", "jet", "lagged", ".", "Look", "like", "I", "need", "Tazocin", "and", "a", "ventilator", ".", "#TuesdayMorning", "only", "three", "more", "to", "go", "\ud83e\udd74"], "text_2_tokenized": ["Ok", "America", "!", "I", "have", "a", "question", "!", "When", "someone", "is", "on", "a", "ventilator", "and", "has", "Coronid", "19", ",", "are", "we", "cleaning", "this", "thing", "enough", "that", "the", "machine", "is", "NOT", "spreading", "it", "?", "?", "?"]} -{"id": "0946-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "Doctor: I'm going to have to pull the plug. Me: But, why? Doctor: because this ventilator is more important than charging your phone.", "token_idx_1": 21, "text_start_1": 79, "text_end_1": 89, "date_1": "2019-03", "text_2": "Sooo...all day yesterday, I went around my city to purchase a box of ventilator masks to no avail. Today, I found one shop and they were $45 for ten. Nope. I'll take my chances and stay indoors...", "token_idx_2": 16, "text_start_2": 69, "text_end_2": 79, "date_2": "2020-03", "text_1_tokenized": ["Doctor", ":", "I'm", "going", "to", "have", "to", "pull", "the", "plug", ".", "Me", ":", "But", ",", "why", "?", "Doctor", ":", "because", "this", "ventilator", "is", "more", "important", "than", "charging", "your", "phone", "."], "text_2_tokenized": ["Sooo", "...", "all", "day", "yesterday", ",", "I", "went", "around", "my", "city", "to", "purchase", "a", "box", "of", "ventilator", "masks", "to", "no", "avail", ".", "Today", ",", "I", "found", "one", "shop", "and", "they", "were", "$", "45", "for", "ten", ".", "Nope", ".", "I'll", "take", "my", "chances", "and", "stay", "indoors", "..."]} -{"id": "0947-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "This time last week my mum was in a coma, breathing through a ventilator in ICU, with a broken neck, back an many other injury's. Today she just phoned me off her mobile \u201ciya luvvv\u201d. Someone is deffo watching over us you know, better than winning the lottery this \ud83d\udc95\ud83d\udc95\ud83d\udc95", "token_idx_1": 14, "text_start_1": 62, "text_end_1": 72, "date_1": "2019-03", "text_2": "Now I'm told my aunt (and godmother) has within 48 hrs, been admitted to hospital, deteriorated, been intubated, placed on a ventilator. This is getting really scary now 2/2", "token_idx_2": 27, "text_start_2": 125, "text_end_2": 135, "date_2": "2020-03", "text_1_tokenized": ["This", "time", "last", "week", "my", "mum", "was", "in", "a", "coma", ",", "breathing", "through", "a", "ventilator", "in", "ICU", ",", "with", "a", "broken", "neck", ",", "back", "an", "many", "other", "injury's", ".", "Today", "she", "just", "phoned", "me", "off", "her", "mobile", "\u201c", "iya", "luvvv", "\u201d", ".", "Someone", "is", "deffo", "watching", "over", "us", "you", "know", ",", "better", "than", "winning", "the", "lottery", "this", "\ud83d\udc95", "\ud83d\udc95", "\ud83d\udc95"], "text_2_tokenized": ["Now", "I'm", "told", "my", "aunt", "(", "and", "godmother", ")", "has", "within", "48", "hrs", ",", "been", "admitted", "to", "hospital", ",", "deteriorated", ",", "been", "intubated", ",", "placed", "on", "a", "ventilator", ".", "This", "is", "getting", "really", "scary", "now", "2/2"]} -{"id": "0948-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "My dad had a successful transplant and is preparing to be extubated from his ventilator, they believe he will be able to breathe on his own at some point today. He is awake, aware and pain being managed by his doctors. The new heart is steadily beating on its own \u2764\ufe0f", "token_idx_1": 14, "text_start_1": 77, "text_end_1": 87, "date_1": "2019-03", "text_2": "Did I really just hear Trump say it takes \u201cyears\u201d to make a ventilator? And not even a minute later say \u201cI cannot tell a lie??? I have no words.", "token_idx_2": 15, "text_start_2": 60, "text_end_2": 70, "date_2": "2020-03", "text_1_tokenized": ["My", "dad", "had", "a", "successful", "transplant", "and", "is", "preparing", "to", "be", "extubated", "from", "his", "ventilator", ",", "they", "believe", "he", "will", "be", "able", "to", "breathe", "on", "his", "own", "at", "some", "point", "today", ".", "He", "is", "awake", ",", "aware", "and", "pain", "being", "managed", "by", "his", "doctors", ".", "The", "new", "heart", "is", "steadily", "beating", "on", "its", "own", "\u2764", "\ufe0f"], "text_2_tokenized": ["Did", "I", "really", "just", "hear", "Trump", "say", "it", "takes", "\u201c", "years", "\u201d", "to", "make", "a", "ventilator", "?", "And", "not", "even", "a", "minute", "later", "say", "\u201c", "I", "cannot", "tell", "a", "lie", "?", "?", "?", "I", "have", "no", "words", "."]} -{"id": "0949-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "hi guys i think they'll be able to take my mom off the ventilator today!!", "token_idx_1": 13, "text_start_1": 55, "text_end_1": 65, "date_1": "2019-03", "text_2": "According to reports Boris Johnson PM is intensive care. So that means he may be on a ventilator (or at least a CPAP system). Survival rate on a ventilator is about 50% with Covid19. I don't wish death on anybody. I'm wishing all the best to the PM and hope he gets through this.", "token_idx_2": 18, "text_start_2": 86, "text_end_2": 96, "date_2": "2020-03", "text_1_tokenized": ["hi", "guys", "i", "think", "they'll", "be", "able", "to", "take", "my", "mom", "off", "the", "ventilator", "today", "!", "!"], "text_2_tokenized": ["According", "to", "reports", "Boris", "Johnson", "PM", "is", "intensive", "care", ".", "So", "that", "means", "he", "may", "be", "on", "a", "ventilator", "(", "or", "at", "least", "a", "CPAP", "system", ")", ".", "Survival", "rate", "on", "a", "ventilator", "is", "about", "50", "%", "with", "Covid", "19", ".", "I", "don't", "wish", "death", "on", "anybody", ".", "I'm", "wishing", "all", "the", "best", "to", "the", "PM", "and", "hope", "he", "gets", "through", "this", "."]} -{"id": "0950-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "2. Bout an hour ago we got the call that it had spread unbelievable quickly up into her abdomen and organs..she went on a ventilator briefly so they could get the immediate family there to say goodbye. She has since been removed and is gone at 41. We are all just stunned..", "token_idx_1": 27, "text_start_1": 122, "text_end_1": 132, "date_1": "2019-03", "text_2": "Which country can't make a ventilator?", "token_idx_2": 5, "text_start_2": 27, "text_end_2": 37, "date_2": "2020-03", "text_1_tokenized": ["2", ".", "Bout", "an", "hour", "ago", "we", "got", "the", "call", "that", "it", "had", "spread", "unbelievable", "quickly", "up", "into", "her", "abdomen", "and", "organs", "..", "she", "went", "on", "a", "ventilator", "briefly", "so", "they", "could", "get", "the", "immediate", "family", "there", "to", "say", "goodbye", ".", "She", "has", "since", "been", "removed", "and", "is", "gone", "at", "41", ".", "We", "are", "all", "just", "stunned", ".."], "text_2_tokenized": ["Which", "country", "can't", "make", "a", "ventilator", "?"]} -{"id": "0951-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "A Congress term is one in which corruption remains on the elevator while development goes down to the ventilator: Prime Minister Modi", "token_idx_1": 18, "text_start_1": 102, "text_end_1": 112, "date_1": "2019-03", "text_2": "My neighbor is currently on a ventilator with Covid-19. Eastside: Take this virus seriously. Stay inside \ud83d\ude4f\ud83c\udffc", "token_idx_2": 6, "text_start_2": 30, "text_end_2": 40, "date_2": "2020-03", "text_1_tokenized": ["A", "Congress", "term", "is", "one", "in", "which", "corruption", "remains", "on", "the", "elevator", "while", "development", "goes", "down", "to", "the", "ventilator", ":", "Prime", "Minister", "Modi"], "text_2_tokenized": ["My", "neighbor", "is", "currently", "on", "a", "ventilator", "with", "Covid", "-", "19", ".", "Eastside", ":", "Take", "this", "virus", "seriously", ".", "Stay", "inside", "\ud83d\ude4f\ud83c\udffc"]} -{"id": "0952-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "Nine months old minor Nashwa who was injected a wrong injection at #DarulSehat hospital and was on ventilator, departed from the world & died - Nashwa is in better place now for sure. Your killer's will be avenged, baby. RIP. \u0625\u0650\u0646\u064e\u0651\u0627 \u0644\u0650\u0644\u0651\u0647\u0650 \u0648\u064e\u0625\u0650\u0646\u064e\u0651\u0640\u0627 \u0625\u0650\u0644\u064e\u064a\u0652\u0647\u0650 \u0631\u064e\u0627\u062c\u0650\u0639\u0648\u0646\u064e..", "token_idx_1": 17, "text_start_1": 99, "text_end_1": 109, "date_1": "2019-03", "text_2": "Excited for tonight's Wrestlemania main event. The Undertaker and Ric Flair are going to fight for the last ventilator in Florida.", "token_idx_2": 19, "text_start_2": 108, "text_end_2": 118, "date_2": "2020-03", "text_1_tokenized": ["Nine", "months", "old", "minor", "Nashwa", "who", "was", "injected", "a", "wrong", "injection", "at", "#DarulSehat", "hospital", "and", "was", "on", "ventilator", ",", "departed", "from", "the", "world", "&", "died", "-", "Nashwa", "is", "in", "better", "place", "now", "for", "sure", ".", "Your", "killer's", "will", "be", "avenged", ",", "baby", ".", "RIP", ".", "\u0625\u0650\u0646\u064e\u0651\u0627", "\u0644\u0650\u0644\u0651\u0647\u0650", "\u0648\u064e\u0625\u0650\u0646\u064e\u0651\u0640\u0627", "\u0625\u0650\u0644\u064e\u064a\u0652\u0647\u0650", "\u0631\u064e\u0627\u062c\u0650\u0639\u0648\u0646\u064e", ".."], "text_2_tokenized": ["Excited", "for", "tonight's", "Wrestlemania", "main", "event", ".", "The", "Undertaker", "and", "Ric", "Flair", "are", "going", "to", "fight", "for", "the", "last", "ventilator", "in", "Florida", "."]} -{"id": "0953-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Why tf is the newborn baby on a fucking ventilator after that attack on 37th & keystone last night.....I hate people", "token_idx_1": 9, "text_start_1": 40, "text_end_1": 50, "date_1": "2019-03", "text_2": "Did I just hear Trump right? \"In the US, no person who has needed a ventilator has been denied a ventilator.\"", "token_idx_2": 18, "text_start_2": 68, "text_end_2": 78, "date_2": "2020-03", "text_1_tokenized": ["Why", "tf", "is", "the", "newborn", "baby", "on", "a", "fucking", "ventilator", "after", "that", "attack", "on", "37th", "&", "keystone", "last", "night", "...", "I", "hate", "people"], "text_2_tokenized": ["Did", "I", "just", "hear", "Trump", "right", "?", "\"", "In", "the", "US", ",", "no", "person", "who", "has", "needed", "a", "ventilator", "has", "been", "denied", "a", "ventilator", ".", "\""]} -{"id": "0954-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "SLEPT ETERNAL SLEEP : 9 months old Nishwa who was injected a wrong injection at Darul Sehat Hospital and was on ventilator, departed from the world & died - Nishwa is now in better place for sure Your killer's will be avenged, baby. RIP", "token_idx_1": 21, "text_start_1": 112, "text_end_1": 122, "date_1": "2019-03", "text_2": "After 11 days on a ventilator my friend is off and starting to breathe on his own. Hooked up to oxygen still and slightly sedated but getting better. Praying hard and he can use more. Not out of the woods yet.", "token_idx_2": 5, "text_start_2": 19, "text_end_2": 29, "date_2": "2020-03", "text_1_tokenized": ["SLEPT", "ETERNAL", "SLEEP", ":", "9", "months", "old", "Nishwa", "who", "was", "injected", "a", "wrong", "injection", "at", "Darul", "Sehat", "Hospital", "and", "was", "on", "ventilator", ",", "departed", "from", "the", "world", "&", "died", "-", "Nishwa", "is", "now", "in", "better", "place", "for", "sure", "Your", "killer's", "will", "be", "avenged", ",", "baby", ".", "RIP"], "text_2_tokenized": ["After", "11", "days", "on", "a", "ventilator", "my", "friend", "is", "off", "and", "starting", "to", "breathe", "on", "his", "own", ".", "Hooked", "up", "to", "oxygen", "still", "and", "slightly", "sedated", "but", "getting", "better", ".", "Praying", "hard", "and", "he", "can", "use", "more", ".", "Not", "out", "of", "the", "woods", "yet", "."]} -{"id": "0955-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "\u203c\ufe0fPLEASE READ\u203c\ufe0f My Grandfather, Andrei Buckareff was rushed to the hospital on April 9th in Paraguay as he was vomiting blood. He has been in ICU there for internal bleeding and is currently sedated and on a ventilator.", "token_idx_1": 42, "text_start_1": 208, "text_end_1": 218, "date_1": "2019-03", "text_2": "After watching today's briefing, Sleepy Joe would have been sent to a Hospital I.C.U. and put on a ventilator. There is no way Sleepy Joe could keep up.", "token_idx_2": 24, "text_start_2": 99, "text_end_2": 109, "date_2": "2020-03", "text_1_tokenized": ["\u203c", "\ufe0fPLEASE", "READ", "\u203c", "\ufe0f", "My", "Grandfather", ",", "Andrei", "Buckareff", "was", "rushed", "to", "the", "hospital", "on", "April", "9th", "in", "Paraguay", "as", "he", "was", "vomiting", "blood", ".", "He", "has", "been", "in", "ICU", "there", "for", "internal", "bleeding", "and", "is", "currently", "sedated", "and", "on", "a", "ventilator", "."], "text_2_tokenized": ["After", "watching", "today's", "briefing", ",", "Sleepy", "Joe", "would", "have", "been", "sent", "to", "a", "Hospital", "I", ".", "C", ".", "U", ".", "and", "put", "on", "a", "ventilator", ".", "There", "is", "no", "way", "Sleepy", "Joe", "could", "keep", "up", "."]} -{"id": "0956-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Power of the tongue is a real thing idk what nobody say. My lil brother was unconscious & on a ventilator 4 breathing yesterday & I said he was gon be straight b4 I ever heard what the verdict from doctors were. & boom, he off the ventilator & back straight\ud83d\udcaa\ud83c\udffd", "token_idx_1": 21, "text_start_1": 99, "text_end_1": 109, "date_1": "2019-03", "text_2": "PEOPLE ARE SO FUCKING STUPID. those that are protesting or support protesting opening the economy early - social media delete and a fucking petition started by me that you don't deserve a ventilator when you get sick BC of your stupidity!!!!!!!", "token_idx_2": 33, "text_start_2": 188, "text_end_2": 198, "date_2": "2020-03", "text_1_tokenized": ["Power", "of", "the", "tongue", "is", "a", "real", "thing", "idk", "what", "nobody", "say", ".", "My", "lil", "brother", "was", "unconscious", "&", "on", "a", "ventilator", "4", "breathing", "yesterday", "&", "I", "said", "he", "was", "gon", "be", "straight", "b4", "I", "ever", "heard", "what", "the", "verdict", "from", "doctors", "were", ".", "&", "boom", ",", "he", "off", "the", "ventilator", "&", "back", "straight", "\ud83d\udcaa\ud83c\udffd"], "text_2_tokenized": ["PEOPLE", "ARE", "SO", "FUCKING", "STUPID", ".", "those", "that", "are", "protesting", "or", "support", "protesting", "opening", "the", "economy", "early", "-", "social", "media", "delete", "and", "a", "fucking", "petition", "started", "by", "me", "that", "you", "don't", "deserve", "a", "ventilator", "when", "you", "get", "sick", "BC", "of", "your", "stupidity", "!", "!", "!"]} -{"id": "0957-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Dead body on ventilator to show him alive: Rafel case.@DrGPradhan", "token_idx_1": 3, "text_start_1": 13, "text_end_1": 23, "date_1": "2019-03", "text_2": "After 27 days on the ventilator, my brother in law, was just taken off and is breathing on his own. This is a miracle. #miracleshappen #CovidCrush", "token_idx_2": 5, "text_start_2": 21, "text_end_2": 31, "date_2": "2020-03", "text_1_tokenized": ["Dead", "body", "on", "ventilator", "to", "show", "him", "alive", ":", "Rafel", "case", ".", "@DrGPradhan"], "text_2_tokenized": ["After", "27", "days", "on", "the", "ventilator", ",", "my", "brother", "in", "law", ",", "was", "just", "taken", "off", "and", "is", "breathing", "on", "his", "own", ".", "This", "is", "a", "miracle", ".", "#miracleshappen", "#CovidCrush"]} -{"id": "0958-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "if u have some health problems, while it's PREVENTABLE, do not hesitate to visit your doctor, u'll never know what's going to come :( a spare of money for laboratories and check-ups is better than to pay a large sum for a mechanical ventilator", "token_idx_1": 46, "text_start_1": 233, "text_end_1": 243, "date_1": "2019-03", "text_2": "\"About 20-40%, maybe sometimes 60% (of people with coronavirus) need some kind of hospitalization, and about 6-10% need ICU care and being on a ventilator,\" Bono said. She added that many people can treat themselves at home with plenty of hydration and cough medicine.", "token_idx_2": 32, "text_start_2": 144, "text_end_2": 154, "date_2": "2020-03", "text_1_tokenized": ["if", "u", "have", "some", "health", "problems", ",", "while", "it's", "PREVENTABLE", ",", "do", "not", "hesitate", "to", "visit", "your", "doctor", ",", "u'll", "never", "know", "what's", "going", "to", "come", ":(", "a", "spare", "of", "money", "for", "laboratories", "and", "check-ups", "is", "better", "than", "to", "pay", "a", "large", "sum", "for", "a", "mechanical", "ventilator"], "text_2_tokenized": ["\"", "About", "20-40", "%", ",", "maybe", "sometimes", "60", "%", "(", "of", "people", "with", "coronavirus", ")", "need", "some", "kind", "of", "hospitalization", ",", "and", "about", "6-10", "%", "need", "ICU", "care", "and", "being", "on", "a", "ventilator", ",", "\"", "Bono", "said", ".", "She", "added", "that", "many", "people", "can", "treat", "themselves", "at", "home", "with", "plenty", "of", "hydration", "and", "cough", "medicine", "."]} -{"id": "0959-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "SLEPT ETERNAL SLEEP: Nine months old minor Nashwa who was injected a wrong injection at Darul Sehat hospital Karachi and was on ventilator, departed from the world & died - Nashwa is in better place now for sure. Your killer's will be avenged, baby. RIP. #\u0688\u0627\u06a9\u0679\u0631\u0632_\u06cc\u0627_\u0642\u0627\u062a\u0644", "token_idx_1": 23, "text_start_1": 128, "text_end_1": 138, "date_1": "2019-03", "text_2": "What's the survival rate for someone that has been put on a ventilator? I read a few things online and it doesn't appear to be very high.", "token_idx_2": 12, "text_start_2": 60, "text_end_2": 70, "date_2": "2020-03", "text_1_tokenized": ["SLEPT", "ETERNAL", "SLEEP", ":", "Nine", "months", "old", "minor", "Nashwa", "who", "was", "injected", "a", "wrong", "injection", "at", "Darul", "Sehat", "hospital", "Karachi", "and", "was", "on", "ventilator", ",", "departed", "from", "the", "world", "&", "died", "-", "Nashwa", "is", "in", "better", "place", "now", "for", "sure", ".", "Your", "killer's", "will", "be", "avenged", ",", "baby", ".", "RIP", ".", "#\u0688\u0627\u06a9\u0679\u0631\u0632_\u06cc\u0627_\u0642\u0627\u062a\u0644"], "text_2_tokenized": ["What's", "the", "survival", "rate", "for", "someone", "that", "has", "been", "put", "on", "a", "ventilator", "?", "I", "read", "a", "few", "things", "online", "and", "it", "doesn't", "appear", "to", "be", "very", "high", "."]} -{"id": "0960-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Each ventilator is built by one member of staff and takes only 2hours! @NWTStransport @KIDSNTS @Embrace_SCH no freebies though!!", "token_idx_1": 1, "text_start_1": 5, "text_end_1": 15, "date_1": "2019-03", "text_2": "My country is not ready to handle if it gets more bad. We have only 700 ventilator installed 600 are in private hospital and govt has 500 which are jot installed yet. But they are not enough.", "token_idx_2": 17, "text_start_2": 72, "text_end_2": 82, "date_2": "2020-03", "text_1_tokenized": ["Each", "ventilator", "is", "built", "by", "one", "member", "of", "staff", "and", "takes", "only", "2hours", "!", "@NWTStransport", "@KIDSNTS", "@Embrace_SCH", "no", "freebies", "though", "!", "!"], "text_2_tokenized": ["My", "country", "is", "not", "ready", "to", "handle", "if", "it", "gets", "more", "bad", ".", "We", "have", "only", "700", "ventilator", "installed", "600", "are", "in", "private", "hospital", "and", "govt", "has", "500", "which", "are", "jot", "installed", "yet", ".", "But", "they", "are", "not", "enough", "."]} -{"id": "0961-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Update on Luis: Mostly everything is still the same from yesterday, vitals are all good, except for his heart rate but nothing to be alarmed.. The results from his \" fungi\" test has not come in yet and he still is on the ventilator so his body can rest and heal.. Still prayers!!", "token_idx_1": 48, "text_start_1": 221, "text_end_1": 231, "date_1": "2019-03", "text_2": "Here's a good idea. If YOU think the economy & a pandemic is a balancing act, that \u201cwe can't be closed forever\u201d Then fuckin go outside. Gather in large groups. But if you ignore #covid and then get it, you don't get a hospital bed or ventilator. Sign here moron x______________", "token_idx_2": 53, "text_start_2": 238, "text_end_2": 248, "date_2": "2020-03", "text_1_tokenized": ["Update", "on", "Luis", ":", "Mostly", "everything", "is", "still", "the", "same", "from", "yesterday", ",", "vitals", "are", "all", "good", ",", "except", "for", "his", "heart", "rate", "but", "nothing", "to", "be", "alarmed", "..", "The", "results", "from", "his", "\"", "fungi", "\"", "test", "has", "not", "come", "in", "yet", "and", "he", "still", "is", "on", "the", "ventilator", "so", "his", "body", "can", "rest", "and", "heal", "..", "Still", "prayers", "!", "!"], "text_2_tokenized": ["Here's", "a", "good", "idea", ".", "If", "YOU", "think", "the", "economy", "&", "a", "pandemic", "is", "a", "balancing", "act", ",", "that", "\u201c", "we", "can't", "be", "closed", "forever", "\u201d", "Then", "fuckin", "go", "outside", ".", "Gather", "in", "large", "groups", ".", "But", "if", "you", "ignore", "#covid", "and", "then", "get", "it", ",", "you", "don't", "get", "a", "hospital", "bed", "or", "ventilator", ".", "Sign", "here", "moron", "x___"]} -{"id": "0962-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Today was a big day for Bekah! After 15 days on ECMO, she was able to be weaned off of the machine. She is now only being supported by a ventilator! She even sat up. She's still a little foggy sedation & frustrated that none of us are good lip readers, but this is great progress!", "token_idx_1": 33, "text_start_1": 137, "text_end_1": 147, "date_1": "2019-03", "text_2": "My hope is that if and when RUMP catches the 'hoax' there won't be one ventilator anywhere around to save his life.", "token_idx_2": 17, "text_start_2": 71, "text_end_2": 81, "date_2": "2020-03", "text_1_tokenized": ["Today", "was", "a", "big", "day", "for", "Bekah", "!", "After", "15", "days", "on", "ECMO", ",", "she", "was", "able", "to", "be", "weaned", "off", "of", "the", "machine", ".", "She", "is", "now", "only", "being", "supported", "by", "a", "ventilator", "!", "She", "even", "sat", "up", ".", "She's", "still", "a", "little", "foggy", "sedation", "&", "frustrated", "that", "none", "of", "us", "are", "good", "lip", "readers", ",", "but", "this", "is", "great", "progress", "!"], "text_2_tokenized": ["My", "hope", "is", "that", "if", "and", "when", "RUMP", "catches", "the", "'", "hoax", "'", "there", "won't", "be", "one", "ventilator", "anywhere", "around", "to", "save", "his", "life", "."]} -{"id": "0963-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Having the patient paralyzed enough, with adequate sedation, to tolerate any ventilator mode is a cornerstone to lung rescue. @DarrenBraude #CCTMC19", "token_idx_1": 13, "text_start_1": 77, "text_end_1": 87, "date_1": "2019-03", "text_2": "Internal fountain sources tell me Boris put on a ventilator. (CNS - Cortomuso Network Staff)", "token_idx_2": 9, "text_start_2": 49, "text_end_2": 59, "date_2": "2020-03", "text_1_tokenized": ["Having", "the", "patient", "paralyzed", "enough", ",", "with", "adequate", "sedation", ",", "to", "tolerate", "any", "ventilator", "mode", "is", "a", "cornerstone", "to", "lung", "rescue", ".", "@DarrenBraude", "#CCTMC19"], "text_2_tokenized": ["Internal", "fountain", "sources", "tell", "me", "Boris", "put", "on", "a", "ventilator", ".", "(", "CNS", "-", "Cortomuso", "Network", "Staff", ")"]} -{"id": "0964-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Just tipped half a bottle of cactus jacks down the sink and now my kitchen smells like Granny Smith apples \ud83d\ude0d\ud83e\udd70 mrs hinch is on a fucking ventilator", "token_idx_1": 28, "text_start_1": 136, "text_end_1": 146, "date_1": "2019-03", "text_2": "I spoke with the Medical Examiner today. My husband was positive for COVID-19. My daughter also tested positive for COVID-19. She is now off the ventilator and no longer sedated. I will be going home soon to continue getting well in self isolation. This disease is not a hoax.", "token_idx_2": 32, "text_start_2": 145, "text_end_2": 155, "date_2": "2020-03", "text_1_tokenized": ["Just", "tipped", "half", "a", "bottle", "of", "cactus", "jacks", "down", "the", "sink", "and", "now", "my", "kitchen", "smells", "like", "Granny", "Smith", "apples", "\ud83d\ude0d", "\ud83e\udd70", "mrs", "hinch", "is", "on", "a", "fucking", "ventilator"], "text_2_tokenized": ["I", "spoke", "with", "the", "Medical", "Examiner", "today", ".", "My", "husband", "was", "positive", "for", "COVID", "-", "19", ".", "My", "daughter", "also", "tested", "positive", "for", "COVID", "-", "19", ".", "She", "is", "now", "off", "the", "ventilator", "and", "no", "longer", "sedated", ".", "I", "will", "be", "going", "home", "soon", "to", "continue", "getting", "well", "in", "self", "isolation", ".", "This", "disease", "is", "not", "a", "hoax", "."]} -{"id": "0965-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Nine months old minor Nashwa who was injected a wrong injection at Darul Sehat hospital Karachi and was on ventilator, departed from the world. May her soul rest in eternal peace. Our deepest condolences with the family of #Nashwa", "token_idx_1": 19, "text_start_1": 107, "text_end_1": 117, "date_1": "2019-03", "text_2": "one ventilator costs like 50,000 Us Dollars...and we have 1042 cases .Ghana we get money like that? you sanso lift the Lockdown.", "token_idx_2": 1, "text_start_2": 4, "text_end_2": 14, "date_2": "2020-03", "text_1_tokenized": ["Nine", "months", "old", "minor", "Nashwa", "who", "was", "injected", "a", "wrong", "injection", "at", "Darul", "Sehat", "hospital", "Karachi", "and", "was", "on", "ventilator", ",", "departed", "from", "the", "world", ".", "May", "her", "soul", "rest", "in", "eternal", "peace", ".", "Our", "deepest", "condolences", "with", "the", "family", "of", "#Nashwa"], "text_2_tokenized": ["one", "ventilator", "costs", "like", "50,000", "Us", "Dollars", "...", "and", "we", "have", "1042", "cases", ".", "Ghana", "we", "get", "money", "like", "that", "?", "you", "sanso", "lift", "the", "Lockdown", "."]} -{"id": "0966-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Likely going to say my goodbyes. \"I just talked to the nurse and she said she is not going to live thru this hospital stay. They want me to make arrangements to come out there. They have her on all the meds they can and are keeping her on the ventilator.\"", "token_idx_1": 54, "text_start_1": 243, "text_end_1": 253, "date_1": "2019-03", "text_2": "If I took a drink every time this orange asshole says ventilator, I'd be drunk before 7.", "token_idx_2": 11, "text_start_2": 54, "text_end_2": 64, "date_2": "2020-03", "text_1_tokenized": ["Likely", "going", "to", "say", "my", "goodbyes", ".", "\"", "I", "just", "talked", "to", "the", "nurse", "and", "she", "said", "she", "is", "not", "going", "to", "live", "thru", "this", "hospital", "stay", ".", "They", "want", "me", "to", "make", "arrangements", "to", "come", "out", "there", ".", "They", "have", "her", "on", "all", "the", "meds", "they", "can", "and", "are", "keeping", "her", "on", "the", "ventilator", ".", "\""], "text_2_tokenized": ["If", "I", "took", "a", "drink", "every", "time", "this", "orange", "asshole", "says", "ventilator", ",", "I'd", "be", "drunk", "before", "7", "."]} -{"id": "0967-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "This is Aasiya Saher d/o Safiuddin from Adilabad she's in Asra hospital in ventilator her family cannot effort the expenses her father has died last month her mother is tailor kindly please help them @KTRTRS", "token_idx_1": 15, "text_start_1": 75, "text_end_1": 85, "date_1": "2019-03", "text_2": "My partners Nan is on a ventilator with suspected sepsis and Covid-19 We've been told to expect the worst She raised him Earlier in the ambulance the paramedic handed over the phone, Tom sung You Are My Sunshine Mary I'm so sorry we can't be there for you", "token_idx_2": 6, "text_start_2": 24, "text_end_2": 34, "date_2": "2020-03", "text_1_tokenized": ["This", "is", "Aasiya", "Saher", "d", "/", "o", "Safiuddin", "from", "Adilabad", "she's", "in", "Asra", "hospital", "in", "ventilator", "her", "family", "cannot", "effort", "the", "expenses", "her", "father", "has", "died", "last", "month", "her", "mother", "is", "tailor", "kindly", "please", "help", "them", "@KTRTRS"], "text_2_tokenized": ["My", "partners", "Nan", "is", "on", "a", "ventilator", "with", "suspected", "sepsis", "and", "Covid", "-", "19", "We've", "been", "told", "to", "expect", "the", "worst", "She", "raised", "him", "Earlier", "in", "the", "ambulance", "the", "paramedic", "handed", "over", "the", "phone", ",", "Tom", "sung", "You", "Are", "My", "Sunshine", "Mary", "I'm", "so", "sorry", "we", "can't", "be", "there", "for", "you"]} -{"id": "0968-ventilator", "word": "ventilator", "label_binary": 0, "text_1": "Last month my banker asked me to make a FD as his targets for 18-19 were 'on the ventilator'. I told him he'll need to return the favour. Y'day, I told him he needs to send me 120 patients in 19-20 @ 'no cost EMI' of 10/month. Safe to say he isn't asking for favours hereafter.", "token_idx_1": 19, "text_start_1": 81, "text_end_1": 91, "date_1": "2019-03", "text_2": "Just found out a patient in my fairly small city hospital in Florida was on a ventilator with COVID19, he was on HCQ and azithromycin and improving. Developed prolonged QT, went into V-tach and died. Hospital now considering giving it to staff prophylactically. Sigh. Idk man.....", "token_idx_2": 16, "text_start_2": 78, "text_end_2": 88, "date_2": "2020-03", "text_1_tokenized": ["Last", "month", "my", "banker", "asked", "me", "to", "make", "a", "FD", "as", "his", "targets", "for", "18-19", "were", "'", "on", "the", "ventilator", "'", ".", "I", "told", "him", "he'll", "need", "to", "return", "the", "favour", ".", "Y'day", ",", "I", "told", "him", "he", "needs", "to", "send", "me", "120", "patients", "in", "19-20", "@", "'", "no", "cost", "EMI", "'", "of", "10", "/", "month", ".", "Safe", "to", "say", "he", "isn't", "asking", "for", "favours", "hereafter", "."], "text_2_tokenized": ["Just", "found", "out", "a", "patient", "in", "my", "fairly", "small", "city", "hospital", "in", "Florida", "was", "on", "a", "ventilator", "with", "COVID", "19", ",", "he", "was", "on", "HCQ", "and", "azithromycin", "and", "improving", ".", "Developed", "prolonged", "QT", ",", "went", "into", "V-tach", "and", "died", ".", "Hospital", "now", "considering", "giving", "it", "to", "staff", "prophylactically", ".", "Sigh", ".", "Idk", "man", "..."]} -{"id": "0969-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Dr. J \"I love a good urban legend. You could put the organs on a ventilator long enough to coordinate the donation.\" [S01E07]", "token_idx_1": 18, "text_start_1": 65, "text_end_1": 75, "date_1": "2019-03", "text_2": "#China should transfer immediately all know-how to other Countries, ventilator , mask machines plans, all other critical supplies and machines! #Covid_19", "token_idx_2": 10, "text_start_2": 68, "text_end_2": 78, "date_2": "2020-03", "text_1_tokenized": ["Dr", ".", "J", "\"", "I", "love", "a", "good", "urban", "legend", ".", "You", "could", "put", "the", "organs", "on", "a", "ventilator", "long", "enough", "to", "coordinate", "the", "donation", ".", "\"", "[", "S01E07", "]"], "text_2_tokenized": ["#China", "should", "transfer", "immediately", "all", "know-how", "to", "other", "Countries", ",", "ventilator", ",", "mask", "machines", "plans", ",", "all", "other", "critical", "supplies", "and", "machines", "!", "#Covid_19"]} -{"id": "0970-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Feeling grateful that my cousin is alive but he's still on a ventilator and they are doing dialysis a couple times a week \ud83d\ude4f\ud83c\udffb I hope he pulls through...I won't be home until further notice.", "token_idx_1": 12, "text_start_1": 61, "text_end_1": 71, "date_1": "2019-03", "text_2": "\"A ventilator is a big deal.\" Ok, drink.", "token_idx_2": 2, "text_start_2": 3, "text_end_2": 13, "date_2": "2020-03", "text_1_tokenized": ["Feeling", "grateful", "that", "my", "cousin", "is", "alive", "but", "he's", "still", "on", "a", "ventilator", "and", "they", "are", "doing", "dialysis", "a", "couple", "times", "a", "week", "\ud83d\ude4f\ud83c\udffb", "I", "hope", "he", "pulls", "through", "...", "I", "won't", "be", "home", "until", "further", "notice", "."], "text_2_tokenized": ["\"", "A", "ventilator", "is", "a", "big", "deal", ".", "\"", "Ok", ",", "drink", "."]} -{"id": "0971-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "SLEPT ETERNAL SLEEP: Nine months old minor Nashwa who was injected a wrong injection at Darul Sehat hospital Karachi and was on ventilator, departed from the world & died - Nashwa is in better place now for sure. Your killer's will be avenged, baby. RIP #JusticeForNishwa", "token_idx_1": 23, "text_start_1": 128, "text_end_1": 138, "date_1": "2019-03", "text_2": "Every patient in the United States who needed a ventilator, got a ventilator! \ud83c\uddfa\ud83c\uddf8\ud83c\uddfa\ud83c\uddf8\ud83c\uddfa\ud83c\uddf8\ud83c\uddfa\ud83c\uddf8\ud83c\uddfa\ud83c\uddf8\ud83c\uddfa\ud83c\uddf8\ud83c\uddfa\ud83c\uddf8\ud83d\udcaa\ud83c\udffc", "token_idx_2": 9, "text_start_2": 48, "text_end_2": 58, "date_2": "2020-03", "text_1_tokenized": ["SLEPT", "ETERNAL", "SLEEP", ":", "Nine", "months", "old", "minor", "Nashwa", "who", "was", "injected", "a", "wrong", "injection", "at", "Darul", "Sehat", "hospital", "Karachi", "and", "was", "on", "ventilator", ",", "departed", "from", "the", "world", "&", "died", "-", "Nashwa", "is", "in", "better", "place", "now", "for", "sure", ".", "Your", "killer's", "will", "be", "avenged", ",", "baby", ".", "RIP", "#JusticeForNishwa"], "text_2_tokenized": ["Every", "patient", "in", "the", "United", "States", "who", "needed", "a", "ventilator", ",", "got", "a", "ventilator", "!", "\ud83c\uddfa", "\ud83c\uddf8", "\ud83c\uddfa", "\ud83c\uddf8", "\ud83c\uddfa", "\ud83c\uddf8", "\ud83c\uddfa", "\ud83c\uddf8", "\ud83c\uddfa", "\ud83c\uddf8", "\ud83c\uddfa", "\ud83c\uddf8", "\ud83c\uddfa", "\ud83c\uddf8", "\ud83d\udcaa\ud83c\udffc"]} -{"id": "0972-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "100 EVMs not working. Is the accountability in @ECI dead. Can we run an ITU with ventilator machines not working, or Dialysis machine malfunctioning!", "token_idx_1": 18, "text_start_1": 81, "text_end_1": 91, "date_1": "2019-03", "text_2": "I wonder if you have a better chance of getting a ventilator if you're admitted with a heart attack?", "token_idx_2": 11, "text_start_2": 50, "text_end_2": 60, "date_2": "2020-03", "text_1_tokenized": ["100", "EVMs", "not", "working", ".", "Is", "the", "accountability", "in", "@ECI", "dead", ".", "Can", "we", "run", "an", "ITU", "with", "ventilator", "machines", "not", "working", ",", "or", "Dialysis", "machine", "malfunctioning", "!"], "text_2_tokenized": ["I", "wonder", "if", "you", "have", "a", "better", "chance", "of", "getting", "a", "ventilator", "if", "you're", "admitted", "with", "a", "heart", "attack", "?"]} -{"id": "0973-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Steroids in sepsis + mech ventilation: no clear mortality benefit, but less ventilator time/earlier discharge although I wonder if powered for secondary outcomes. But if so, why not? Patient wins, system wins with no downside. What's the catch? #smacc", "token_idx_1": 14, "text_start_1": 76, "text_end_1": 86, "date_1": "2019-03", "text_2": "You hoes gonna be on a ventilator keep playing around.", "token_idx_2": 6, "text_start_2": 23, "text_end_2": 33, "date_2": "2020-03", "text_1_tokenized": ["Steroids", "in", "sepsis", "+", "mech", "ventilation", ":", "no", "clear", "mortality", "benefit", ",", "but", "less", "ventilator", "time", "/", "earlier", "discharge", "although", "I", "wonder", "if", "powered", "for", "secondary", "outcomes", ".", "But", "if", "so", ",", "why", "not", "?", "Patient", "wins", ",", "system", "wins", "with", "no", "downside", ".", "What's", "the", "catch", "?", "#smacc"], "text_2_tokenized": ["You", "hoes", "gonna", "be", "on", "a", "ventilator", "keep", "playing", "around", "."]} -{"id": "0974-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Can't wait until hunny is off his ventilator and has a clear mouth!", "token_idx_1": 7, "text_start_1": 34, "text_end_1": 44, "date_1": "2019-03", "text_2": "I'd still like to know of those dying with Covid 19 how many were given a place in intensive care and/or a ventilator. Eventually I'd like all the age and health stats on what care people got so we can see the reality of what keeping ITU excess capacity meant.", "token_idx_2": 24, "text_start_2": 107, "text_end_2": 117, "date_2": "2020-03", "text_1_tokenized": ["Can't", "wait", "until", "hunny", "is", "off", "his", "ventilator", "and", "has", "a", "clear", "mouth", "!"], "text_2_tokenized": ["I'd", "still", "like", "to", "know", "of", "those", "dying", "with", "Covid", "19", "how", "many", "were", "given", "a", "place", "in", "intensive", "care", "and", "/", "or", "a", "ventilator", ".", "Eventually", "I'd", "like", "all", "the", "age", "and", "health", "stats", "on", "what", "care", "people", "got", "so", "we", "can", "see", "the", "reality", "of", "what", "keeping", "ITU", "excess", "capacity", "meant", "."]} -{"id": "0975-ventilator", "word": "ventilator", "label_binary": 1, "text_1": "Comeback in less than 24 hrs. I've got my ventilator machine ready in case. A quick prayer for my ovaries in advance, please and thank you.", "token_idx_1": 10, "text_start_1": 42, "text_end_1": 52, "date_1": "2019-03", "text_2": "85% of people with the virus That go on ventilator die, they have been finding out it cause Pulmonary edema...", "token_idx_2": 10, "text_start_2": 40, "text_end_2": 50, "date_2": "2020-03", "text_1_tokenized": ["Comeback", "in", "less", "than", "24", "hrs", ".", "I've", "got", "my", "ventilator", "machine", "ready", "in", "case", ".", "A", "quick", "prayer", "for", "my", "ovaries", "in", "advance", ",", "please", "and", "thank", "you", "."], "text_2_tokenized": ["85", "%", "of", "people", "with", "the", "virus", "That", "go", "on", "ventilator", "die", ",", "they", "have", "been", "finding", "out", "it", "cause", "Pulmonary", "edema", "..."]} -{"id": "1373-villager", "word": "villager", "label_binary": 0, "text_1": "me, looking at villager, link, and pokemon trainer, \u201ci'm sorry but.. it's joker time now...\u201d", "token_idx_1": 4, "text_start_1": 15, "text_end_1": 23, "date_1": "2019-04", "text_2": "i hate when im taking one of these personality quizzes and someone asks me which lyric i am and i have to read through an entire page of text just to properly assess what controversial animal crossing villager i am", "token_idx_2": 37, "text_start_2": 201, "text_end_2": 209, "date_2": "2020-04", "text_1_tokenized": ["me", ",", "looking", "at", "villager", ",", "link", ",", "and", "pokemon", "trainer", ",", "\u201c", "i'm", "sorry", "but", "..", "it's", "joker", "time", "now", "...", "\u201d"], "text_2_tokenized": ["i", "hate", "when", "im", "taking", "one", "of", "these", "personality", "quizzes", "and", "someone", "asks", "me", "which", "lyric", "i", "am", "and", "i", "have", "to", "read", "through", "an", "entire", "page", "of", "text", "just", "to", "properly", "assess", "what", "controversial", "animal", "crossing", "villager", "i", "am"]} -{"id": "1374-villager", "word": "villager", "label_binary": 0, "text_1": "My villager was sitting at 4.49 gsp how tf am I not in elite yet", "token_idx_1": 1, "text_start_1": 3, "text_end_1": 11, "date_1": "2019-04", "text_2": "I got ART in the mail from a villager. It was real, and its in my Museum!", "token_idx_2": 8, "text_start_2": 29, "text_end_2": 37, "date_2": "2020-04", "text_1_tokenized": ["My", "villager", "was", "sitting", "at", "4.49", "gsp", "how", "tf", "am", "I", "not", "in", "elite", "yet"], "text_2_tokenized": ["I", "got", "ART", "in", "the", "mail", "from", "a", "villager", ".", "It", "was", "real", ",", "and", "its", "in", "my", "Museum", "!"]} -{"id": "1375-villager", "word": "villager", "label_binary": 0, "text_1": "3CR: CLUE 3: Part 2 Where can you find an unsophisticated villager?", "token_idx_1": 13, "text_start_1": 58, "text_end_1": 66, "date_1": "2019-04", "text_2": "i rlly need to more variety w my villager personalities but most of my favourites r normals... AH!!", "token_idx_2": 8, "text_start_2": 33, "text_end_2": 41, "date_2": "2020-04", "text_1_tokenized": ["3CR", ":", "CLUE", "3", ":", "Part", "2", "Where", "can", "you", "find", "an", "unsophisticated", "villager", "?"], "text_2_tokenized": ["i", "rlly", "need", "to", "more", "variety", "w", "my", "villager", "personalities", "but", "most", "of", "my", "favourites", "r", "normals", "...", "AH", "!", "!"]} -{"id": "1376-villager", "word": "villager", "label_binary": 0, "text_1": "acpc is trying to cheer me up bc ive gotten my 3rd villager map in a row from gulliver ;\u00d7;", "token_idx_1": 12, "text_start_1": 51, "text_end_1": 59, "date_1": "2019-04", "text_2": "opened up city folk and played for like 5 mins and lucky asked me to bring a gift to another villager. been playing new horizons for 230+ hours and ive gotten like TWO \u201cplease catch this fish for me\u201d requests \ud83d\ude2d\ud83d\ude2d\ud83d\ude2d HELLO??? give me errands!! \ud83e\udd32\ud83e\udd32\ud83e\udd32", "token_idx_2": 20, "text_start_2": 93, "text_end_2": 101, "date_2": "2020-04", "text_1_tokenized": ["acpc", "is", "trying", "to", "cheer", "me", "up", "bc", "ive", "gotten", "my", "3rd", "villager", "map", "in", "a", "row", "from", "gulliver", ";", "\u00d7", ";"], "text_2_tokenized": ["opened", "up", "city", "folk", "and", "played", "for", "like", "5", "mins", "and", "lucky", "asked", "me", "to", "bring", "a", "gift", "to", "another", "villager", ".", "been", "playing", "new", "horizons", "for", "230", "+", "hours", "and", "ive", "gotten", "like", "TWO", "\u201c", "please", "catch", "this", "fish", "for", "me", "\u201d", "requests", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude2d", "HELLO", "?", "?", "?", "give", "me", "errands", "!", "!", "\ud83e\udd32", "\ud83e\udd32", "\ud83e\udd32"]} -{"id": "1377-villager", "word": "villager", "label_binary": 0, "text_1": "Hey @notch can I get a villager \u201chnggg\u201d from you", "token_idx_1": 6, "text_start_1": 23, "text_end_1": 31, "date_1": "2019-04", "text_2": "please don't shame me for this but i've not had 1 cute villager move to my island i think i'm cursed", "token_idx_2": 12, "text_start_2": 55, "text_end_2": 63, "date_2": "2020-04", "text_1_tokenized": ["Hey", "@notch", "can", "I", "get", "a", "villager", "\u201c", "hnggg", "\u201d", "from", "you"], "text_2_tokenized": ["please", "don't", "shame", "me", "for", "this", "but", "i've", "not", "had", "1", "cute", "villager", "move", "to", "my", "island", "i", "think", "i'm", "cursed"]} -{"id": "1378-villager", "word": "villager", "label_binary": 0, "text_1": "There is one villager called Ali Naka on these streets, tell him a Liverpool fan is looking for him. He just hibernated", "token_idx_1": 3, "text_start_1": 13, "text_end_1": 21, "date_1": "2019-04", "text_2": "molly is the cutest villager on animal crossing this is not up for debate", "token_idx_2": 4, "text_start_2": 20, "text_end_2": 28, "date_2": "2020-04", "text_1_tokenized": ["There", "is", "one", "villager", "called", "Ali", "Naka", "on", "these", "streets", ",", "tell", "him", "a", "Liverpool", "fan", "is", "looking", "for", "him", ".", "He", "just", "hibernated"], "text_2_tokenized": ["molly", "is", "the", "cutest", "villager", "on", "animal", "crossing", "this", "is", "not", "up", "for", "debate"]} -{"id": "1379-villager", "word": "villager", "label_binary": 1, "text_1": "i wish i still had images of my old animal crossing new leaf villager he was good boy", "token_idx_1": 13, "text_start_1": 61, "text_end_1": 69, "date_1": "2019-04", "text_2": "How does villager trading in New horizons even work like tf", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 17, "date_2": "2020-04", "text_1_tokenized": ["i", "wish", "i", "still", "had", "images", "of", "my", "old", "animal", "crossing", "new", "leaf", "villager", "he", "was", "good", "boy"], "text_2_tokenized": ["How", "does", "villager", "trading", "in", "New", "horizons", "even", "work", "like", "tf"]} -{"id": "1380-villager", "word": "villager", "label_binary": 1, "text_1": "A new villager moved in on top of my field of tulips. I had my only black tulip in there!! \ud83d\ude2d", "token_idx_1": 2, "text_start_1": 6, "text_end_1": 14, "date_1": "2019-04", "text_2": "I think I have a plan to get Sherb. Time travel and kick out Gruff. Get random villager (or one of the lesser popular ones I want, like Agnes, Chevre, or Muffy). Then, time travel and offer up Raymond for Sherb. (Because I've heard you cant kick out your most recent villager??)\ud83e\udd37\ud83c\udffc\u200d\u2640\ufe0f", "token_idx_2": 19, "text_start_2": 79, "text_end_2": 87, "date_2": "2020-04", "text_1_tokenized": ["A", "new", "villager", "moved", "in", "on", "top", "of", "my", "field", "of", "tulips", ".", "I", "had", "my", "only", "black", "tulip", "in", "there", "!", "!", "\ud83d\ude2d"], "text_2_tokenized": ["I", "think", "I", "have", "a", "plan", "to", "get", "Sherb", ".", "Time", "travel", "and", "kick", "out", "Gruff", ".", "Get", "random", "villager", "(", "or", "one", "of", "the", "lesser", "popular", "ones", "I", "want", ",", "like", "Agnes", ",", "Chevre", ",", "or", "Muffy", ")", ".", "Then", ",", "time", "travel", "and", "offer", "up", "Raymond", "for", "Sherb", ".", "(", "Because", "I've", "heard", "you", "cant", "kick", "out", "your", "most", "recent", "villager", "?", "?", ")", "\ud83e\udd37\ud83c\udffc\u200d\u2640", "\ufe0f"]} -{"id": "1381-villager", "word": "villager", "label_binary": 1, "text_1": "Pocket Camp craft times are crazy. It takes about 6 hours to make a margherita pizza. Not to mention I changed my mind about inviting the villager that needs it. Though I enjoy a margherita pizza, so I'll still probably use it...", "token_idx_1": 28, "text_start_1": 138, "text_end_1": 146, "date_1": "2019-04", "text_2": "video game struggle tweet accs r only funny when theyre for dumb things that happen ingame (ie getting the villager glitch or getting stuck somewhere), not when theyre just putting ppl out in the open to be made fun of", "token_idx_2": 20, "text_start_2": 107, "text_end_2": 115, "date_2": "2020-04", "text_1_tokenized": ["Pocket", "Camp", "craft", "times", "are", "crazy", ".", "It", "takes", "about", "6", "hours", "to", "make", "a", "margherita", "pizza", ".", "Not", "to", "mention", "I", "changed", "my", "mind", "about", "inviting", "the", "villager", "that", "needs", "it", ".", "Though", "I", "enjoy", "a", "margherita", "pizza", ",", "so", "I'll", "still", "probably", "use", "it", "..."], "text_2_tokenized": ["video", "game", "struggle", "tweet", "accs", "r", "only", "funny", "when", "theyre", "for", "dumb", "things", "that", "happen", "ingame", "(", "ie", "getting", "the", "villager", "glitch", "or", "getting", "stuck", "somewhere", ")", ",", "not", "when", "theyre", "just", "putting", "ppl", "out", "in", "the", "open", "to", "be", "made", "fun", "of"]} -{"id": "1382-villager", "word": "villager", "label_binary": 1, "text_1": "A group of knights must escort a helpful villager to the boat, except one of them is a mob boss", "token_idx_1": 8, "text_start_1": 41, "text_end_1": 49, "date_1": "2019-04", "text_2": "i made classic horror outfits to display in my store & i log in today & my favorite villager was wearing the freddy krueger sweater \ud83e\udd7a", "token_idx_2": 18, "text_start_2": 92, "text_end_2": 100, "date_2": "2020-04", "text_1_tokenized": ["A", "group", "of", "knights", "must", "escort", "a", "helpful", "villager", "to", "the", "boat", ",", "except", "one", "of", "them", "is", "a", "mob", "boss"], "text_2_tokenized": ["i", "made", "classic", "horror", "outfits", "to", "display", "in", "my", "store", "&", "i", "log", "in", "today", "&", "my", "favorite", "villager", "was", "wearing", "the", "freddy", "krueger", "sweater", "\ud83e\udd7a"]} -{"id": "1383-villager", "word": "villager", "label_binary": 0, "text_1": "Every villager who talks to a named character: says their line. Me: YOU'RE GOING TO DIE. YES YOU, SPECIFICALLY. #gameofthrones #gameofsnark", "token_idx_1": 1, "text_start_1": 6, "text_end_1": 14, "date_1": "2019-04", "text_2": "sorry 2 say it but raymond isn't even that cute and by far not the best cat villager", "token_idx_2": 17, "text_start_2": 76, "text_end_2": 84, "date_2": "2020-04", "text_1_tokenized": ["Every", "villager", "who", "talks", "to", "a", "named", "character", ":", "says", "their", "line", ".", "Me", ":", "YOU'RE", "GOING", "TO", "DIE", ".", "YES", "YOU", ",", "SPECIFICALLY", ".", "#gameofthrones", "#gameofsnark"], "text_2_tokenized": ["sorry", "2", "say", "it", "but", "raymond", "isn't", "even", "that", "cute", "and", "by", "far", "not", "the", "best", "cat", "villager"]} -{"id": "1384-villager", "word": "villager", "label_binary": 0, "text_1": "Who puts cinnamon in rice? Now this shit tastes like a bun because I am an unrefined villager", "token_idx_1": 18, "text_start_1": 85, "text_end_1": 93, "date_1": "2019-04", "text_2": "i want the illiterate villager dammit", "token_idx_2": 4, "text_start_2": 22, "text_end_2": 30, "date_2": "2020-04", "text_1_tokenized": ["Who", "puts", "cinnamon", "in", "rice", "?", "Now", "this", "shit", "tastes", "like", "a", "bun", "because", "I", "am", "an", "unrefined", "villager"], "text_2_tokenized": ["i", "want", "the", "illiterate", "villager", "dammit"]} -{"id": "1385-villager", "word": "villager", "label_binary": 0, "text_1": "Who remembers I like trains and villager news?", "token_idx_1": 6, "text_start_1": 32, "text_end_1": 40, "date_1": "2019-04", "text_2": "The amount of good furniture I've missed on nook islands while villager hunting is upsetting. First time I shook a tree on one and I get the red imperial bed \ud83d\ude0d", "token_idx_2": 11, "text_start_2": 63, "text_end_2": 71, "date_2": "2020-04", "text_1_tokenized": ["Who", "remembers", "I", "like", "trains", "and", "villager", "news", "?"], "text_2_tokenized": ["The", "amount", "of", "good", "furniture", "I've", "missed", "on", "nook", "islands", "while", "villager", "hunting", "is", "upsetting", ".", "First", "time", "I", "shook", "a", "tree", "on", "one", "and", "I", "get", "the", "red", "imperial", "bed", "\ud83d\ude0d"]} -{"id": "1386-villager", "word": "villager", "label_binary": 1, "text_1": "When you only have one villager in your #acnl game that you actually like. \ud83d\ude2d\ud83d\ude2d\ud83d\ude2d #caneveryonegobuttangy", "token_idx_1": 5, "text_start_1": 23, "text_end_1": 31, "date_1": "2019-04", "text_2": "gonna see if i can get a villager to move out soon and then hunt for audie", "token_idx_2": 7, "text_start_2": 25, "text_end_2": 33, "date_2": "2020-04", "text_1_tokenized": ["When", "you", "only", "have", "one", "villager", "in", "your", "#acnl", "game", "that", "you", "actually", "like", ".", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude2d", "#caneveryonegobuttangy"], "text_2_tokenized": ["gonna", "see", "if", "i", "can", "get", "a", "villager", "to", "move", "out", "soon", "and", "then", "hunt", "for", "audie"]} -{"id": "1387-villager", "word": "villager", "label_binary": 0, "text_1": "getting pay statements on a friday when you don't get paid until monday is very... *minecraft villager noise*", "token_idx_1": 18, "text_start_1": 94, "text_end_1": 102, "date_1": "2019-04", "text_2": "This won't matter to anyone outside of AC twitter but I spent exactly 2 of my 40 NMT for a villager. I got sherb immediately, and then a cranky old goat. Obviously I chose the old man goat. But it was really hard to leave Sherb. Anyways. What do I do with my remaining tickets??", "token_idx_2": 20, "text_start_2": 91, "text_end_2": 99, "date_2": "2020-04", "text_1_tokenized": ["getting", "pay", "statements", "on", "a", "friday", "when", "you", "don't", "get", "paid", "until", "monday", "is", "very", "...", "*", "minecraft", "villager", "noise", "*"], "text_2_tokenized": ["This", "won't", "matter", "to", "anyone", "outside", "of", "AC", "twitter", "but", "I", "spent", "exactly", "2", "of", "my", "40", "NMT", "for", "a", "villager", ".", "I", "got", "sherb", "immediately", ",", "and", "then", "a", "cranky", "old", "goat", ".", "Obviously", "I", "chose", "the", "old", "man", "goat", ".", "But", "it", "was", "really", "hard", "to", "leave", "Sherb", ".", "Anyways", ".", "What", "do", "I", "do", "with", "my", "remaining", "tickets", "?", "?"]} -{"id": "1388-villager", "word": "villager", "label_binary": 0, "text_1": "My dad and grandparents escaped to NYC via Morocco. Both their families died at Auschwitz. My Nana's sister (blonde like me) was taken in by a Christian family but shot 2 days before liberation when a villager saw her and yelled \u2018Jew' in the street. #HolocaustRemembranceDay", "token_idx_1": 40, "text_start_1": 201, "text_end_1": 209, "date_1": "2019-04", "text_2": "nothing but respect for ppl who have all their villager houses in an organized way", "token_idx_2": 9, "text_start_2": 47, "text_end_2": 55, "date_2": "2020-04", "text_1_tokenized": ["My", "dad", "and", "grandparents", "escaped", "to", "NYC", "via", "Morocco", ".", "Both", "their", "families", "died", "at", "Auschwitz", ".", "My", "Nana's", "sister", "(", "blonde", "like", "me", ")", "was", "taken", "in", "by", "a", "Christian", "family", "but", "shot", "2", "days", "before", "liberation", "when", "a", "villager", "saw", "her", "and", "yelled", "\u2018", "Jew", "'", "in", "the", "street", ".", "#HolocaustRemembranceDay"], "text_2_tokenized": ["nothing", "but", "respect", "for", "ppl", "who", "have", "all", "their", "villager", "houses", "in", "an", "organized", "way"]} -{"id": "1389-villager", "word": "villager", "label_binary": 0, "text_1": "Yeah giving birth hurts or whatever but have you ever lost a villager to a boar lure in AoE2", "token_idx_1": 12, "text_start_1": 61, "text_end_1": 69, "date_1": "2019-04", "text_2": "I need a cat villager asap so I can give them the cat grass I keep getting from balloons", "token_idx_2": 4, "text_start_2": 13, "text_end_2": 21, "date_2": "2020-04", "text_1_tokenized": ["Yeah", "giving", "birth", "hurts", "or", "whatever", "but", "have", "you", "ever", "lost", "a", "villager", "to", "a", "boar", "lure", "in", "AoE", "2"], "text_2_tokenized": ["I", "need", "a", "cat", "villager", "asap", "so", "I", "can", "give", "them", "the", "cat", "grass", "I", "keep", "getting", "from", "balloons"]} -{"id": "1390-villager", "word": "villager", "label_binary": 0, "text_1": "They're making a live action minecraft movie and I swear if they don't cast Danny Devito as a villager I'm not watching \ud83d\ude24", "token_idx_1": 18, "text_start_1": 94, "text_end_1": 102, "date_1": "2019-04", "text_2": "Ok cool cats and kittens, anyone got a crafting villager rn? I wanna learn DIY stuffs! #acnh", "token_idx_2": 10, "text_start_2": 48, "text_end_2": 56, "date_2": "2020-04", "text_1_tokenized": ["They're", "making", "a", "live", "action", "minecraft", "movie", "and", "I", "swear", "if", "they", "don't", "cast", "Danny", "Devito", "as", "a", "villager", "I'm", "not", "watching", "\ud83d\ude24"], "text_2_tokenized": ["Ok", "cool", "cats", "and", "kittens", ",", "anyone", "got", "a", "crafting", "villager", "rn", "?", "I", "wanna", "learn", "DIY", "stuffs", "!", "#acnh"]} -{"id": "1391-villager", "word": "villager", "label_binary": 0, "text_1": "If you remember your OG Animal Crossing villager squad you are cool with me.", "token_idx_1": 7, "text_start_1": 40, "text_end_1": 48, "date_1": "2019-04", "text_2": "Raymond is a shitty villager and I genuinely don't understand why everyone wants him so badly", "token_idx_2": 4, "text_start_2": 20, "text_end_2": 28, "date_2": "2020-04", "text_1_tokenized": ["If", "you", "remember", "your", "OG", "Animal", "Crossing", "villager", "squad", "you", "are", "cool", "with", "me", "."], "text_2_tokenized": ["Raymond", "is", "a", "shitty", "villager", "and", "I", "genuinely", "don't", "understand", "why", "everyone", "wants", "him", "so", "badly"]} -{"id": "1392-villager", "word": "villager", "label_binary": 0, "text_1": "what happens when a villager uses a bed in the nether or end", "token_idx_1": 4, "text_start_1": 20, "text_end_1": 28, "date_1": "2019-04", "text_2": "walker is the best villager", "token_idx_2": 4, "text_start_2": 19, "text_end_2": 27, "date_2": "2020-04", "text_1_tokenized": ["what", "happens", "when", "a", "villager", "uses", "a", "bed", "in", "the", "nether", "or", "end"], "text_2_tokenized": ["walker", "is", "the", "best", "villager"]} -{"id": "1393-villager", "word": "villager", "label_binary": 1, "text_1": "It's obvious that some of you have never played the role of villager #5 and it shows.", "token_idx_1": 12, "text_start_1": 60, "text_end_1": 68, "date_1": "2019-04", "text_2": "government assign me an animal crossing villager", "token_idx_2": 6, "text_start_2": 40, "text_end_2": 48, "date_2": "2020-04", "text_1_tokenized": ["It's", "obvious", "that", "some", "of", "you", "have", "never", "played", "the", "role", "of", "villager", "#", "5", "and", "it", "shows", "."], "text_2_tokenized": ["government", "assign", "me", "an", "animal", "crossing", "villager"]} -{"id": "1394-villager", "word": "villager", "label_binary": 0, "text_1": "\"hi, I'm a random, shadowy villager from a Hellboy story. any mythology I reference as I walk-and-talk with Mr Hellboy through this church/forest/dungeon is the actual, factual truth, and an element from that mythology will kill me/my family/village before the story is over.\"", "token_idx_1": 8, "text_start_1": 27, "text_end_1": 35, "date_1": "2019-04", "text_2": "it's villager hunting time lads", "token_idx_2": 1, "text_start_2": 5, "text_end_2": 13, "date_2": "2020-04", "text_1_tokenized": ["\"", "hi", ",", "I'm", "a", "random", ",", "shadowy", "villager", "from", "a", "Hellboy", "story", ".", "any", "mythology", "I", "reference", "as", "I", "walk-and-talk", "with", "Mr", "Hellboy", "through", "this", "church", "/", "forest", "/", "dungeon", "is", "the", "actual", ",", "factual", "truth", ",", "and", "an", "element", "from", "that", "mythology", "will", "kill", "me", "/", "my", "family", "/", "village", "before", "the", "story", "is", "over", ".", "\""], "text_2_tokenized": ["it's", "villager", "hunting", "time", "lads"]} -{"id": "1395-villager", "word": "villager", "label_binary": 0, "text_1": "If you hear a villager talk about how wealthy a certain person is, and if the person is speaking in kikuyu, to stress his point and ensure it sinks, he will not fail to mention that the said person, is as wealthy as a monkey, now what does a monkey own to be so honored?", "token_idx_1": 4, "text_start_1": 14, "text_end_1": 22, "date_1": "2019-04", "text_2": "give me the ability to re arrange villager furniture #acnh", "token_idx_2": 7, "text_start_2": 34, "text_end_2": 42, "date_2": "2020-04", "text_1_tokenized": ["If", "you", "hear", "a", "villager", "talk", "about", "how", "wealthy", "a", "certain", "person", "is", ",", "and", "if", "the", "person", "is", "speaking", "in", "kikuyu", ",", "to", "stress", "his", "point", "and", "ensure", "it", "sinks", ",", "he", "will", "not", "fail", "to", "mention", "that", "the", "said", "person", ",", "is", "as", "wealthy", "as", "a", "monkey", ",", "now", "what", "does", "a", "monkey", "own", "to", "be", "so", "honored", "?"], "text_2_tokenized": ["give", "me", "the", "ability", "to", "re", "arrange", "villager", "furniture", "#acnh"]} -{"id": "1396-villager", "word": "villager", "label_binary": 1, "text_1": "haru as villager is mad cute tho wtf baby axe girl", "token_idx_1": 2, "text_start_1": 8, "text_end_1": 16, "date_1": "2019-04", "text_2": "Nintendo needs to stop being cowards and make a plague doctor Animal Crossing villager already.", "token_idx_2": 13, "text_start_2": 78, "text_end_2": 86, "date_2": "2020-04", "text_1_tokenized": ["haru", "as", "villager", "is", "mad", "cute", "tho", "wtf", "baby", "axe", "girl"], "text_2_tokenized": ["Nintendo", "needs", "to", "stop", "being", "cowards", "and", "make", "a", "plague", "doctor", "Animal", "Crossing", "villager", "already", "."]} -{"id": "1397-villager", "word": "villager", "label_binary": 0, "text_1": "i keep getting that minecraft villager ad on twitch and it's making me want to play minecraft again. successful advertising!", "token_idx_1": 5, "text_start_1": 30, "text_end_1": 38, "date_1": "2019-04", "text_2": "If Animal Crossing releases as much as ONE MORE peppy rabbit villager, I'm legit gonna fucking lose it.", "token_idx_2": 11, "text_start_2": 61, "text_end_2": 69, "date_2": "2020-04", "text_1_tokenized": ["i", "keep", "getting", "that", "minecraft", "villager", "ad", "on", "twitch", "and", "it's", "making", "me", "want", "to", "play", "minecraft", "again", ".", "successful", "advertising", "!"], "text_2_tokenized": ["If", "Animal", "Crossing", "releases", "as", "much", "as", "ONE", "MORE", "peppy", "rabbit", "villager", ",", "I'm", "legit", "gonna", "fucking", "lose", "it", "."]} -{"id": "1398-villager", "word": "villager", "label_binary": 0, "text_1": "Someone drove by with a forklift, hooting, the villager in me started screaming \"re rekisha ditamaaaati! Cabbage'eeee, matsapane le ona a gona\" \ud83d\ude02\ud83d\ude02\ud83d\ude02\ud83d\ude02 I just couldn't help it", "token_idx_1": 10, "text_start_1": 47, "text_end_1": 55, "date_1": "2019-04", "text_2": "i want a penguin villager so bad omg... all of them are so cute \ud83e\udd7a", "token_idx_2": 4, "text_start_2": 17, "text_end_2": 25, "date_2": "2020-04", "text_1_tokenized": ["Someone", "drove", "by", "with", "a", "forklift", ",", "hooting", ",", "the", "villager", "in", "me", "started", "screaming", "\"", "re", "rekisha", "ditamaaaati", "!", "Cabbage'eeee", ",", "matsapane", "le", "ona", "a", "gona", "\"", "\ud83d\ude02", "\ud83d\ude02", "\ud83d\ude02", "I", "just", "couldn't", "help", "it"], "text_2_tokenized": ["i", "want", "a", "penguin", "villager", "so", "bad", "omg", "...", "all", "of", "them", "are", "so", "cute", "\ud83e\udd7a"]} -{"id": "1399-villager", "word": "villager", "label_binary": 0, "text_1": "When a villager asked the BJP leader, \"what did you do in the last 5 years? He said, \"Bolo Bharat Mata Ki Jai.\".....\ud83d\udea9\ud83d\udea9", "token_idx_1": 2, "text_start_1": 7, "text_end_1": 15, "date_1": "2019-04", "text_2": "Why does Animal Crossing not allow the player to make every villager wear pants?", "token_idx_2": 11, "text_start_2": 60, "text_end_2": 68, "date_2": "2020-04", "text_1_tokenized": ["When", "a", "villager", "asked", "the", "BJP", "leader", ",", "\"", "what", "did", "you", "do", "in", "the", "last", "5", "years", "?", "He", "said", ",", "\"", "Bolo", "Bharat", "Mata", "Ki", "Jai", ".", "\"", "...", "\ud83d\udea9", "\ud83d\udea9"], "text_2_tokenized": ["Why", "does", "Animal", "Crossing", "not", "allow", "the", "player", "to", "make", "every", "villager", "wear", "pants", "?"]} -{"id": "1400-villager", "word": "villager", "label_binary": 1, "text_1": "im an animal crossing villager on the inside", "token_idx_1": 4, "text_start_1": 22, "text_end_1": 30, "date_1": "2019-04", "text_2": "if anyone has raymond, or nmts in bulk please lmk!! raymond is my boyfriends dreamie and he has fang up for trade rn \ud83e\udd7a i really want him to get his dream villager since hes been giving up on acnh ever since he tried island hopping for him :<", "token_idx_2": 35, "text_start_2": 154, "text_end_2": 162, "date_2": "2020-04", "text_1_tokenized": ["im", "an", "animal", "crossing", "villager", "on", "the", "inside"], "text_2_tokenized": ["if", "anyone", "has", "raymond", ",", "or", "nmts", "in", "bulk", "please", "lmk", "!", "!", "raymond", "is", "my", "boyfriends", "dreamie", "and", "he", "has", "fang", "up", "for", "trade", "rn", "\ud83e\udd7a", "i", "really", "want", "him", "to", "get", "his", "dream", "villager", "since", "hes", "been", "giving", "up", "on", "acnh", "ever", "since", "he", "tried", "island", "hopping", "for", "him", ":", "<"]} -{"id": "1401-villager", "word": "villager", "label_binary": 0, "text_1": "Cursed thought of the day: Minecraft villager noses pop off and become baby villagers.", "token_idx_1": 7, "text_start_1": 37, "text_end_1": 45, "date_1": "2019-04", "text_2": "i got a clay amiibo card :> there's a villager called fang and i wanted him too because same family name!!!! but he's $64 hdjbfjkdhasbf", "token_idx_2": 10, "text_start_2": 41, "text_end_2": 49, "date_2": "2020-04", "text_1_tokenized": ["Cursed", "thought", "of", "the", "day", ":", "Minecraft", "villager", "noses", "pop", "off", "and", "become", "baby", "villagers", "."], "text_2_tokenized": ["i", "got", "a", "clay", "amiibo", "card", ":", ">", "there's", "a", "villager", "called", "fang", "and", "i", "wanted", "him", "too", "because", "same", "family", "name", "!", "!", "!", "but", "he's", "$", "64", "hdjbfjkdhasbf"]} -{"id": "1402-villager", "word": "villager", "label_binary": 1, "text_1": "when i was rewriting theo for my isekai story i was a lot happier with her but apparently my sister didn't like her much because she seemed like just a villager B type and i got sad :\"\"\") preferences i guess but still orz", "token_idx_1": 30, "text_start_1": 152, "text_end_1": 160, "date_1": "2019-04", "text_2": "ACNH question. If a villager moves out, can I invite them back with Amiibo? If so, would it be that same exact villager? Like would they know me?", "token_idx_2": 5, "text_start_2": 20, "text_end_2": 28, "date_2": "2020-04", "text_1_tokenized": ["when", "i", "was", "rewriting", "theo", "for", "my", "isekai", "story", "i", "was", "a", "lot", "happier", "with", "her", "but", "apparently", "my", "sister", "didn't", "like", "her", "much", "because", "she", "seemed", "like", "just", "a", "villager", "B", "type", "and", "i", "got", "sad", ":", "\"", "\"", "\"", ")", "preferences", "i", "guess", "but", "still", "orz"], "text_2_tokenized": ["ACNH", "question", ".", "If", "a", "villager", "moves", "out", ",", "can", "I", "invite", "them", "back", "with", "Amiibo", "?", "If", "so", ",", "would", "it", "be", "that", "same", "exact", "villager", "?", "Like", "would", "they", "know", "me", "?"]} -{"id": "1403-villager", "word": "villager", "label_binary": 0, "text_1": "i can become a good voice actor, i am adding mr bean's \u201cTEDDDYYY!!!\u201d to the list of voices and lines i can flawlessly imitate, next to my minecraft villager \u201chhhmmmm\u201d sound", "token_idx_1": 35, "text_start_1": 148, "text_end_1": 156, "date_1": "2019-04", "text_2": "man 'cute pastel aesthetic' ac players give me bad feelings coz theyre the ones gushing over giving maid dresses to raymond/marshal and shit on 'ugly' villager as if they committed tax fraud", "token_idx_2": 31, "text_start_2": 151, "text_end_2": 159, "date_2": "2020-04", "text_1_tokenized": ["i", "can", "become", "a", "good", "voice", "actor", ",", "i", "am", "adding", "mr", "bean's", "\u201c", "TEDDDYYY", "!", "!", "!", "\u201d", "to", "the", "list", "of", "voices", "and", "lines", "i", "can", "flawlessly", "imitate", ",", "next", "to", "my", "minecraft", "villager", "\u201c", "hhhmmmm", "\u201d", "sound"], "text_2_tokenized": ["man", "'", "cute", "pastel", "aesthetic", "'", "ac", "players", "give", "me", "bad", "feelings", "coz", "theyre", "the", "ones", "gushing", "over", "giving", "maid", "dresses", "to", "raymond", "/", "marshal", "and", "shit", "on", "'", "ugly", "'", "villager", "as", "if", "they", "committed", "tax", "fraud"]} -{"id": "1404-villager", "word": "villager", "label_binary": 1, "text_1": "bfjdhd acnl prompting me for villager names forcing me to name mz big eyes miriam", "token_idx_1": 5, "text_start_1": 29, "text_end_1": 37, "date_1": "2019-04", "text_2": "A fun minigame I like to play in Animal Crossing when I give a villager a bug or a fish is to guess whether they'll thank me for the pet or thank me for the snack.", "token_idx_2": 14, "text_start_2": 63, "text_end_2": 71, "date_2": "2020-04", "text_1_tokenized": ["bfjdhd", "acnl", "prompting", "me", "for", "villager", "names", "forcing", "me", "to", "name", "mz", "big", "eyes", "miriam"], "text_2_tokenized": ["A", "fun", "minigame", "I", "like", "to", "play", "in", "Animal", "Crossing", "when", "I", "give", "a", "villager", "a", "bug", "or", "a", "fish", "is", "to", "guess", "whether", "they'll", "thank", "me", "for", "the", "pet", "or", "thank", "me", "for", "the", "snack", "."]} -{"id": "1405-villager", "word": "villager", "label_binary": 1, "text_1": "every damn villager in Jak and Daxter are greedy bastards. 90 precursor orbs to \u201cfund\u201d your campaign?! 90 precursor orbs so you can go on your expedition?!?! I know damn well you aren't leaving that hut", "token_idx_1": 2, "text_start_1": 11, "text_end_1": 19, "date_1": "2019-04", "text_2": "whats your favourite AC villager that isn't \"\"popular\"\" example: not Raymond or Marshal", "token_idx_2": 4, "text_start_2": 24, "text_end_2": 32, "date_2": "2020-04", "text_1_tokenized": ["every", "damn", "villager", "in", "Jak", "and", "Daxter", "are", "greedy", "bastards", ".", "90", "precursor", "orbs", "to", "\u201c", "fund", "\u201d", "your", "campaign", "?", "!", "90", "precursor", "orbs", "so", "you", "can", "go", "on", "your", "expedition", "?", "!", "?", "!", "I", "know", "damn", "well", "you", "aren't", "leaving", "that", "hut"], "text_2_tokenized": ["whats", "your", "favourite", "AC", "villager", "that", "isn't", "\"", "\"", "popular", "\"", "\"", "example", ":", "not", "Raymond", "or", "Marshal"]} -{"id": "1406-villager", "word": "villager", "label_binary": 1, "text_1": "While playing Minecraft, I looked over at Alistair's screen as he poured lava onto a villager. Me: \u201cDude, that's a little dark.\u201d Alistair, without looking away from the screen: \u201cThey make the silliest noises when you set them on fire.\u201d Me: \u201cWhat?\u201d A: \u201cWhat?\u201d \ud83d\ude27\ud83d\ude27\ud83d\ude35 #Minecraft", "token_idx_1": 16, "text_start_1": 85, "text_end_1": 93, "date_1": "2019-04", "text_2": "I did have an idea tho for anyone who wants to Tell me who you think is the ugliest villager And I'll make porn of them As a fuck you for calling them ugly Jk but still dont judge them I'll show you they can be sexy Except Graham Fuck Graham", "token_idx_2": 19, "text_start_2": 84, "text_end_2": 92, "date_2": "2020-04", "text_1_tokenized": ["While", "playing", "Minecraft", ",", "I", "looked", "over", "at", "Alistair's", "screen", "as", "he", "poured", "lava", "onto", "a", "villager", ".", "Me", ":", "\u201c", "Dude", ",", "that's", "a", "little", "dark", ".", "\u201d", "Alistair", ",", "without", "looking", "away", "from", "the", "screen", ":", "\u201c", "They", "make", "the", "silliest", "noises", "when", "you", "set", "them", "on", "fire", ".", "\u201d", "Me", ":", "\u201c", "What", "?", "\u201d", "A", ":", "\u201c", "What", "?", "\u201d", "\ud83d\ude27", "\ud83d\ude27", "\ud83d\ude35", "#Minecraft"], "text_2_tokenized": ["I", "did", "have", "an", "idea", "tho", "for", "anyone", "who", "wants", "to", "Tell", "me", "who", "you", "think", "is", "the", "ugliest", "villager", "And", "I'll", "make", "porn", "of", "them", "As", "a", "fuck", "you", "for", "calling", "them", "ugly", "Jk", "but", "still", "dont", "judge", "them", "I'll", "show", "you", "they", "can", "be", "sexy", "Except", "Graham", "Fuck", "Graham"]} -{"id": "1407-villager", "word": "villager", "label_binary": 0, "text_1": "Can't sleep thinking about how a villager complained about stalling in 2019", "token_idx_1": 6, "text_start_1": 33, "text_end_1": 41, "date_1": "2019-04", "text_2": "Should I time travel, move like eight villager houses and re-terraform like 70% of my shit", "token_idx_2": 8, "text_start_2": 38, "text_end_2": 46, "date_2": "2020-04", "text_1_tokenized": ["Can't", "sleep", "thinking", "about", "how", "a", "villager", "complained", "about", "stalling", "in", "2019"], "text_2_tokenized": ["Should", "I", "time", "travel", ",", "move", "like", "eight", "villager", "houses", "and", "re-terraform", "like", "70", "%", "of", "my", "shit"]} -{"id": "1408-villager", "word": "villager", "label_binary": 1, "text_1": "Boy FFXIV sure does love its \"local villager complete waste of space and it's up to you to fix them\" quest lines", "token_idx_1": 8, "text_start_1": 36, "text_end_1": 44, "date_1": "2019-04", "text_2": "I had 15 tickets I was ready to go villager hunting with and i got 2 money rock islands and my 3rd island was Merengue ;; lucky lucky", "token_idx_2": 9, "text_start_2": 35, "text_end_2": 43, "date_2": "2020-04", "text_1_tokenized": ["Boy", "FFXIV", "sure", "does", "love", "its", "\"", "local", "villager", "complete", "waste", "of", "space", "and", "it's", "up", "to", "you", "to", "fix", "them", "\"", "quest", "lines"], "text_2_tokenized": ["I", "had", "15", "tickets", "I", "was", "ready", "to", "go", "villager", "hunting", "with", "and", "i", "got", "2", "money", "rock", "islands", "and", "my", "3rd", "island", "was", "Merengue", ";", ";", "lucky", "lucky"]} -{"id": "1409-villager", "word": "villager", "label_binary": 0, "text_1": "In neighbor loud speaker is played. On query knew that an old lady died in evening. So nirgun songs will be played during whole night. But I never heard abt observation of such ritual on death of old person despite being villager. In funeral procession done. EB castes do so.", "token_idx_1": 44, "text_start_1": 221, "text_end_1": 229, "date_1": "2019-04", "text_2": "i accidentally bullied a villager in animal crossing by pushing him, and made him sad i sent him an apology letter with some peaches, i hope he forgives me", "token_idx_2": 4, "text_start_2": 25, "text_end_2": 33, "date_2": "2020-04", "text_1_tokenized": ["In", "neighbor", "loud", "speaker", "is", "played", ".", "On", "query", "knew", "that", "an", "old", "lady", "died", "in", "evening", ".", "So", "nirgun", "songs", "will", "be", "played", "during", "whole", "night", ".", "But", "I", "never", "heard", "abt", "observation", "of", "such", "ritual", "on", "death", "of", "old", "person", "despite", "being", "villager", ".", "In", "funeral", "procession", "done", ".", "EB", "castes", "do", "so", "."], "text_2_tokenized": ["i", "accidentally", "bullied", "a", "villager", "in", "animal", "crossing", "by", "pushing", "him", ",", "and", "made", "him", "sad", "i", "sent", "him", "an", "apology", "letter", "with", "some", "peaches", ",", "i", "hope", "he", "forgives", "me"]} -{"id": "1410-villager", "word": "villager", "label_binary": 1, "text_1": "MC BS: [may] found out if you smack a villager, they /will/ still sell to you, but will increase their prices. im going to smack this guy again and see if the prices keep going up", "token_idx_1": 12, "text_start_1": 38, "text_end_1": 46, "date_1": "2019-04", "text_2": "I don't know, man. My opinion is probably super biased because I can only play Pocket Camp but, like, just play and hoard stuff until you get the villager you want????", "token_idx_2": 32, "text_start_2": 146, "text_end_2": 154, "date_2": "2020-04", "text_1_tokenized": ["MC", "BS", ":", "[", "may", "]", "found", "out", "if", "you", "smack", "a", "villager", ",", "they", "/", "will", "/", "still", "sell", "to", "you", ",", "but", "will", "increase", "their", "prices", ".", "im", "going", "to", "smack", "this", "guy", "again", "and", "see", "if", "the", "prices", "keep", "going", "up"], "text_2_tokenized": ["I", "don't", "know", ",", "man", ".", "My", "opinion", "is", "probably", "super", "biased", "because", "I", "can", "only", "play", "Pocket", "Camp", "but", ",", "like", ",", "just", "play", "and", "hoard", "stuff", "until", "you", "get", "the", "villager", "you", "want", "?", "?", "?"]} -{"id": "1411-villager", "word": "villager", "label_binary": 0, "text_1": "Leaving at 9.... o man... Minecraft villager noise", "token_idx_1": 8, "text_start_1": 36, "text_end_1": 44, "date_1": "2019-04", "text_2": "I got the villager I wanted the most randomly in ACNH \ud83d\ude33", "token_idx_2": 3, "text_start_2": 10, "text_end_2": 18, "date_2": "2020-04", "text_1_tokenized": ["Leaving", "at", "9", "...", "o", "man", "...", "Minecraft", "villager", "noise"], "text_2_tokenized": ["I", "got", "the", "villager", "I", "wanted", "the", "most", "randomly", "in", "ACNH", "\ud83d\ude33"]} -{"id": "1412-villager", "word": "villager", "label_binary": 1, "text_1": "I will punt the villager child like a FOOTBAAAALLLLLLLL", "token_idx_1": 4, "text_start_1": 16, "text_end_1": 24, "date_1": "2019-04", "text_2": "tex has been giving me looks after I got him mad, I visited his house the other day and he was all happy with my other villager that was there but he kept giving me the side eye and seemed annoyed lmao, BOY BYE!!!!", "token_idx_2": 27, "text_start_2": 119, "text_end_2": 127, "date_2": "2020-04", "text_1_tokenized": ["I", "will", "punt", "the", "villager", "child", "like", "a", "FOOTBAAAALLLLLLLL"], "text_2_tokenized": ["tex", "has", "been", "giving", "me", "looks", "after", "I", "got", "him", "mad", ",", "I", "visited", "his", "house", "the", "other", "day", "and", "he", "was", "all", "happy", "with", "my", "other", "villager", "that", "was", "there", "but", "he", "kept", "giving", "me", "the", "side", "eye", "and", "seemed", "annoyed", "lmao", ",", "BOY", "BYE", "!", "!", "!"]} -{"id": "1413-villager", "word": "villager", "label_binary": 0, "text_1": "I just found out that I'm technically a villager and idk what to do with this information", "token_idx_1": 8, "text_start_1": 40, "text_end_1": 48, "date_1": "2019-04", "text_2": "I wanna comment on everyone's \u201ctell me your favorite villager and I'll...\u201d but I don't wanna fill everyone's tl w me just saying \u201cFlora\u201d a bunch lol", "token_idx_2": 10, "text_start_2": 53, "text_end_2": 61, "date_2": "2020-04", "text_1_tokenized": ["I", "just", "found", "out", "that", "I'm", "technically", "a", "villager", "and", "idk", "what", "to", "do", "with", "this", "information"], "text_2_tokenized": ["I", "wanna", "comment", "on", "everyone's", "\u201c", "tell", "me", "your", "favorite", "villager", "and", "I'll", "...", "\u201d", "but", "I", "don't", "wanna", "fill", "everyone's", "tl", "w", "me", "just", "saying", "\u201c", "Flora", "\u201d", "a", "bunch", "lol"]} -{"id": "1414-villager", "word": "villager", "label_binary": 1, "text_1": "Shopping list: \u2022 bread \u2022 oramges \u2022Friends in real life who are as supportive as villager friends in animal crossing \u2022egg", "token_idx_1": 17, "text_start_1": 80, "text_end_1": 88, "date_1": "2019-04", "text_2": "i haven't had a new villager in forever.... how long does it take for someone to wanna move out \ud83d\ude2d", "token_idx_2": 5, "text_start_2": 20, "text_end_2": 28, "date_2": "2020-04", "text_1_tokenized": ["Shopping", "list", ":", "\u2022", "bread", "\u2022", "oramges", "\u2022", "Friends", "in", "real", "life", "who", "are", "as", "supportive", "as", "villager", "friends", "in", "animal", "crossing", "\u2022", "egg"], "text_2_tokenized": ["i", "haven't", "had", "a", "new", "villager", "in", "forever", "...", "how", "long", "does", "it", "take", "for", "someone", "to", "wanna", "move", "out", "\ud83d\ude2d"]} -{"id": "1415-villager", "word": "villager", "label_binary": 1, "text_1": "Pretty crazy that the villager can fit an entire TV in his pocket but he can't fit a few dozen beetles in them", "token_idx_1": 4, "text_start_1": 22, "text_end_1": 30, "date_1": "2019-04", "text_2": "how do you give a villager to someone? I'm going to use an amiibo to get them to move first. Does a person have to come to talk to the villager when their things are all packed into boxes?", "token_idx_2": 5, "text_start_2": 18, "text_end_2": 26, "date_2": "2020-04", "text_1_tokenized": ["Pretty", "crazy", "that", "the", "villager", "can", "fit", "an", "entire", "TV", "in", "his", "pocket", "but", "he", "can't", "fit", "a", "few", "dozen", "beetles", "in", "them"], "text_2_tokenized": ["how", "do", "you", "give", "a", "villager", "to", "someone", "?", "I'm", "going", "to", "use", "an", "amiibo", "to", "get", "them", "to", "move", "first", ".", "Does", "a", "person", "have", "to", "come", "to", "talk", "to", "the", "villager", "when", "their", "things", "are", "all", "packed", "into", "boxes", "?"]} -{"id": "1416-villager", "word": "villager", "label_binary": 0, "text_1": "\u231a\ufe0e You don't know how but one day, you found yourself trapped and living in fairy tale. Cinderella's story, to be exact. You are no one, just unnamed villager who has nothing to do with the story. You also an acquaintance of Cinderella, the step mother and step sisters. ++", "token_idx_1": 34, "text_start_1": 150, "text_end_1": 158, "date_1": "2019-04", "text_2": "Some of my favorite villager interactions in #ACNH : A Thread. #AnimalCrossing", "token_idx_2": 4, "text_start_2": 20, "text_end_2": 28, "date_2": "2020-04", "text_1_tokenized": ["\u231a", "\ufe0e", "You", "don't", "know", "how", "but", "one", "day", ",", "you", "found", "yourself", "trapped", "and", "living", "in", "fairy", "tale", ".", "Cinderella's", "story", ",", "to", "be", "exact", ".", "You", "are", "no", "one", ",", "just", "unnamed", "villager", "who", "has", "nothing", "to", "do", "with", "the", "story", ".", "You", "also", "an", "acquaintance", "of", "Cinderella", ",", "the", "step", "mother", "and", "step", "sisters", ".", "+", "+"], "text_2_tokenized": ["Some", "of", "my", "favorite", "villager", "interactions", "in", "#ACNH", ":", "A", "Thread", ".", "#AnimalCrossing"]} -{"id": "1417-villager", "word": "villager", "label_binary": 0, "text_1": "Tonight we overhauled #BloodElegance blood container items, implemented blood colours, and finally expanded away from creature attributes into full blown blood types, ranging from \"human\", to \"villager\", to the fantastical \"dragon\"! Status effects and blood splatter next week!", "token_idx_1": 33, "text_start_1": 193, "text_end_1": 201, "date_1": "2019-04", "text_2": "I never realised Apollo is so popular as a villager?! And I had him on my island... Could have sold him for some sweet, sweet bells when he moved out\u2026 Gfdi\u2026\u2026\u2026..", "token_idx_2": 9, "text_start_2": 43, "text_end_2": 51, "date_2": "2020-04", "text_1_tokenized": ["Tonight", "we", "overhauled", "#BloodElegance", "blood", "container", "items", ",", "implemented", "blood", "colours", ",", "and", "finally", "expanded", "away", "from", "creature", "attributes", "into", "full", "blown", "blood", "types", ",", "ranging", "from", "\"", "human", "\"", ",", "to", "\"", "villager", "\"", ",", "to", "the", "fantastical", "\"", "dragon", "\"", "!", "Status", "effects", "and", "blood", "splatter", "next", "week", "!"], "text_2_tokenized": ["I", "never", "realised", "Apollo", "is", "so", "popular", "as", "a", "villager", "?", "!", "And", "I", "had", "him", "on", "my", "island", "...", "Could", "have", "sold", "him", "for", "some", "sweet", ",", "sweet", "bells", "when", "he", "moved", "out", "\u2026", "Gfdi", "\u2026", "\u2026", "\u2026", ".."]} -{"id": "1418-villager", "word": "villager", "label_binary": 0, "text_1": "The new minecraft update is reall fun. Villages got a much needed update, building up villages is much more easy an fun, and being able to pick villager jobs is great. Just fix the broken chuck loading alright.", "token_idx_1": 30, "text_start_1": 144, "text_end_1": 152, "date_1": "2019-04", "text_2": "unpopular opinion: you can like or dislike whatever villager you want , it doesnt say anything about your personality or you as a person, it is a game about paying fictional debt to a raccoon landlord \ud83d\ude43", "token_idx_2": 9, "text_start_2": 52, "text_end_2": 60, "date_2": "2020-04", "text_1_tokenized": ["The", "new", "minecraft", "update", "is", "reall", "fun", ".", "Villages", "got", "a", "much", "needed", "update", ",", "building", "up", "villages", "is", "much", "more", "easy", "an", "fun", ",", "and", "being", "able", "to", "pick", "villager", "jobs", "is", "great", ".", "Just", "fix", "the", "broken", "chuck", "loading", "alright", "."], "text_2_tokenized": ["unpopular", "opinion", ":", "you", "can", "like", "or", "dislike", "whatever", "villager", "you", "want", ",", "it", "doesnt", "say", "anything", "about", "your", "personality", "or", "you", "as", "a", "person", ",", "it", "is", "a", "game", "about", "paying", "fictional", "debt", "to", "a", "raccoon", "landlord", "\ud83d\ude43"]} -{"id": "1419-villager", "word": "villager", "label_binary": 0, "text_1": "Just played a 20 minute game of smash bros. Someone got to a part of the custom stage basically no one else could intentionally because villager recovery and then waited for the rest of us to kill each other. On sideways Joy-Cons. I hate everyone.", "token_idx_1": 26, "text_start_1": 136, "text_end_1": 144, "date_1": "2019-04", "text_2": "flo loves to sing............he's my favorite villager in kyunland", "token_idx_2": 8, "text_start_2": 46, "text_end_2": 54, "date_2": "2020-04", "text_1_tokenized": ["Just", "played", "a", "20", "minute", "game", "of", "smash", "bros", ".", "Someone", "got", "to", "a", "part", "of", "the", "custom", "stage", "basically", "no", "one", "else", "could", "intentionally", "because", "villager", "recovery", "and", "then", "waited", "for", "the", "rest", "of", "us", "to", "kill", "each", "other", ".", "On", "sideways", "Joy-Cons", ".", "I", "hate", "everyone", "."], "text_2_tokenized": ["flo", "loves", "to", "sing", "...", "he's", "my", "favorite", "villager", "in", "kyunland"]} -{"id": "1420-villager", "word": "villager", "label_binary": 1, "text_1": "\u00d7New AU Link\u00d7 \u25cf Post-Game BotW. Follows the main story normally. \u25cf Due to a occurence in the world of Fire Emblem Heroes, Link is Summoned to Askr while he was on a quest from a villager. \u00d7More Details to be added as story progresses\u00d7", "token_idx_1": 41, "text_start_1": 178, "text_end_1": 186, "date_1": "2019-04", "text_2": "Apparently Marina is moving into Nate's old plot. Which is fine, I suppose, but I was looking forward to island hopping and finding a new villager myself \ud83d\ude2d", "token_idx_2": 28, "text_start_2": 138, "text_end_2": 146, "date_2": "2020-04", "text_1_tokenized": ["\u00d7", "New", "AU", "Link", "\u00d7", "\u25cf", "Post-Game", "BotW", ".", "Follows", "the", "main", "story", "normally", ".", "\u25cf", "Due", "to", "a", "occurence", "in", "the", "world", "of", "Fire", "Emblem", "Heroes", ",", "Link", "is", "Summoned", "to", "Askr", "while", "he", "was", "on", "a", "quest", "from", "a", "villager", ".", "\u00d7", "More", "Details", "to", "be", "added", "as", "story", "progresses", "\u00d7"], "text_2_tokenized": ["Apparently", "Marina", "is", "moving", "into", "Nate's", "old", "plot", ".", "Which", "is", "fine", ",", "I", "suppose", ",", "but", "I", "was", "looking", "forward", "to", "island", "hopping", "and", "finding", "a", "new", "villager", "myself", "\ud83d\ude2d"]} -{"id": "1421-villager", "word": "villager", "label_binary": 1, "text_1": "i think it's cool that vinny vinesauce and i use the same villager color in smash", "token_idx_1": 12, "text_start_1": 58, "text_end_1": 66, "date_1": "2019-04", "text_2": "I wish Nintendo would have it to where if you find a villager who moved out from your island , on a deserted island. They should be able to remember being in your island. I hate that they act like you never existed", "token_idx_2": 12, "text_start_2": 53, "text_end_2": 61, "date_2": "2020-04", "text_1_tokenized": ["i", "think", "it's", "cool", "that", "vinny", "vinesauce", "and", "i", "use", "the", "same", "villager", "color", "in", "smash"], "text_2_tokenized": ["I", "wish", "Nintendo", "would", "have", "it", "to", "where", "if", "you", "find", "a", "villager", "who", "moved", "out", "from", "your", "island", ",", "on", "a", "deserted", "island", ".", "They", "should", "be", "able", "to", "remember", "being", "in", "your", "island", ".", "I", "hate", "that", "they", "act", "like", "you", "never", "existed"]} -{"id": "1422-villager", "word": "villager", "label_binary": 0, "text_1": "Bought a drink, I ask for a straw then the cashier(female) tells me,\" REAL MEN DON'T USE STRAWS\" The seemed a villager so I ask her ,\"which men do you even know?\" She replied,\"Strong men, like builders\" Just walked away, could've gotten it fired even.", "token_idx_1": 28, "text_start_1": 110, "text_end_1": 118, "date_1": "2019-04", "text_2": "Just want my last Animal Crossing villager to be a wolf so bad \ud83e\udd7a", "token_idx_2": 6, "text_start_2": 34, "text_end_2": 42, "date_2": "2020-04", "text_1_tokenized": ["Bought", "a", "drink", ",", "I", "ask", "for", "a", "straw", "then", "the", "cashier", "(", "female", ")", "tells", "me", ",", "\"", "REAL", "MEN", "DON'T", "USE", "STRAWS", "\"", "The", "seemed", "a", "villager", "so", "I", "ask", "her", ",", "\"", "which", "men", "do", "you", "even", "know", "?", "\"", "She", "replied", ",", "\"", "Strong", "men", ",", "like", "builders", "\"", "Just", "walked", "away", ",", "could've", "gotten", "it", "fired", "even", "."], "text_2_tokenized": ["Just", "want", "my", "last", "Animal", "Crossing", "villager", "to", "be", "a", "wolf", "so", "bad", "\ud83e\udd7a"]} -{"id": "1423-villager", "word": "villager", "label_binary": 0, "text_1": "Tryna figure out which Minecraft villager I am today", "token_idx_1": 5, "text_start_1": 33, "text_end_1": 41, "date_1": "2019-04", "text_2": "Sherb is my new villager and i love him already", "token_idx_2": 4, "text_start_2": 16, "text_end_2": 24, "date_2": "2020-04", "text_1_tokenized": ["Tryna", "figure", "out", "which", "Minecraft", "villager", "I", "am", "today"], "text_2_tokenized": ["Sherb", "is", "my", "new", "villager", "and", "i", "love", "him", "already"]} -{"id": "1424-villager", "word": "villager", "label_binary": 0, "text_1": "Omg What should I name my villager ???", "token_idx_1": 6, "text_start_1": 26, "text_end_1": 34, "date_1": "2019-04", "text_2": "Taken from @mayorstetson ! reply with a selfie and ill tell you which villager you look like!! as long as I know your face ~", "token_idx_2": 13, "text_start_2": 70, "text_end_2": 78, "date_2": "2020-04", "text_1_tokenized": ["Omg", "What", "should", "I", "name", "my", "villager", "?", "?", "?"], "text_2_tokenized": ["Taken", "from", "@mayorstetson", "!", "reply", "with", "a", "selfie", "and", "ill", "tell", "you", "which", "villager", "you", "look", "like", "!", "!", "as", "long", "as", "I", "know", "your", "face", "~"]} -{"id": "1425-villager", "word": "villager", "label_binary": 0, "text_1": "Congress and Mahamilawat (Maha Gathbandhan or grand alliance) gave headlines to Pakistan media. Whether you people will speak a language that is liked by #Pakistan. Now even a villager of #Gujarat speaks like that, says PM @narendramodi #LokSabhaElections2019", "token_idx_1": 32, "text_start_1": 176, "text_end_1": 184, "date_1": "2019-04", "text_2": "give me your favorite Animal Crossing villager, now, I need to know", "token_idx_2": 6, "text_start_2": 38, "text_end_2": 46, "date_2": "2020-04", "text_1_tokenized": ["Congress", "and", "Mahamilawat", "(", "Maha", "Gathbandhan", "or", "grand", "alliance", ")", "gave", "headlines", "to", "Pakistan", "media", ".", "Whether", "you", "people", "will", "speak", "a", "language", "that", "is", "liked", "by", "#Pakistan", ".", "Now", "even", "a", "villager", "of", "#Gujarat", "speaks", "like", "that", ",", "says", "PM", "@narendramodi", "#LokSabhaElections2019"], "text_2_tokenized": ["give", "me", "your", "favorite", "Animal", "Crossing", "villager", ",", "now", ",", "I", "need", "to", "know"]} -{"id": "1426-villager", "word": "villager", "label_binary": 1, "text_1": "biff as an animal crossing villager", "token_idx_1": 5, "text_start_1": 27, "text_end_1": 35, "date_1": "2019-04", "text_2": "the way bob isn't even a favorite villager of mine so i can just trade him for 100+ tickets at any moment", "token_idx_2": 7, "text_start_2": 34, "text_end_2": 42, "date_2": "2020-04", "text_1_tokenized": ["biff", "as", "an", "animal", "crossing", "villager"], "text_2_tokenized": ["the", "way", "bob", "isn't", "even", "a", "favorite", "villager", "of", "mine", "so", "i", "can", "just", "trade", "him", "for", "100", "+", "tickets", "at", "any", "moment"]} -{"id": "1427-villager", "word": "villager", "label_binary": 0, "text_1": "I'm afraid of playing Minecraft with the new villager update since they changed all the blacksmith areas", "token_idx_1": 8, "text_start_1": 45, "text_end_1": 53, "date_1": "2019-04", "text_2": "Who's your favorite villager and why? \ud83d\udc95", "token_idx_2": 3, "text_start_2": 20, "text_end_2": 28, "date_2": "2020-04", "text_1_tokenized": ["I'm", "afraid", "of", "playing", "Minecraft", "with", "the", "new", "villager", "update", "since", "they", "changed", "all", "the", "blacksmith", "areas"], "text_2_tokenized": ["Who's", "your", "favorite", "villager", "and", "why", "?", "\ud83d\udc95"]} -{"id": "1428-villager", "word": "villager", "label_binary": 0, "text_1": "I have a pea sized brain, watching Kept a japanese villager main against a wolf I noticed he did a lloid at short hop hieght so when it was reflected it would go over his head WHY HAVEN'T I THOUGHT OF THAT BEFORE, I'M SO DUMB", "token_idx_1": 11, "text_start_1": 51, "text_end_1": 59, "date_1": "2019-04", "text_2": "to all my new (or old) followers: comment ur fav villager or interesting facts about yourself/island! i wanna get to know all of you\ud83e\udd7a\ud83e\udd0e & if you guys ever need anyone my dms are open\ud83c\udf43", "token_idx_2": 13, "text_start_2": 49, "text_end_2": 57, "date_2": "2020-04", "text_1_tokenized": ["I", "have", "a", "pea", "sized", "brain", ",", "watching", "Kept", "a", "japanese", "villager", "main", "against", "a", "wolf", "I", "noticed", "he", "did", "a", "lloid", "at", "short", "hop", "hieght", "so", "when", "it", "was", "reflected", "it", "would", "go", "over", "his", "head", "WHY", "HAVEN'T", "I", "THOUGHT", "OF", "THAT", "BEFORE", ",", "I'M", "SO", "DUMB"], "text_2_tokenized": ["to", "all", "my", "new", "(", "or", "old", ")", "followers", ":", "comment", "ur", "fav", "villager", "or", "interesting", "facts", "about", "yourself", "/", "island", "!", "i", "wanna", "get", "to", "know", "all", "of", "you", "\ud83e\udd7a", "\ud83e\udd0e", "&", "if", "you", "guys", "ever", "need", "anyone", "my", "dms", "are", "open", "\ud83c\udf43"]} -{"id": "1429-villager", "word": "villager", "label_binary": 0, "text_1": "Poor homeless villager in #CycloneFani hit Odisha told #OTV \"Lots of politicians came before election, made big promises .. After Cyclone no one came\" behind them were the broken hut of there .. .. Just heard ..", "token_idx_1": 2, "text_start_1": 14, "text_end_1": 22, "date_1": "2019-04", "text_2": "If anyone wants Flora as a villager in #ACNH let me know. She is moving off my island.", "token_idx_2": 6, "text_start_2": 27, "text_end_2": 35, "date_2": "2020-04", "text_1_tokenized": ["Poor", "homeless", "villager", "in", "#CycloneFani", "hit", "Odisha", "told", "#OTV", "\"", "Lots", "of", "politicians", "came", "before", "election", ",", "made", "big", "promises", "..", "After", "Cyclone", "no", "one", "came", "\"", "behind", "them", "were", "the", "broken", "hut", "of", "there", ".. ..", "Just", "heard", ".."], "text_2_tokenized": ["If", "anyone", "wants", "Flora", "as", "a", "villager", "in", "#ACNH", "let", "me", "know", ".", "She", "is", "moving", "off", "my", "island", "."]} -{"id": "1430-villager", "word": "villager", "label_binary": 0, "text_1": "*insert villager hmm here* Idk what to do with my rp account", "token_idx_1": 2, "text_start_1": 8, "text_end_1": 16, "date_1": "2019-04", "text_2": "This disappointment when you hit A to slap a villager with your net but accidentally end up TALKING to them, ending your streak of ignoring them for a week (except to hit them ofc)", "token_idx_2": 9, "text_start_2": 45, "text_end_2": 53, "date_2": "2020-04", "text_1_tokenized": ["*", "insert", "villager", "hmm", "here", "*", "Idk", "what", "to", "do", "with", "my", "rp", "account"], "text_2_tokenized": ["This", "disappointment", "when", "you", "hit", "A", "to", "slap", "a", "villager", "with", "your", "net", "but", "accidentally", "end", "up", "TALKING", "to", "them", ",", "ending", "your", "streak", "of", "ignoring", "them", "for", "a", "week", "(", "except", "to", "hit", "them", "ofc", ")"]} -{"id": "1431-villager", "word": "villager", "label_binary": 1, "text_1": "comment ur fav animal crossing villager so i can make stamps!", "token_idx_1": 5, "text_start_1": 31, "text_end_1": 39, "date_1": "2019-04", "text_2": "Bless all this animal crossing content on my timeline even though I dont have it myself Browsing it 3am in the morning is surprisingly very calming Someone just caught that super rare fish? You go bud Finally got that villager to move in? Hell yeah", "token_idx_2": 40, "text_start_2": 218, "text_end_2": 226, "date_2": "2020-04", "text_1_tokenized": ["comment", "ur", "fav", "animal", "crossing", "villager", "so", "i", "can", "make", "stamps", "!"], "text_2_tokenized": ["Bless", "all", "this", "animal", "crossing", "content", "on", "my", "timeline", "even", "though", "I", "dont", "have", "it", "myself", "Browsing", "it", "3am", "in", "the", "morning", "is", "surprisingly", "very", "calming", "Someone", "just", "caught", "that", "super", "rare", "fish", "?", "You", "go", "bud", "Finally", "got", "that", "villager", "to", "move", "in", "?", "Hell", "yeah"]} -{"id": "1432-villager", "word": "villager", "label_binary": 0, "text_1": "Just saw a local maid (villager) doing video chat. The world is very different than you'd expect \ud83d\ude05", "token_idx_1": 6, "text_start_1": 23, "text_end_1": 31, "date_1": "2019-04", "text_2": "Just told a story about Drake and bf thought I was talking about a) the rapper, then b) Drake Bell, but it was really c) my Animal Crossing villager, who I consider a friend. Lonely :/", "token_idx_2": 33, "text_start_2": 140, "text_end_2": 148, "date_2": "2020-04", "text_1_tokenized": ["Just", "saw", "a", "local", "maid", "(", "villager", ")", "doing", "video", "chat", ".", "The", "world", "is", "very", "different", "than", "you'd", "expect", "\ud83d\ude05"], "text_2_tokenized": ["Just", "told", "a", "story", "about", "Drake", "and", "bf", "thought", "I", "was", "talking", "about", "a", ")", "the", "rapper", ",", "then", "b", ")", "Drake", "Bell", ",", "but", "it", "was", "really", "c", ")", "my", "Animal", "Crossing", "villager", ",", "who", "I", "consider", "a", "friend", ".", "Lonely", ":/"]} -{"id": "1433-villager", "word": "villager", "label_binary": 0, "text_1": "Going live a little bit late today, but we're going to be playing in Ephemera again! I scanned in another villager offscreen yesterday (and did a few other thing as well)! Who do you think I scanned in? Join the stream and see! \ud83d\ude38", "token_idx_1": 22, "text_start_1": 106, "text_end_1": 114, "date_1": "2019-04", "text_2": "im trying to get rid of a villager so i caged her in and i put pitfall seeds at the entrance of her house \ud83d\ude0a", "token_idx_2": 7, "text_start_2": 26, "text_end_2": 34, "date_2": "2020-04", "text_1_tokenized": ["Going", "live", "a", "little", "bit", "late", "today", ",", "but", "we're", "going", "to", "be", "playing", "in", "Ephemera", "again", "!", "I", "scanned", "in", "another", "villager", "offscreen", "yesterday", "(", "and", "did", "a", "few", "other", "thing", "as", "well", ")", "!", "Who", "do", "you", "think", "I", "scanned", "in", "?", "Join", "the", "stream", "and", "see", "!", "\ud83d\ude38"], "text_2_tokenized": ["im", "trying", "to", "get", "rid", "of", "a", "villager", "so", "i", "caged", "her", "in", "and", "i", "put", "pitfall", "seeds", "at", "the", "entrance", "of", "her", "house", "\ud83d\ude0a"]} -{"id": "1434-villager", "word": "villager", "label_binary": 0, "text_1": "Talk about #inclusive and use of #bsl The Jungle Book @DerbyTheatre was absolutely incredible!! Well done to cast and crew. Special shout to Hannah, amazing wolf/monkey/villager!!", "token_idx_1": 33, "text_start_1": 169, "text_end_1": 177, "date_1": "2019-04", "text_2": "Anyone have lucky NOT READY TO MOVE OR IN BOXES. I want him so bad. I got a full plot right now so I'm going to try to make my villager move as soon as possible #DodoCode #acnh #ACNH", "token_idx_2": 32, "text_start_2": 127, "text_end_2": 135, "date_2": "2020-04", "text_1_tokenized": ["Talk", "about", "#inclusive", "and", "use", "of", "#bsl", "The", "Jungle", "Book", "@DerbyTheatre", "was", "absolutely", "incredible", "!", "!", "Well", "done", "to", "cast", "and", "crew", ".", "Special", "shout", "to", "Hannah", ",", "amazing", "wolf", "/", "monkey", "/", "villager", "!", "!"], "text_2_tokenized": ["Anyone", "have", "lucky", "NOT", "READY", "TO", "MOVE", "OR", "IN", "BOXES", ".", "I", "want", "him", "so", "bad", ".", "I", "got", "a", "full", "plot", "right", "now", "so", "I'm", "going", "to", "try", "to", "make", "my", "villager", "move", "as", "soon", "as", "possible", "#DodoCode", "#acnh", "#ACNH"]} -{"id": "1435-villager", "word": "villager", "label_binary": 1, "text_1": "New villager species that I'd like to see in Animal Crossing Switch: - Lop Bunnies - Fluffy Cats (like persian cats) - Pointy Ear Dogs (like shiba inus) - Chinchillas - Guinea Pigs - Ferrets And as a special character: - Red Panda - Toucan - Stork - Lynx", "token_idx_1": 1, "text_start_1": 4, "text_end_1": 12, "date_1": "2019-04", "text_2": "I'm worried about getting a snooty villager bc I want all my villagers to get along... I have a jock and 2 lazies so I'm worried they won't get along...", "token_idx_2": 6, "text_start_2": 35, "text_end_2": 43, "date_2": "2020-04", "text_1_tokenized": ["New", "villager", "species", "that", "I'd", "like", "to", "see", "in", "Animal", "Crossing", "Switch", ":", "-", "Lop", "Bunnies", "-", "Fluffy", "Cats", "(", "like", "persian", "cats", ")", "-", "Pointy", "Ear", "Dogs", "(", "like", "shiba", "inus", ")", "-", "Chinchillas", "-", "Guinea", "Pigs", "-", "Ferrets", "And", "as", "a", "special", "character", ":", "-", "Red", "Panda", "-", "Toucan", "-", "Stork", "-", "Lynx"], "text_2_tokenized": ["I'm", "worried", "about", "getting", "a", "snooty", "villager", "bc", "I", "want", "all", "my", "villagers", "to", "get", "along", "...", "I", "have", "a", "jock", "and", "2", "lazies", "so", "I'm", "worried", "they", "won't", "get", "along", "..."]} -{"id": "1436-villager", "word": "villager", "label_binary": 1, "text_1": "oh to be an animal crossing villager", "token_idx_1": 6, "text_start_1": 28, "text_end_1": 36, "date_1": "2019-04", "text_2": "nothing can explain the joy I feel when a villager runs to you to tell you something but when you talk to them they go \u201coh sorry I was so excited to see you I forgot what I wanted to ask you about!\u201d", "token_idx_2": 9, "text_start_2": 42, "text_end_2": 50, "date_2": "2020-04", "text_1_tokenized": ["oh", "to", "be", "an", "animal", "crossing", "villager"], "text_2_tokenized": ["nothing", "can", "explain", "the", "joy", "I", "feel", "when", "a", "villager", "runs", "to", "you", "to", "tell", "you", "something", "but", "when", "you", "talk", "to", "them", "they", "go", "\u201c", "oh", "sorry", "I", "was", "so", "excited", "to", "see", "you", "I", "forgot", "what", "I", "wanted", "to", "ask", "you", "about", "!", "\u201d"]} -{"id": "1437-villager", "word": "villager", "label_binary": 1, "text_1": "my damn pride in time traveling to get lots of bushes has cost me a good villager. sighs.", "token_idx_1": 16, "text_start_1": 73, "text_end_1": 81, "date_1": "2019-04", "text_2": "animal crossing oomfs or moots can i buy nook mile tickets off someone?? i have quite a lot of bells and want to go villager hunting!! also i have hazel in a box if anyone wants her :-0", "token_idx_2": 26, "text_start_2": 116, "text_end_2": 124, "date_2": "2020-04", "text_1_tokenized": ["my", "damn", "pride", "in", "time", "traveling", "to", "get", "lots", "of", "bushes", "has", "cost", "me", "a", "good", "villager", ".", "sighs", "."], "text_2_tokenized": ["animal", "crossing", "oomfs", "or", "moots", "can", "i", "buy", "nook", "mile", "tickets", "off", "someone", "?", "?", "i", "have", "quite", "a", "lot", "of", "bells", "and", "want", "to", "go", "villager", "hunting", "!", "!", "also", "i", "have", "hazel", "in", "a", "box", "if", "anyone", "wants", "her", ":", "-", "0"]} -{"id": "1438-villager", "word": "villager", "label_binary": 0, "text_1": "you ever just turn up the pottsfield song really loud and close your eyes and imagine you're actually there, a dancing pumpkin villager at the festival??? that's the mood today and i have no idea why", "token_idx_1": 23, "text_start_1": 127, "text_end_1": 135, "date_1": "2019-04", "text_2": "I made a villager i didnt like leave i kinda feel bad now lol", "token_idx_2": 3, "text_start_2": 9, "text_end_2": 17, "date_2": "2020-04", "text_1_tokenized": ["you", "ever", "just", "turn", "up", "the", "pottsfield", "song", "really", "loud", "and", "close", "your", "eyes", "and", "imagine", "you're", "actually", "there", ",", "a", "dancing", "pumpkin", "villager", "at", "the", "festival", "?", "?", "?", "that's", "the", "mood", "today", "and", "i", "have", "no", "idea", "why"], "text_2_tokenized": ["I", "made", "a", "villager", "i", "didnt", "like", "leave", "i", "kinda", "feel", "bad", "now", "lol"]} -{"id": "1439-villager", "word": "villager", "label_binary": 0, "text_1": "never thought i could i could find something more unfun in smash than villager turns out laggy villager that spams the bowling ball takes the cake", "token_idx_1": 13, "text_start_1": 70, "text_end_1": 78, "date_1": "2019-04", "text_2": "We went to an island for a villager and got AUDIE AND THE IRONWOOD DRESSER DIY?!?!? ugh", "token_idx_2": 7, "text_start_2": 27, "text_end_2": 35, "date_2": "2020-04", "text_1_tokenized": ["never", "thought", "i", "could", "i", "could", "find", "something", "more", "unfun", "in", "smash", "than", "villager", "turns", "out", "laggy", "villager", "that", "spams", "the", "bowling", "ball", "takes", "the", "cake"], "text_2_tokenized": ["We", "went", "to", "an", "island", "for", "a", "villager", "and", "got", "AUDIE", "AND", "THE", "IRONWOOD", "DRESSER", "DIY", "?", "!", "?", "!", "?", "ugh"]} -{"id": "1440-villager", "word": "villager", "label_binary": 1, "text_1": "so glad to see that @Sora_Sakurai is enjoying #StardewValley too, a hardworking person such as him couldn't be better matched with such a slow paced game. Enjoy yourself Mr. Sakurai, and tell us who your favorite villager is!", "token_idx_1": 40, "text_start_1": 213, "text_end_1": 221, "date_1": "2019-04", "text_2": "i want to eat an orange the way the ac villager does: unhinged, peel on, three bites", "token_idx_2": 10, "text_start_2": 39, "text_end_2": 47, "date_2": "2020-04", "text_1_tokenized": ["so", "glad", "to", "see", "that", "@Sora_Sakurai", "is", "enjoying", "#StardewValley", "too", ",", "a", "hardworking", "person", "such", "as", "him", "couldn't", "be", "better", "matched", "with", "such", "a", "slow", "paced", "game", ".", "Enjoy", "yourself", "Mr", ".", "Sakurai", ",", "and", "tell", "us", "who", "your", "favorite", "villager", "is", "!"], "text_2_tokenized": ["i", "want", "to", "eat", "an", "orange", "the", "way", "the", "ac", "villager", "does", ":", "unhinged", ",", "peel", "on", ",", "three", "bites"]} -{"id": "1441-villager", "word": "villager", "label_binary": 1, "text_1": "Here, help me pick the tenth villager I'm going to cheat into my Animal Crossing town.", "token_idx_1": 7, "text_start_1": 29, "text_end_1": 37, "date_1": "2019-04", "text_2": "went to about 8 islands to find a new villager and theyre all lame as fuck. such a waste of miles \ud83d\ude11", "token_idx_2": 9, "text_start_2": 38, "text_end_2": 46, "date_2": "2020-04", "text_1_tokenized": ["Here", ",", "help", "me", "pick", "the", "tenth", "villager", "I'm", "going", "to", "cheat", "into", "my", "Animal", "Crossing", "town", "."], "text_2_tokenized": ["went", "to", "about", "8", "islands", "to", "find", "a", "new", "villager", "and", "theyre", "all", "lame", "as", "fuck", ".", "such", "a", "waste", "of", "miles", "\ud83d\ude11"]} -{"id": "1442-villager", "word": "villager", "label_binary": 0, "text_1": "IM BACK..... snake vs villager", "token_idx_1": 5, "text_start_1": 22, "text_end_1": 30, "date_1": "2019-04", "text_2": "you know ur going through it when ur villager asks if you're doing okay and u start crying", "token_idx_2": 8, "text_start_2": 37, "text_end_2": 45, "date_2": "2020-04", "text_1_tokenized": ["IM", "BACK", "...", "snake", "vs", "villager"], "text_2_tokenized": ["you", "know", "ur", "going", "through", "it", "when", "ur", "villager", "asks", "if", "you're", "doing", "okay", "and", "u", "start", "crying"]} -{"id": "1443-villager", "word": "villager", "label_binary": 0, "text_1": "Now on KET: Father Brown: Father Brown senses something sinister is afoot when Mrs. McCarthy's friend is the latest villager to die in her sleep.", "token_idx_1": 22, "text_start_1": 116, "text_end_1": 124, "date_1": "2019-04", "text_2": "just got my favorite villager in animal crossing for 20k bells it's the steal of the century", "token_idx_2": 4, "text_start_2": 21, "text_end_2": 29, "date_2": "2020-04", "text_1_tokenized": ["Now", "on", "KET", ":", "Father", "Brown", ":", "Father", "Brown", "senses", "something", "sinister", "is", "afoot", "when", "Mrs", ".", "McCarthy's", "friend", "is", "the", "latest", "villager", "to", "die", "in", "her", "sleep", "."], "text_2_tokenized": ["just", "got", "my", "favorite", "villager", "in", "animal", "crossing", "for", "20k", "bells", "it's", "the", "steal", "of", "the", "century"]} -{"id": "1444-villager", "word": "villager", "label_binary": 0, "text_1": "i cured a baby zombie villager, then it turned back into a zombie, so i tried to cure it again, and it took my potion and my apple and vanished >:(", "token_idx_1": 5, "text_start_1": 22, "text_end_1": 30, "date_1": "2019-04", "text_2": "i am once again moving my villager houses", "token_idx_2": 6, "text_start_2": 26, "text_end_2": 34, "date_2": "2020-04", "text_1_tokenized": ["i", "cured", "a", "baby", "zombie", "villager", ",", "then", "it", "turned", "back", "into", "a", "zombie", ",", "so", "i", "tried", "to", "cure", "it", "again", ",", "and", "it", "took", "my", "potion", "and", "my", "apple", "and", "vanished", ">:("], "text_2_tokenized": ["i", "am", "once", "again", "moving", "my", "villager", "houses"]} -{"id": "1445-villager", "word": "villager", "label_binary": 0, "text_1": "_SHOURITSU fortnite gf &amp;lt; roblox gf &amp;lt; minecraft gf &amp;lt; god &amp;lt; villager gf", "token_idx_1": 28, "text_start_1": 102, "text_end_1": 110, "date_1": "2019-04", "text_2": "i've been timeskipping for a villager to leave for 7 hours or something now and i'm getting frustrated lmao", "token_idx_2": 5, "text_start_2": 29, "text_end_2": 37, "date_2": "2020-04", "text_1_tokenized": ["_SHOURITSU", "fortnite", "gf", "&", "amp", ";", "lt", ";", "roblox", "gf", "&", "amp", ";", "lt", ";", "minecraft", "gf", "&", "amp", ";", "lt", ";", "god", "&", "amp", ";", "lt", ";", "villager", "gf"], "text_2_tokenized": ["i've", "been", "timeskipping", "for", "a", "villager", "to", "leave", "for", "7", "hours", "or", "something", "now", "and", "i'm", "getting", "frustrated", "lmao"]} -{"id": "1446-villager", "word": "villager", "label_binary": 1, "text_1": "also. the way the farmer villager and cartographer villager like to stand near my map and talk smack right in front of me", "token_idx_1": 6, "text_start_1": 25, "text_end_1": 33, "date_1": "2019-04", "text_2": "there is an animal crossing villager with my deadname and every time i see them im just like \"guh\"", "token_idx_2": 5, "text_start_2": 28, "text_end_2": 36, "date_2": "2020-04", "text_1_tokenized": ["also", ".", "the", "way", "the", "farmer", "villager", "and", "cartographer", "villager", "like", "to", "stand", "near", "my", "map", "and", "talk", "smack", "right", "in", "front", "of", "me"], "text_2_tokenized": ["there", "is", "an", "animal", "crossing", "villager", "with", "my", "deadname", "and", "every", "time", "i", "see", "them", "im", "just", "like", "\"", "guh", "\""]} -{"id": "1447-villager", "word": "villager", "label_binary": 0, "text_1": "I'm just gonna say it..... *minecraft villager noise*", "token_idx_1": 8, "text_start_1": 38, "text_end_1": 46, "date_1": "2019-04", "text_2": "My dad really be dogging me \ud83d\ude02\ud83d\ude02 I put my natural hair in twisties and he said I look like a villager from the bushes of Ghana \ud83d\ude2d", "token_idx_2": 22, "text_start_2": 91, "text_end_2": 99, "date_2": "2020-04", "text_1_tokenized": ["I'm", "just", "gonna", "say", "it", "...", "*", "minecraft", "villager", "noise", "*"], "text_2_tokenized": ["My", "dad", "really", "be", "dogging", "me", "\ud83d\ude02", "\ud83d\ude02", "I", "put", "my", "natural", "hair", "in", "twisties", "and", "he", "said", "I", "look", "like", "a", "villager", "from", "the", "bushes", "of", "Ghana", "\ud83d\ude2d"]} -{"id": "1448-villager", "word": "villager", "label_binary": 0, "text_1": "i think my fav villager type in pocket camp is hip... what's yours!!! TELL ME, FRIENDS!", "token_idx_1": 4, "text_start_1": 15, "text_end_1": 23, "date_1": "2019-04", "text_2": "anyone knows if i send a wallpaper/flooring to a villager (not starter) either in person or by mail, is it 100% guaranteed that they'll use it in their house?", "token_idx_2": 11, "text_start_2": 49, "text_end_2": 57, "date_2": "2020-04", "text_1_tokenized": ["i", "think", "my", "fav", "villager", "type", "in", "pocket", "camp", "is", "hip", "...", "what's", "yours", "!", "!", "!", "TELL", "ME", ",", "FRIENDS", "!"], "text_2_tokenized": ["anyone", "knows", "if", "i", "send", "a", "wallpaper", "/", "flooring", "to", "a", "villager", "(", "not", "starter", ")", "either", "in", "person", "or", "by", "mail", ",", "is", "it", "100", "%", "guaranteed", "that", "they'll", "use", "it", "in", "their", "house", "?"]} -{"id": "1449-villager", "word": "villager", "label_binary": 0, "text_1": "Two police personnel were killed and a villager was injured on Saturday evening when Naxals opened fire on them in Chhattisgarh's Bijapur district. When is India going to term them as #CommunistTerrorists and isolate them? #UrbanNaxals", "token_idx_1": 7, "text_start_1": 39, "text_end_1": 47, "date_1": "2019-04", "text_2": "I'm visiting a friend who has a villager I had and loved in New Leaf and meeting her all over again was like the beginning of a reincarnation story.", "token_idx_2": 7, "text_start_2": 32, "text_end_2": 40, "date_2": "2020-04", "text_1_tokenized": ["Two", "police", "personnel", "were", "killed", "and", "a", "villager", "was", "injured", "on", "Saturday", "evening", "when", "Naxals", "opened", "fire", "on", "them", "in", "Chhattisgarh's", "Bijapur", "district", ".", "When", "is", "India", "going", "to", "term", "them", "as", "#CommunistTerrorists", "and", "isolate", "them", "?", "#UrbanNaxals"], "text_2_tokenized": ["I'm", "visiting", "a", "friend", "who", "has", "a", "villager", "I", "had", "and", "loved", "in", "New", "Leaf", "and", "meeting", "her", "all", "over", "again", "was", "like", "the", "beginning", "of", "a", "reincarnation", "story", "."]} -{"id": "1450-villager", "word": "villager", "label_binary": 0, "text_1": "teen dirk meeting ultimate dirk is actually super interesting to think about (Minecraft villager noise)", "token_idx_1": 14, "text_start_1": 88, "text_end_1": 96, "date_1": "2019-04", "text_2": "gonna go villager hunting hope i find someone cute", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 17, "date_2": "2020-04", "text_1_tokenized": ["teen", "dirk", "meeting", "ultimate", "dirk", "is", "actually", "super", "interesting", "to", "think", "about", "(", "Minecraft", "villager", "noise", ")"], "text_2_tokenized": ["gonna", "go", "villager", "hunting", "hope", "i", "find", "someone", "cute"]} -{"id": "1451-villager", "word": "villager", "label_binary": 1, "text_1": "I decided to do my celebration for completing a bridge in my acnl town at 8am and thus only one villager came. Rip", "token_idx_1": 20, "text_start_1": 96, "text_end_1": 104, "date_1": "2019-04", "text_2": "pausing my campsite villager hunt because i got a shooting star announcement... my first Ever.", "token_idx_2": 3, "text_start_2": 20, "text_end_2": 28, "date_2": "2020-04", "text_1_tokenized": ["I", "decided", "to", "do", "my", "celebration", "for", "completing", "a", "bridge", "in", "my", "acnl", "town", "at", "8am", "and", "thus", "only", "one", "villager", "came", ".", "Rip"], "text_2_tokenized": ["pausing", "my", "campsite", "villager", "hunt", "because", "i", "got", "a", "shooting", "star", "announcement", "...", "my", "first", "Ever", "."]} -{"id": "1453-villager", "word": "villager", "label_binary": 0, "text_1": "7.killed and 8.injuries innocent villager in Kyauktan-Rathedaung Township Rakhine State at this morning by Myanmar military LIR(22).", "token_idx_1": 4, "text_start_1": 33, "text_end_1": 41, "date_1": "2019-04", "text_2": "getting into ac villager trade is really weird because i want to give people things they want for free, but also people on Site For It should know better than to offer 2nmt and what is basically petty cash at this point for a villager they want", "token_idx_2": 3, "text_start_2": 16, "text_end_2": 24, "date_2": "2020-04", "text_1_tokenized": ["7.killed", "and", "8.injuries", "innocent", "villager", "in", "Kyauktan-Rathedaung", "Township", "Rakhine", "State", "at", "this", "morning", "by", "Myanmar", "military", "LIR", "(", "22", ")", "."], "text_2_tokenized": ["getting", "into", "ac", "villager", "trade", "is", "really", "weird", "because", "i", "want", "to", "give", "people", "things", "they", "want", "for", "free", ",", "but", "also", "people", "on", "Site", "For", "It", "should", "know", "better", "than", "to", "offer", "2nmt", "and", "what", "is", "basically", "petty", "cash", "at", "this", "point", "for", "a", "villager", "they", "want"]} -{"id": "1454-villager", "word": "villager", "label_binary": 0, "text_1": "Goomba for her next mate is our main form of chloe price villager.", "token_idx_1": 12, "text_start_1": 57, "text_end_1": 65, "date_1": "2019-04", "text_2": "my favorite snooty villager is Purrl. She was in my gamecube animal crossing town (she lived in the same acre as Freya), and also in new leaf", "token_idx_2": 3, "text_start_2": 19, "text_end_2": 27, "date_2": "2020-04", "text_1_tokenized": ["Goomba", "for", "her", "next", "mate", "is", "our", "main", "form", "of", "chloe", "price", "villager", "."], "text_2_tokenized": ["my", "favorite", "snooty", "villager", "is", "Purrl", ".", "She", "was", "in", "my", "gamecube", "animal", "crossing", "town", "(", "she", "lived", "in", "the", "same", "acre", "as", "Freya", ")", ",", "and", "also", "in", "new", "leaf"]} -{"id": "1455-villager", "word": "villager", "label_binary": 0, "text_1": "*minecraft villager sound* I want to walk around at night but I'm too dummy thicc and the clap of my ass cheeks alerts the zombies", "token_idx_1": 2, "text_start_1": 11, "text_end_1": 19, "date_1": "2019-04", "text_2": "Apollo is Transgender because when I first discovered I was nonbinary I played pocket camp and changed my gender on the app to boy and then I deemed him my favorite villager.", "token_idx_2": 31, "text_start_2": 165, "text_end_2": 173, "date_2": "2020-04", "text_1_tokenized": ["*", "minecraft", "villager", "sound", "*", "I", "want", "to", "walk", "around", "at", "night", "but", "I'm", "too", "dummy", "thicc", "and", "the", "clap", "of", "my", "ass", "cheeks", "alerts", "the", "zombies"], "text_2_tokenized": ["Apollo", "is", "Transgender", "because", "when", "I", "first", "discovered", "I", "was", "nonbinary", "I", "played", "pocket", "camp", "and", "changed", "my", "gender", "on", "the", "app", "to", "boy", "and", "then", "I", "deemed", "him", "my", "favorite", "villager", "."]} -{"id": "1456-villager", "word": "villager", "label_binary": 0, "text_1": "Minecraft sure brings out the inner evil inside us all. 1. I kill a villager after curing it for thinking my house was theirs. 2. I burn a forest to try and clear some tries. 3. I kidnap a villager from a different village because i'm so desperate for a librarian.", "token_idx_1": 16, "text_start_1": 68, "text_end_1": 76, "date_1": "2019-04", "text_2": "\"(Insert villager) is making (insert rare recipe) 150k bells entry! Need to pay my house loan!\" \"Can I pwease come to your island? \ud83d\udc49\ud83d\udc48\ud83d\ude33\" \"Sure! Its 150k bells entry!\" \"Yup. Nvm.\" Do y'all even read the posts??", "token_idx_2": 3, "text_start_2": 9, "text_end_2": 17, "date_2": "2020-04", "text_1_tokenized": ["Minecraft", "sure", "brings", "out", "the", "inner", "evil", "inside", "us", "all", ".", "1", ".", "I", "kill", "a", "villager", "after", "curing", "it", "for", "thinking", "my", "house", "was", "theirs", ".", "2", ".", "I", "burn", "a", "forest", "to", "try", "and", "clear", "some", "tries", ".", "3", ".", "I", "kidnap", "a", "villager", "from", "a", "different", "village", "because", "i'm", "so", "desperate", "for", "a", "librarian", "."], "text_2_tokenized": ["\"", "(", "Insert", "villager", ")", "is", "making", "(", "insert", "rare", "recipe", ")", "150k", "bells", "entry", "!", "Need", "to", "pay", "my", "house", "loan", "!", "\"", "\"", "Can", "I", "pwease", "come", "to", "your", "island", "?", "\ud83d\udc49", "\ud83d\udc48", "\ud83d\ude33", "\"", "\"", "Sure", "!", "Its", "150k", "bells", "entry", "!", "\"", "\"", "Yup", ".", "Nvm", ".", "\"", "Do", "y'all", "even", "read", "the", "posts", "?", "?"]} -{"id": "1457-villager", "word": "villager", "label_binary": 1, "text_1": "I have a villager who keeps being mean to me >:( don't make me report you to isabelle!!!!", "token_idx_1": 3, "text_start_1": 9, "text_end_1": 17, "date_1": "2019-04", "text_2": "Off on a villager hunt with no one in mind! Just looking a cute lil someone to come live on K\u00f6rte\ud83e\udd70", "token_idx_2": 3, "text_start_2": 9, "text_end_2": 17, "date_2": "2020-04", "text_1_tokenized": ["I", "have", "a", "villager", "who", "keeps", "being", "mean", "to", "me", ">:(", "don't", "make", "me", "report", "you", "to", "isabelle", "!", "!", "!"], "text_2_tokenized": ["Off", "on", "a", "villager", "hunt", "with", "no", "one", "in", "mind", "!", "Just", "looking", "a", "cute", "lil", "someone", "to", "come", "live", "on", "K\u00f6rte", "\ud83e\udd70"]} -{"id": "1458-villager", "word": "villager", "label_binary": 1, "text_1": "So there's been rumblings of a Nintendo Direct this week and all I want to see is Rover walk up and sit down next to the villager on an airplane.", "token_idx_1": 26, "text_start_1": 121, "text_end_1": 129, "date_1": "2019-04", "text_2": "my villager is a butch queen.", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 11, "date_2": "2020-04", "text_1_tokenized": ["So", "there's", "been", "rumblings", "of", "a", "Nintendo", "Direct", "this", "week", "and", "all", "I", "want", "to", "see", "is", "Rover", "walk", "up", "and", "sit", "down", "next", "to", "the", "villager", "on", "an", "airplane", "."], "text_2_tokenized": ["my", "villager", "is", "a", "butch", "queen", "."]} -{"id": "1459-villager", "word": "villager", "label_binary": 0, "text_1": "In 18th-century England, the life expectancy of a common villager was long as you're having fun while driving two cars.", "token_idx_1": 12, "text_start_1": 57, "text_end_1": 65, "date_1": "2019-04", "text_2": "the worst part about your favourite villager being lazy is that they go to bed too early smh", "token_idx_2": 6, "text_start_2": 36, "text_end_2": 44, "date_2": "2020-04", "text_1_tokenized": ["In", "18th", "-", "century", "England", ",", "the", "life", "expectancy", "of", "a", "common", "villager", "was", "long", "as", "you're", "having", "fun", "while", "driving", "two", "cars", "."], "text_2_tokenized": ["the", "worst", "part", "about", "your", "favourite", "villager", "being", "lazy", "is", "that", "they", "go", "to", "bed", "too", "early", "smh"]} -{"id": "1460-villager", "word": "villager", "label_binary": 0, "text_1": "I'm just a villager. Was playing Minecraft until lights went out. :(", "token_idx_1": 3, "text_start_1": 11, "text_end_1": 19, "date_1": "2019-04", "text_2": "Oh my god, my best friend Olivia has been thinking about moving away since I've been giving another villager lots of attention (cause I heard you could make someone leave by spam talking to them) #ACNH", "token_idx_2": 19, "text_start_2": 100, "text_end_2": 108, "date_2": "2020-04", "text_1_tokenized": ["I'm", "just", "a", "villager", ".", "Was", "playing", "Minecraft", "until", "lights", "went", "out", ".", ":("], "text_2_tokenized": ["Oh", "my", "god", ",", "my", "best", "friend", "Olivia", "has", "been", "thinking", "about", "moving", "away", "since", "I've", "been", "giving", "another", "villager", "lots", "of", "attention", "(", "cause", "I", "heard", "you", "could", "make", "someone", "leave", "by", "spam", "talking", "to", "them", ")", "#ACNH"]} -{"id": "1461-villager", "word": "villager", "label_binary": 0, "text_1": "This is one hell of a joke. An innocent villager was told by the Imam of his mosque that every supplication he makes before the iftar in Ramadan will be accepted by Allah. He prayed on the first day of fasting \u201cO Allah, make tomorrow's day the Eid's day.", "token_idx_1": 10, "text_start_1": 40, "text_end_1": 48, "date_1": "2019-04", "text_2": "i might do a nmt giveaway if my villager hunt keeps going as well as it has been,,", "token_idx_2": 8, "text_start_2": 32, "text_end_2": 40, "date_2": "2020-04", "text_1_tokenized": ["This", "is", "one", "hell", "of", "a", "joke", ".", "An", "innocent", "villager", "was", "told", "by", "the", "Imam", "of", "his", "mosque", "that", "every", "supplication", "he", "makes", "before", "the", "iftar", "in", "Ramadan", "will", "be", "accepted", "by", "Allah", ".", "He", "prayed", "on", "the", "first", "day", "of", "fasting", "\u201c", "O", "Allah", ",", "make", "tomorrow's", "day", "the", "Eid's", "day", "."], "text_2_tokenized": ["i", "might", "do", "a", "nmt", "giveaway", "if", "my", "villager", "hunt", "keeps", "going", "as", "well", "as", "it", "has", "been", ",", ","]} -{"id": "1462-villager", "word": "villager", "label_binary": 0, "text_1": "me, a poor starving minecrafter on the brink of death: (takes one single potato from a villager crop) jannah, a cop: iron golem, kill this fool.", "token_idx_1": 19, "text_start_1": 87, "text_end_1": 95, "date_1": "2019-04", "text_2": "i think its fucking crazy that Dom, the sheep that looks like he's always just about to cry, is a jock villager", "token_idx_2": 23, "text_start_2": 103, "text_end_2": 111, "date_2": "2020-04", "text_1_tokenized": ["me", ",", "a", "poor", "starving", "minecrafter", "on", "the", "brink", "of", "death", ":", "(", "takes", "one", "single", "potato", "from", "a", "villager", "crop", ")", "jannah", ",", "a", "cop", ":", "iron", "golem", ",", "kill", "this", "fool", "."], "text_2_tokenized": ["i", "think", "its", "fucking", "crazy", "that", "Dom", ",", "the", "sheep", "that", "looks", "like", "he's", "always", "just", "about", "to", "cry", ",", "is", "a", "jock", "villager"]} -{"id": "1463-villager", "word": "villager", "label_binary": 0, "text_1": "\u201cwhen i put my hands in my sweatshirt pocket i feel like a minecraft villager\u201d", "token_idx_1": 15, "text_start_1": 69, "text_end_1": 77, "date_1": "2019-04", "text_2": "Once upon a time there was a hairy villager who fell in love with a tall peasant. Their love was ugly. Thus, they decided to scurry off together in the ocean. In this place they were finally free to be with one another. After five hours, they gave birth to a young creature.", "token_idx_2": 8, "text_start_2": 35, "text_end_2": 43, "date_2": "2020-04", "text_1_tokenized": ["\u201c", "when", "i", "put", "my", "hands", "in", "my", "sweatshirt", "pocket", "i", "feel", "like", "a", "minecraft", "villager", "\u201d"], "text_2_tokenized": ["Once", "upon", "a", "time", "there", "was", "a", "hairy", "villager", "who", "fell", "in", "love", "with", "a", "tall", "peasant", ".", "Their", "love", "was", "ugly", ".", "Thus", ",", "they", "decided", "to", "scurry", "off", "together", "in", "the", "ocean", ".", "In", "this", "place", "they", "were", "finally", "free", "to", "be", "with", "one", "another", ".", "After", "five", "hours", ",", "they", "gave", "birth", "to", "a", "young", "creature", "."]} -{"id": "1464-villager", "word": "villager", "label_binary": 0, "text_1": "Hopefully the universal projectile nerf makes Gren villager MU close to even. And Gren snake MU at least to 6-4. The fact that projectiles oppresses shield less theoretically will make these matchups slightly better but I don't know", "token_idx_1": 7, "text_start_1": 51, "text_end_1": 59, "date_1": "2019-04", "text_2": "I decided to bid on a card on eBay \ud83d\ude4f I set my max bid so let's pray that someone doesn't love this villager more than me \ud83d\ude2d", "token_idx_2": 23, "text_start_2": 99, "text_end_2": 107, "date_2": "2020-04", "text_1_tokenized": ["Hopefully", "the", "universal", "projectile", "nerf", "makes", "Gren", "villager", "MU", "close", "to", "even", ".", "And", "Gren", "snake", "MU", "at", "least", "to", "6-4", ".", "The", "fact", "that", "projectiles", "oppresses", "shield", "less", "theoretically", "will", "make", "these", "matchups", "slightly", "better", "but", "I", "don't", "know"], "text_2_tokenized": ["I", "decided", "to", "bid", "on", "a", "card", "on", "eBay", "\ud83d\ude4f", "I", "set", "my", "max", "bid", "so", "let's", "pray", "that", "someone", "doesn't", "love", "this", "villager", "more", "than", "me", "\ud83d\ude2d"]} -{"id": "1465-villager", "word": "villager", "label_binary": 1, "text_1": "I CANT believe I really got turned into a cute animal crossing villager with a cute swirly tail and flowery clothes i'm emo...\u2026\u2026\u2026\u2026...", "token_idx_1": 12, "text_start_1": 63, "text_end_1": 71, "date_1": "2019-04", "text_2": "I need to stop running around #AnimalCrossingNewHorizions with my axe out... \"Hi, villager! Don't mind the sharp axe staring you in the face! -beam-\"", "token_idx_2": 15, "text_start_2": 82, "text_end_2": 90, "date_2": "2020-04", "text_1_tokenized": ["I", "CANT", "believe", "I", "really", "got", "turned", "into", "a", "cute", "animal", "crossing", "villager", "with", "a", "cute", "swirly", "tail", "and", "flowery", "clothes", "i'm", "emo", "...", "\u2026", "\u2026", "\u2026", "..."], "text_2_tokenized": ["I", "need", "to", "stop", "running", "around", "#AnimalCrossingNewHorizions", "with", "my", "axe", "out", "...", "\"", "Hi", ",", "villager", "!", "Don't", "mind", "the", "sharp", "axe", "staring", "you", "in", "the", "face", "!", "-", "beam", "-", "\""]} -{"id": "1466-villager", "word": "villager", "label_binary": 0, "text_1": "LRT that reminds me, almost every Soulsborne had a bucket head from Solaire to Valtr. We didnt get a cool basket head character in Sekiro except for the frightened villager and O'rin of the Water. I hope basket head Sekiro cosplay becomes a thing.", "token_idx_1": 31, "text_start_1": 164, "text_end_1": 172, "date_1": "2019-04", "text_2": "is kid cat a good villager", "token_idx_2": 5, "text_start_2": 18, "text_end_2": 26, "date_2": "2020-04", "text_1_tokenized": ["LRT", "that", "reminds", "me", ",", "almost", "every", "Soulsborne", "had", "a", "bucket", "head", "from", "Solaire", "to", "Valtr", ".", "We", "didnt", "get", "a", "cool", "basket", "head", "character", "in", "Sekiro", "except", "for", "the", "frightened", "villager", "and", "O'rin", "of", "the", "Water", ".", "I", "hope", "basket", "head", "Sekiro", "cosplay", "becomes", "a", "thing", "."], "text_2_tokenized": ["is", "kid", "cat", "a", "good", "villager"]} -{"id": "1467-villager", "word": "villager", "label_binary": 1, "text_1": "i remember someone telling me i would be a good animal crossing villager and i kinda just strive off of that now", "token_idx_1": 12, "text_start_1": 64, "text_end_1": 72, "date_1": "2019-04", "text_2": "Sherb is quickly becoming my new favorite animal crossing villager \ud83e\udd7a\ud83d\udc95", "token_idx_2": 9, "text_start_2": 58, "text_end_2": 66, "date_2": "2020-04", "text_1_tokenized": ["i", "remember", "someone", "telling", "me", "i", "would", "be", "a", "good", "animal", "crossing", "villager", "and", "i", "kinda", "just", "strive", "off", "of", "that", "now"], "text_2_tokenized": ["Sherb", "is", "quickly", "becoming", "my", "new", "favorite", "animal", "crossing", "villager", "\ud83e\udd7a", "\ud83d\udc95"]} -{"id": "1468-villager", "word": "villager", "label_binary": 0, "text_1": "It really bothers me there's no villager skin that has the hat with horns", "token_idx_1": 6, "text_start_1": 32, "text_end_1": 40, "date_1": "2019-04", "text_2": "I want the ugly frog villager to move off my island but can't bring myself to be mean to her :(", "token_idx_2": 5, "text_start_2": 21, "text_end_2": 29, "date_2": "2020-04", "text_1_tokenized": ["It", "really", "bothers", "me", "there's", "no", "villager", "skin", "that", "has", "the", "hat", "with", "horns"], "text_2_tokenized": ["I", "want", "the", "ugly", "frog", "villager", "to", "move", "off", "my", "island", "but", "can't", "bring", "myself", "to", "be", "mean", "to", "her", ":("]} -{"id": "1469-villager", "word": "villager", "label_binary": 1, "text_1": "In my search for AC content today I came across a padded Punchy image by @SlyChestnut with a comment from @superbeanbat64 claiming that he's the best villager but NO NO, Bob is clearly better.", "token_idx_1": 26, "text_start_1": 150, "text_end_1": 158, "date_1": "2019-04", "text_2": "I'm pissed.. a decent villager left my island, I wasn't able to find a replacement. Now an ugly villager moving in.. ugh", "token_idx_2": 5, "text_start_2": 22, "text_end_2": 30, "date_2": "2020-04", "text_1_tokenized": ["In", "my", "search", "for", "AC", "content", "today", "I", "came", "across", "a", "padded", "Punchy", "image", "by", "@SlyChestnut", "with", "a", "comment", "from", "@superbeanbat64", "claiming", "that", "he's", "the", "best", "villager", "but", "NO", "NO", ",", "Bob", "is", "clearly", "better", "."], "text_2_tokenized": ["I'm", "pissed", "..", "a", "decent", "villager", "left", "my", "island", ",", "I", "wasn't", "able", "to", "find", "a", "replacement", ".", "Now", "an", "ugly", "villager", "moving", "in", "..", "ugh"]} -{"id": "1470-villager", "word": "villager", "label_binary": 1, "text_1": "It's 2 am and all I can think about are the new villager textures", "token_idx_1": 12, "text_start_1": 48, "text_end_1": 56, "date_1": "2019-04", "text_2": "@ all the cool people who followed me back hello there!! Who is your favorite villager?", "token_idx_2": 17, "text_start_2": 78, "text_end_2": 86, "date_2": "2020-04", "text_1_tokenized": ["It's", "2", "am", "and", "all", "I", "can", "think", "about", "are", "the", "new", "villager", "textures"], "text_2_tokenized": ["@", "all", "the", "cool", "people", "who", "followed", "me", "back", "hello", "there", "!", "!", "Who", "is", "your", "favorite", "villager", "?"]} -{"id": "1471-villager", "word": "villager", "label_binary": 0, "text_1": "So... The villager mu, people are like \"oh hes gotta play defensively\" or \"hes gotta stop throwing out unsafe pkfires\", but yall don't get the fucking pain the mu is. Ness tries to jump gets slingshot, and what does ness need to do in neutral to win.....JUMP. And trust me ive....", "token_idx_1": 3, "text_start_1": 10, "text_end_1": 18, "date_1": "2019-04", "text_2": "what jock villager should i get? \ud83d\udc49\ud83c\udffb\ud83d\udc48\ud83c\udffb trying to get as many types as possible bc if i have to follow my heart i would only pick peppy/normal !!! but i cannot find a jock that suit my aesthetic :///", "token_idx_2": 2, "text_start_2": 10, "text_end_2": 18, "date_2": "2020-04", "text_1_tokenized": ["So", "...", "The", "villager", "mu", ",", "people", "are", "like", "\"", "oh", "hes", "gotta", "play", "defensively", "\"", "or", "\"", "hes", "gotta", "stop", "throwing", "out", "unsafe", "pkfires", "\"", ",", "but", "yall", "don't", "get", "the", "fucking", "pain", "the", "mu", "is", ".", "Ness", "tries", "to", "jump", "gets", "slingshot", ",", "and", "what", "does", "ness", "need", "to", "do", "in", "neutral", "to", "win", "...", "JUMP", ".", "And", "trust", "me", "ive", "..."], "text_2_tokenized": ["what", "jock", "villager", "should", "i", "get", "?", "\ud83d\udc49\ud83c\udffb", "\ud83d\udc48\ud83c\udffb", "trying", "to", "get", "as", "many", "types", "as", "possible", "bc", "if", "i", "have", "to", "follow", "my", "heart", "i", "would", "only", "pick", "peppy", "/", "normal", "!", "!", "!", "but", "i", "cannot", "find", "a", "jock", "that", "suit", "my", "aesthetic", ":/", "/", "/"]} -{"id": "1472-villager", "word": "villager", "label_binary": 1, "text_1": "Can someone tell me what animal crossing villager I remind you of l", "token_idx_1": 7, "text_start_1": 41, "text_end_1": 49, "date_1": "2019-04", "text_2": "What an eventful day my animals crossed, I married a villager, and Tom nook and redd are going to adopt a baby. I would say I beat animal crossing today. \u2764\ufe0f\ud83e\udd97\ud83d\udcab\ud83c\udf08\ud83c\udf2a\ud83c\udf2a", "token_idx_2": 11, "text_start_2": 53, "text_end_2": 61, "date_2": "2020-04", "text_1_tokenized": ["Can", "someone", "tell", "me", "what", "animal", "crossing", "villager", "I", "remind", "you", "of", "l"], "text_2_tokenized": ["What", "an", "eventful", "day", "my", "animals", "crossed", ",", "I", "married", "a", "villager", ",", "and", "Tom", "nook", "and", "redd", "are", "going", "to", "adopt", "a", "baby", ".", "I", "would", "say", "I", "beat", "animal", "crossing", "today", ".", "\u2764", "\ufe0f", "\ud83e\udd97", "\ud83d\udcab", "\ud83c\udf08", "\ud83c\udf2a", "\ud83c\udf2a"]} -{"id": "1769-turnip", "word": "turnip", "label_binary": 0, "text_1": "My Nana's greeting over forgetting turnip for Sunday dinner and I'm sitting greeting about the game the day", "token_idx_1": 5, "text_start_1": 35, "text_end_1": 41, "date_1": "2019-05", "text_2": "Okay my turnip prices are 560 \ud83d\ude0e all I want if the iron garden table and chair diy PLS #acnhturnipprices #ACNHturnips", "token_idx_2": 2, "text_start_2": 8, "text_end_2": 14, "date_2": "2020-05", "text_1_tokenized": ["My", "Nana's", "greeting", "over", "forgetting", "turnip", "for", "Sunday", "dinner", "and", "I'm", "sitting", "greeting", "about", "the", "game", "the", "day"], "text_2_tokenized": ["Okay", "my", "turnip", "prices", "are", "560", "\ud83d\ude0e", "all", "I", "want", "if", "the", "iron", "garden", "table", "and", "chair", "diy", "PLS", "#acnhturnipprices", "#ACNHturnips"]} -{"id": "1770-turnip", "word": "turnip", "label_binary": 0, "text_1": "thank god for good directions.. and turnip greens \ud83d\ude43", "token_idx_1": 7, "text_start_1": 36, "text_end_1": 42, "date_1": "2019-05", "text_2": "i mean today's sunday but idk any time skippers out there got like 500+ turnip buying", "token_idx_2": 15, "text_start_2": 72, "text_end_2": 78, "date_2": "2020-05", "text_1_tokenized": ["thank", "god", "for", "good", "directions", "..", "and", "turnip", "greens", "\ud83d\ude43"], "text_2_tokenized": ["i", "mean", "today's", "sunday", "but", "idk", "any", "time", "skippers", "out", "there", "got", "like", "500", "+", "turnip", "buying"]} -{"id": "1771-turnip", "word": "turnip", "label_binary": 0, "text_1": "I hope the back throw and turnip nerfs aren't as terrible as it looks", "token_idx_1": 6, "text_start_1": 26, "text_end_1": 32, "date_1": "2019-05", "text_2": "You crazy if you want 10NMT for 500 Bells per turnip", "token_idx_2": 10, "text_start_2": 46, "text_end_2": 52, "date_2": "2020-05", "text_1_tokenized": ["I", "hope", "the", "back", "throw", "and", "turnip", "nerfs", "aren't", "as", "terrible", "as", "it", "looks"], "text_2_tokenized": ["You", "crazy", "if", "you", "want", "10NMT", "for", "500", "Bells", "per", "turnip"]} -{"id": "1772-turnip", "word": "turnip", "label_binary": 0, "text_1": "That Peter Dutton... He's cerainly a turnip for the books.", "token_idx_1": 7, "text_start_1": 37, "text_end_1": 43, "date_1": "2019-05", "text_2": "I am hyperventilating. Per my acnh turnip calculator, I should be looking at some pretty big numbers tomorrow! We are officially rolling the dice. Stay tuned! #ScaredMoneyDontMakeNoMoneh", "token_idx_2": 7, "text_start_2": 35, "text_end_2": 41, "date_2": "2020-05", "text_1_tokenized": ["That", "Peter", "Dutton", "...", "He's", "cerainly", "a", "turnip", "for", "the", "books", "."], "text_2_tokenized": ["I", "am", "hyperventilating", ".", "Per", "my", "acnh", "turnip", "calculator", ",", "I", "should", "be", "looking", "at", "some", "pretty", "big", "numbers", "tomorrow", "!", "We", "are", "officially", "rolling", "the", "dice", ".", "Stay", "tuned", "!", "#ScaredMoneyDontMakeNoMoneh"]} -{"id": "1773-turnip", "word": "turnip", "label_binary": 0, "text_1": "I ain't gone lie...my turnip greens was so fye last night I ate a bowl this morning \ud83e\udd74", "token_idx_1": 6, "text_start_1": 22, "text_end_1": 28, "date_1": "2019-05", "text_2": "ay my turnip prices are at 467 if anyone wants to come over #acnh #acnhturnip :)", "token_idx_2": 2, "text_start_2": 6, "text_end_2": 12, "date_2": "2020-05", "text_1_tokenized": ["I", "ain't", "gone", "lie", "...", "my", "turnip", "greens", "was", "so", "fye", "last", "night", "I", "ate", "a", "bowl", "this", "morning", "\ud83e\udd74"], "text_2_tokenized": ["ay", "my", "turnip", "prices", "are", "at", "467", "if", "anyone", "wants", "to", "come", "over", "#acnh", "#acnhturnip", ":)"]} -{"id": "1774-turnip", "word": "turnip", "label_binary": 0, "text_1": "it's the turnip enby", "token_idx_1": 2, "text_start_1": 9, "text_end_1": 15, "date_1": "2019-05", "text_2": "Anyone have turnip prices above 150 \ud83d\ude05", "token_idx_2": 2, "text_start_2": 12, "text_end_2": 18, "date_2": "2020-05", "text_1_tokenized": ["it's", "the", "turnip", "enby"], "text_2_tokenized": ["Anyone", "have", "turnip", "prices", "above", "150", "\ud83d\ude05"]} -{"id": "1775-turnip", "word": "turnip", "label_binary": 0, "text_1": "Jamie rly left his tall glass of custard up in winterfell to go die by brick fall wit beggy mrs turnip head. Smh", "token_idx_1": 20, "text_start_1": 96, "text_end_1": 102, "date_1": "2019-05", "text_2": "Nigga hosting a turnip exchange tiring as hell", "token_idx_2": 3, "text_start_2": 16, "text_end_2": 22, "date_2": "2020-05", "text_1_tokenized": ["Jamie", "rly", "left", "his", "tall", "glass", "of", "custard", "up", "in", "winterfell", "to", "go", "die", "by", "brick", "fall", "wit", "beggy", "mrs", "turnip", "head", ".", "Smh"], "text_2_tokenized": ["Nigga", "hosting", "a", "turnip", "exchange", "tiring", "as", "hell"]} -{"id": "1776-turnip", "word": "turnip", "label_binary": 0, "text_1": "All hot messes are nasty Beiber is a turnip Therefore our world is mirage of our imagination", "token_idx_1": 8, "text_start_1": 37, "text_end_1": 43, "date_1": "2019-05", "text_2": "Ok fam who got the good turnip prices", "token_idx_2": 6, "text_start_2": 24, "text_end_2": 30, "date_2": "2020-05", "text_1_tokenized": ["All", "hot", "messes", "are", "nasty", "Beiber", "is", "a", "turnip", "Therefore", "our", "world", "is", "mirage", "of", "our", "imagination"], "text_2_tokenized": ["Ok", "fam", "who", "got", "the", "good", "turnip", "prices"]} -{"id": "1777-turnip", "word": "turnip", "label_binary": 0, "text_1": "Actually going to miss my grandma saying \u201cdo you think I fell off of a turnip truck?\u201d", "token_idx_1": 16, "text_start_1": 71, "text_end_1": 77, "date_1": "2019-05", "text_2": "There's a possibility that I'll have a high turnip price on Wednesday according to my prediction app, so do I just wait and see or do I just try to sell them asap???", "token_idx_2": 8, "text_start_2": 44, "text_end_2": 50, "date_2": "2020-05", "text_1_tokenized": ["Actually", "going", "to", "miss", "my", "grandma", "saying", "\u201c", "do", "you", "think", "I", "fell", "off", "of", "a", "turnip", "truck", "?", "\u201d"], "text_2_tokenized": ["There's", "a", "possibility", "that", "I'll", "have", "a", "high", "turnip", "price", "on", "Wednesday", "according", "to", "my", "prediction", "app", ",", "so", "do", "I", "just", "wait", "and", "see", "or", "do", "I", "just", "try", "to", "sell", "them", "asap", "?", "?", "?"]} -{"id": "1778-turnip", "word": "turnip", "label_binary": 0, "text_1": "raw it's like a spicy turnip but i cooked it in broth for my udon and its so tender and sweet omg", "token_idx_1": 5, "text_start_1": 22, "text_end_1": 28, "date_1": "2019-05", "text_2": "I got turnip prices at 589 for my peeps who play.", "token_idx_2": 2, "text_start_2": 6, "text_end_2": 12, "date_2": "2020-05", "text_1_tokenized": ["raw", "it's", "like", "a", "spicy", "turnip", "but", "i", "cooked", "it", "in", "broth", "for", "my", "udon", "and", "its", "so", "tender", "and", "sweet", "omg"], "text_2_tokenized": ["I", "got", "turnip", "prices", "at", "589", "for", "my", "peeps", "who", "play", "."]} -{"id": "1779-turnip", "word": "turnip", "label_binary": 0, "text_1": "Braves & Diamondbacks: Second time trading 1-run wins with each other in consecutive games. Actually did for a 3-game series Jul 22-24 2005 (ARI 6-5, ATL 3-2, ARI 3-2). Yeah, this one's a turnip. Three to go.", "token_idx_1": 45, "text_start_1": 192, "text_end_1": 198, "date_1": "2019-05", "text_2": "Who has good turnip prices.... help", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 19, "date_2": "2020-05", "text_1_tokenized": ["Braves", "&", "Diamondbacks", ":", "Second", "time", "trading", "1", "-", "run", "wins", "with", "each", "other", "in", "consecutive", "games", ".", "Actually", "did", "for", "a", "3", "-", "game", "series", "Jul", "22-24", "2005", "(", "ARI", "6-5", ",", "ATL", "3-2", ",", "ARI", "3-2", ")", ".", "Yeah", ",", "this", "one's", "a", "turnip", ".", "Three", "to", "go", "."], "text_2_tokenized": ["Who", "has", "good", "turnip", "prices", "...", "help"]} -{"id": "1780-turnip", "word": "turnip", "label_binary": 0, "text_1": "Only reason I use twitter is to shit on avil tbh. dudes a turnip", "token_idx_1": 14, "text_start_1": 58, "text_end_1": 64, "date_1": "2019-05", "text_2": "does anyone watch their acnh turnip prices and is anyone's good this week?? I have tons to sell and I will give u some \ud83e\udd7a", "token_idx_2": 5, "text_start_2": 29, "text_end_2": 35, "date_2": "2020-05", "text_1_tokenized": ["Only", "reason", "I", "use", "twitter", "is", "to", "shit", "on", "avil", "tbh", ".", "dudes", "a", "turnip"], "text_2_tokenized": ["does", "anyone", "watch", "their", "acnh", "turnip", "prices", "and", "is", "anyone's", "good", "this", "week", "?", "?", "I", "have", "tons", "to", "sell", "and", "I", "will", "give", "u", "some", "\ud83e\udd7a"]} -{"id": "1781-turnip", "word": "turnip", "label_binary": 0, "text_1": "I must look like I just fell off the turnip truck. Every bot on Twitter send me a DM request message.", "token_idx_1": 9, "text_start_1": 37, "text_end_1": 43, "date_1": "2019-05", "text_2": "ummm who else didn't know that turnip prices changed during the day", "token_idx_2": 6, "text_start_2": 31, "text_end_2": 37, "date_2": "2020-05", "text_1_tokenized": ["I", "must", "look", "like", "I", "just", "fell", "off", "the", "turnip", "truck", ".", "Every", "bot", "on", "Twitter", "send", "me", "a", "DM", "request", "message", "."], "text_2_tokenized": ["ummm", "who", "else", "didn't", "know", "that", "turnip", "prices", "changed", "during", "the", "day"]} -{"id": "1782-turnip", "word": "turnip", "label_binary": 0, "text_1": "Love too grill a turnip and fling it at @GabrielNeil's Achilles tendons.", "token_idx_1": 4, "text_start_1": 17, "text_end_1": 23, "date_1": "2019-05", "text_2": "I've got great turnip prices! 481 bells! Looking for gold, star piece, rare hybrid, or an interesting item. Dm for code! No Waiting! #turnipprices #AnimalCrossingNewHorizon #turnipexchange", "token_idx_2": 3, "text_start_2": 15, "text_end_2": 21, "date_2": "2020-05", "text_1_tokenized": ["Love", "too", "grill", "a", "turnip", "and", "fling", "it", "at", "@GabrielNeil", "'", "s", "Achilles", "tendons", "."], "text_2_tokenized": ["I've", "got", "great", "turnip", "prices", "!", "481", "bells", "!", "Looking", "for", "gold", ",", "star", "piece", ",", "rare", "hybrid", ",", "or", "an", "interesting", "item", ".", "Dm", "for", "code", "!", "No", "Waiting", "!", "#turnipprices", "#AnimalCrossingNewHorizon", "#turnipexchange"]} -{"id": "1783-turnip", "word": "turnip", "label_binary": 0, "text_1": "once I learn to combo back air off of turnip it'll be over for you hoes", "token_idx_1": 9, "text_start_1": 38, "text_end_1": 44, "date_1": "2019-05", "text_2": "lmao trying again with the tag. anyone have decent turnip prices today? #acnh #ACNHturnips", "token_idx_2": 10, "text_start_2": 51, "text_end_2": 57, "date_2": "2020-05", "text_1_tokenized": ["once", "I", "learn", "to", "combo", "back", "air", "off", "of", "turnip", "it'll", "be", "over", "for", "you", "hoes"], "text_2_tokenized": ["lmao", "trying", "again", "with", "the", "tag", ".", "anyone", "have", "decent", "turnip", "prices", "today", "?", "#acnh", "#ACNHturnips"]} -{"id": "1784-turnip", "word": "turnip", "label_binary": 0, "text_1": "Little cousin bday this weekend, DEFINITELY turnip weekend in the H lmao fuck going to Austin", "token_idx_1": 7, "text_start_1": 44, "text_end_1": 50, "date_1": "2019-05", "text_2": "Worst realization I've had the urge to find good turnip prices", "token_idx_2": 9, "text_start_2": 49, "text_end_2": 55, "date_2": "2020-05", "text_1_tokenized": ["Little", "cousin", "bday", "this", "weekend", ",", "DEFINITELY", "turnip", "weekend", "in", "the", "H", "lmao", "fuck", "going", "to", "Austin"], "text_2_tokenized": ["Worst", "realization", "I've", "had", "the", "urge", "to", "find", "good", "turnip", "prices"]} -{"id": "1785-turnip", "word": "turnip", "label_binary": 0, "text_1": "literally there are no clothes designed to fit me, a turnip", "token_idx_1": 11, "text_start_1": 53, "text_end_1": 59, "date_1": "2019-05", "text_2": "My turnip prices are bullshit", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 9, "date_2": "2020-05", "text_1_tokenized": ["literally", "there", "are", "no", "clothes", "designed", "to", "fit", "me", ",", "a", "turnip"], "text_2_tokenized": ["My", "turnip", "prices", "are", "bullshit"]} -{"id": "1786-turnip", "word": "turnip", "label_binary": 0, "text_1": "Sunday May 26. It's a beautiful day in Port Austin. We will be renting kayaks today from 8-2. Everyone off the water by 5pm. We will not be letting anyone under 18 our on the turnip rock trail, but our broken rocks trail will still be available for families with children.", "token_idx_1": 39, "text_start_1": 175, "text_end_1": 181, "date_1": "2019-05", "text_2": "guys my turnip prices are 1 bell please hit dms if want do cod", "token_idx_2": 2, "text_start_2": 8, "text_end_2": 14, "date_2": "2020-05", "text_1_tokenized": ["Sunday", "May", "26", ".", "It's", "a", "beautiful", "day", "in", "Port", "Austin", ".", "We", "will", "be", "renting", "kayaks", "today", "from", "8-2", ".", "Everyone", "off", "the", "water", "by", "5pm", ".", "We", "will", "not", "be", "letting", "anyone", "under", "18", "our", "on", "the", "turnip", "rock", "trail", ",", "but", "our", "broken", "rocks", "trail", "will", "still", "be", "available", "for", "families", "with", "children", "."], "text_2_tokenized": ["guys", "my", "turnip", "prices", "are", "1", "bell", "please", "hit", "dms", "if", "want", "do", "cod"]} -{"id": "1787-turnip", "word": "turnip", "label_binary": 0, "text_1": "I hope it was a turnip that he shot. #unbreakable", "token_idx_1": 5, "text_start_1": 16, "text_end_1": 22, "date_1": "2019-05", "text_2": "does anyone have high turnip prices they're only asking 57 on my island", "token_idx_2": 4, "text_start_2": 22, "text_end_2": 28, "date_2": "2020-05", "text_1_tokenized": ["I", "hope", "it", "was", "a", "turnip", "that", "he", "shot", ".", "#unbreakable"], "text_2_tokenized": ["does", "anyone", "have", "high", "turnip", "prices", "they're", "only", "asking", "57", "on", "my", "island"]} -{"id": "1788-turnip", "word": "turnip", "label_binary": 0, "text_1": "What's the expression?? Squeezing blood from a turnip? Democrats have perfected this with the faux Russian investigation. #Tucker", "token_idx_1": 9, "text_start_1": 47, "text_end_1": 53, "date_1": "2019-05", "text_2": "Now. Time to sit here and fully void my large intestines then go see what the Able Sister and the Nooklings have for sale (and check that turnip price)", "token_idx_2": 29, "text_start_2": 138, "text_end_2": 144, "date_2": "2020-05", "text_1_tokenized": ["What's", "the", "expression", "?", "?", "Squeezing", "blood", "from", "a", "turnip", "?", "Democrats", "have", "perfected", "this", "with", "the", "faux", "Russian", "investigation", ".", "#Tucker"], "text_2_tokenized": ["Now", ".", "Time", "to", "sit", "here", "and", "fully", "void", "my", "large", "intestines", "then", "go", "see", "what", "the", "Able", "Sister", "and", "the", "Nooklings", "have", "for", "sale", "(", "and", "check", "that", "turnip", "price", ")"]} -{"id": "1789-turnip", "word": "turnip", "label_binary": 0, "text_1": "#onthisday 1882 \"finished ploughing down the Dung on the turnip land\" Duncan MacFarlane #rurallife #canadianhistory", "token_idx_1": 10, "text_start_1": 57, "text_end_1": 63, "date_1": "2019-05", "text_2": "Ugh why do I always have shitty turnip prices \ud83d\ude24", "token_idx_2": 7, "text_start_2": 32, "text_end_2": 38, "date_2": "2020-05", "text_1_tokenized": ["#onthisday", "1882", "\"", "finished", "ploughing", "down", "the", "Dung", "on", "the", "turnip", "land", "\"", "Duncan", "MacFarlane", "#rurallife", "#canadianhistory"], "text_2_tokenized": ["Ugh", "why", "do", "I", "always", "have", "shitty", "turnip", "prices", "\ud83d\ude24"]} -{"id": "1790-turnip", "word": "turnip", "label_binary": 0, "text_1": "The extracted Moscato from Greece reminds the tongue of goji berry and turnip.", "token_idx_1": 12, "text_start_1": 71, "text_end_1": 77, "date_1": "2019-05", "text_2": "My game fell asleep, new code is drr1h. 435 a turnip I'm not really paying attention, but I am the one in the blue frog hat #turnipprices #TurnipPrice", "token_idx_2": 13, "text_start_2": 46, "text_end_2": 52, "date_2": "2020-05", "text_1_tokenized": ["The", "extracted", "Moscato", "from", "Greece", "reminds", "the", "tongue", "of", "goji", "berry", "and", "turnip", "."], "text_2_tokenized": ["My", "game", "fell", "asleep", ",", "new", "code", "is", "drr", "1h", ".", "435", "a", "turnip", "I'm", "not", "really", "paying", "attention", ",", "but", "I", "am", "the", "one", "in", "the", "blue", "frog", "hat", "#turnipprices", "#TurnipPrice"]} -{"id": "1791-turnip", "word": "turnip", "label_binary": 0, "text_1": "Fried turnip greens, bacon, black pepper, and crushed hot pepper seeds. You can take the boy outta the southwest side of Warren but you can't take the SW side of Warren outta the boy !!! Fabulous !!!", "token_idx_1": 1, "text_start_1": 6, "text_end_1": 12, "date_1": "2019-05", "text_2": "My turnip price didn't get over 100 bells this week honestly I give up with the stalk market #ACNH #AnimalCrossingNewHorizons", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 9, "date_2": "2020-05", "text_1_tokenized": ["Fried", "turnip", "greens", ",", "bacon", ",", "black", "pepper", ",", "and", "crushed", "hot", "pepper", "seeds", ".", "You", "can", "take", "the", "boy", "outta", "the", "southwest", "side", "of", "Warren", "but", "you", "can't", "take", "the", "SW", "side", "of", "Warren", "outta", "the", "boy", "!", "!", "!", "Fabulous", "!", "!", "!"], "text_2_tokenized": ["My", "turnip", "price", "didn't", "get", "over", "100", "bells", "this", "week", "honestly", "I", "give", "up", "with", "the", "stalk", "market", "#ACNH", "#AnimalCrossingNewHorizons"]} -{"id": "1792-turnip", "word": "turnip", "label_binary": 0, "text_1": "Kizaru the brain of the entire Russian turnip", "token_idx_1": 7, "text_start_1": 39, "text_end_1": 45, "date_1": "2019-05", "text_2": "my turnip prices are 553 bells. however, you must money match me in smash for $5 or more for the dodo code. make your choice #AnimalCrossing #AnimalCrossingturnips", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 9, "date_2": "2020-05", "text_1_tokenized": ["Kizaru", "the", "brain", "of", "the", "entire", "Russian", "turnip"], "text_2_tokenized": ["my", "turnip", "prices", "are", "553", "bells", ".", "however", ",", "you", "must", "money", "match", "me", "in", "smash", "for", "$", "5", "or", "more", "for", "the", "dodo", "code", ".", "make", "your", "choice", "#AnimalCrossing", "#AnimalCrossingturnips"]} -{"id": "1793-turnip", "word": "turnip", "label_binary": 0, "text_1": "First Istanbul taste experience: sal gam suyu, a turnip drink that tastes like a cross between bloody mary mix and pickle juice. I actually like it but probably won't order it again. Not a relaxing beverage.", "token_idx_1": 10, "text_start_1": 49, "text_end_1": 55, "date_1": "2019-05", "text_2": "I can't sleep because I'm literally sick with worry that the turnip prices are going to fuck me again this week \ud83e\udd22 #acnh #AnimalCrossingNewHorizons", "token_idx_2": 11, "text_start_2": 61, "text_end_2": 67, "date_2": "2020-05", "text_1_tokenized": ["First", "Istanbul", "taste", "experience", ":", "sal", "gam", "suyu", ",", "a", "turnip", "drink", "that", "tastes", "like", "a", "cross", "between", "bloody", "mary", "mix", "and", "pickle", "juice", ".", "I", "actually", "like", "it", "but", "probably", "won't", "order", "it", "again", ".", "Not", "a", "relaxing", "beverage", "."], "text_2_tokenized": ["I", "can't", "sleep", "because", "I'm", "literally", "sick", "with", "worry", "that", "the", "turnip", "prices", "are", "going", "to", "fuck", "me", "again", "this", "week", "\ud83e\udd22", "#acnh", "#AnimalCrossingNewHorizons"]} -{"id": "1794-turnip", "word": "turnip", "label_binary": 0, "text_1": "there's a community garden round the corner from mum's new place in philly im boutta go ham actually, turnip***", "token_idx_1": 19, "text_start_1": 102, "text_end_1": 108, "date_1": "2019-05", "text_2": "does anybody have a good turnip price??", "token_idx_2": 5, "text_start_2": 25, "text_end_2": 31, "date_2": "2020-05", "text_1_tokenized": ["there's", "a", "community", "garden", "round", "the", "corner", "from", "mum's", "new", "place", "in", "philly", "im", "boutta", "go", "ham", "actually", ",", "turnip", "*", "*", "*"], "text_2_tokenized": ["does", "anybody", "have", "a", "good", "turnip", "price", "?", "?"]} -{"id": "1795-turnip", "word": "turnip", "label_binary": 0, "text_1": "Old russian proverb - never walk out on a turnip demanding blood. Vox strike? Typically you strike when companies are strong and have an upward financial trajectory. Is that the case here?", "token_idx_1": 9, "text_start_1": 42, "text_end_1": 48, "date_1": "2019-05", "text_2": "Any good turnip prices? I don't want to waste all these turnips \ud83d\ude29\ud83d\ude29 #TurnipPrice #AnimalCrossingNewHorizons", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 15, "date_2": "2020-05", "text_1_tokenized": ["Old", "russian", "proverb", "-", "never", "walk", "out", "on", "a", "turnip", "demanding", "blood", ".", "Vox", "strike", "?", "Typically", "you", "strike", "when", "companies", "are", "strong", "and", "have", "an", "upward", "financial", "trajectory", ".", "Is", "that", "the", "case", "here", "?"], "text_2_tokenized": ["Any", "good", "turnip", "prices", "?", "I", "don't", "want", "to", "waste", "all", "these", "turnips", "\ud83d\ude29", "\ud83d\ude29", "#TurnipPrice", "#AnimalCrossingNewHorizons"]} -{"id": "1796-turnip", "word": "turnip", "label_binary": 0, "text_1": "Tha king Geoffreys a little turnip mate", "token_idx_1": 5, "text_start_1": 28, "text_end_1": 34, "date_1": "2019-05", "text_2": "Hosting a turnip exchange.... now I'm just waiting for people \ud83d\ude39", "token_idx_2": 2, "text_start_2": 10, "text_end_2": 16, "date_2": "2020-05", "text_1_tokenized": ["Tha", "king", "Geoffreys", "a", "little", "turnip", "mate"], "text_2_tokenized": ["Hosting", "a", "turnip", "exchange", "...", "now", "I'm", "just", "waiting", "for", "people", "\ud83d\ude39"]} -{"id": "1797-turnip", "word": "turnip", "label_binary": 0, "text_1": "Just picked up our first CSA share for the summer. Anyone know any turnip recipes?", "token_idx_1": 14, "text_start_1": 67, "text_end_1": 73, "date_1": "2019-05", "text_2": "My island is ugly and my turnip prices are shit, you guys should come visit!!! Dodo code: 0BX7L", "token_idx_2": 6, "text_start_2": 25, "text_end_2": 31, "date_2": "2020-05", "text_1_tokenized": ["Just", "picked", "up", "our", "first", "CSA", "share", "for", "the", "summer", ".", "Anyone", "know", "any", "turnip", "recipes", "?"], "text_2_tokenized": ["My", "island", "is", "ugly", "and", "my", "turnip", "prices", "are", "shit", ",", "you", "guys", "should", "come", "visit", "!", "!", "!", "Dodo", "code", ":", "0BX7L"]} -{"id": "1799-turnip", "word": "turnip", "label_binary": 0, "text_1": "To the turnip who decided to drop both his chocolate wrapper and then crisp packet out of his car window in Stockport just, hope karma will give you a polite nudge later on today you absolute plum", "token_idx_1": 2, "text_start_1": 7, "text_end_1": 13, "date_1": "2019-05", "text_2": "Anyone have any turnip prices above 100? Just looking to lose not a lot of money here lol", "token_idx_2": 3, "text_start_2": 16, "text_end_2": 22, "date_2": "2020-05", "text_1_tokenized": ["To", "the", "turnip", "who", "decided", "to", "drop", "both", "his", "chocolate", "wrapper", "and", "then", "crisp", "packet", "out", "of", "his", "car", "window", "in", "Stockport", "just", ",", "hope", "karma", "will", "give", "you", "a", "polite", "nudge", "later", "on", "today", "you", "absolute", "plum"], "text_2_tokenized": ["Anyone", "have", "any", "turnip", "prices", "above", "100", "?", "Just", "looking", "to", "lose", "not", "a", "lot", "of", "money", "here", "lol"]} -{"id": "1800-turnip", "word": "turnip", "label_binary": 1, "text_1": "My grandmom is showing me how she makes her turnip greens and bitch I can't wait.", "token_idx_1": 9, "text_start_1": 44, "text_end_1": 50, "date_1": "2019-05", "text_2": "New math: if the donald \u201clooks like Elvis\u201d then Elvis must look like an angry turnip. So who is the angry turnip?", "token_idx_2": 18, "text_start_2": 78, "text_end_2": 84, "date_2": "2020-05", "text_1_tokenized": ["My", "grandmom", "is", "showing", "me", "how", "she", "makes", "her", "turnip", "greens", "and", "bitch", "I", "can't", "wait", "."], "text_2_tokenized": ["New", "math", ":", "if", "the", "donald", "\u201c", "looks", "like", "Elvis", "\u201d", "then", "Elvis", "must", "look", "like", "an", "angry", "turnip", ".", "So", "who", "is", "the", "angry", "turnip", "?"]} -{"id": "1801-turnip", "word": "turnip", "label_binary": 0, "text_1": "I decided too late to stop live tweeting during the show. Y'all will suck the blood out of a turnip", "token_idx_1": 20, "text_start_1": 93, "text_end_1": 99, "date_1": "2019-05", "text_2": "If my Sunday morning turnip hustle in Animal Crossing has taught me any life lessons, it's that I would share my wealth if I were hella rich. Like, yo, you want help with your town infrastructure? SURE I'll build your bridge & incline, you wanna pay off your house? Here's $1m...", "token_idx_2": 4, "text_start_2": 21, "text_end_2": 27, "date_2": "2020-05", "text_1_tokenized": ["I", "decided", "too", "late", "to", "stop", "live", "tweeting", "during", "the", "show", ".", "Y'all", "will", "suck", "the", "blood", "out", "of", "a", "turnip"], "text_2_tokenized": ["If", "my", "Sunday", "morning", "turnip", "hustle", "in", "Animal", "Crossing", "has", "taught", "me", "any", "life", "lessons", ",", "it's", "that", "I", "would", "share", "my", "wealth", "if", "I", "were", "hella", "rich", ".", "Like", ",", "yo", ",", "you", "want", "help", "with", "your", "town", "infrastructure", "?", "SURE", "I'll", "build", "your", "bridge", "&", "incline", ",", "you", "wanna", "pay", "off", "your", "house", "?", "Here's", "$", "1m", "..."]} -{"id": "1802-turnip", "word": "turnip", "label_binary": 0, "text_1": "My hubby, a vet, said when turnip got elected..we need to give him a chance. What a difference 2 years have made. Now he calls him a dumbass. I tried to tell him!!!!", "token_idx_1": 8, "text_start_1": 27, "text_end_1": 33, "date_1": "2019-05", "text_2": "my turnip prices are 135 I think that's the best I've seen. It's always like 59\ud83d\ude15", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 9, "date_2": "2020-05", "text_1_tokenized": ["My", "hubby", ",", "a", "vet", ",", "said", "when", "turnip", "got", "elected", "..", "we", "need", "to", "give", "him", "a", "chance", ".", "What", "a", "difference", "2", "years", "have", "made", ".", "Now", "he", "calls", "him", "a", "dumbass", ".", "I", "tried", "to", "tell", "him", "!", "!", "!"], "text_2_tokenized": ["my", "turnip", "prices", "are", "135", "I", "think", "that's", "the", "best", "I've", "seen", ".", "It's", "always", "like", "59", "\ud83d\ude15"]} -{"id": "1803-turnip", "word": "turnip", "label_binary": 0, "text_1": "America, could you not have send one if your \"best\" instead of the turnip you inflected on us. We have been fighting against is the reintroduction of a \"BORDER\" and one of the first things out of the tangerine clowns mouth was \"A WALL\". His level of stupidity grows by the day.", "token_idx_1": 16, "text_start_1": 67, "text_end_1": 73, "date_1": "2019-05", "text_2": "YOOOOOOO the acnh app that i have had an update & there's a turnip price tracker now I LOVE!!!", "token_idx_2": 13, "text_start_2": 64, "text_end_2": 70, "date_2": "2020-05", "text_1_tokenized": ["America", ",", "could", "you", "not", "have", "send", "one", "if", "your", "\"", "best", "\"", "instead", "of", "the", "turnip", "you", "inflected", "on", "us", ".", "We", "have", "been", "fighting", "against", "is", "the", "reintroduction", "of", "a", "\"", "BORDER", "\"", "and", "one", "of", "the", "first", "things", "out", "of", "the", "tangerine", "clowns", "mouth", "was", "\"", "A", "WALL", "\"", ".", "His", "level", "of", "stupidity", "grows", "by", "the", "day", "."], "text_2_tokenized": ["YOOOOOOO", "the", "acnh", "app", "that", "i", "have", "had", "an", "update", "&", "there's", "a", "turnip", "price", "tracker", "now", "I", "LOVE", "!", "!", "!"]} -{"id": "1804-turnip", "word": "turnip", "label_binary": 0, "text_1": "Can anyone Smash Ultimate savvy confirm that the only difference between Peach and Daisy is turnip knockback? As in Daisy can do every combo Peach can?", "token_idx_1": 15, "text_start_1": 92, "text_end_1": 98, "date_1": "2019-05", "text_2": "someone let me know if they have high turnip prices this week \ud83d\udc49\ud83c\udffb\ud83d\udc48\ud83c\udffb", "token_idx_2": 8, "text_start_2": 38, "text_end_2": 44, "date_2": "2020-05", "text_1_tokenized": ["Can", "anyone", "Smash", "Ultimate", "savvy", "confirm", "that", "the", "only", "difference", "between", "Peach", "and", "Daisy", "is", "turnip", "knockback", "?", "As", "in", "Daisy", "can", "do", "every", "combo", "Peach", "can", "?"], "text_2_tokenized": ["someone", "let", "me", "know", "if", "they", "have", "high", "turnip", "prices", "this", "week", "\ud83d\udc49\ud83c\udffb", "\ud83d\udc48\ud83c\udffb"]} -{"id": "1805-turnip", "word": "turnip", "label_binary": 0, "text_1": "Last night I had a dream @SultryDave was eating chili in my house and my roommates were offering him weird vegan options like an uncooked turnip and saltines?", "token_idx_1": 25, "text_start_1": 138, "text_end_1": 144, "date_1": "2019-05", "text_2": "anyone got good turnip prices \ud83e\udd7a\ud83e\udd32\ud83c\udffb", "token_idx_2": 3, "text_start_2": 16, "text_end_2": 22, "date_2": "2020-05", "text_1_tokenized": ["Last", "night", "I", "had", "a", "dream", "@SultryDave", "was", "eating", "chili", "in", "my", "house", "and", "my", "roommates", "were", "offering", "him", "weird", "vegan", "options", "like", "an", "uncooked", "turnip", "and", "saltines", "?"], "text_2_tokenized": ["anyone", "got", "good", "turnip", "prices", "\ud83e\udd7a", "\ud83e\udd32\ud83c\udffb"]} -{"id": "1806-turnip", "word": "turnip", "label_binary": 0, "text_1": "From the horses mouth...The tango turnip wants the NHS.", "token_idx_1": 7, "text_start_1": 34, "text_end_1": 40, "date_1": "2019-05", "text_2": "Apparently I'm supposed to get a spike in turnip prices on my island tomorrow morning so if it's crazy high I'll DM my Dodo code to mutuals only", "token_idx_2": 8, "text_start_2": 42, "text_end_2": 48, "date_2": "2020-05", "text_1_tokenized": ["From", "the", "horses", "mouth", "...", "The", "tango", "turnip", "wants", "the", "NHS", "."], "text_2_tokenized": ["Apparently", "I'm", "supposed", "to", "get", "a", "spike", "in", "turnip", "prices", "on", "my", "island", "tomorrow", "morning", "so", "if", "it's", "crazy", "high", "I'll", "DM", "my", "Dodo", "code", "to", "mutuals", "only"]} -{"id": "1807-turnip", "word": "turnip", "label_binary": 0, "text_1": "Tartarus slayed a monster called Gorgon while riding a seal while also eating a vegetable called a turnip", "token_idx_1": 17, "text_start_1": 99, "text_end_1": 105, "date_1": "2019-05", "text_2": "petition for my turnip prices to stop being so damn pathetic", "token_idx_2": 3, "text_start_2": 16, "text_end_2": 22, "date_2": "2020-05", "text_1_tokenized": ["Tartarus", "slayed", "a", "monster", "called", "Gorgon", "while", "riding", "a", "seal", "while", "also", "eating", "a", "vegetable", "called", "a", "turnip"], "text_2_tokenized": ["petition", "for", "my", "turnip", "prices", "to", "stop", "being", "so", "damn", "pathetic"]} -{"id": "1808-turnip", "word": "turnip", "label_binary": 0, "text_1": "My turmeric chicken thighs, steamed broccoli, and roasted turnip root to the rescue! Those fries will still haunt my dreams tonight.", "token_idx_1": 10, "text_start_1": 58, "text_end_1": 64, "date_1": "2019-05", "text_2": "Does anyone have good turnip prices right now", "token_idx_2": 4, "text_start_2": 22, "text_end_2": 28, "date_2": "2020-05", "text_1_tokenized": ["My", "turmeric", "chicken", "thighs", ",", "steamed", "broccoli", ",", "and", "roasted", "turnip", "root", "to", "the", "rescue", "!", "Those", "fries", "will", "still", "haunt", "my", "dreams", "tonight", "."], "text_2_tokenized": ["Does", "anyone", "have", "good", "turnip", "prices", "right", "now"]} -{"id": "1809-turnip", "word": "turnip", "label_binary": 0, "text_1": "Amber has the personality of a turnip\ud83d\ude2a #LoveIsIand", "token_idx_1": 6, "text_start_1": 31, "text_end_1": 37, "date_1": "2019-05", "text_2": "#ACNH Does anyone have turnip prices over 106 bells? My market is on a decrease (currently at 70) and I bought them for 106 and I'd like to make even a tiny profit if possible", "token_idx_2": 4, "text_start_2": 23, "text_end_2": 29, "date_2": "2020-05", "text_1_tokenized": ["Amber", "has", "the", "personality", "of", "a", "turnip", "\ud83d\ude2a", "#LoveIsIand"], "text_2_tokenized": ["#ACNH", "Does", "anyone", "have", "turnip", "prices", "over", "106", "bells", "?", "My", "market", "is", "on", "a", "decrease", "(", "currently", "at", "70", ")", "and", "I", "bought", "them", "for", "106", "and", "I'd", "like", "to", "make", "even", "a", "tiny", "profit", "if", "possible"]} -{"id": "1810-turnip", "word": "turnip", "label_binary": 0, "text_1": "I wonder if #DesperateDonald has ever heard that you can't get blood out of turnip...Mexico is our friend and he blackballed them into deal. what a shitty leader. #ImpeachmentInquiryNow #PrisionIsGoodToo", "token_idx_1": 14, "text_start_1": 76, "text_end_1": 82, "date_1": "2019-05", "text_2": "Broke: you gotta wake up before noon so you can get back into the cycle of work and school Woke: you gotta wake up before noon so you can check your turnip prices", "token_idx_2": 33, "text_start_2": 149, "text_end_2": 155, "date_2": "2020-05", "text_1_tokenized": ["I", "wonder", "if", "#DesperateDonald", "has", "ever", "heard", "that", "you", "can't", "get", "blood", "out", "of", "turnip", "...", "Mexico", "is", "our", "friend", "and", "he", "blackballed", "them", "into", "deal", ".", "what", "a", "shitty", "leader", ".", "#ImpeachmentInquiryNow", "#PrisionIsGoodToo"], "text_2_tokenized": ["Broke", ":", "you", "gotta", "wake", "up", "before", "noon", "so", "you", "can", "get", "back", "into", "the", "cycle", "of", "work", "and", "school", "Woke", ":", "you", "gotta", "wake", "up", "before", "noon", "so", "you", "can", "check", "your", "turnip", "prices"]} -{"id": "1811-turnip", "word": "turnip", "label_binary": 0, "text_1": "Nancypoo, has the attention span of a turnip...........LOL", "token_idx_1": 8, "text_start_1": 38, "text_end_1": 44, "date_1": "2019-05", "text_2": "my turnip prices are poopy does anyone have good prices \ud83e\udd7a legit anything above 90 and ill fly there rn i swear", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 9, "date_2": "2020-05", "text_1_tokenized": ["Nancypoo", ",", "has", "the", "attention", "span", "of", "a", "turnip", "...", "LOL"], "text_2_tokenized": ["my", "turnip", "prices", "are", "poopy", "does", "anyone", "have", "good", "prices", "\ud83e\udd7a", "legit", "anything", "above", "90", "and", "ill", "fly", "there", "rn", "i", "swear"]} -{"id": "1812-turnip", "word": "turnip", "label_binary": 0, "text_1": "The orange turnip runs to the press like they're his parents lmao thats one let me clear my side snitching ass man child.", "token_idx_1": 2, "text_start_1": 11, "text_end_1": 17, "date_1": "2019-05", "text_2": "so uhhh what's everyone's turnip prices? \ud83d\udc40", "token_idx_2": 4, "text_start_2": 26, "text_end_2": 32, "date_2": "2020-05", "text_1_tokenized": ["The", "orange", "turnip", "runs", "to", "the", "press", "like", "they're", "his", "parents", "lmao", "thats", "one", "let", "me", "clear", "my", "side", "snitching", "ass", "man", "child", "."], "text_2_tokenized": ["so", "uhhh", "what's", "everyone's", "turnip", "prices", "?", "\ud83d\udc40"]} -{"id": "1813-turnip", "word": "turnip", "label_binary": 0, "text_1": "Put all your turnip futures on Grayling being our next PM. Karma hasn't finished fucking with the UK yet. #c4news", "token_idx_1": 3, "text_start_1": 13, "text_end_1": 19, "date_1": "2019-05", "text_2": "anyone's turnip prices 500-600+?", "token_idx_2": 1, "text_start_2": 9, "text_end_2": 15, "date_2": "2020-05", "text_1_tokenized": ["Put", "all", "your", "turnip", "futures", "on", "Grayling", "being", "our", "next", "PM", ".", "Karma", "hasn't", "finished", "fucking", "with", "the", "UK", "yet", ".", "#c4news"], "text_2_tokenized": ["anyone's", "turnip", "prices", "500-600+", "?"]} -{"id": "1814-turnip", "word": "turnip", "label_binary": 0, "text_1": "Have you ever seen anyone eat a turnip?", "token_idx_1": 7, "text_start_1": 32, "text_end_1": 38, "date_1": "2019-05", "text_2": "Why are my turnip prices always trash? Currently 130 bells per turnip, but I paid 110! #AnimalCrossingNewHorizon #AnimalCrossing #NintendoSwich", "token_idx_2": 3, "text_start_2": 11, "text_end_2": 17, "date_2": "2020-05", "text_1_tokenized": ["Have", "you", "ever", "seen", "anyone", "eat", "a", "turnip", "?"], "text_2_tokenized": ["Why", "are", "my", "turnip", "prices", "always", "trash", "?", "Currently", "130", "bells", "per", "turnip", ",", "but", "I", "paid", "110", "!", "#AnimalCrossingNewHorizon", "#AnimalCrossing", "#NintendoSwich"]} -{"id": "1815-turnip", "word": "turnip", "label_binary": 0, "text_1": "See you bear careless turnip snare, Rampages pitch color red, In the father, the.", "token_idx_1": 4, "text_start_1": 22, "text_end_1": 28, "date_1": "2019-05", "text_2": "Anyone have good turnip prices today? Mine are so bad lol #AnimalCrossing", "token_idx_2": 3, "text_start_2": 17, "text_end_2": 23, "date_2": "2020-05", "text_1_tokenized": ["See", "you", "bear", "careless", "turnip", "snare", ",", "Rampages", "pitch", "color", "red", ",", "In", "the", "father", ",", "the", "."], "text_2_tokenized": ["Anyone", "have", "good", "turnip", "prices", "today", "?", "Mine", "are", "so", "bad", "lol", "#AnimalCrossing"]} -{"id": "1816-turnip", "word": "turnip", "label_binary": 0, "text_1": "im sad im not at miami rolling loud only bc my turnip friends i met at lst year bay area rolling loud after parties are out there and they be on stage and VIP af w it \ud83d\ude2d\ud83d\ude2d\ud83d\ude2d", "token_idx_1": 11, "text_start_1": 47, "text_end_1": 53, "date_1": "2019-05", "text_2": "Don't forget to check your turnip prices and also that Robert Gomez is a rapist/pedophile!", "token_idx_2": 5, "text_start_2": 27, "text_end_2": 33, "date_2": "2020-05", "text_1_tokenized": ["im", "sad", "im", "not", "at", "miami", "rolling", "loud", "only", "bc", "my", "turnip", "friends", "i", "met", "at", "lst", "year", "bay", "area", "rolling", "loud", "after", "parties", "are", "out", "there", "and", "they", "be", "on", "stage", "and", "VIP", "af", "w", "it", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude2d"], "text_2_tokenized": ["Don't", "forget", "to", "check", "your", "turnip", "prices", "and", "also", "that", "Robert", "Gomez", "is", "a", "rapist", "/", "pedophile", "!"]} -{"id": "1817-turnip", "word": "turnip", "label_binary": 0, "text_1": "So let me get this straight? You can't do president stuff because of the annoyance of these investigations into your crimes but you sure as hell can play golf and live on Twitter for hours a day with no prob? Do you even know the model of that turnip truck I need to jump off?", "token_idx_1": 50, "text_start_1": 244, "text_end_1": 250, "date_1": "2019-05", "text_2": "556 turnip price hit me up for the next 2 hours if you want to sell. =w= #ACNH", "token_idx_2": 1, "text_start_2": 4, "text_end_2": 10, "date_2": "2020-05", "text_1_tokenized": ["So", "let", "me", "get", "this", "straight", "?", "You", "can't", "do", "president", "stuff", "because", "of", "the", "annoyance", "of", "these", "investigations", "into", "your", "crimes", "but", "you", "sure", "as", "hell", "can", "play", "golf", "and", "live", "on", "Twitter", "for", "hours", "a", "day", "with", "no", "prob", "?", "Do", "you", "even", "know", "the", "model", "of", "that", "turnip", "truck", "I", "need", "to", "jump", "off", "?"], "text_2_tokenized": ["556", "turnip", "price", "hit", "me", "up", "for", "the", "next", "2", "hours", "if", "you", "want", "to", "sell", ".", "=", "w", "=", "#ACNH"]} -{"id": "1818-turnip", "word": "turnip", "label_binary": 0, "text_1": "Ye'd best start believin' in white turnip stories, Ms Turner. Yer in one.", "token_idx_1": 7, "text_start_1": 35, "text_end_1": 41, "date_1": "2019-05", "text_2": "If anyone has a high turnip price on Animal Crossing I will literally send ass or bells to come sell them please my family is drowning in turnips the wife left me. Isabelle the kids miss you.", "token_idx_2": 5, "text_start_2": 21, "text_end_2": 27, "date_2": "2020-05", "text_1_tokenized": ["Ye'd", "best", "start", "believin", "'", "in", "white", "turnip", "stories", ",", "Ms", "Turner", ".", "Yer", "in", "one", "."], "text_2_tokenized": ["If", "anyone", "has", "a", "high", "turnip", "price", "on", "Animal", "Crossing", "I", "will", "literally", "send", "ass", "or", "bells", "to", "come", "sell", "them", "please", "my", "family", "is", "drowning", "in", "turnips", "the", "wife", "left", "me", ".", "Isabelle", "the", "kids", "miss", "you", "."]} -{"id": "1819-turnip", "word": "turnip", "label_binary": 0, "text_1": "Nearly rammed into @Tony_Robinson on my way out of the Men's Room. This is seriously surreal. (I wish I were carrying a turnip.) #GoodOmens #London #BlackAdderforever", "token_idx_1": 25, "text_start_1": 120, "text_end_1": 126, "date_1": "2019-05", "text_2": "Reopening my island in 7 min for turnip sales.", "token_idx_2": 7, "text_start_2": 33, "text_end_2": 39, "date_2": "2020-05", "text_1_tokenized": ["Nearly", "rammed", "into", "@Tony_Robinson", "on", "my", "way", "out", "of", "the", "Men's", "Room", ".", "This", "is", "seriously", "surreal", ".", "(", "I", "wish", "I", "were", "carrying", "a", "turnip", ".", ")", "#GoodOmens", "#London", "#BlackAdderforever"], "text_2_tokenized": ["Reopening", "my", "island", "in", "7", "min", "for", "turnip", "sales", "."]} -{"id": "1820-turnip", "word": "turnip", "label_binary": 0, "text_1": "I throw them in the coin collector at the toll booth. Of course I use black electrical tape to alter my license plate number first. I didn't just fall off the turnip truck yesterday. #OtherUsesForHashtagPoints", "token_idx_1": 33, "text_start_1": 159, "text_end_1": 165, "date_1": "2019-05", "text_2": "hi animal crossing twitter! my turnip prices are 637 today! if you wanna sell your turnips here pls check the comments of this tweet for instructions on entering my island!", "token_idx_2": 6, "text_start_2": 31, "text_end_2": 37, "date_2": "2020-05", "text_1_tokenized": ["I", "throw", "them", "in", "the", "coin", "collector", "at", "the", "toll", "booth", ".", "Of", "course", "I", "use", "black", "electrical", "tape", "to", "alter", "my", "license", "plate", "number", "first", ".", "I", "didn't", "just", "fall", "off", "the", "turnip", "truck", "yesterday", ".", "#OtherUsesForHashtagPoints"], "text_2_tokenized": ["hi", "animal", "crossing", "twitter", "!", "my", "turnip", "prices", "are", "637", "today", "!", "if", "you", "wanna", "sell", "your", "turnips", "here", "pls", "check", "the", "comments", "of", "this", "tweet", "for", "instructions", "on", "entering", "my", "island", "!"]} -{"id": "1821-turnip", "word": "turnip", "label_binary": 0, "text_1": "I wonder if turnip fries are any good.", "token_idx_1": 3, "text_start_1": 12, "text_end_1": 18, "date_1": "2019-05", "text_2": "Who has good turnip prices??? I need to sell by today \ud83d\ude29 #AnimalCrossing #ACNH", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 19, "date_2": "2020-05", "text_1_tokenized": ["I", "wonder", "if", "turnip", "fries", "are", "any", "good", "."], "text_2_tokenized": ["Who", "has", "good", "turnip", "prices", "?", "?", "?", "I", "need", "to", "sell", "by", "today", "\ud83d\ude29", "#AnimalCrossing", "#ACNH"]} -{"id": "1822-turnip", "word": "turnip", "label_binary": 0, "text_1": "The most powerful nation on earth is \u201cled\u201d by a septuagenarian with the emotional intelligence of a toddler and the IQ of a turnip. #WhyAliensWontVisit", "token_idx_1": 25, "text_start_1": 124, "text_end_1": 130, "date_1": "2019-05", "text_2": "Anyone out there got turnip prices over 100 bells?", "token_idx_2": 4, "text_start_2": 21, "text_end_2": 27, "date_2": "2020-05", "text_1_tokenized": ["The", "most", "powerful", "nation", "on", "earth", "is", "\u201c", "led", "\u201d", "by", "a", "septuagenarian", "with", "the", "emotional", "intelligence", "of", "a", "toddler", "and", "the", "IQ", "of", "a", "turnip", ".", "#WhyAliensWontVisit"], "text_2_tokenized": ["Anyone", "out", "there", "got", "turnip", "prices", "over", "100", "bells", "?"]} -{"id": "1823-turnip", "word": "turnip", "label_binary": 0, "text_1": "Here are my three little girls cinnamon turnip and noodles just for you Grandma I just took these pictures last night don't they look cute and thank you Grandma it helps out anything you can do love you thank you", "token_idx_1": 7, "text_start_1": 40, "text_end_1": 46, "date_1": "2019-05", "text_2": "My turnip price is 501!! I time traveled if you need the dodo code DM me. I don't need anything, but tips are appreciated! \ud83d\udc97", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 9, "date_2": "2020-05", "text_1_tokenized": ["Here", "are", "my", "three", "little", "girls", "cinnamon", "turnip", "and", "noodles", "just", "for", "you", "Grandma", "I", "just", "took", "these", "pictures", "last", "night", "don't", "they", "look", "cute", "and", "thank", "you", "Grandma", "it", "helps", "out", "anything", "you", "can", "do", "love", "you", "thank", "you"], "text_2_tokenized": ["My", "turnip", "price", "is", "501", "!", "!", "I", "time", "traveled", "if", "you", "need", "the", "dodo", "code", "DM", "me", ".", "I", "don't", "need", "anything", ",", "but", "tips", "are", "appreciated", "!", "\ud83d\udc97"]} -{"id": "1824-turnip", "word": "turnip", "label_binary": 0, "text_1": "I was looking through this stack of little notebooks I have bc I know one of them is the journal I kept in Germany and the last page says \u201cI wish I could be friends with bangtan\u201d alright 2015 turnip calm down", "token_idx_1": 41, "text_start_1": 192, "text_end_1": 198, "date_1": "2019-05", "text_2": "does anyone *actually* have good turnip prices today i know it's a reach since it's only monday but i wanna sell them asap", "token_idx_2": 7, "text_start_2": 33, "text_end_2": 39, "date_2": "2020-05", "text_1_tokenized": ["I", "was", "looking", "through", "this", "stack", "of", "little", "notebooks", "I", "have", "bc", "I", "know", "one", "of", "them", "is", "the", "journal", "I", "kept", "in", "Germany", "and", "the", "last", "page", "says", "\u201c", "I", "wish", "I", "could", "be", "friends", "with", "bangtan", "\u201d", "alright", "2015", "turnip", "calm", "down"], "text_2_tokenized": ["does", "anyone", "*", "actually", "*", "have", "good", "turnip", "prices", "today", "i", "know", "it's", "a", "reach", "since", "it's", "only", "monday", "but", "i", "wanna", "sell", "them", "asap"]} -{"id": "1825-turnip", "word": "turnip", "label_binary": 0, "text_1": "RT @LS_Nutrition: Aiming for more Vitamin #K in your #diet this May? Try some turnip greens, spinach or broccoli.", "token_idx_1": 17, "text_start_1": 78, "text_end_1": 84, "date_1": "2019-05", "text_2": "24 bells per turnip are u kidding me", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 19, "date_2": "2020-05", "text_1_tokenized": ["RT", "@LS_Nutrition", ":", "Aiming", "for", "more", "Vitamin", "#", "K", "in", "your", "#diet", "this", "May", "?", "Try", "some", "turnip", "greens", ",", "spinach", "or", "broccoli", "."], "text_2_tokenized": ["24", "bells", "per", "turnip", "are", "u", "kidding", "me"]} -{"id": "1826-turnip", "word": "turnip", "label_binary": 0, "text_1": "This dynamic between Nicole and her mom is so hard to watch. Robalee would have an easier time talking sense into a turnip. #90DayFianceHappilyEverAfter", "token_idx_1": 23, "text_start_1": 116, "text_end_1": 122, "date_1": "2019-05", "text_2": "Does anyone have any good turnip prices? I keep getting really bad rates.", "token_idx_2": 5, "text_start_2": 26, "text_end_2": 32, "date_2": "2020-05", "text_1_tokenized": ["This", "dynamic", "between", "Nicole", "and", "her", "mom", "is", "so", "hard", "to", "watch", ".", "Robalee", "would", "have", "an", "easier", "time", "talking", "sense", "into", "a", "turnip", ".", "#90DayFianceHappilyEverAfter"], "text_2_tokenized": ["Does", "anyone", "have", "any", "good", "turnip", "prices", "?", "I", "keep", "getting", "really", "bad", "rates", "."]} -{"id": "1827-turnip", "word": "turnip", "label_binary": 0, "text_1": "Bouta start a couple of microgrows but also tryna start a hydroponic turnip", "token_idx_1": 12, "text_start_1": 69, "text_end_1": 75, "date_1": "2019-05", "text_2": "if anyone has high turnip prices 2day pls let me sell mine on ur island", "token_idx_2": 4, "text_start_2": 19, "text_end_2": 25, "date_2": "2020-05", "text_1_tokenized": ["Bouta", "start", "a", "couple", "of", "microgrows", "but", "also", "tryna", "start", "a", "hydroponic", "turnip"], "text_2_tokenized": ["if", "anyone", "has", "high", "turnip", "prices", "2day", "pls", "let", "me", "sell", "mine", "on", "ur", "island"]} -{"id": "1828-turnip", "word": "turnip", "label_binary": 0, "text_1": "The Queen was seen wearing a turnip hat. God save us all.", "token_idx_1": 6, "text_start_1": 29, "text_end_1": 35, "date_1": "2019-05", "text_2": "if anyone has good turnip prices pls let me know bc i don't want to wait in line for turnip exchange i have shit to do ok", "token_idx_2": 4, "text_start_2": 19, "text_end_2": 25, "date_2": "2020-05", "text_1_tokenized": ["The", "Queen", "was", "seen", "wearing", "a", "turnip", "hat", ".", "God", "save", "us", "all", "."], "text_2_tokenized": ["if", "anyone", "has", "good", "turnip", "prices", "pls", "let", "me", "know", "bc", "i", "don't", "want", "to", "wait", "in", "line", "for", "turnip", "exchange", "i", "have", "shit", "to", "do", "ok"]} -{"id": "1829-turnip", "word": "turnip", "label_binary": 0, "text_1": "Y'all know today was serious sad bc I had to make chicken fried steak & gravy & turnip greens.", "token_idx_1": 17, "text_start_1": 88, "text_end_1": 94, "date_1": "2019-05", "text_2": "Anyone have a good turnip price??", "token_idx_2": 4, "text_start_2": 19, "text_end_2": 25, "date_2": "2020-05", "text_1_tokenized": ["Y'all", "know", "today", "was", "serious", "sad", "bc", "I", "had", "to", "make", "chicken", "fried", "steak", "&", "gravy", "&", "turnip", "greens", "."], "text_2_tokenized": ["Anyone", "have", "a", "good", "turnip", "price", "?", "?"]} -{"id": "1830-turnip", "word": "turnip", "label_binary": 0, "text_1": "Bruh I bought some onions from this wheelbarrow aboki guy 3 weeks ago and I'm still using one ! and I cook everyday . Remember that Russian folklore \"The Enormous turnip\" ?? Think that", "token_idx_1": 31, "text_start_1": 163, "text_end_1": 169, "date_1": "2019-05", "text_2": "when your turnip price is 95 bells but you just spent all your bells on a bridge", "token_idx_2": 2, "text_start_2": 10, "text_end_2": 16, "date_2": "2020-05", "text_1_tokenized": ["Bruh", "I", "bought", "some", "onions", "from", "this", "wheelbarrow", "aboki", "guy", "3", "weeks", "ago", "and", "I'm", "still", "using", "one", "!", "and", "I", "cook", "everyday", ".", "Remember", "that", "Russian", "folklore", "\"", "The", "Enormous", "turnip", "\"", "?", "?", "Think", "that"], "text_2_tokenized": ["when", "your", "turnip", "price", "is", "95", "bells", "but", "you", "just", "spent", "all", "your", "bells", "on", "a", "bridge"]} -{"id": "1831-turnip", "word": "turnip", "label_binary": 0, "text_1": "Every single man who's jumped in My DMs like \u201cI ain't wanna be your sub but you're hot wanna experience a REAL ALPHA MALE with a BIG DICK\u201d has had the sex appeal of a mouldy turnip.", "token_idx_1": 38, "text_start_1": 174, "text_end_1": 180, "date_1": "2019-05", "text_2": "If anyone has great turnip prices I can use and would let me come to their Island, can you notify me? Thx! :)", "token_idx_2": 4, "text_start_2": 20, "text_end_2": 26, "date_2": "2020-05", "text_1_tokenized": ["Every", "single", "man", "who's", "jumped", "in", "My", "DMs", "like", "\u201c", "I", "ain't", "wanna", "be", "your", "sub", "but", "you're", "hot", "wanna", "experience", "a", "REAL", "ALPHA", "MALE", "with", "a", "BIG", "DICK", "\u201d", "has", "had", "the", "sex", "appeal", "of", "a", "mouldy", "turnip", "."], "text_2_tokenized": ["If", "anyone", "has", "great", "turnip", "prices", "I", "can", "use", "and", "would", "let", "me", "come", "to", "their", "Island", ",", "can", "you", "notify", "me", "?", "Thx", "!", ":)"]} -{"id": "1832-turnip", "word": "turnip", "label_binary": 0, "text_1": "Listen.. my grandma made the best turnip greens I've ever tasted in my life. Like I'm tempted to drive to Goldsboro after work to get some more.", "token_idx_1": 7, "text_start_1": 34, "text_end_1": 40, "date_1": "2019-05", "text_2": "Who got good turnip price? #ACNHturnips", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 19, "date_2": "2020-05", "text_1_tokenized": ["Listen", "..", "my", "grandma", "made", "the", "best", "turnip", "greens", "I've", "ever", "tasted", "in", "my", "life", ".", "Like", "I'm", "tempted", "to", "drive", "to", "Goldsboro", "after", "work", "to", "get", "some", "more", "."], "text_2_tokenized": ["Who", "got", "good", "turnip", "price", "?", "#ACNHturnips"]} -{"id": "1833-turnip", "word": "turnip", "label_binary": 0, "text_1": "Justin Amash a republican just said few congress persons have read the Mueller report. WTF are we paying you for congress? Read the damn report and impeach trump. America didn't just fall off the turnip truck. Just do it. We are over all the lies and criminality.", "token_idx_1": 37, "text_start_1": 196, "text_end_1": 202, "date_1": "2019-05", "text_2": "I kinda want to make a Discord or something just for fat people who play ACNH. I only want to share my resources/good turnip prices with fellow fats, tbh.", "token_idx_2": 26, "text_start_2": 118, "text_end_2": 124, "date_2": "2020-05", "text_1_tokenized": ["Justin", "Amash", "a", "republican", "just", "said", "few", "congress", "persons", "have", "read", "the", "Mueller", "report", ".", "WTF", "are", "we", "paying", "you", "for", "congress", "?", "Read", "the", "damn", "report", "and", "impeach", "trump", ".", "America", "didn't", "just", "fall", "off", "the", "turnip", "truck", ".", "Just", "do", "it", ".", "We", "are", "over", "all", "the", "lies", "and", "criminality", "."], "text_2_tokenized": ["I", "kinda", "want", "to", "make", "a", "Discord", "or", "something", "just", "for", "fat", "people", "who", "play", "ACNH", ".", "I", "only", "want", "to", "share", "my", "resources", "/", "good", "turnip", "prices", "with", "fellow", "fats", ",", "tbh", "."]} -{"id": "1834-turnip", "word": "turnip", "label_binary": 0, "text_1": "Someone asked me for my pen settings and I don't remember who it was. Ive been having a lot of fun using the default turnip pen (Clip studio) instead of my customized line art pen actually. I just use any pen to sketch since my sketches aren't really that detailed or anything", "token_idx_1": 25, "text_start_1": 117, "text_end_1": 123, "date_1": "2019-05", "text_2": "Hey does anyone have a good turnip price in animal crossing rn?", "token_idx_2": 6, "text_start_2": 28, "text_end_2": 34, "date_2": "2020-05", "text_1_tokenized": ["Someone", "asked", "me", "for", "my", "pen", "settings", "and", "I", "don't", "remember", "who", "it", "was", ".", "Ive", "been", "having", "a", "lot", "of", "fun", "using", "the", "default", "turnip", "pen", "(", "Clip", "studio", ")", "instead", "of", "my", "customized", "line", "art", "pen", "actually", ".", "I", "just", "use", "any", "pen", "to", "sketch", "since", "my", "sketches", "aren't", "really", "that", "detailed", "or", "anything"], "text_2_tokenized": ["Hey", "does", "anyone", "have", "a", "good", "turnip", "price", "in", "animal", "crossing", "rn", "?"]} -{"id": "1835-turnip", "word": "turnip", "label_binary": 0, "text_1": "i like sam but oh my lord if leon hit that turnip combo...", "token_idx_1": 11, "text_start_1": 43, "text_end_1": 49, "date_1": "2019-05", "text_2": "bitches b like \u201cmy turnip prices are at 703 bells dm this person for the code\u201d\ud83d\ude2d\ud83d\ude2dCAN YALL NOT TELL THATS FAKE-", "token_idx_2": 5, "text_start_2": 19, "text_end_2": 25, "date_2": "2020-05", "text_1_tokenized": ["i", "like", "sam", "but", "oh", "my", "lord", "if", "leon", "hit", "that", "turnip", "combo", "..."], "text_2_tokenized": ["bitches", "b", "like", "\u201c", "my", "turnip", "prices", "are", "at", "703", "bells", "dm", "this", "person", "for", "the", "code", "\u201d", "\ud83d\ude2d", "\ud83d\ude2d", "CAN", "YALL", "NOT", "TELL", "THATS", "FAKE", "-"]} -{"id": "1836-turnip", "word": "turnip", "label_binary": 0, "text_1": "If you're the type of person to post on social media all about you changing and working on yourself because you're a dickhead and then not work on it then you're a flat out attention seeking little turnip", "token_idx_1": 37, "text_start_1": 198, "text_end_1": 204, "date_1": "2019-05", "text_2": "Imagine there's two farmers on an island. Well, not farmers, more like fishermen/lepidopterists/turnip merchants. One of them gets to be the Resident Representative because they are a child and logged in to the Switch first. Who collects the iron for a shop? The other \"farmer\"!", "token_idx_2": 19, "text_start_2": 96, "text_end_2": 102, "date_2": "2020-05", "text_1_tokenized": ["If", "you're", "the", "type", "of", "person", "to", "post", "on", "social", "media", "all", "about", "you", "changing", "and", "working", "on", "yourself", "because", "you're", "a", "dickhead", "and", "then", "not", "work", "on", "it", "then", "you're", "a", "flat", "out", "attention", "seeking", "little", "turnip"], "text_2_tokenized": ["Imagine", "there's", "two", "farmers", "on", "an", "island", ".", "Well", ",", "not", "farmers", ",", "more", "like", "fishermen", "/", "lepidopterists", "/", "turnip", "merchants", ".", "One", "of", "them", "gets", "to", "be", "the", "Resident", "Representative", "because", "they", "are", "a", "child", "and", "logged", "in", "to", "the", "Switch", "first", ".", "Who", "collects", "the", "iron", "for", "a", "shop", "?", "The", "other", "\"", "farmer", "\"", "!"]} -{"id": "1837-turnip", "word": "turnip", "label_binary": 0, "text_1": "I just saw a billboard advertising canned turnip greens.", "token_idx_1": 7, "text_start_1": 42, "text_end_1": 48, "date_1": "2019-05", "text_2": "What price range to people consider good turnip prices?", "token_idx_2": 7, "text_start_2": 41, "text_end_2": 47, "date_2": "2020-05", "text_1_tokenized": ["I", "just", "saw", "a", "billboard", "advertising", "canned", "turnip", "greens", "."], "text_2_tokenized": ["What", "price", "range", "to", "people", "consider", "good", "turnip", "prices", "?"]} -{"id": "1838-turnip", "word": "turnip", "label_binary": 0, "text_1": "i wanna eat some turnip cake ...", "token_idx_1": 4, "text_start_1": 17, "text_end_1": 23, "date_1": "2019-05", "text_2": "I really was graced with a declining turnip pattern the weekend I buy a shit ton of them. love that for me.", "token_idx_2": 7, "text_start_2": 37, "text_end_2": 43, "date_2": "2020-05", "text_1_tokenized": ["i", "wanna", "eat", "some", "turnip", "cake", "..."], "text_2_tokenized": ["I", "really", "was", "graced", "with", "a", "declining", "turnip", "pattern", "the", "weekend", "I", "buy", "a", "shit", "ton", "of", "them", ".", "love", "that", "for", "me", "."]} -{"id": "1839-turnip", "word": "turnip", "label_binary": 0, "text_1": "My mom went off on these turnip greens ... \ud83d\ude0b\ud83d\ude0b", "token_idx_1": 6, "text_start_1": 25, "text_end_1": 31, "date_1": "2019-05", "text_2": "My turnip prices are 627, someone call @elijahwood lmao #acnh #TurnipPrice ,", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 9, "date_2": "2020-05", "text_1_tokenized": ["My", "mom", "went", "off", "on", "these", "turnip", "greens", "...", "\ud83d\ude0b", "\ud83d\ude0b"], "text_2_tokenized": ["My", "turnip", "prices", "are", "627", ",", "someone", "call", "@elijahwood", "lmao", "#acnh", "#TurnipPrice", ","]} -{"id": "1840-turnip", "word": "turnip", "label_binary": 0, "text_1": "paddocks sown dry with oats for feed at 2 to 3 leaf with say 60 to 70% medic in inter row rest turnip/capeweed not a huge amount of herbicide options? Broadstrike or spraygraze? bugger all ryegrass/grass", "token_idx_1": 23, "text_start_1": 95, "text_end_1": 101, "date_1": "2019-05", "text_2": "What's with all these fake people posting about turnip prices. If anyone has a good price hit me up pls! #ACNHturnips", "token_idx_2": 8, "text_start_2": 48, "text_end_2": 54, "date_2": "2020-05", "text_1_tokenized": ["paddocks", "sown", "dry", "with", "oats", "for", "feed", "at", "2", "to", "3", "leaf", "with", "say", "60", "to", "70", "%", "medic", "in", "inter", "row", "rest", "turnip", "/", "capeweed", "not", "a", "huge", "amount", "of", "herbicide", "options", "?", "Broadstrike", "or", "spraygraze", "?", "bugger", "all", "ryegrass", "/", "grass"], "text_2_tokenized": ["What's", "with", "all", "these", "fake", "people", "posting", "about", "turnip", "prices", ".", "If", "anyone", "has", "a", "good", "price", "hit", "me", "up", "pls", "!", "#ACNHturnips"]} -{"id": "1841-turnip", "word": "turnip", "label_binary": 0, "text_1": "\u2018i didnt just fall of the turnip cart' is confirmed as a weird ohio thing! @kateschapira", "token_idx_1": 7, "text_start_1": 26, "text_end_1": 32, "date_1": "2019-05", "text_2": "our turnip prices are currently 468 bells, who wants to come? #AnimalCrossingNewHorizions", "token_idx_2": 1, "text_start_2": 4, "text_end_2": 10, "date_2": "2020-05", "text_1_tokenized": ["\u2018", "i", "didnt", "just", "fall", "of", "the", "turnip", "cart", "'", "is", "confirmed", "as", "a", "weird", "ohio", "thing", "!", "@kateschapira"], "text_2_tokenized": ["our", "turnip", "prices", "are", "currently", "468", "bells", ",", "who", "wants", "to", "come", "?", "#AnimalCrossingNewHorizions"]} -{"id": "1842-turnip", "word": "turnip", "label_binary": 1, "text_1": "I love how Tories can label Diane Abbott thick for getting her numbers mixed up once, years ago, and be abor to contemplate Dominic Raab as PM. A man who doesn't know why Dover is an important port. You might as well put a mouldy turnip in charge of the country.", "token_idx_1": 49, "text_start_1": 230, "text_end_1": 236, "date_1": "2019-05", "text_2": "get lost you orange turnip.", "token_idx_2": 4, "text_start_2": 20, "text_end_2": 26, "date_2": "2020-05", "text_1_tokenized": ["I", "love", "how", "Tories", "can", "label", "Diane", "Abbott", "thick", "for", "getting", "her", "numbers", "mixed", "up", "once", ",", "years", "ago", ",", "and", "be", "abor", "to", "contemplate", "Dominic", "Raab", "as", "PM", ".", "A", "man", "who", "doesn't", "know", "why", "Dover", "is", "an", "important", "port", ".", "You", "might", "as", "well", "put", "a", "mouldy", "turnip", "in", "charge", "of", "the", "country", "."], "text_2_tokenized": ["get", "lost", "you", "orange", "turnip", "."]} -{"id": "1843-turnip", "word": "turnip", "label_binary": 0, "text_1": "I smell these turnip greens cooking and I am stoked. \ud83d\ude0b", "token_idx_1": 3, "text_start_1": 14, "text_end_1": 20, "date_1": "2019-05", "text_2": "I need someone with good turnip prices \ud83d\ude29 I gotta unload some product ita almost Sunday. HELP !!!!!!!!", "token_idx_2": 5, "text_start_2": 25, "text_end_2": 31, "date_2": "2020-05", "text_1_tokenized": ["I", "smell", "these", "turnip", "greens", "cooking", "and", "I", "am", "stoked", ".", "\ud83d\ude0b"], "text_2_tokenized": ["I", "need", "someone", "with", "good", "turnip", "prices", "\ud83d\ude29", "I", "gotta", "unload", "some", "product", "ita", "almost", "Sunday", ".", "HELP", "!", "!", "!"]} -{"id": "1844-turnip", "word": "turnip", "label_binary": 0, "text_1": "Braise feijoa and turnip. Serve in a spool of wire.", "token_idx_1": 3, "text_start_1": 18, "text_end_1": 24, "date_1": "2019-05", "text_2": "Anyone have high turnip prices in Animal Crossing?", "token_idx_2": 3, "text_start_2": 17, "text_end_2": 23, "date_2": "2020-05", "text_1_tokenized": ["Braise", "feijoa", "and", "turnip", ".", "Serve", "in", "a", "spool", "of", "wire", "."], "text_2_tokenized": ["Anyone", "have", "high", "turnip", "prices", "in", "Animal", "Crossing", "?"]} -{"id": "1845-turnip", "word": "turnip", "label_binary": 0, "text_1": "You can't squeeze blood out of a turnip.", "token_idx_1": 7, "text_start_1": 33, "text_end_1": 39, "date_1": "2019-05", "text_2": "Got some bad news so I'm logging out for the night. If you need me or have good turnip prices, I'll be on priv.", "token_idx_2": 19, "text_start_2": 80, "text_end_2": 86, "date_2": "2020-05", "text_1_tokenized": ["You", "can't", "squeeze", "blood", "out", "of", "a", "turnip", "."], "text_2_tokenized": ["Got", "some", "bad", "news", "so", "I'm", "logging", "out", "for", "the", "night", ".", "If", "you", "need", "me", "or", "have", "good", "turnip", "prices", ",", "I'll", "be", "on", "priv", "."]} -{"id": "1846-turnip", "word": "turnip", "label_binary": 0, "text_1": "So the moral of this show is that a white man, any white man, even this beige turnip, is better than a woman with brains and ambition #TheFinalEpisode", "token_idx_1": 19, "text_start_1": 78, "text_end_1": 84, "date_1": "2019-05", "text_2": "Does anyone have decent turnip prices? Literally anything 100 or above so I could atleast make some profit? Pls help \ud83d\udc49\ud83c\udffb\ud83d\udc48\ud83c\udffb", "token_idx_2": 4, "text_start_2": 24, "text_end_2": 30, "date_2": "2020-05", "text_1_tokenized": ["So", "the", "moral", "of", "this", "show", "is", "that", "a", "white", "man", ",", "any", "white", "man", ",", "even", "this", "beige", "turnip", ",", "is", "better", "than", "a", "woman", "with", "brains", "and", "ambition", "#TheFinalEpisode"], "text_2_tokenized": ["Does", "anyone", "have", "decent", "turnip", "prices", "?", "Literally", "anything", "100", "or", "above", "so", "I", "could", "atleast", "make", "some", "profit", "?", "Pls", "help", "\ud83d\udc49\ud83c\udffb", "\ud83d\udc48\ud83c\udffb"]} -{"id": "1847-turnip", "word": "turnip", "label_binary": 0, "text_1": "honestly it's so weird how 8th graders today look like mini supermodels but I literally looked like a turnip", "token_idx_1": 18, "text_start_1": 102, "text_end_1": 108, "date_1": "2019-05", "text_2": "If anyone needs to do last minute turnip selling, my prices are 480. x", "token_idx_2": 7, "text_start_2": 34, "text_end_2": 40, "date_2": "2020-05", "text_1_tokenized": ["honestly", "it's", "so", "weird", "how", "8th", "graders", "today", "look", "like", "mini", "supermodels", "but", "I", "literally", "looked", "like", "a", "turnip"], "text_2_tokenized": ["If", "anyone", "needs", "to", "do", "last", "minute", "turnip", "selling", ",", "my", "prices", "are", "480", ".", "x"]} -{"id": "1848-turnip", "word": "turnip", "label_binary": 0, "text_1": "funny how Peach turnip is called RNG as though there's a downside for the Peach player: worst-case scenario is the Peach gets the normal turnip and best case your opponent just dies whenever. IMO there should be a chance to pull turnips that have lower stats than default turnips", "token_idx_1": 3, "text_start_1": 16, "text_end_1": 22, "date_1": "2019-05", "text_2": "Does anyone else feel like they are living this quarantine one turnip day to the next? Because same", "token_idx_2": 11, "text_start_2": 63, "text_end_2": 69, "date_2": "2020-05", "text_1_tokenized": ["funny", "how", "Peach", "turnip", "is", "called", "RNG", "as", "though", "there's", "a", "downside", "for", "the", "Peach", "player", ":", "worst-case", "scenario", "is", "the", "Peach", "gets", "the", "normal", "turnip", "and", "best", "case", "your", "opponent", "just", "dies", "whenever", ".", "IMO", "there", "should", "be", "a", "chance", "to", "pull", "turnips", "that", "have", "lower", "stats", "than", "default", "turnips"], "text_2_tokenized": ["Does", "anyone", "else", "feel", "like", "they", "are", "living", "this", "quarantine", "one", "turnip", "day", "to", "the", "next", "?", "Because", "same"]} -{"id": "1849-turnip", "word": "turnip", "label_binary": 0, "text_1": "Today, I'm serving up some dysfunctional, content, devious turnip realness.", "token_idx_1": 11, "text_start_1": 59, "text_end_1": 65, "date_1": "2019-05", "text_2": "My turnip prices have been shittt this week", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 9, "date_2": "2020-05", "text_1_tokenized": ["Today", ",", "I'm", "serving", "up", "some", "dysfunctional", ",", "content", ",", "devious", "turnip", "realness", "."], "text_2_tokenized": ["My", "turnip", "prices", "have", "been", "shittt", "this", "week"]} -{"id": "1850-turnip", "word": "turnip", "label_binary": 0, "text_1": "Chopped lettuce, Vidalia onion, red turnip and cucumber salad topped with jerked chicken breast and a Vidalia vignette....lunch is served.", "token_idx_1": 7, "text_start_1": 36, "text_end_1": 42, "date_1": "2019-05", "text_2": "i havent bought a single turnip yet bcz i sleep till noon and never see daisy mae \ud83d\udc94", "token_idx_2": 5, "text_start_2": 25, "text_end_2": 31, "date_2": "2020-05", "text_1_tokenized": ["Chopped", "lettuce", ",", "Vidalia", "onion", ",", "red", "turnip", "and", "cucumber", "salad", "topped", "with", "jerked", "chicken", "breast", "and", "a", "Vidalia", "vignette", "...", "lunch", "is", "served", "."], "text_2_tokenized": ["i", "havent", "bought", "a", "single", "turnip", "yet", "bcz", "i", "sleep", "till", "noon", "and", "never", "see", "daisy", "mae", "\ud83d\udc94"]} -{"id": "1851-turnip", "word": "turnip", "label_binary": 0, "text_1": "Bruh I made chicken, turnip greens, squash, corn, and sweet potatoes for dinner. Now I'm over here stuffed and shit\ud83d\ude44\ud83d\ude29", "token_idx_1": 5, "text_start_1": 21, "text_end_1": 27, "date_1": "2019-05", "text_2": "worth a shot i'm desperate. someone who's turnip prices are over 100 let me come over to sell mine. \ud83d\ude2d #acnh #AnimalCrossingNewHorizons #animalcrossingisland", "token_idx_2": 8, "text_start_2": 42, "text_end_2": 48, "date_2": "2020-05", "text_1_tokenized": ["Bruh", "I", "made", "chicken", ",", "turnip", "greens", ",", "squash", ",", "corn", ",", "and", "sweet", "potatoes", "for", "dinner", ".", "Now", "I'm", "over", "here", "stuffed", "and", "shit", "\ud83d\ude44", "\ud83d\ude29"], "text_2_tokenized": ["worth", "a", "shot", "i'm", "desperate", ".", "someone", "who's", "turnip", "prices", "are", "over", "100", "let", "me", "come", "over", "to", "sell", "mine", ".", "\ud83d\ude2d", "#acnh", "#AnimalCrossingNewHorizons", "#animalcrossingisland"]} -{"id": "1852-turnip", "word": "turnip", "label_binary": 0, "text_1": "Now my mama talking about she wants some turnip greens and dumplings tomorrow....I'm already slaving over this grill with my titties out now you want me to move to the stove tomorrow? \ud83d\ude24\ud83d\ude21 No ma'am!", "token_idx_1": 8, "text_start_1": 41, "text_end_1": 47, "date_1": "2019-05", "text_2": "Midweek turnip price check - any friends have good prices?", "token_idx_2": 1, "text_start_2": 8, "text_end_2": 14, "date_2": "2020-05", "text_1_tokenized": ["Now", "my", "mama", "talking", "about", "she", "wants", "some", "turnip", "greens", "and", "dumplings", "tomorrow", "...", "I'm", "already", "slaving", "over", "this", "grill", "with", "my", "titties", "out", "now", "you", "want", "me", "to", "move", "to", "the", "stove", "tomorrow", "?", "\ud83d\ude24", "\ud83d\ude21", "No", "ma'am", "!"], "text_2_tokenized": ["Midweek", "turnip", "price", "check", "-", "any", "friends", "have", "good", "prices", "?"]} -{"id": "1853-turnip", "word": "turnip", "label_binary": 0, "text_1": "Someone: \"Did you just fall off the turnip truck?\" Me, wiping turnips out of my hair: pfft. No.", "token_idx_1": 9, "text_start_1": 36, "text_end_1": 42, "date_1": "2019-05", "text_2": "My turnip price is 144 is that good?", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 9, "date_2": "2020-05", "text_1_tokenized": ["Someone", ":", "\"", "Did", "you", "just", "fall", "off", "the", "turnip", "truck", "?", "\"", "Me", ",", "wiping", "turnips", "out", "of", "my", "hair", ":", "pfft", ".", "No", "."], "text_2_tokenized": ["My", "turnip", "price", "is", "144", "is", "that", "good", "?"]} -{"id": "1854-turnip", "word": "turnip", "label_binary": 0, "text_1": "I didn't just fall off the turnip truck. (I'm not naive/inexperienced.) @robcesternino @tbirdcooper", "token_idx_1": 6, "text_start_1": 27, "text_end_1": 33, "date_1": "2019-05", "text_2": "Anyone gonna have good turnip prices throughout the rest the week? I think mine is decreasing :/", "token_idx_2": 4, "text_start_2": 23, "text_end_2": 29, "date_2": "2020-05", "text_1_tokenized": ["I", "didn't", "just", "fall", "off", "the", "turnip", "truck", ".", "(", "I'm", "not", "naive", "/", "inexperienced", ".", ")", "@robcesternino", "@tbirdcooper"], "text_2_tokenized": ["Anyone", "gonna", "have", "good", "turnip", "prices", "throughout", "the", "rest", "the", "week", "?", "I", "think", "mine", "is", "decreasing", ":/"]} -{"id": "1855-turnip", "word": "turnip", "label_binary": 0, "text_1": "a chronograph is a turnip: lugubrious, yet splenic", "token_idx_1": 4, "text_start_1": 19, "text_end_1": 25, "date_1": "2019-05", "text_2": "Does anyone have turnip prices higher than 103 bells? pls it just going to get lower in my island ;-;", "token_idx_2": 3, "text_start_2": 17, "text_end_2": 23, "date_2": "2020-05", "text_1_tokenized": ["a", "chronograph", "is", "a", "turnip", ":", "lugubrious", ",", "yet", "splenic"], "text_2_tokenized": ["Does", "anyone", "have", "turnip", "prices", "higher", "than", "103", "bells", "?", "pls", "it", "just", "going", "to", "get", "lower", "in", "my", "island", ";", "-", ";"]} -{"id": "1856-turnip", "word": "turnip", "label_binary": 0, "text_1": "Good to see you fleetingly just now @gregjames . Your escalator based rendition of \ud83c\udf37 or turnip brought me some much needed LOLs. Enjoy your trip \ud83d\udc11", "token_idx_1": 16, "text_start_1": 88, "text_end_1": 94, "date_1": "2019-05", "text_2": "My turnip prices are 533 rn be my girlfriend and I'll dm you the dodo code", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 9, "date_2": "2020-05", "text_1_tokenized": ["Good", "to", "see", "you", "fleetingly", "just", "now", "@gregjames", ".", "Your", "escalator", "based", "rendition", "of", "\ud83c\udf37", "or", "turnip", "brought", "me", "some", "much", "needed", "LOLs", ".", "Enjoy", "your", "trip", "\ud83d\udc11"], "text_2_tokenized": ["My", "turnip", "prices", "are", "533", "rn", "be", "my", "girlfriend", "and", "I'll", "dm", "you", "the", "dodo", "code"]} -{"id": "1857-turnip", "word": "turnip", "label_binary": 0, "text_1": "|| My aesthetic is comparing Richard's social skills to the most random shit... so far we have Rabid Badger and Disgruntled turnip.", "token_idx_1": 23, "text_start_1": 124, "text_end_1": 130, "date_1": "2019-05", "text_2": "all my friends getting viral tweets over nothing and i'm over here lying about my turnip prices for attention because i'm bored WHY can't the universe just work for me", "token_idx_2": 15, "text_start_2": 82, "text_end_2": 88, "date_2": "2020-05", "text_1_tokenized": ["|", "|", "My", "aesthetic", "is", "comparing", "Richard's", "social", "skills", "to", "the", "most", "random", "shit", "...", "so", "far", "we", "have", "Rabid", "Badger", "and", "Disgruntled", "turnip", "."], "text_2_tokenized": ["all", "my", "friends", "getting", "viral", "tweets", "over", "nothing", "and", "i'm", "over", "here", "lying", "about", "my", "turnip", "prices", "for", "attention", "because", "i'm", "bored", "WHY", "can't", "the", "universe", "just", "work", "for", "me"]} -{"id": "1858-turnip", "word": "turnip", "label_binary": 1, "text_1": "ive never had a turnip ever", "token_idx_1": 4, "text_start_1": 16, "text_end_1": 22, "date_1": "2019-05", "text_2": "Watched a wonderful documentary chronicling the history and legacy of Halas & Batchelor. Still outraged at all who doubted Joy, a woman responsible for the foundation of an entire industry. Also, I could hear Vera Linnecar slander Charley's upside-down turnip head for hours.", "token_idx_2": 43, "text_start_2": 257, "text_end_2": 263, "date_2": "2020-05", "text_1_tokenized": ["ive", "never", "had", "a", "turnip", "ever"], "text_2_tokenized": ["Watched", "a", "wonderful", "documentary", "chronicling", "the", "history", "and", "legacy", "of", "Halas", "&", "Batchelor", ".", "Still", "outraged", "at", "all", "who", "doubted", "Joy", ",", "a", "woman", "responsible", "for", "the", "foundation", "of", "an", "entire", "industry", ".", "Also", ",", "I", "could", "hear", "Vera", "Linnecar", "slander", "Charley's", "upside-down", "turnip", "head", "for", "hours", "."]} -{"id": "1859-turnip", "word": "turnip", "label_binary": 0, "text_1": "Meanwhile, Groupie messaged me as my shift was beginning: \u201cToday's task is to find a turnip.\u201d It turned out to be quite a challenge indeed\u2026 #ZaphAndGroupiesSurrealLife", "token_idx_1": 18, "text_start_1": 85, "text_end_1": 91, "date_1": "2019-05", "text_2": "434 turnip prices, who wants? \ud83d\udc40 #TurnipExchange #ACNH", "token_idx_2": 1, "text_start_2": 4, "text_end_2": 10, "date_2": "2020-05", "text_1_tokenized": ["Meanwhile", ",", "Groupie", "messaged", "me", "as", "my", "shift", "was", "beginning", ":", "\u201c", "Today's", "task", "is", "to", "find", "a", "turnip", ".", "\u201d", "It", "turned", "out", "to", "be", "quite", "a", "challenge", "indeed", "\u2026", "#ZaphAndGroupiesSurrealLife"], "text_2_tokenized": ["434", "turnip", "prices", ",", "who", "wants", "?", "\ud83d\udc40", "#TurnipExchange", "#ACNH"]} -{"id": "1860-turnip", "word": "turnip", "label_binary": 0, "text_1": "I very much wish people hadn't voted for a 4chan shitlord troll with the mental acuity of a turnip to squat in the WH. Maybe it's just me.", "token_idx_1": 18, "text_start_1": 92, "text_end_1": 98, "date_1": "2019-05", "text_2": "Nvm on that turnip tweet apparently my predictions were wrong cuz I forgot to add a value from yesterday. They're at 186 rn tho if anyone wants to sell here", "token_idx_2": 3, "text_start_2": 12, "text_end_2": 18, "date_2": "2020-05", "text_1_tokenized": ["I", "very", "much", "wish", "people", "hadn't", "voted", "for", "a", "4chan", "shitlord", "troll", "with", "the", "mental", "acuity", "of", "a", "turnip", "to", "squat", "in", "the", "WH", ".", "Maybe", "it's", "just", "me", "."], "text_2_tokenized": ["Nvm", "on", "that", "turnip", "tweet", "apparently", "my", "predictions", "were", "wrong", "cuz", "I", "forgot", "to", "add", "a", "value", "from", "yesterday", ".", "They're", "at", "186", "rn", "tho", "if", "anyone", "wants", "to", "sell", "here"]} -{"id": "1861-turnip", "word": "turnip", "label_binary": 0, "text_1": "I think it's pretty stupid that Manafort will have to go to Rikers for what he did, like seriously? He's going to have to spend 4 yrs in solitary confinement for his own well being? It's bullsh*t. Seems some ppl are still trying to get blood out of a turnip.", "token_idx_1": 55, "text_start_1": 251, "text_end_1": 257, "date_1": "2019-05", "text_2": "im still staying off twitter but if anyone has ok turnip prices pls help me i'm having a problem", "token_idx_2": 10, "text_start_2": 50, "text_end_2": 56, "date_2": "2020-05", "text_1_tokenized": ["I", "think", "it's", "pretty", "stupid", "that", "Manafort", "will", "have", "to", "go", "to", "Rikers", "for", "what", "he", "did", ",", "like", "seriously", "?", "He's", "going", "to", "have", "to", "spend", "4", "yrs", "in", "solitary", "confinement", "for", "his", "own", "well", "being", "?", "It's", "bullsh", "*", "t", ".", "Seems", "some", "ppl", "are", "still", "trying", "to", "get", "blood", "out", "of", "a", "turnip", "."], "text_2_tokenized": ["im", "still", "staying", "off", "twitter", "but", "if", "anyone", "has", "ok", "turnip", "prices", "pls", "help", "me", "i'm", "having", "a", "problem"]} -{"id": "1862-turnip", "word": "turnip", "label_binary": 0, "text_1": "That moron trump calls Mayor Pete Alfred E Neumann. First proving how young and vibrant he is he channels a 196O icon. Second, Pete us a Harvard grad and veteran; trump has the mind of a turnip. He should STFU", "token_idx_1": 40, "text_start_1": 187, "text_end_1": 193, "date_1": "2019-05", "text_2": "My turnip prices are 513 bells,,, but I didn't buy turnips this week \ud83e\udd7a\ud83d\ude2d#AnimalCrossingNewHorizons #AnimalCrossingturnips", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 9, "date_2": "2020-05", "text_1_tokenized": ["That", "moron", "trump", "calls", "Mayor", "Pete", "Alfred", "E", "Neumann", ".", "First", "proving", "how", "young", "and", "vibrant", "he", "is", "he", "channels", "a", "196O", "icon", ".", "Second", ",", "Pete", "us", "a", "Harvard", "grad", "and", "veteran", ";", "trump", "has", "the", "mind", "of", "a", "turnip", ".", "He", "should", "STFU"], "text_2_tokenized": ["My", "turnip", "prices", "are", "513", "bells", ",", ",", ",", "but", "I", "didn't", "buy", "turnips", "this", "week", "\ud83e\udd7a", "\ud83d\ude2d", "#AnimalCrossingNewHorizons", "#AnimalCrossingturnips"]} -{"id": "1863-turnip", "word": "turnip", "label_binary": 0, "text_1": "To celebrate Charmy's birthday, I ate all-you-can-eat hot pot with @ctrl_zeru and @lightofthedeep ! We also had turnip hairstyles hehe \u2728\ud83c\udf7d", "token_idx_1": 18, "text_start_1": 112, "text_end_1": 118, "date_1": "2019-05", "text_2": "Crap! I forgot I was queueing for something at turnip exchange. I'm so sorry!", "token_idx_2": 10, "text_start_2": 47, "text_end_2": 53, "date_2": "2020-05", "text_1_tokenized": ["To", "celebrate", "Charmy's", "birthday", ",", "I", "ate", "all-you-can-eat", "hot", "pot", "with", "@ctrl_zeru", "and", "@lightofthedeep", "!", "We", "also", "had", "turnip", "hairstyles", "hehe", "\u2728", "\ud83c\udf7d"], "text_2_tokenized": ["Crap", "!", "I", "forgot", "I", "was", "queueing", "for", "something", "at", "turnip", "exchange", ".", "I'm", "so", "sorry", "!"]} -{"id": "1864-turnip", "word": "turnip", "label_binary": 0, "text_1": "lrt is me trying to keep turnip from my food", "token_idx_1": 6, "text_start_1": 25, "text_end_1": 31, "date_1": "2019-05", "text_2": "Blocking every single account I see posting 'turnip prices at X on my friends island!'", "token_idx_2": 8, "text_start_2": 45, "text_end_2": 51, "date_2": "2020-05", "text_1_tokenized": ["lrt", "is", "me", "trying", "to", "keep", "turnip", "from", "my", "food"], "text_2_tokenized": ["Blocking", "every", "single", "account", "I", "see", "posting", "'", "turnip", "prices", "at", "X", "on", "my", "friends", "island", "!", "'"]} -{"id": "1865-turnip", "word": "turnip", "label_binary": 0, "text_1": "Ain't nothing like a good Sunday dinner .. shit would've hit different with some turnip greens tho", "token_idx_1": 14, "text_start_1": 81, "text_end_1": 87, "date_1": "2019-05", "text_2": "I don't even want super fucking good turnip prices, I'm good with fucking 300 bells for fucks sake.", "token_idx_2": 7, "text_start_2": 37, "text_end_2": 43, "date_2": "2020-05", "text_1_tokenized": ["Ain't", "nothing", "like", "a", "good", "Sunday", "dinner", "..", "shit", "would've", "hit", "different", "with", "some", "turnip", "greens", "tho"], "text_2_tokenized": ["I", "don't", "even", "want", "super", "fucking", "good", "turnip", "prices", ",", "I'm", "good", "with", "fucking", "300", "bells", "for", "fucks", "sake", "."]} -{"id": "1866-turnip", "word": "turnip", "label_binary": 0, "text_1": "I just want my Granny to cook me some turnip greens and kale\u2014 mixed with a lot of stems. \ud83d\udc45", "token_idx_1": 9, "text_start_1": 38, "text_end_1": 44, "date_1": "2019-05", "text_2": "Oh my god this turnip thing in Animal Crossing is annoying", "token_idx_2": 4, "text_start_2": 15, "text_end_2": 21, "date_2": "2020-05", "text_1_tokenized": ["I", "just", "want", "my", "Granny", "to", "cook", "me", "some", "turnip", "greens", "and", "kale", "\u2014", "mixed", "with", "a", "lot", "of", "stems", ".", "\ud83d\udc45"], "text_2_tokenized": ["Oh", "my", "god", "this", "turnip", "thing", "in", "Animal", "Crossing", "is", "annoying"]} -{"id": "1867-turnip", "word": "turnip", "label_binary": 1, "text_1": "i really feel like a turnip dealer", "token_idx_1": 5, "text_start_1": 21, "text_end_1": 27, "date_1": "2019-05", "text_2": "so I got robbed doing turnip exchanges.. I didn't think they could pick up stuff off the ground lol. Be careful out there everyone. #turnipprices #turnip #turnipexchange", "token_idx_2": 5, "text_start_2": 22, "text_end_2": 28, "date_2": "2020-05", "text_1_tokenized": ["i", "really", "feel", "like", "a", "turnip", "dealer"], "text_2_tokenized": ["so", "I", "got", "robbed", "doing", "turnip", "exchanges", "..", "I", "didn't", "think", "they", "could", "pick", "up", "stuff", "off", "the", "ground", "lol", ".", "Be", "careful", "out", "there", "everyone", ".", "#turnipprices", "#turnip", "#turnipexchange"]} -{"id": "1868-turnip", "word": "turnip", "label_binary": 0, "text_1": "Evil fuckery villifying natural health issues, women, free choice (when that is truly present as well), is just as crazy as kissing a royal turnip's fetid asshole when it has the root rot!", "token_idx_1": 29, "text_start_1": 140, "text_end_1": 148, "date_1": "2019-05", "text_2": "Anyone not have trash turnip prices?", "token_idx_2": 4, "text_start_2": 22, "text_end_2": 28, "date_2": "2020-05", "text_1_tokenized": ["Evil", "fuckery", "villifying", "natural", "health", "issues", ",", "women", ",", "free", "choice", "(", "when", "that", "is", "truly", "present", "as", "well", ")", ",", "is", "just", "as", "crazy", "as", "kissing", "a", "royal", "turnip's", "fetid", "asshole", "when", "it", "has", "the", "root", "rot", "!"], "text_2_tokenized": ["Anyone", "not", "have", "trash", "turnip", "prices", "?"]} -{"id": "1076-bunker", "word": "bunker", "label_binary": 0, "text_1": "Shock's starting off sloppy, and Reign is making few but key mistakes. Everyone's been sleeping during the break and I don't blame them, I'd sleep all day too if I knew stage 3 was gonna be goats featuring bunker.", "token_idx_1": 41, "text_start_1": 206, "text_end_1": 212, "date_1": "2019-06", "text_2": "New YouTube video will be live tomorrow we open the bunker easteregg on gunfight!! Also I ran into my first stream snipers \ud83e\udd21 and they challenged us to a snipers only tune in to see how that went", "token_idx_2": 10, "text_start_2": 52, "text_end_2": 58, "date_2": "2020-06", "text_1_tokenized": ["Shock's", "starting", "off", "sloppy", ",", "and", "Reign", "is", "making", "few", "but", "key", "mistakes", ".", "Everyone's", "been", "sleeping", "during", "the", "break", "and", "I", "don't", "blame", "them", ",", "I'd", "sleep", "all", "day", "too", "if", "I", "knew", "stage", "3", "was", "gonna", "be", "goats", "featuring", "bunker", "."], "text_2_tokenized": ["New", "YouTube", "video", "will", "be", "live", "tomorrow", "we", "open", "the", "bunker", "easteregg", "on", "gunfight", "!", "!", "Also", "I", "ran", "into", "my", "first", "stream", "snipers", "\ud83e\udd21", "and", "they", "challenged", "us", "to", "a", "snipers", "only", "tune", "in", "to", "see", "how", "that", "went"]} -{"id": "1077-bunker", "word": "bunker", "label_binary": 0, "text_1": "And in more sh*t news for #oil and demand: Singapore bunker fuel oil sales in May down 9% from a year ago, for the seventh consecutive month. At least it's not as bad as April. #OOTT", "token_idx_1": 13, "text_start_1": 53, "text_end_1": 59, "date_1": "2019-06", "text_2": "Anyone got a beat on Trump? Is he drugged out of his mind, cowering in the bunker, or has he had a medical episode?", "token_idx_2": 18, "text_start_2": 75, "text_end_2": 81, "date_2": "2020-06", "text_1_tokenized": ["And", "in", "more", "sh", "*", "t", "news", "for", "#oil", "and", "demand", ":", "Singapore", "bunker", "fuel", "oil", "sales", "in", "May", "down", "9", "%", "from", "a", "year", "ago", ",", "for", "the", "seventh", "consecutive", "month", ".", "At", "least", "it's", "not", "as", "bad", "as", "April", ".", "#OOTT"], "text_2_tokenized": ["Anyone", "got", "a", "beat", "on", "Trump", "?", "Is", "he", "drugged", "out", "of", "his", "mind", ",", "cowering", "in", "the", "bunker", ",", "or", "has", "he", "had", "a", "medical", "episode", "?"]} -{"id": "1078-bunker", "word": "bunker", "label_binary": 0, "text_1": "#orign this is bullshit refs are f#cken shit but bunker is worse.. Joke", "token_idx_1": 10, "text_start_1": 49, "text_end_1": 55, "date_1": "2019-06", "text_2": "still support Prez and will vote for him. begging him to come out of his mental bunker and act decisively. @realDonaldTrump please lead us", "token_idx_2": 17, "text_start_2": 80, "text_end_2": 86, "date_2": "2020-06", "text_1_tokenized": ["#orign", "this", "is", "bullshit", "refs", "are", "f", "#cken", "shit", "but", "bunker", "is", "worse", "..", "Joke"], "text_2_tokenized": ["still", "support", "Prez", "and", "will", "vote", "for", "him", ".", "begging", "him", "to", "come", "out", "of", "his", "mental", "bunker", "and", "act", "decisively", ".", "@realDonaldTrump", "please", "lead", "us"]} -{"id": "1079-bunker", "word": "bunker", "label_binary": 1, "text_1": "if Hilary Swank tried to get into my apocalypse bunker looking like that I would let the robots have her, no offense", "token_idx_1": 9, "text_start_1": 48, "text_end_1": 54, "date_1": "2019-06", "text_2": "#Trump out of the bunker and addressing the nation then taking a stroll to St. John's to pay a visit to the church that met destruction of its own during the #protests. What a guy! \ud83d\udc4d\ud83c\uddfa\ud83c\uddf8 Love our president! \ud83e\udd70 #MAGA2!", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 24, "date_2": "2020-06", "text_1_tokenized": ["if", "Hilary", "Swank", "tried", "to", "get", "into", "my", "apocalypse", "bunker", "looking", "like", "that", "I", "would", "let", "the", "robots", "have", "her", ",", "no", "offense"], "text_2_tokenized": ["#Trump", "out", "of", "the", "bunker", "and", "addressing", "the", "nation", "then", "taking", "a", "stroll", "to", "St", ".", "John's", "to", "pay", "a", "visit", "to", "the", "church", "that", "met", "destruction", "of", "its", "own", "during", "the", "#protests", ".", "What", "a", "guy", "!", "\ud83d\udc4d", "\ud83c\uddfa", "\ud83c\uddf8", "Love", "our", "president", "!", "\ud83e\udd70", "#MAGA2", "!"]} -{"id": "1080-bunker", "word": "bunker", "label_binary": 1, "text_1": "Once again, I love my Mama. She listens to my theories and sometimes is even on board with them. Tonight we discussed materials and locations for an apocalyptic bunker.", "token_idx_1": 31, "text_start_1": 161, "text_end_1": 167, "date_1": "2019-06", "text_2": "Wonder if Khan has a bunker paid for by us.", "token_idx_2": 5, "text_start_2": 21, "text_end_2": 27, "date_2": "2020-06", "text_1_tokenized": ["Once", "again", ",", "I", "love", "my", "Mama", ".", "She", "listens", "to", "my", "theories", "and", "sometimes", "is", "even", "on", "board", "with", "them", ".", "Tonight", "we", "discussed", "materials", "and", "locations", "for", "an", "apocalyptic", "bunker", "."], "text_2_tokenized": ["Wonder", "if", "Khan", "has", "a", "bunker", "paid", "for", "by", "us", "."]} -{"id": "1081-bunker", "word": "bunker", "label_binary": 0, "text_1": "So, two holes have cost Tiger a chance to be at the top of the leaderboard. A double from the fairway on a par 5 yesterday, and a wedge out of a bunker that stayed in there just now. When he crashes, it is carnage.", "token_idx_1": 35, "text_start_1": 145, "text_end_1": 151, "date_1": "2019-06", "text_2": "Biden could win the election and do nothing throughout his four years. He could crawl in the White House bunker and never speak to anyone. He could sign the occasional law but otherwise, not even exist. AND IT WOULD STILL BE BETTER THAN FOUR YEARS OF TRUMP.", "token_idx_2": 20, "text_start_2": 105, "text_end_2": 111, "date_2": "2020-06", "text_1_tokenized": ["So", ",", "two", "holes", "have", "cost", "Tiger", "a", "chance", "to", "be", "at", "the", "top", "of", "the", "leaderboard", ".", "A", "double", "from", "the", "fairway", "on", "a", "par", "5", "yesterday", ",", "and", "a", "wedge", "out", "of", "a", "bunker", "that", "stayed", "in", "there", "just", "now", ".", "When", "he", "crashes", ",", "it", "is", "carnage", "."], "text_2_tokenized": ["Biden", "could", "win", "the", "election", "and", "do", "nothing", "throughout", "his", "four", "years", ".", "He", "could", "crawl", "in", "the", "White", "House", "bunker", "and", "never", "speak", "to", "anyone", ".", "He", "could", "sign", "the", "occasional", "law", "but", "otherwise", ",", "not", "even", "exist", ".", "AND", "IT", "WOULD", "STILL", "BE", "BETTER", "THAN", "FOUR", "YEARS", "OF", "TRUMP", "."]} -{"id": "1082-bunker", "word": "bunker", "label_binary": 0, "text_1": "Golf is a game of nerves, can be very frustrating, golfers' mind is made of steel, check out Poulter attempt on the edge of that bunker, not to mention Captain America and his wedge, ....on his knee!! @PGATOUR @usopengolf", "token_idx_1": 29, "text_start_1": 129, "text_end_1": 135, "date_1": "2019-06", "text_2": "I keep seeing people call @POTUS bunker boy. Biden has LITERALLY been in hiding since this all started! So wouldnt that make @BidenWarRoom (best I can do since I'm blocked \ud83e\udd23\ud83e\udd23\ud83e\udd23) the #BunkerBoy The hypocrites on the left legitimately never cease to amaze me. Pt1.", "token_idx_2": 6, "text_start_2": 33, "text_end_2": 39, "date_2": "2020-06", "text_1_tokenized": ["Golf", "is", "a", "game", "of", "nerves", ",", "can", "be", "very", "frustrating", ",", "golfers", "'", "mind", "is", "made", "of", "steel", ",", "check", "out", "Poulter", "attempt", "on", "the", "edge", "of", "that", "bunker", ",", "not", "to", "mention", "Captain", "America", "and", "his", "wedge", ",", "...", "on", "his", "knee", "!", "!", "@PGATOUR", "@usopengolf"], "text_2_tokenized": ["I", "keep", "seeing", "people", "call", "@POTUS", "bunker", "boy", ".", "Biden", "has", "LITERALLY", "been", "in", "hiding", "since", "this", "all", "started", "!", "So", "wouldnt", "that", "make", "@BidenWarRoom", "(", "best", "I", "can", "do", "since", "I'm", "blocked", "\ud83e\udd23", "\ud83e\udd23", "\ud83e\udd23", ")", "the", "#BunkerBoy", "The", "hypocrites", "on", "the", "left", "legitimately", "never", "cease", "to", "amaze", "me", ".", "Pt1", "."]} -{"id": "1083-bunker", "word": "bunker", "label_binary": 1, "text_1": "Sam: You're bi Dean: Obviously I'm not bi Sam: But you're married to Cas, an Angel in a male vessel, and you fuck him on every surface available in the bunker making us all traumatized for life daily Dean, making finger guns: You got me there", "token_idx_1": 35, "text_start_1": 152, "text_end_1": 158, "date_1": "2019-06", "text_2": "Told @GiraffesGoQuack that trump went to the bunker to 'inspect' it, and she started choking on her food. \ud83d\ude02\ud83d\ude02\ud83d\ude02", "token_idx_2": 7, "text_start_2": 45, "text_end_2": 51, "date_2": "2020-06", "text_1_tokenized": ["Sam", ":", "You're", "bi", "Dean", ":", "Obviously", "I'm", "not", "bi", "Sam", ":", "But", "you're", "married", "to", "Cas", ",", "an", "Angel", "in", "a", "male", "vessel", ",", "and", "you", "fuck", "him", "on", "every", "surface", "available", "in", "the", "bunker", "making", "us", "all", "traumatized", "for", "life", "daily", "Dean", ",", "making", "finger", "guns", ":", "You", "got", "me", "there"], "text_2_tokenized": ["Told", "@GiraffesGoQuack", "that", "trump", "went", "to", "the", "bunker", "to", "'", "inspect", "'", "it", ",", "and", "she", "started", "choking", "on", "her", "food", ".", "\ud83d\ude02", "\ud83d\ude02", "\ud83d\ude02"]} -{"id": "1084-bunker", "word": "bunker", "label_binary": 1, "text_1": "I'm only trying to get rich so I can build a bunker the size of my house", "token_idx_1": 11, "text_start_1": 45, "text_end_1": 51, "date_1": "2019-06", "text_2": "Just don't get how China can build dedicated covid hospitals in 10 days yet the US has been in quarantine for 4 months and our hospitals are literally swamped and have been since the outbreak... it's like do y'all not get it or you too busy hiding in a bunker with ur $$$?", "token_idx_2": 50, "text_start_2": 253, "text_end_2": 259, "date_2": "2020-06", "text_1_tokenized": ["I'm", "only", "trying", "to", "get", "rich", "so", "I", "can", "build", "a", "bunker", "the", "size", "of", "my", "house"], "text_2_tokenized": ["Just", "don't", "get", "how", "China", "can", "build", "dedicated", "covid", "hospitals", "in", "10", "days", "yet", "the", "US", "has", "been", "in", "quarantine", "for", "4", "months", "and", "our", "hospitals", "are", "literally", "swamped", "and", "have", "been", "since", "the", "outbreak", "...", "it's", "like", "do", "y'all", "not", "get", "it", "or", "you", "too", "busy", "hiding", "in", "a", "bunker", "with", "ur", "$", "$", "$", "?"]} -{"id": "1085-bunker", "word": "bunker", "label_binary": 0, "text_1": "//just fought the bunker and it dropped the bitch first try!", "token_idx_1": 5, "text_start_1": 18, "text_end_1": 24, "date_1": "2019-06", "text_2": "March on the WH America, the RAT will run to his bunker.", "token_idx_2": 12, "text_start_2": 49, "text_end_2": 55, "date_2": "2020-06", "text_1_tokenized": ["/", "/", "just", "fought", "the", "bunker", "and", "it", "dropped", "the", "bitch", "first", "try", "!"], "text_2_tokenized": ["March", "on", "the", "WH", "America", ",", "the", "RAT", "will", "run", "to", "his", "bunker", "."]} -{"id": "1091-bunker", "word": "bunker", "label_binary": 0, "text_1": "Rory can't hole a bunker shot for 59. Bum.", "token_idx_1": 4, "text_start_1": 18, "text_end_1": 24, "date_1": "2019-06", "text_2": "trump is for real just a tenant waiting to be evicted from the White House and his bunker", "token_idx_2": 17, "text_start_2": 83, "text_end_2": 89, "date_2": "2020-06", "text_1_tokenized": ["Rory", "can't", "hole", "a", "bunker", "shot", "for", "59", ".", "Bum", "."], "text_2_tokenized": ["trump", "is", "for", "real", "just", "a", "tenant", "waiting", "to", "be", "evicted", "from", "the", "White", "House", "and", "his", "bunker"]} -{"id": "1092-bunker", "word": "bunker", "label_binary": 0, "text_1": "I'm actually not that keen on the idea of 2-2-2. I think even in the current patch people were still developing (and still are) very variable tactics regarding different comps. We are seeing bunker/goats/dps and even dive.", "token_idx_1": 38, "text_start_1": 191, "text_end_1": 197, "date_1": "2019-06", "text_2": "This bunker gear work out is gonna hit different", "token_idx_2": 1, "text_start_2": 5, "text_end_2": 11, "date_2": "2020-06", "text_1_tokenized": ["I'm", "actually", "not", "that", "keen", "on", "the", "idea", "of", "2-2-", "2", ".", "I", "think", "even", "in", "the", "current", "patch", "people", "were", "still", "developing", "(", "and", "still", "are", ")", "very", "variable", "tactics", "regarding", "different", "comps", ".", "We", "are", "seeing", "bunker", "/", "goats", "/", "dps", "and", "even", "dive", "."], "text_2_tokenized": ["This", "bunker", "gear", "work", "out", "is", "gonna", "hit", "different"]} -{"id": "1093-bunker", "word": "bunker", "label_binary": 0, "text_1": "Complaining about the Times Corbyn splash for \u201cpoliticising the Civil Service\u201d misses the key point - there is a hell of a lot of insider info from the \u201cbunker\u201d on p8-9. \u201cThe only newspaper delivered to the leader's office is the Morning Star\u201d. \ud83d\udea9", "token_idx_1": 31, "text_start_1": 153, "text_end_1": 159, "date_1": "2019-06", "text_2": "So much for all the \"He's hiding in his bunker\" tweets about Trump. He just walked out through a crowd out in the open. There it is.", "token_idx_2": 10, "text_start_2": 40, "text_end_2": 46, "date_2": "2020-06", "text_1_tokenized": ["Complaining", "about", "the", "Times", "Corbyn", "splash", "for", "\u201c", "politicising", "the", "Civil", "Service", "\u201d", "misses", "the", "key", "point", "-", "there", "is", "a", "hell", "of", "a", "lot", "of", "insider", "info", "from", "the", "\u201c", "bunker", "\u201d", "on", "p8", "-", "9", ".", "\u201c", "The", "only", "newspaper", "delivered", "to", "the", "leader's", "office", "is", "the", "Morning", "Star", "\u201d", ".", "\ud83d\udea9"], "text_2_tokenized": ["So", "much", "for", "all", "the", "\"", "He's", "hiding", "in", "his", "bunker", "\"", "tweets", "about", "Trump", ".", "He", "just", "walked", "out", "through", "a", "crowd", "out", "in", "the", "open", ".", "There", "it", "is", "."]} -{"id": "1094-bunker", "word": "bunker", "label_binary": 0, "text_1": "going into the bunker on level 24?? madness luv", "token_idx_1": 3, "text_start_1": 15, "text_end_1": 21, "date_1": "2019-06", "text_2": "#WeakIn4Words tweeted from underground bunker", "token_idx_2": 4, "text_start_2": 39, "text_end_2": 45, "date_2": "2020-06", "text_1_tokenized": ["going", "into", "the", "bunker", "on", "level", "24", "?", "?", "madness", "luv"], "text_2_tokenized": ["#WeakIn4Words", "tweeted", "from", "underground", "bunker"]} -{"id": "1095-bunker", "word": "bunker", "label_binary": 1, "text_1": "2.26am and I've finally cracked @Ansible Tower and @VMware Identity Manager integration! Snapshots and backups taken, offline copies of VMs now in a fire- and bulletproof safe buried in a bunker. Now I'm going to bed! \ud83d\udca4\ud83d\udca4\ud83d\udca4 #vIDM #EUC #VDI", "token_idx_1": 34, "text_start_1": 188, "text_end_1": 194, "date_1": "2019-06", "text_2": "New protest chant: Donald Trump is not your Friend-He's in his bunker, lock him in", "token_idx_2": 12, "text_start_2": 63, "text_end_2": 69, "date_2": "2020-06", "text_1_tokenized": ["2.26", "am", "and", "I've", "finally", "cracked", "@Ansible", "Tower", "and", "@VMware", "Identity", "Manager", "integration", "!", "Snapshots", "and", "backups", "taken", ",", "offline", "copies", "of", "VMs", "now", "in", "a", "fire", "-", "and", "bulletproof", "safe", "buried", "in", "a", "bunker", ".", "Now", "I'm", "going", "to", "bed", "!", "\ud83d\udca4", "\ud83d\udca4", "\ud83d\udca4", "#vIDM", "#EUC", "#VDI"], "text_2_tokenized": ["New", "protest", "chant", ":", "Donald", "Trump", "is", "not", "your", "Friend-He's", "in", "his", "bunker", ",", "lock", "him", "in"]} -{"id": "1096-bunker", "word": "bunker", "label_binary": 0, "text_1": "Will set pile bunker as emote, rather than gestures... I cant make it strike only once as gestures... .- . Saddeded", "token_idx_1": 3, "text_start_1": 14, "text_end_1": 20, "date_1": "2019-06", "text_2": "Lil Donny boy.. aka @realDonaldTrump you can leave the bunker now.. we won't hurt you! But can we get Melania? Wanna chat about her bully movement..", "token_idx_2": 10, "text_start_2": 55, "text_end_2": 61, "date_2": "2020-06", "text_1_tokenized": ["Will", "set", "pile", "bunker", "as", "emote", ",", "rather", "than", "gestures", "...", "I", "cant", "make", "it", "strike", "only", "once", "as", "gestures", "... .", "-", ".", "Saddeded"], "text_2_tokenized": ["Lil", "Donny", "boy", "..", "aka", "@realDonaldTrump", "you", "can", "leave", "the", "bunker", "now", "..", "we", "won't", "hurt", "you", "!", "But", "can", "we", "get", "Melania", "?", "Wanna", "chat", "about", "her", "bully", "movement", ".."]} -{"id": "1097-bunker", "word": "bunker", "label_binary": 1, "text_1": "I'll be hiding in a bunker with d'Eon and Scheherazade for the entirety of Agartha, thank you very much!", "token_idx_1": 5, "text_start_1": 20, "text_end_1": 26, "date_1": "2019-06", "text_2": "Trump wanting to dismantle Obamacare during a pandemic reminds me of his idol Adolph Hitler in his final days in his bunker who ordered nazis to sabotage German infrastructure since he believed Germans are cowards and don't deserve him. Expect it to get way worse with Trump", "token_idx_2": 21, "text_start_2": 117, "text_end_2": 123, "date_2": "2020-06", "text_1_tokenized": ["I'll", "be", "hiding", "in", "a", "bunker", "with", "d'Eon", "and", "Scheherazade", "for", "the", "entirety", "of", "Agartha", ",", "thank", "you", "very", "much", "!"], "text_2_tokenized": ["Trump", "wanting", "to", "dismantle", "Obamacare", "during", "a", "pandemic", "reminds", "me", "of", "his", "idol", "Adolph", "Hitler", "in", "his", "final", "days", "in", "his", "bunker", "who", "ordered", "nazis", "to", "sabotage", "German", "infrastructure", "since", "he", "believed", "Germans", "are", "cowards", "and", "don't", "deserve", "him", ".", "Expect", "it", "to", "get", "way", "worse", "with", "Trump"]} -{"id": "1098-bunker", "word": "bunker", "label_binary": 1, "text_1": "Ok, so if they can't find Tim Thomas in his bunker and @TomBrady is busy, who else would you like to see wave the flag for the @NHLBruins #game7 #StanleyCupFinal ? Who else you got?", "token_idx_1": 11, "text_start_1": 44, "text_end_1": 50, "date_1": "2019-06", "text_2": "don't mind me i'm just chilling in trump's bunker", "token_idx_2": 8, "text_start_2": 43, "text_end_2": 49, "date_2": "2020-06", "text_1_tokenized": ["Ok", ",", "so", "if", "they", "can't", "find", "Tim", "Thomas", "in", "his", "bunker", "and", "@TomBrady", "is", "busy", ",", "who", "else", "would", "you", "like", "to", "see", "wave", "the", "flag", "for", "the", "@NHLBruins", "#game7", "#StanleyCupFinal", "?", "Who", "else", "you", "got", "?"], "text_2_tokenized": ["don't", "mind", "me", "i'm", "just", "chilling", "in", "trump's", "bunker"]} -{"id": "1099-bunker", "word": "bunker", "label_binary": 0, "text_1": "Can we all agree Spieth didn't hit the rake? Clearly hit the lip of the bunker.", "token_idx_1": 16, "text_start_1": 72, "text_end_1": 78, "date_1": "2019-06", "text_2": "The level of pettiness from Americans classing this as Obama day and flooding twitter with pics of him on Trumps birthday is the type of stuff I live for to be honest because you just know he's Seethinnnnngg in his bunker \ud83d\udc4f\ud83c\udffb\ud83d\ude0a #FuckTrump", "token_idx_2": 40, "text_start_2": 215, "text_end_2": 221, "date_2": "2020-06", "text_1_tokenized": ["Can", "we", "all", "agree", "Spieth", "didn't", "hit", "the", "rake", "?", "Clearly", "hit", "the", "lip", "of", "the", "bunker", "."], "text_2_tokenized": ["The", "level", "of", "pettiness", "from", "Americans", "classing", "this", "as", "Obama", "day", "and", "flooding", "twitter", "with", "pics", "of", "him", "on", "Trumps", "birthday", "is", "the", "type", "of", "stuff", "I", "live", "for", "to", "be", "honest", "because", "you", "just", "know", "he's", "Seethinnnnngg", "in", "his", "bunker", "\ud83d\udc4f\ud83c\udffb", "\ud83d\ude0a", "#FuckTrump"]} -{"id": "1100-bunker", "word": "bunker", "label_binary": 0, "text_1": "So excited about moving into a basement so i can be true c.h.u.d. and also prepare for future bunker.", "token_idx_1": 25, "text_start_1": 94, "text_end_1": 100, "date_1": "2019-06", "text_2": "Where's the overflow bunker baby ?", "token_idx_2": 3, "text_start_2": 21, "text_end_2": 27, "date_2": "2020-06", "text_1_tokenized": ["So", "excited", "about", "moving", "into", "a", "basement", "so", "i", "can", "be", "true", "c", ".", "h", ".", "u", ".", "d", ".", "and", "also", "prepare", "for", "future", "bunker", "."], "text_2_tokenized": ["Where's", "the", "overflow", "bunker", "baby", "?"]} -{"id": "1101-bunker", "word": "bunker", "label_binary": 0, "text_1": "I love @PaulAzinger analysis...that said, I can't remember in my @PGA days telling a student to hit a bunker shot loaded w integrity and spin... #NewMantra", "token_idx_1": 21, "text_start_1": 102, "text_end_1": 108, "date_1": "2019-06", "text_2": "Building himself an outside bunker", "token_idx_2": 4, "text_start_2": 28, "text_end_2": 34, "date_2": "2020-06", "text_1_tokenized": ["I", "love", "@PaulAzinger", "analysis", "...", "that", "said", ",", "I", "can't", "remember", "in", "my", "@PGA", "days", "telling", "a", "student", "to", "hit", "a", "bunker", "shot", "loaded", "w", "integrity", "and", "spin", "...", "#NewMantra"], "text_2_tokenized": ["Building", "himself", "an", "outside", "bunker"]} -{"id": "1102-bunker", "word": "bunker", "label_binary": 0, "text_1": "59 probably not going to happen for Rory. Has to hole a tough bunker shot at 18 to make history.", "token_idx_1": 14, "text_start_1": 62, "text_end_1": 68, "date_1": "2019-06", "text_2": "I'm walking a spiritual path, but I'm no Buddha. I have a voice and I'm gonna use it for truth and justice - otherwise inner peace is just hiding in a bunker. -Dr. G", "token_idx_2": 33, "text_start_2": 151, "text_end_2": 157, "date_2": "2020-06", "text_1_tokenized": ["59", "probably", "not", "going", "to", "happen", "for", "Rory", ".", "Has", "to", "hole", "a", "tough", "bunker", "shot", "at", "18", "to", "make", "history", "."], "text_2_tokenized": ["I'm", "walking", "a", "spiritual", "path", ",", "but", "I'm", "no", "Buddha", ".", "I", "have", "a", "voice", "and", "I'm", "gonna", "use", "it", "for", "truth", "and", "justice", "-", "otherwise", "inner", "peace", "is", "just", "hiding", "in", "a", "bunker", ".", "-", "Dr", ".", "G"]} -{"id": "1103-bunker", "word": "bunker", "label_binary": 1, "text_1": "Deadly fire above secret nuclear bunker under Maryland house leads to prison sentence An eccentric computer hacker was sentenced Monday to nine years in prison for the fiery death of a man who was helping him secretly dig tunnels for a nuclear bunker beneath a Maryland home. \u2026", "token_idx_1": 5, "text_start_1": 33, "text_end_1": 39, "date_1": "2019-06", "text_2": "lil Aaron just called trump a bunker boy and I actually laughed out loud", "token_idx_2": 6, "text_start_2": 30, "text_end_2": 36, "date_2": "2020-06", "text_1_tokenized": ["Deadly", "fire", "above", "secret", "nuclear", "bunker", "under", "Maryland", "house", "leads", "to", "prison", "sentence", "An", "eccentric", "computer", "hacker", "was", "sentenced", "Monday", "to", "nine", "years", "in", "prison", "for", "the", "fiery", "death", "of", "a", "man", "who", "was", "helping", "him", "secretly", "dig", "tunnels", "for", "a", "nuclear", "bunker", "beneath", "a", "Maryland", "home", ".", "\u2026"], "text_2_tokenized": ["lil", "Aaron", "just", "called", "trump", "a", "bunker", "boy", "and", "I", "actually", "laughed", "out", "loud"]} -{"id": "1104-bunker", "word": "bunker", "label_binary": 0, "text_1": "All the straps in this bitch ain't no trap this a bunker", "token_idx_1": 11, "text_start_1": 50, "text_end_1": 56, "date_1": "2019-06", "text_2": "I keep hearing tRUMP's desire to dominate the streets! Their US citizen's streets, he can go back to his bunker bitch!", "token_idx_2": 21, "text_start_2": 105, "text_end_2": 111, "date_2": "2020-06", "text_1_tokenized": ["All", "the", "straps", "in", "this", "bitch", "ain't", "no", "trap", "this", "a", "bunker"], "text_2_tokenized": ["I", "keep", "hearing", "tRUMP's", "desire", "to", "dominate", "the", "streets", "!", "Their", "US", "citizen's", "streets", ",", "he", "can", "go", "back", "to", "his", "bunker", "bitch", "!"]} -{"id": "1105-bunker", "word": "bunker", "label_binary": 0, "text_1": "Troy Merritt just birdied from a fairway bunker on 18 to split top 20 bets on Ben An. Full win at 5.0 if TM doesn't birdie that hole.", "token_idx_1": 7, "text_start_1": 41, "text_end_1": 47, "date_1": "2019-06", "text_2": "Isn't this all feeling a little bit like #Parasite with our punisher hiding in a bunker deep down in the bada bing?", "token_idx_2": 15, "text_start_2": 81, "text_end_2": 87, "date_2": "2020-06", "text_1_tokenized": ["Troy", "Merritt", "just", "birdied", "from", "a", "fairway", "bunker", "on", "18", "to", "split", "top", "20", "bets", "on", "Ben", "An", ".", "Full", "win", "at", "5.0", "if", "TM", "doesn't", "birdie", "that", "hole", "."], "text_2_tokenized": ["Isn't", "this", "all", "feeling", "a", "little", "bit", "like", "#Parasite", "with", "our", "punisher", "hiding", "in", "a", "bunker", "deep", "down", "in", "the", "bada", "bing", "?"]} -{"id": "1106-bunker", "word": "bunker", "label_binary": 0, "text_1": "So it seems likely that on this birthday I'll be drinking Guinness alone? Has to be better than that Palinka stuff which had me in a Cold War bunker club under a Central European castle, with a bunch of strangers no?", "token_idx_1": 29, "text_start_1": 142, "text_end_1": 148, "date_1": "2019-06", "text_2": "He wasn't \"inspecting the bunker\" he was \"emulating Archie Bunker\".", "token_idx_2": 5, "text_start_2": 26, "text_end_2": 32, "date_2": "2020-06", "text_1_tokenized": ["So", "it", "seems", "likely", "that", "on", "this", "birthday", "I'll", "be", "drinking", "Guinness", "alone", "?", "Has", "to", "be", "better", "than", "that", "Palinka", "stuff", "which", "had", "me", "in", "a", "Cold", "War", "bunker", "club", "under", "a", "Central", "European", "castle", ",", "with", "a", "bunch", "of", "strangers", "no", "?"], "text_2_tokenized": ["He", "wasn't", "\"", "inspecting", "the", "bunker", "\"", "he", "was", "\"", "emulating", "Archie", "Bunker", "\"", "."]} -{"id": "1107-bunker", "word": "bunker", "label_binary": 1, "text_1": "Kimmy: The only thing that matters is that for 15 years, that man right there wouldn't let us out of the bunker. And that is the definition of kidnapping! Reverend: And of my Verizon contract. Lawyer: Objection! You're paying for the network! Compare coverage! --I CANNOT\ud83d\ude02\ud83d\ude02\ud83d\ude02", "token_idx_1": 23, "text_start_1": 105, "text_end_1": 111, "date_1": "2019-06", "text_2": "If you don't think that our Fuhrer in Chief will be hiding in his bunker while the rest of us die then you need to wake the fuck up", "token_idx_2": 14, "text_start_2": 66, "text_end_2": 72, "date_2": "2020-06", "text_1_tokenized": ["Kimmy", ":", "The", "only", "thing", "that", "matters", "is", "that", "for", "15", "years", ",", "that", "man", "right", "there", "wouldn't", "let", "us", "out", "of", "the", "bunker", ".", "And", "that", "is", "the", "definition", "of", "kidnapping", "!", "Reverend", ":", "And", "of", "my", "Verizon", "contract", ".", "Lawyer", ":", "Objection", "!", "You're", "paying", "for", "the", "network", "!", "Compare", "coverage", "!", "-", "-", "I", "CANNOT", "\ud83d\ude02", "\ud83d\ude02", "\ud83d\ude02"], "text_2_tokenized": ["If", "you", "don't", "think", "that", "our", "Fuhrer", "in", "Chief", "will", "be", "hiding", "in", "his", "bunker", "while", "the", "rest", "of", "us", "die", "then", "you", "need", "to", "wake", "the", "fuck", "up"]} -{"id": "1108-bunker", "word": "bunker", "label_binary": 1, "text_1": "accumulating a vast wealth so i can buy and remodel an old ww2 bunker to live in", "token_idx_1": 13, "text_start_1": 63, "text_end_1": 69, "date_1": "2019-06", "text_2": "Donnie must be pouting in his bunker today", "token_idx_2": 6, "text_start_2": 30, "text_end_2": 36, "date_2": "2020-06", "text_1_tokenized": ["accumulating", "a", "vast", "wealth", "so", "i", "can", "buy", "and", "remodel", "an", "old", "ww2", "bunker", "to", "live", "in"], "text_2_tokenized": ["Donnie", "must", "be", "pouting", "in", "his", "bunker", "today"]} -{"id": "1109-bunker", "word": "bunker", "label_binary": 1, "text_1": "So, I'm confused. Trump said he made a deal with Mexico. Does that mean the invasion is over? Can I come out of the bunker now?", "token_idx_1": 28, "text_start_1": 116, "text_end_1": 122, "date_1": "2019-06", "text_2": "Haaaah! Finally! I've been waiting for somebody to come get this fool. Iran has issued an arrest warrant for @realDonaldTrump. You can find him in his bunker at the @WhiteHouse. #LawAndOrder", "token_idx_2": 30, "text_start_2": 151, "text_end_2": 157, "date_2": "2020-06", "text_1_tokenized": ["So", ",", "I'm", "confused", ".", "Trump", "said", "he", "made", "a", "deal", "with", "Mexico", ".", "Does", "that", "mean", "the", "invasion", "is", "over", "?", "Can", "I", "come", "out", "of", "the", "bunker", "now", "?"], "text_2_tokenized": ["Haaaah", "!", "Finally", "!", "I've", "been", "waiting", "for", "somebody", "to", "come", "get", "this", "fool", ".", "Iran", "has", "issued", "an", "arrest", "warrant", "for", "@realDonaldTrump", ".", "You", "can", "find", "him", "in", "his", "bunker", "at", "the", "@WhiteHouse", ".", "#LawAndOrder"]} -{"id": "1110-bunker", "word": "bunker", "label_binary": 0, "text_1": "The bunker is using channel 9s app to review the tries \ud83d\ude02", "token_idx_1": 1, "text_start_1": 4, "text_end_1": 10, "date_1": "2019-06", "text_2": "If the Democratic Party had a spine, @JoeBiden would exit his bunker and start a long march to the White House to challenge the Insurrection Act and call Trump's bluff.", "token_idx_2": 12, "text_start_2": 62, "text_end_2": 68, "date_2": "2020-06", "text_1_tokenized": ["The", "bunker", "is", "using", "channel", "9s", "app", "to", "review", "the", "tries", "\ud83d\ude02"], "text_2_tokenized": ["If", "the", "Democratic", "Party", "had", "a", "spine", ",", "@JoeBiden", "would", "exit", "his", "bunker", "and", "start", "a", "long", "march", "to", "the", "White", "House", "to", "challenge", "the", "Insurrection", "Act", "and", "call", "Trump's", "bluff", "."]} -{"id": "1111-bunker", "word": "bunker", "label_binary": 0, "text_1": "Viktor Hovland just short-hopped it out of a bunker and back into the rough, but it's cool because he's wearing an Oklahoma State Golf shirt", "token_idx_1": 8, "text_start_1": 45, "text_end_1": 51, "date_1": "2019-06", "text_2": "This movie got me thinking COVID-19 was created in Trump's bunker.", "token_idx_2": 12, "text_start_2": 59, "text_end_2": 65, "date_2": "2020-06", "text_1_tokenized": ["Viktor", "Hovland", "just", "short-hopped", "it", "out", "of", "a", "bunker", "and", "back", "into", "the", "rough", ",", "but", "it's", "cool", "because", "he's", "wearing", "an", "Oklahoma", "State", "Golf", "shirt"], "text_2_tokenized": ["This", "movie", "got", "me", "thinking", "COVID", "-", "19", "was", "created", "in", "Trump's", "bunker", "."]} -{"id": "1112-bunker", "word": "bunker", "label_binary": 1, "text_1": "Is that bunker still a thing? I'd like to enquire", "token_idx_1": 2, "text_start_1": 8, "text_end_1": 14, "date_1": "2019-06", "text_2": "Is he hiding in the bunker again? #Bunkerbaby #JusticeForGeorgeFloyd #JusticeforBreonnaTaylor", "token_idx_2": 5, "text_start_2": 20, "text_end_2": 26, "date_2": "2020-06", "text_1_tokenized": ["Is", "that", "bunker", "still", "a", "thing", "?", "I'd", "like", "to", "enquire"], "text_2_tokenized": ["Is", "he", "hiding", "in", "the", "bunker", "again", "?", "#Bunkerbaby", "#JusticeForGeorgeFloyd", "#JusticeforBreonnaTaylor"]} -{"id": "1113-bunker", "word": "bunker", "label_binary": 0, "text_1": "a2: if i were on the bunker with the logic breakout i would've stopped it 9s: my commander was there and she got like 20 viruses in like 2 minutes lol not worth it!!!! a2: rip to your commander but im different", "token_idx_1": 7, "text_start_1": 21, "text_end_1": 27, "date_1": "2019-06", "text_2": "Protesters. Go to @BrianKempGA residence. Make him head to his bunker.", "token_idx_2": 12, "text_start_2": 63, "text_end_2": 69, "date_2": "2020-06", "text_1_tokenized": ["a2", ":", "if", "i", "were", "on", "the", "bunker", "with", "the", "logic", "breakout", "i", "would've", "stopped", "it", "9s", ":", "my", "commander", "was", "there", "and", "she", "got", "like", "20", "viruses", "in", "like", "2", "minutes", "lol", "not", "worth", "it", "!", "!", "!", "a2", ":", "rip", "to", "your", "commander", "but", "im", "different"], "text_2_tokenized": ["Protesters", ".", "Go", "to", "@BrianKempGA", "residence", ".", "Make", "him", "head", "to", "his", "bunker", "."]} -{"id": "1114-bunker", "word": "bunker", "label_binary": 1, "text_1": "Gerrard Williams is right and he should say it. Hitler escaped the bunker.", "token_idx_1": 13, "text_start_1": 67, "text_end_1": 73, "date_1": "2019-06", "text_2": "Everybody expressing their concern over how we know shitler won't leave his bunker peacefully in January is like dangling red meat for me right now. Rawr, I'm hungry. \ud83d\ude33", "token_idx_2": 12, "text_start_2": 76, "text_end_2": 82, "date_2": "2020-06", "text_1_tokenized": ["Gerrard", "Williams", "is", "right", "and", "he", "should", "say", "it", ".", "Hitler", "escaped", "the", "bunker", "."], "text_2_tokenized": ["Everybody", "expressing", "their", "concern", "over", "how", "we", "know", "shitler", "won't", "leave", "his", "bunker", "peacefully", "in", "January", "is", "like", "dangling", "red", "meat", "for", "me", "right", "now", ".", "Rawr", ",", "I'm", "hungry", ".", "\ud83d\ude33"]} -{"id": "1115-bunker", "word": "bunker", "label_binary": 0, "text_1": "Phil misses it left into the bunker on his first hole, No. 10. That was his miss this morning on the practice range. #USOpen", "token_idx_1": 6, "text_start_1": 29, "text_end_1": 35, "date_1": "2019-06", "text_2": "some of my older family members keep telling to tell my generation to vote like we aren't already to kill bunker baby, like homie TELL YOUR GENERATION TO VOTE OUT THAT FAT CHEETO TOO! damn!", "token_idx_2": 20, "text_start_2": 106, "text_end_2": 112, "date_2": "2020-06", "text_1_tokenized": ["Phil", "misses", "it", "left", "into", "the", "bunker", "on", "his", "first", "hole", ",", "No", ".", "10", ".", "That", "was", "his", "miss", "this", "morning", "on", "the", "practice", "range", ".", "#USOpen"], "text_2_tokenized": ["some", "of", "my", "older", "family", "members", "keep", "telling", "to", "tell", "my", "generation", "to", "vote", "like", "we", "aren't", "already", "to", "kill", "bunker", "baby", ",", "like", "homie", "TELL", "YOUR", "GENERATION", "TO", "VOTE", "OUT", "THAT", "FAT", "CHEETO", "TOO", "!", "damn", "!"]} -{"id": "1117-bunker", "word": "bunker", "label_binary": 1, "text_1": "cus im vampire better hide in a bunker", "token_idx_1": 7, "text_start_1": 32, "text_end_1": 38, "date_1": "2019-06", "text_2": "Do we know when Trump will be speaking at George Floyd's funeral? Or will it just be Biden? (Note sarcasm, as he wont come out of the bunker)", "token_idx_2": 31, "text_start_2": 134, "text_end_2": 140, "date_2": "2020-06", "text_1_tokenized": ["cus", "im", "vampire", "better", "hide", "in", "a", "bunker"], "text_2_tokenized": ["Do", "we", "know", "when", "Trump", "will", "be", "speaking", "at", "George", "Floyd's", "funeral", "?", "Or", "will", "it", "just", "be", "Biden", "?", "(", "Note", "sarcasm", ",", "as", "he", "wont", "come", "out", "of", "the", "bunker", ")"]} -{"id": "1118-bunker", "word": "bunker", "label_binary": 1, "text_1": "When they turn the country into Gilead, Lil Nas X will be brought out once a year to perform Old Town Road then shoved back into an underground bunker.", "token_idx_1": 29, "text_start_1": 144, "text_end_1": 150, "date_1": "2019-06", "text_2": "not sis said \u201cgo back to your bunker\u201d to trump \ud83d\udc80\ud83d\udc80\ud83d\udc80 I'm weak asf", "token_idx_2": 8, "text_start_2": 30, "text_end_2": 36, "date_2": "2020-06", "text_1_tokenized": ["When", "they", "turn", "the", "country", "into", "Gilead", ",", "Lil", "Nas", "X", "will", "be", "brought", "out", "once", "a", "year", "to", "perform", "Old", "Town", "Road", "then", "shoved", "back", "into", "an", "underground", "bunker", "."], "text_2_tokenized": ["not", "sis", "said", "\u201c", "go", "back", "to", "your", "bunker", "\u201d", "to", "trump", "\ud83d\udc80", "\ud83d\udc80", "\ud83d\udc80", "I'm", "weak", "asf"]} -{"id": "1119-bunker", "word": "bunker", "label_binary": 0, "text_1": "Sgt pep and the blue moonies forever in need of buying yer own glue you bellend where's me bunker as you were LG x", "token_idx_1": 18, "text_start_1": 91, "text_end_1": 97, "date_1": "2019-06", "text_2": "Is Boris shacked up in the bunker with Trump? Where's the leadership? He needs to condemn Trump's handling of the protests immediately!", "token_idx_2": 6, "text_start_2": 27, "text_end_2": 33, "date_2": "2020-06", "text_1_tokenized": ["Sgt", "pep", "and", "the", "blue", "moonies", "forever", "in", "need", "of", "buying", "yer", "own", "glue", "you", "bellend", "where's", "me", "bunker", "as", "you", "were", "LG", "x"], "text_2_tokenized": ["Is", "Boris", "shacked", "up", "in", "the", "bunker", "with", "Trump", "?", "Where's", "the", "leadership", "?", "He", "needs", "to", "condemn", "Trump's", "handling", "of", "the", "protests", "immediately", "!"]} -{"id": "1120-bunker", "word": "bunker", "label_binary": 0, "text_1": "You know its getting bad when someone asks me what type of cheese I dislike and I instantly say bunker on paris. Said person went ???", "token_idx_1": 19, "text_start_1": 96, "text_end_1": 102, "date_1": "2019-06", "text_2": "Do you think trump is wearing a tinfoil hat in the bunker?", "token_idx_2": 11, "text_start_2": 51, "text_end_2": 57, "date_2": "2020-06", "text_1_tokenized": ["You", "know", "its", "getting", "bad", "when", "someone", "asks", "me", "what", "type", "of", "cheese", "I", "dislike", "and", "I", "instantly", "say", "bunker", "on", "paris", ".", "Said", "person", "went", "?", "?", "?"], "text_2_tokenized": ["Do", "you", "think", "trump", "is", "wearing", "a", "tinfoil", "hat", "in", "the", "bunker", "?"]} -{"id": "1121-bunker", "word": "bunker", "label_binary": 0, "text_1": "So. They successfully broke Archie out of juvie. We still got that whole stab thing to deal with but he's safely in the bunker", "token_idx_1": 25, "text_start_1": 120, "text_end_1": 126, "date_1": "2019-06", "text_2": "So...we're gonna call him bunker boy/ bunker bitch forever now...right?", "token_idx_2": 6, "text_start_2": 26, "text_end_2": 32, "date_2": "2020-06", "text_1_tokenized": ["So", ".", "They", "successfully", "broke", "Archie", "out", "of", "juvie", ".", "We", "still", "got", "that", "whole", "stab", "thing", "to", "deal", "with", "but", "he's", "safely", "in", "the", "bunker"], "text_2_tokenized": ["So", "...", "we're", "gonna", "call", "him", "bunker", "boy", "/", "bunker", "bitch", "forever", "now", "...", "right", "?"]} -{"id": "1122-bunker", "word": "bunker", "label_binary": 0, "text_1": "Into the bunker we go, blues. Cheers.", "token_idx_1": 2, "text_start_1": 9, "text_end_1": 15, "date_1": "2019-06", "text_2": "HEY, where the fuck has melania been for the past few days? did they leave her in the bunker?", "token_idx_2": 20, "text_start_2": 86, "text_end_2": 92, "date_2": "2020-06", "text_1_tokenized": ["Into", "the", "bunker", "we", "go", ",", "blues", ".", "Cheers", "."], "text_2_tokenized": ["HEY", ",", "where", "the", "fuck", "has", "melania", "been", "for", "the", "past", "few", "days", "?", "did", "they", "leave", "her", "in", "the", "bunker", "?"]} -{"id": "1123-bunker", "word": "bunker", "label_binary": 0, "text_1": "Imagine playing 5 stack bunker in QP, couldn't be me! \ud83e\udd74\ud83e\udd21\ud83e\udd74\ud83e\udd21", "token_idx_1": 4, "text_start_1": 24, "text_end_1": 30, "date_1": "2019-06", "text_2": "found one more organisation to write to tomorrow, 'they can tipe their own cases their is no more freedom' to think they trained a bunker up to fight a woman, and the entire time they would had been lost", "token_idx_2": 27, "text_start_2": 131, "text_end_2": 137, "date_2": "2020-06", "text_1_tokenized": ["Imagine", "playing", "5", "stack", "bunker", "in", "QP", ",", "couldn't", "be", "me", "!", "\ud83e\udd74", "\ud83e\udd21", "\ud83e\udd74", "\ud83e\udd21"], "text_2_tokenized": ["found", "one", "more", "organisation", "to", "write", "to", "tomorrow", ",", "'", "they", "can", "tipe", "their", "own", "cases", "their", "is", "no", "more", "freedom", "'", "to", "think", "they", "trained", "a", "bunker", "up", "to", "fight", "a", "woman", ",", "and", "the", "entire", "time", "they", "would", "had", "been", "lost"]} -{"id": "1124-bunker", "word": "bunker", "label_binary": 0, "text_1": "thinkin about crowley's gay panic sand bunker", "token_idx_1": 6, "text_start_1": 39, "text_end_1": 45, "date_1": "2019-06", "text_2": "The whole WH is in a bunker and not tethered to reality #Trump", "token_idx_2": 6, "text_start_2": 21, "text_end_2": 27, "date_2": "2020-06", "text_1_tokenized": ["thinkin", "about", "crowley's", "gay", "panic", "sand", "bunker"], "text_2_tokenized": ["The", "whole", "WH", "is", "in", "a", "bunker", "and", "not", "tethered", "to", "reality", "#Trump"]} -{"id": "1125-bunker", "word": "bunker", "label_binary": 1, "text_1": "#ReasonsYouStayOnTwitter: Getting to follow @austinkleon and @nathanjurgenson. It also serves as my \"emergency bunker\" when @peachdotcool is down. Heck, anytime any other site or app is down, I come here to ask, \"Is it just me?\"", "token_idx_1": 16, "text_start_1": 111, "text_end_1": 117, "date_1": "2019-06", "text_2": "#InMyBunker My mighty bunker The lion sleeps tonight", "token_idx_2": 3, "text_start_2": 22, "text_end_2": 28, "date_2": "2020-06", "text_1_tokenized": ["#ReasonsYouStayOnTwitter", ":", "Getting", "to", "follow", "@austinkleon", "and", "@nathanjurgenson", ".", "It", "also", "serves", "as", "my", "\"", "emergency", "bunker", "\"", "when", "@peachdotcool", "is", "down", ".", "Heck", ",", "anytime", "any", "other", "site", "or", "app", "is", "down", ",", "I", "come", "here", "to", "ask", ",", "\"", "Is", "it", "just", "me", "?", "\""], "text_2_tokenized": ["#InMyBunker", "My", "mighty", "bunker", "The", "lion", "sleeps", "tonight"]} -{"id": "1126-bunker", "word": "bunker", "label_binary": 0, "text_1": "D-Day. 22.12. Elements of the British 1st Airborne secure the building on Long Lane, placing the partygoers under arrest and defending the CW2 command bunker until relief forces from Toledo beach under General Patton arrive in central London in the early hours of the morning.", "token_idx_1": 27, "text_start_1": 151, "text_end_1": 157, "date_1": "2019-06", "text_2": "Hablando de Rasputin, termine mi warmind bunker en animal crossing :3", "token_idx_2": 7, "text_start_2": 41, "text_end_2": 47, "date_2": "2020-06", "text_1_tokenized": ["D-Day", ".", "22.12", ".", "Elements", "of", "the", "British", "1st", "Airborne", "secure", "the", "building", "on", "Long", "Lane", ",", "placing", "the", "partygoers", "under", "arrest", "and", "defending", "the", "CW2", "command", "bunker", "until", "relief", "forces", "from", "Toledo", "beach", "under", "General", "Patton", "arrive", "in", "central", "London", "in", "the", "early", "hours", "of", "the", "morning", "."], "text_2_tokenized": ["Hablando", "de", "Rasputin", ",", "termine", "mi", "warmind", "bunker", "en", "animal", "crossing", ":", "3"]} -{"id": "1127-bunker", "word": "bunker", "label_binary": 1, "text_1": "Kelly started her daily regime. 40 minutes on the treadmill. 30 minutes of weights. Shower. Get dressed. Eat. Hydrate. Clean her rifle. Change the filter on her mask. Check her bio suit for rips. Get geared up. Open the bunker door. Seal the entrance. Search for Bobby. #vss365a", "token_idx_1": 50, "text_start_1": 220, "text_end_1": 226, "date_1": "2019-06", "text_2": "Trump letting people know that he's essentially ordering war against US citizens that just want justice and giving authority to shoot them and then hiding in a bunker shows he's an absolute fucking idiotic coward.", "token_idx_2": 27, "text_start_2": 160, "text_end_2": 166, "date_2": "2020-06", "text_1_tokenized": ["Kelly", "started", "her", "daily", "regime", ".", "40", "minutes", "on", "the", "treadmill", ".", "30", "minutes", "of", "weights", ".", "Shower", ".", "Get", "dressed", ".", "Eat", ".", "Hydrate", ".", "Clean", "her", "rifle", ".", "Change", "the", "filter", "on", "her", "mask", ".", "Check", "her", "bio", "suit", "for", "rips", ".", "Get", "geared", "up", ".", "Open", "the", "bunker", "door", ".", "Seal", "the", "entrance", ".", "Search", "for", "Bobby", ".", "#vss365a"], "text_2_tokenized": ["Trump", "letting", "people", "know", "that", "he's", "essentially", "ordering", "war", "against", "US", "citizens", "that", "just", "want", "justice", "and", "giving", "authority", "to", "shoot", "them", "and", "then", "hiding", "in", "a", "bunker", "shows", "he's", "an", "absolute", "fucking", "idiotic", "coward", "."]} -{"id": "1128-bunker", "word": "bunker", "label_binary": 0, "text_1": "Rory with the flop into the bunker from the fairway, very relatable.", "token_idx_1": 6, "text_start_1": 28, "text_end_1": 34, "date_1": "2019-06", "text_2": "They need to show some aerial shots of the DC protest. If they don't, Dotard McSlurry will come out from hiding in his bunker and deny it happened. #DCProtests #BlackLivesMatter #TrumpOwnsEveryDeath", "token_idx_2": 25, "text_start_2": 119, "text_end_2": 125, "date_2": "2020-06", "text_1_tokenized": ["Rory", "with", "the", "flop", "into", "the", "bunker", "from", "the", "fairway", ",", "very", "relatable", "."], "text_2_tokenized": ["They", "need", "to", "show", "some", "aerial", "shots", "of", "the", "DC", "protest", ".", "If", "they", "don't", ",", "Dotard", "McSlurry", "will", "come", "out", "from", "hiding", "in", "his", "bunker", "and", "deny", "it", "happened", ".", "#DCProtests", "#BlackLivesMatter", "#TrumpOwnsEveryDeath"]} -{"id": "1129-bunker", "word": "bunker", "label_binary": 1, "text_1": "Wowee, staying off Twitter to avoid Pok\u00e9mon SwSh spoilers is mighty difficult. I dodged lots of bullets today but havem't fully seen any new Pok\u00e9mon or characters yet. Anyway, I'm scuttling back to my bunker till I feel safe and the hype dies down.", "token_idx_1": 38, "text_start_1": 201, "text_end_1": 207, "date_1": "2019-06", "text_2": "Day #6 In the bunker. I've eaten all the Cheetos, I'm also starting to suspect that Pence is really and old roll of bologna with a hair piece on. -Love Donny-", "token_idx_2": 5, "text_start_2": 14, "text_end_2": 20, "date_2": "2020-06", "text_1_tokenized": ["Wowee", ",", "staying", "off", "Twitter", "to", "avoid", "Pok\u00e9mon", "SwSh", "spoilers", "is", "mighty", "difficult", ".", "I", "dodged", "lots", "of", "bullets", "today", "but", "havem't", "fully", "seen", "any", "new", "Pok\u00e9mon", "or", "characters", "yet", ".", "Anyway", ",", "I'm", "scuttling", "back", "to", "my", "bunker", "till", "I", "feel", "safe", "and", "the", "hype", "dies", "down", "."], "text_2_tokenized": ["Day", "#", "6", "In", "the", "bunker", ".", "I've", "eaten", "all", "the", "Cheetos", ",", "I'm", "also", "starting", "to", "suspect", "that", "Pence", "is", "really", "and", "old", "roll", "of", "bologna", "with", "a", "hair", "piece", "on", ".", "-", "Love", "Donny", "-"]} -{"id": "1130-bunker", "word": "bunker", "label_binary": 0, "text_1": "if we really get 2-2-2 lock, i hope y'all are excited for bunker vs emp+nanoblade every single map :)", "token_idx_1": 14, "text_start_1": 58, "text_end_1": 64, "date_1": "2019-06", "text_2": "In response a Trump tweet: Gov. Inslee tweeted: \"A man who is totally incapable of governing should stay out of Washington state's business. \u201cStoop\u201d tweeting.\"Seattle Mayor Jenny Durkan replied on Twitter with \"Make us all safe. Go back to your bunker. #BlackLivesMatter\"", "token_idx_2": 52, "text_start_2": 245, "text_end_2": 251, "date_2": "2020-06", "text_1_tokenized": ["if", "we", "really", "get", "2-2-", "2", "lock", ",", "i", "hope", "y'all", "are", "excited", "for", "bunker", "vs", "emp", "+", "nanoblade", "every", "single", "map", ":)"], "text_2_tokenized": ["In", "response", "a", "Trump", "tweet", ":", "Gov", ".", "Inslee", "tweeted", ":", "\"", "A", "man", "who", "is", "totally", "incapable", "of", "governing", "should", "stay", "out", "of", "Washington", "state's", "business", ".", "\u201c", "Stoop", "\u201d", "tweeting", ".", "\"", "Seattle", "Mayor", "Jenny", "Durkan", "replied", "on", "Twitter", "with", "\"", "Make", "us", "all", "safe", ".", "Go", "back", "to", "your", "bunker", ".", "#BlackLivesMatter", "\""]} -{"id": "1131-bunker", "word": "bunker", "label_binary": 0, "text_1": "All this bloody rain has put a serious damper on things. The bunker is leaking.\ud83d\ude2d #Timeless #SaveTimeless", "token_idx_1": 13, "text_start_1": 61, "text_end_1": 67, "date_1": "2019-06", "text_2": "i forgot to say \ud83e\udd70 happy birthday bunker baby!! love u sm have the worst day dickless rapist \ud83d\ude0a\u270c\ufe0f @realDonaldTrump", "token_idx_2": 7, "text_start_2": 33, "text_end_2": 39, "date_2": "2020-06", "text_1_tokenized": ["All", "this", "bloody", "rain", "has", "put", "a", "serious", "damper", "on", "things", ".", "The", "bunker", "is", "leaking", ".", "\ud83d\ude2d", "#Timeless", "#SaveTimeless"], "text_2_tokenized": ["i", "forgot", "to", "say", "\ud83e\udd70", "happy", "birthday", "bunker", "baby", "!", "!", "love", "u", "sm", "have", "the", "worst", "day", "dickless", "rapist", "\ud83d\ude0a", "\u270c", "\ufe0f", "@realDonaldTrump"]} -{"id": "1132-bunker", "word": "bunker", "label_binary": 0, "text_1": "Around 5.00 am, Major Pluskat observes the fleet approaching from the observation bunker at Longues-Sur-Mer. #DDay #DDay75 #Longuessurmer", "token_idx_1": 13, "text_start_1": 82, "text_end_1": 88, "date_1": "2019-06", "text_2": "He's bringing in the military and will still hide in his bunker? #CowardinChief", "token_idx_2": 11, "text_start_2": 57, "text_end_2": 63, "date_2": "2020-06", "text_1_tokenized": ["Around", "5.00", "am", ",", "Major", "Pluskat", "observes", "the", "fleet", "approaching", "from", "the", "observation", "bunker", "at", "Longues-Sur-Mer", ".", "#DDay", "#DDay75", "#Longuessurmer"], "text_2_tokenized": ["He's", "bringing", "in", "the", "military", "and", "will", "still", "hide", "in", "his", "bunker", "?", "#CowardinChief"]} -{"id": "1133-bunker", "word": "bunker", "label_binary": 1, "text_1": "Black Mirror episode idea: Build an A.I. with a 5 year power source. Keep it off until you put it in a concrete bunker. Leave the bunker and turn it on. Then we wait and see what kind of thoughts an A.I. develops over a five year period of total darkness and isolation.", "token_idx_1": 28, "text_start_1": 112, "text_end_1": 118, "date_1": "2019-06", "text_2": "Where is Pence hiding? Does he have a designated bunker, too?", "token_idx_2": 10, "text_start_2": 49, "text_end_2": 55, "date_2": "2020-06", "text_1_tokenized": ["Black", "Mirror", "episode", "idea", ":", "Build", "an", "A", ".", "I", ".", "with", "a", "5", "year", "power", "source", ".", "Keep", "it", "off", "until", "you", "put", "it", "in", "a", "concrete", "bunker", ".", "Leave", "the", "bunker", "and", "turn", "it", "on", ".", "Then", "we", "wait", "and", "see", "what", "kind", "of", "thoughts", "an", "A", ".", "I", ".", "develops", "over", "a", "five", "year", "period", "of", "total", "darkness", "and", "isolation", "."], "text_2_tokenized": ["Where", "is", "Pence", "hiding", "?", "Does", "he", "have", "a", "designated", "bunker", ",", "too", "?"]} -{"id": "1134-bunker", "word": "bunker", "label_binary": 0, "text_1": "OK, that's the dogs one bunker decision in their favour for the year. A try every day of the week #NRLBulldogsDragons", "token_idx_1": 6, "text_start_1": 24, "text_end_1": 30, "date_1": "2019-06", "text_2": "... it's like an underground bunker underneath pieces of land that I own-", "token_idx_2": 5, "text_start_2": 29, "text_end_2": 35, "date_2": "2020-06", "text_1_tokenized": ["OK", ",", "that's", "the", "dogs", "one", "bunker", "decision", "in", "their", "favour", "for", "the", "year", ".", "A", "try", "every", "day", "of", "the", "week", "#NRLBulldogsDragons"], "text_2_tokenized": ["...", "it's", "like", "an", "underground", "bunker", "underneath", "pieces", "of", "land", "that", "I", "own", "-"]} -{"id": "1135-bunker", "word": "bunker", "label_binary": 0, "text_1": "playing vs bunker for 2 hours PEPEGA", "token_idx_1": 2, "text_start_1": 11, "text_end_1": 17, "date_1": "2019-06", "text_2": "Said in the voice of Eddie Lawrence: You can't walk down a ramp without taking baby steps?? You hide in the bunker at the first sign of a protest? You can't take a sip of water without both hands? Your daughter's married to a criminal? Your wife can't stand the sight of you?", "token_idx_2": 24, "text_start_2": 108, "text_end_2": 114, "date_2": "2020-06", "text_1_tokenized": ["playing", "vs", "bunker", "for", "2", "hours", "PEPEGA"], "text_2_tokenized": ["Said", "in", "the", "voice", "of", "Eddie", "Lawrence", ":", "You", "can't", "walk", "down", "a", "ramp", "without", "taking", "baby", "steps", "?", "?", "You", "hide", "in", "the", "bunker", "at", "the", "first", "sign", "of", "a", "protest", "?", "You", "can't", "take", "a", "sip", "of", "water", "without", "both", "hands", "?", "Your", "daughter's", "married", "to", "a", "criminal", "?", "Your", "wife", "can't", "stand", "the", "sight", "of", "you", "?"]} -{"id": "1136-bunker", "word": "bunker", "label_binary": 0, "text_1": "No bunker cam? I was looking forward to seeing everyone \ud83d\ude22 #f2b", "token_idx_1": 1, "text_start_1": 3, "text_end_1": 9, "date_1": "2019-06", "text_2": "Protesters gathering in front of the White House and #CowardTrump's new fence this evening. Guess who will be \u201cinspecting\u201d his bunker again tonight?", "token_idx_2": 23, "text_start_2": 127, "text_end_2": 133, "date_2": "2020-06", "text_1_tokenized": ["No", "bunker", "cam", "?", "I", "was", "looking", "forward", "to", "seeing", "everyone", "\ud83d\ude22", "#f2b"], "text_2_tokenized": ["Protesters", "gathering", "in", "front", "of", "the", "White", "House", "and", "#CowardTrump's", "new", "fence", "this", "evening", ".", "Guess", "who", "will", "be", "\u201c", "inspecting", "\u201d", "his", "bunker", "again", "tonight", "?"]} -{"id": "1137-bunker", "word": "bunker", "label_binary": 0, "text_1": "Watching @Dalance23 putt it from the green into the bunker on #6 today was so enjoyable.. \ud83d\ude02\ud83d\ude02 #doyouworkhere", "token_idx_1": 9, "text_start_1": 52, "text_end_1": 58, "date_1": "2019-06", "text_2": "So given the chinks & The Bud buds are now threatening the world with Nukes..has anyone got a bunker I can accommodate for a while \ud83d\ude4f", "token_idx_2": 20, "text_start_2": 98, "text_end_2": 104, "date_2": "2020-06", "text_1_tokenized": ["Watching", "@Dalance23", "putt", "it", "from", "the", "green", "into", "the", "bunker", "on", "#", "6", "today", "was", "so", "enjoyable", "..", "\ud83d\ude02", "\ud83d\ude02", "#doyouworkhere"], "text_2_tokenized": ["So", "given", "the", "chinks", "&", "The", "Bud", "buds", "are", "now", "threatening", "the", "world", "with", "Nukes", "..", "has", "anyone", "got", "a", "bunker", "I", "can", "accommodate", "for", "a", "while", "\ud83d\ude4f"]} -{"id": "1138-bunker", "word": "bunker", "label_binary": 0, "text_1": "Michael Greller caused Spieth to hit into the right green side bunker on 6. Hope Michael can help his man get it up and down for birdie.", "token_idx_1": 11, "text_start_1": 63, "text_end_1": 69, "date_1": "2019-06", "text_2": "Does the secret service have two members with covid after Tulsa and tons in quarantine thanks to bunker bitch tantrums? ok, #JoeByeDon", "token_idx_2": 17, "text_start_2": 97, "text_end_2": 103, "date_2": "2020-06", "text_1_tokenized": ["Michael", "Greller", "caused", "Spieth", "to", "hit", "into", "the", "right", "green", "side", "bunker", "on", "6", ".", "Hope", "Michael", "can", "help", "his", "man", "get", "it", "up", "and", "down", "for", "birdie", "."], "text_2_tokenized": ["Does", "the", "secret", "service", "have", "two", "members", "with", "covid", "after", "Tulsa", "and", "tons", "in", "quarantine", "thanks", "to", "bunker", "bitch", "tantrums", "?", "ok", ",", "#JoeByeDon"]} -{"id": "1139-bunker", "word": "bunker", "label_binary": 0, "text_1": "Unreal shot there from @GaryWoodland. These guys feel no pressure seemingly. @BKoepka in the bunker against the lip. Yikes", "token_idx_1": 16, "text_start_1": 93, "text_end_1": 99, "date_1": "2019-06", "text_2": "My fav bug/glitch for season 5 apex is toothless ride or die/bunker buster gibby. Actually helps w my sanity between games lmfao.", "token_idx_2": 15, "text_start_2": 61, "text_end_2": 67, "date_2": "2020-06", "text_1_tokenized": ["Unreal", "shot", "there", "from", "@GaryWoodland", ".", "These", "guys", "feel", "no", "pressure", "seemingly", ".", "@BKoepka", "in", "the", "bunker", "against", "the", "lip", ".", "Yikes"], "text_2_tokenized": ["My", "fav", "bug", "/", "glitch", "for", "season", "5", "apex", "is", "toothless", "ride", "or", "die", "/", "bunker", "buster", "gibby", ".", "Actually", "helps", "w", "my", "sanity", "between", "games", "lmfao", "."]} -{"id": "1140-bunker", "word": "bunker", "label_binary": 0, "text_1": "\"Overwatch 2 is dumb\" if it means no more goats, bunker, or any cheese comps. Better matchmaking. Not be a dead game. Actual good players a chance into OWL. Sign me the fuck up blizzard hopefully learned a lot from Overwatch and have a lot to work with for a successor.", "token_idx_1": 13, "text_start_1": 49, "text_end_1": 55, "date_1": "2019-06", "text_2": "A world leader hiding in a bunker. That usually ends with him getting shot by the military or shooting himself. I am ok with either option.", "token_idx_2": 6, "text_start_2": 27, "text_end_2": 33, "date_2": "2020-06", "text_1_tokenized": ["\"", "Overwatch", "2", "is", "dumb", "\"", "if", "it", "means", "no", "more", "goats", ",", "bunker", ",", "or", "any", "cheese", "comps", ".", "Better", "matchmaking", ".", "Not", "be", "a", "dead", "game", ".", "Actual", "good", "players", "a", "chance", "into", "OWL", ".", "Sign", "me", "the", "fuck", "up", "blizzard", "hopefully", "learned", "a", "lot", "from", "Overwatch", "and", "have", "a", "lot", "to", "work", "with", "for", "a", "successor", "."], "text_2_tokenized": ["A", "world", "leader", "hiding", "in", "a", "bunker", ".", "That", "usually", "ends", "with", "him", "getting", "shot", "by", "the", "military", "or", "shooting", "himself", ".", "I", "am", "ok", "with", "either", "option", "."]} -{"id": "1141-bunker", "word": "bunker", "label_binary": 0, "text_1": "......Um. Narwhal won. And I'm confused. Well. Time to hide in the Potato bunker and deliberate the morality and philosophy between order and chaos.", "token_idx_1": 18, "text_start_1": 74, "text_end_1": 80, "date_1": "2019-06", "text_2": "Why didn't DJT play golf this weekend? He was afraid of hitting it into the bunker! @IAmJohnAles @jimmykimmel", "token_idx_2": 16, "text_start_2": 76, "text_end_2": 82, "date_2": "2020-06", "text_1_tokenized": ["...", "Um", ".", "Narwhal", "won", ".", "And", "I'm", "confused", ".", "Well", ".", "Time", "to", "hide", "in", "the", "Potato", "bunker", "and", "deliberate", "the", "morality", "and", "philosophy", "between", "order", "and", "chaos", "."], "text_2_tokenized": ["Why", "didn't", "DJT", "play", "golf", "this", "weekend", "?", "He", "was", "afraid", "of", "hitting", "it", "into", "the", "bunker", "!", "@IAmJohnAles", "@jimmykimmel"]} -{"id": "1142-bunker", "word": "bunker", "label_binary": 0, "text_1": "Omg... @Outlaws running the mobile bunker was the Best. Thing. Ever. It may only work once, but boy did it work. #OWL2019", "token_idx_1": 6, "text_start_1": 35, "text_end_1": 41, "date_1": "2019-06", "text_2": "That's what I was afraid of. Not to mention the shape of that bunker I'm seeing where? Okay", "token_idx_2": 14, "text_start_2": 62, "text_end_2": 68, "date_2": "2020-06", "text_1_tokenized": ["Omg", "...", "@Outlaws", "running", "the", "mobile", "bunker", "was", "the", "Best", ".", "Thing", ".", "Ever", ".", "It", "may", "only", "work", "once", ",", "but", "boy", "did", "it", "work", ".", "#OWL2019"], "text_2_tokenized": ["That's", "what", "I", "was", "afraid", "of", ".", "Not", "to", "mention", "the", "shape", "of", "that", "bunker", "I'm", "seeing", "where", "?", "Okay"]} -{"id": "1143-bunker", "word": "bunker", "label_binary": 1, "text_1": "my top three picks for a house are a backyard for a dog, a garage for wood-working and enough space between neighbors that I can build a secret bunker for when the world ends", "token_idx_1": 29, "text_start_1": 144, "text_end_1": 150, "date_1": "2019-06", "text_2": "Wondering how #BunkerBoy @realDonaldTrump is doing today...probably crying into a #BigMac in his bunker because he couldn't even fill a 19k cap arena in one of the most red states. Great momentum, but it has to carry to 11/3! They're going to try & limit how to vote. But do it!", "token_idx_2": 15, "text_start_2": 97, "text_end_2": 103, "date_2": "2020-06", "text_1_tokenized": ["my", "top", "three", "picks", "for", "a", "house", "are", "a", "backyard", "for", "a", "dog", ",", "a", "garage", "for", "wood-working", "and", "enough", "space", "between", "neighbors", "that", "I", "can", "build", "a", "secret", "bunker", "for", "when", "the", "world", "ends"], "text_2_tokenized": ["Wondering", "how", "#BunkerBoy", "@realDonaldTrump", "is", "doing", "today", "...", "probably", "crying", "into", "a", "#BigMac", "in", "his", "bunker", "because", "he", "couldn't", "even", "fill", "a", "19k", "cap", "arena", "in", "one", "of", "the", "most", "red", "states", ".", "Great", "momentum", ",", "but", "it", "has", "to", "carry", "to", "11/3", "!", "They're", "going", "to", "try", "&", "limit", "how", "to", "vote", ".", "But", "do", "it", "!"]} -{"id": "1144-bunker", "word": "bunker", "label_binary": 0, "text_1": "Why does the @AFL want a bunker? The @NRL has one and still mess up decisions.", "token_idx_1": 6, "text_start_1": 25, "text_end_1": 31, "date_1": "2019-06", "text_2": "Know why Trump went down to inspect the bunker? He knows those cops are gonna get away with it and see no jail time. He knows. And he knows it's gonna kick off like never before. And he's a coward, so he's gonna leg it to his bunker and hide, because he's a fucking coward.", "token_idx_2": 8, "text_start_2": 40, "text_end_2": 46, "date_2": "2020-06", "text_1_tokenized": ["Why", "does", "the", "@AFL", "want", "a", "bunker", "?", "The", "@NRL", "has", "one", "and", "still", "mess", "up", "decisions", "."], "text_2_tokenized": ["Know", "why", "Trump", "went", "down", "to", "inspect", "the", "bunker", "?", "He", "knows", "those", "cops", "are", "gonna", "get", "away", "with", "it", "and", "see", "no", "jail", "time", ".", "He", "knows", ".", "And", "he", "knows", "it's", "gonna", "kick", "off", "like", "never", "before", ".", "And", "he's", "a", "coward", ",", "so", "he's", "gonna", "leg", "it", "to", "his", "bunker", "and", "hide", ",", "because", "he's", "a", "fucking", "coward", "."]} -{"id": "1145-bunker", "word": "bunker", "label_binary": 1, "text_1": "In the middle of WWIII, would Trump be arguing with Cher in his bunker at 4:00 a.m. while in a crisis meeting about damage reports? #BetteMidler", "token_idx_1": 14, "text_start_1": 64, "text_end_1": 70, "date_1": "2019-06", "text_2": "Ok donnie what Terry McCauluff meant if someone has to be in the bunker running this gs we prefer if Joe Biden is there being in charge of this country", "token_idx_2": 13, "text_start_2": 65, "text_end_2": 71, "date_2": "2020-06", "text_1_tokenized": ["In", "the", "middle", "of", "WWIII", ",", "would", "Trump", "be", "arguing", "with", "Cher", "in", "his", "bunker", "at", "4:00", "a", ".", "m", ".", "while", "in", "a", "crisis", "meeting", "about", "damage", "reports", "?", "#BetteMidler"], "text_2_tokenized": ["Ok", "donnie", "what", "Terry", "McCauluff", "meant", "if", "someone", "has", "to", "be", "in", "the", "bunker", "running", "this", "gs", "we", "prefer", "if", "Joe", "Biden", "is", "there", "being", "in", "charge", "of", "this", "country"]} -{"id": "1146-bunker", "word": "bunker", "label_binary": 1, "text_1": "Finally got this jackass! He was holed up in his bunker under his combat ship. I made sure to blast it away with some C6 explosives! Nice work Jiffy and Aisha. Now we get our money!", "token_idx_1": 11, "text_start_1": 49, "text_end_1": 55, "date_1": "2019-06", "text_2": "#TraitorTrump \u201cthere have not been many attacks on us\u201d troops. This is his excuse for letting American troops die while he does nothing. On a related note, IT did go down to a bunker when he got scared of unarmed protesters and then built new security fences around the WH.", "token_idx_2": 38, "text_start_2": 176, "text_end_2": 182, "date_2": "2020-06", "text_1_tokenized": ["Finally", "got", "this", "jackass", "!", "He", "was", "holed", "up", "in", "his", "bunker", "under", "his", "combat", "ship", ".", "I", "made", "sure", "to", "blast", "it", "away", "with", "some", "C6", "explosives", "!", "Nice", "work", "Jiffy", "and", "Aisha", ".", "Now", "we", "get", "our", "money", "!"], "text_2_tokenized": ["#TraitorTrump", "\u201c", "there", "have", "not", "been", "many", "attacks", "on", "us", "\u201d", "troops", ".", "This", "is", "his", "excuse", "for", "letting", "American", "troops", "die", "while", "he", "does", "nothing", ".", "On", "a", "related", "note", ",", "IT", "did", "go", "down", "to", "a", "bunker", "when", "he", "got", "scared", "of", "unarmed", "protesters", "and", "then", "built", "new", "security", "fences", "around", "the", "WH", "."]} -{"id": "1147-bunker", "word": "bunker", "label_binary": 0, "text_1": "\u201cThe Rain\u201d A brother and sister duo are left in a bunker by their father to avoid the poisonous rain that killed majority of the population...until they're forced out years later. I like my post apocalypses with zombies, but killer rain will do for now. Season 1 > Season 2.", "token_idx_1": 13, "text_start_1": 50, "text_end_1": 56, "date_1": "2019-06", "text_2": "Pence sound like the OG bunker boy in April 45. \u201cThe final victory is near.\u201d", "token_idx_2": 5, "text_start_2": 24, "text_end_2": 30, "date_2": "2020-06", "text_1_tokenized": ["\u201c", "The", "Rain", "\u201d", "A", "brother", "and", "sister", "duo", "are", "left", "in", "a", "bunker", "by", "their", "father", "to", "avoid", "the", "poisonous", "rain", "that", "killed", "majority", "of", "the", "population", "...", "until", "they're", "forced", "out", "years", "later", ".", "I", "like", "my", "post", "apocalypses", "with", "zombies", ",", "but", "killer", "rain", "will", "do", "for", "now", ".", "Season", "1", ">", "Season", "2", "."], "text_2_tokenized": ["Pence", "sound", "like", "the", "OG", "bunker", "boy", "in", "April", "45", ".", "\u201c", "The", "final", "victory", "is", "near", ".", "\u201d"]} -{"id": "1148-bunker", "word": "bunker", "label_binary": 0, "text_1": "\"Before Dirk, the last time I retired a German he was standing right next to Eva Braun in a bunker.\" Jesse The Usher", "token_idx_1": 21, "text_start_1": 92, "text_end_1": 98, "date_1": "2019-06", "text_2": "I'm gonna put this out there...but I have a strong belief that Trump is going to try and make a run of it. Gonna seek shelter, and I'm not talking bunker kids...any thoughts?", "token_idx_2": 34, "text_start_2": 147, "text_end_2": 153, "date_2": "2020-06", "text_1_tokenized": ["\"", "Before", "Dirk", ",", "the", "last", "time", "I", "retired", "a", "German", "he", "was", "standing", "right", "next", "to", "Eva", "Braun", "in", "a", "bunker", ".", "\"", "Jesse", "The", "Usher"], "text_2_tokenized": ["I'm", "gonna", "put", "this", "out", "there", "...", "but", "I", "have", "a", "strong", "belief", "that", "Trump", "is", "going", "to", "try", "and", "make", "a", "run", "of", "it", ".", "Gonna", "seek", "shelter", ",", "and", "I'm", "not", "talking", "bunker", "kids", "...", "any", "thoughts", "?"]} -{"id": "1149-bunker", "word": "bunker", "label_binary": 0, "text_1": "Great bunker shot to 2 feet\ud83d\udc4f and it should be a par save for Thor", "token_idx_1": 1, "text_start_1": 6, "text_end_1": 12, "date_1": "2019-06", "text_2": "Back to the bunker now. I suppose", "token_idx_2": 3, "text_start_2": 12, "text_end_2": 18, "date_2": "2020-06", "text_1_tokenized": ["Great", "bunker", "shot", "to", "2", "feet", "\ud83d\udc4f", "and", "it", "should", "be", "a", "par", "save", "for", "Thor"], "text_2_tokenized": ["Back", "to", "the", "bunker", "now", ".", "I", "suppose"]} -{"id": "1150-bunker", "word": "bunker", "label_binary": 1, "text_1": "You enter the bunker. It appears strange. You find some residue nearby.", "token_idx_1": 3, "text_start_1": 14, "text_end_1": 20, "date_1": "2019-06", "text_2": "#realDonaldTrimp The reason reporters were thrown out of the White House was because Trump was given a \u201ctime out\u201d in the bunker for having a temper tantrum before the rally and he didn't want reporters to mock him!", "token_idx_2": 23, "text_start_2": 121, "text_end_2": 127, "date_2": "2020-06", "text_1_tokenized": ["You", "enter", "the", "bunker", ".", "It", "appears", "strange", ".", "You", "find", "some", "residue", "nearby", "."], "text_2_tokenized": ["#realDonaldTrimp", "The", "reason", "reporters", "were", "thrown", "out", "of", "the", "White", "House", "was", "because", "Trump", "was", "given", "a", "\u201c", "time", "out", "\u201d", "in", "the", "bunker", "for", "having", "a", "temper", "tantrum", "before", "the", "rally", "and", "he", "didn't", "want", "reporters", "to", "mock", "him", "!"]} -{"id": "1151-bunker", "word": "bunker", "label_binary": 0, "text_1": "So if the AFL institute a central bunker it will be a mess when two score reviews happen at the same time. There are after two games on a the same time.", "token_idx_1": 7, "text_start_1": 34, "text_end_1": 40, "date_1": "2019-06", "text_2": "*emerging from my bunker, chapbook draft in hand* \"Better to walk through the dust than have it fall upon you...\" grubhub guy: \"huh?\"", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 24, "date_2": "2020-06", "text_1_tokenized": ["So", "if", "the", "AFL", "institute", "a", "central", "bunker", "it", "will", "be", "a", "mess", "when", "two", "score", "reviews", "happen", "at", "the", "same", "time", ".", "There", "are", "after", "two", "games", "on", "a", "the", "same", "time", "."], "text_2_tokenized": ["*", "emerging", "from", "my", "bunker", ",", "chapbook", "draft", "in", "hand", "*", "\"", "Better", "to", "walk", "through", "the", "dust", "than", "have", "it", "fall", "upon", "you", "...", "\"", "grubhub", "guy", ":", "\"", "huh", "?", "\""]} -{"id": "1152-bunker", "word": "bunker", "label_binary": 1, "text_1": "Watched I Am Mother last night. It was basically a long Jeremy Bentham lesson but in a post-apocalyptic bunker with robots, which is ideally how all philosophy should be taught. 4 out of 5 \u2013 would learn about utilitarianism from robots again.", "token_idx_1": 19, "text_start_1": 104, "text_end_1": 110, "date_1": "2019-06", "text_2": "The Tulsa show....The country is speaking now, not your cowardly republican elected enablers. We the people have said YOU ARE GOING DOWN & WE are NOT supporting you !! Return to your bunker!", "token_idx_2": 37, "text_start_2": 187, "text_end_2": 193, "date_2": "2020-06", "text_1_tokenized": ["Watched", "I", "Am", "Mother", "last", "night", ".", "It", "was", "basically", "a", "long", "Jeremy", "Bentham", "lesson", "but", "in", "a", "post-apocalyptic", "bunker", "with", "robots", ",", "which", "is", "ideally", "how", "all", "philosophy", "should", "be", "taught", ".", "4", "out", "of", "5", "\u2013", "would", "learn", "about", "utilitarianism", "from", "robots", "again", "."], "text_2_tokenized": ["The", "Tulsa", "show", "...", "The", "country", "is", "speaking", "now", ",", "not", "your", "cowardly", "republican", "elected", "enablers", ".", "We", "the", "people", "have", "said", "YOU", "ARE", "GOING", "DOWN", "&", "WE", "are", "NOT", "supporting", "you", "!", "!", "Return", "to", "your", "bunker", "!"]} -{"id": "1153-bunker", "word": "bunker", "label_binary": 0, "text_1": "Once last stand at the bunker fire. Machine gun and pitchfork at breast. #AtomTan #TheClash", "token_idx_1": 5, "text_start_1": 23, "text_end_1": 29, "date_1": "2019-06", "text_2": "A man who finds conspiracy theories in ABSOLUTELY EVERYTHING, expects us to believe protesters cleared from a park minutes before he exited his bunker for a walk ... was a total coincidence. Why not just start with \u201cI think you all are so f**king stupid ...\u201d", "token_idx_2": 24, "text_start_2": 144, "text_end_2": 150, "date_2": "2020-06", "text_1_tokenized": ["Once", "last", "stand", "at", "the", "bunker", "fire", ".", "Machine", "gun", "and", "pitchfork", "at", "breast", ".", "#AtomTan", "#TheClash"], "text_2_tokenized": ["A", "man", "who", "finds", "conspiracy", "theories", "in", "ABSOLUTELY", "EVERYTHING", ",", "expects", "us", "to", "believe", "protesters", "cleared", "from", "a", "park", "minutes", "before", "he", "exited", "his", "bunker", "for", "a", "walk", "...", "was", "a", "total", "coincidence", ".", "Why", "not", "just", "start", "with", "\u201c", "I", "think", "you", "all", "are", "so", "f", "*", "*", "king", "stupid", "...", "\u201d"]} -{"id": "1154-bunker", "word": "bunker", "label_binary": 1, "text_1": "In the #75DDay anniversary, I'm reminded of a story my gran would nearly always tell me... she was in the RAF and was on a double shift in the communications bunker the night of 5th June going over to the 6th and being given a 2 minute break by the officer she was working with", "token_idx_1": 32, "text_start_1": 158, "text_end_1": 164, "date_1": "2019-06", "text_2": "Trump's been very quiet today - is the Wi-Fi down in the bunker?", "token_idx_2": 12, "text_start_2": 57, "text_end_2": 63, "date_2": "2020-06", "text_1_tokenized": ["In", "the", "#75DDay", "anniversary", ",", "I'm", "reminded", "of", "a", "story", "my", "gran", "would", "nearly", "always", "tell", "me", "...", "she", "was", "in", "the", "RAF", "and", "was", "on", "a", "double", "shift", "in", "the", "communications", "bunker", "the", "night", "of", "5th", "June", "going", "over", "to", "the", "6th", "and", "being", "given", "a", "2", "minute", "break", "by", "the", "officer", "she", "was", "working", "with"], "text_2_tokenized": ["Trump's", "been", "very", "quiet", "today", "-", "is", "the", "Wi-Fi", "down", "in", "the", "bunker", "?"]} -{"id": "1155-bunker", "word": "bunker", "label_binary": 1, "text_1": "she should've stayed in the bunker quite frankly", "token_idx_1": 5, "text_start_1": 28, "text_end_1": 34, "date_1": "2019-06", "text_2": "Fox News is a lot like Trump just \u201cinspecting\u201d the bunker. Both are bullshit.", "token_idx_2": 12, "text_start_2": 51, "text_end_2": 57, "date_2": "2020-06", "text_1_tokenized": ["she", "should've", "stayed", "in", "the", "bunker", "quite", "frankly"], "text_2_tokenized": ["Fox", "News", "is", "a", "lot", "like", "Trump", "just", "\u201c", "inspecting", "\u201d", "the", "bunker", ".", "Both", "are", "bullshit", "."]} -{"id": "1156-bunker", "word": "bunker", "label_binary": 0, "text_1": "The 2 best shots @GaryWoodland hit during final round @usopengolf 2 maintain positive momentum 2nd shot from right rough 11th hole ended up on fridge of green flop shot from behind back right bunker on par 3 12th. He averted disaster kept lead by hitting great shots both times.", "token_idx_1": 33, "text_start_1": 192, "text_end_1": 198, "date_1": "2019-06", "text_2": "Trump's Vitamin D levels are gonna get hella low if he stays in that bunker. People who get Covid & have low Vitamin D levels are associated with much poorer outcomes. I'm just stating the facts, don't shoot the messenger, just unfollow me if these facts trigger your punk ass idc", "token_idx_2": 14, "text_start_2": 69, "text_end_2": 75, "date_2": "2020-06", "text_1_tokenized": ["The", "2", "best", "shots", "@GaryWoodland", "hit", "during", "final", "round", "@usopengolf", "2", "maintain", "positive", "momentum", "2nd", "shot", "from", "right", "rough", "11th", "hole", "ended", "up", "on", "fridge", "of", "green", "flop", "shot", "from", "behind", "back", "right", "bunker", "on", "par", "3", "12th", ".", "He", "averted", "disaster", "kept", "lead", "by", "hitting", "great", "shots", "both", "times", "."], "text_2_tokenized": ["Trump's", "Vitamin", "D", "levels", "are", "gonna", "get", "hella", "low", "if", "he", "stays", "in", "that", "bunker", ".", "People", "who", "get", "Covid", "&", "have", "low", "Vitamin", "D", "levels", "are", "associated", "with", "much", "poorer", "outcomes", ".", "I'm", "just", "stating", "the", "facts", ",", "don't", "shoot", "the", "messenger", ",", "just", "unfollow", "me", "if", "these", "facts", "trigger", "your", "punk", "ass", "idc"]} -{"id": "1157-bunker", "word": "bunker", "label_binary": 0, "text_1": "Genuinely laugh at people who message you for not playing the way they do on #ApexLegends saying you shouldn't camp. Well it's better than running into a fight at bunker on your own with 3 teams having a fight. #Nonce", "token_idx_1": 30, "text_start_1": 163, "text_end_1": 169, "date_1": "2019-06", "text_2": "They moved into the underground Vorbunker, part of Hitler's underground bunker complex, on 22 April 1945. Hitler committed suicide on 30 April. In accordance with Hitler's will, Goebbels succeeded him as Chancellor of Germany; he served one day in this post.", "token_idx_2": 11, "text_start_2": 35, "text_end_2": 41, "date_2": "2020-06", "text_1_tokenized": ["Genuinely", "laugh", "at", "people", "who", "message", "you", "for", "not", "playing", "the", "way", "they", "do", "on", "#ApexLegends", "saying", "you", "shouldn't", "camp", ".", "Well", "it's", "better", "than", "running", "into", "a", "fight", "at", "bunker", "on", "your", "own", "with", "3", "teams", "having", "a", "fight", ".", "#Nonce"], "text_2_tokenized": ["They", "moved", "into", "the", "underground", "Vorbunker", ",", "part", "of", "Hitler's", "underground", "bunker", "complex", ",", "on", "22", "April", "1945", ".", "Hitler", "committed", "suicide", "on", "30", "April", ".", "In", "accordance", "with", "Hitler's", "will", ",", "Goebbels", "succeeded", "him", "as", "Chancellor", "of", "Germany", ";", "he", "served", "one", "day", "in", "this", "post", "."]} -{"id": "1158-bunker", "word": "bunker", "label_binary": 0, "text_1": "DONE - Added pre-load routines back for random location spawns when leaving bunker", "token_idx_1": 12, "text_start_1": 76, "text_end_1": 82, "date_1": "2019-06", "text_2": "I bet Trump, his NatSec and WH PR team are a holed up in the White House bunker tonight hiding out as the Russia Bounty stories tonight publicly put paid to all their lies.", "token_idx_2": 18, "text_start_2": 73, "text_end_2": 79, "date_2": "2020-06", "text_1_tokenized": ["DONE", "-", "Added", "pre-load", "routines", "back", "for", "random", "location", "spawns", "when", "leaving", "bunker"], "text_2_tokenized": ["I", "bet", "Trump", ",", "his", "NatSec", "and", "WH", "PR", "team", "are", "a", "holed", "up", "in", "the", "White", "House", "bunker", "tonight", "hiding", "out", "as", "the", "Russia", "Bounty", "stories", "tonight", "publicly", "put", "paid", "to", "all", "their", "lies", "."]} -{"id": "1159-bunker", "word": "bunker", "label_binary": 0, "text_1": "me most ow players \ud83e\udd1d\ud83c\udffb disliking bunker", "token_idx_1": 6, "text_start_1": 32, "text_end_1": 38, "date_1": "2019-06", "text_2": "Please share your thoughts on how Orange45 (Trump) is full steam ahead with his authoritarian steamroller towards full blown, down in the bunker, almost small mustache trajectory as Mr. Mengele. #TestShow161 love you", "token_idx_2": 26, "text_start_2": 138, "text_end_2": 144, "date_2": "2020-06", "text_1_tokenized": ["me", "most", "ow", "players", "\ud83e\udd1d\ud83c\udffb", "disliking", "bunker"], "text_2_tokenized": ["Please", "share", "your", "thoughts", "on", "how", "Orange", "45", "(", "Trump", ")", "is", "full", "steam", "ahead", "with", "his", "authoritarian", "steamroller", "towards", "full", "blown", ",", "down", "in", "the", "bunker", ",", "almost", "small", "mustache", "trajectory", "as", "Mr", ".", "Mengele", ".", "#TestShow161", "love", "you"]} -{"id": "1160-bunker", "word": "bunker", "label_binary": 1, "text_1": "This morning's movie was WW1 body horror Death Trench (2019) allied mission to German bunker reveals biological terror weapon, gruesome and claustrophobic out now on digital and DVD from @SignatureEntUK", "token_idx_1": 16, "text_start_1": 86, "text_end_1": 92, "date_1": "2019-06", "text_2": "\"I was blind folded right after I woke up. The boys said not to worry about it.....but I'm being carried god knows where in the bunker.... And I smell tacos for lunch. But....they won't give me any tacos. Sammy's been carrying me for 20 minutes.... Tacos....\"", "token_idx_2": 29, "text_start_2": 128, "text_end_2": 134, "date_2": "2020-06", "text_1_tokenized": ["This", "morning's", "movie", "was", "WW1", "body", "horror", "Death", "Trench", "(", "2019", ")", "allied", "mission", "to", "German", "bunker", "reveals", "biological", "terror", "weapon", ",", "gruesome", "and", "claustrophobic", "out", "now", "on", "digital", "and", "DVD", "from", "@SignatureEntUK"], "text_2_tokenized": ["\"", "I", "was", "blind", "folded", "right", "after", "I", "woke", "up", ".", "The", "boys", "said", "not", "to", "worry", "about", "it", "...", "but", "I'm", "being", "carried", "god", "knows", "where", "in", "the", "bunker", "...", "And", "I", "smell", "tacos", "for", "lunch", ".", "But", "...", "they", "won't", "give", "me", "any", "tacos", ".", "Sammy's", "been", "carrying", "me", "for", "20", "minutes", "...", "Tacos", "...", "\""]} -{"id": "1161-bunker", "word": "bunker", "label_binary": 0, "text_1": "Equities, Treasuries, DXY, crude all subdued ahead of crunch time. Restlessness in the bunker \ud83d\udc7b\ud83d\udc7b", "token_idx_1": 17, "text_start_1": 87, "text_end_1": 93, "date_1": "2019-06", "text_2": "Does anyone have an underground bunker? Asking for a friend \ud83d\ude09 \ud83d\ude02", "token_idx_2": 5, "text_start_2": 32, "text_end_2": 38, "date_2": "2020-06", "text_1_tokenized": ["Equities", ",", "Treasuries", ",", "DXY", ",", "crude", "all", "subdued", "ahead", "of", "crunch", "time", ".", "Restlessness", "in", "the", "bunker", "\ud83d\udc7b", "\ud83d\udc7b"], "text_2_tokenized": ["Does", "anyone", "have", "an", "underground", "bunker", "?", "Asking", "for", "a", "friend", "\ud83d\ude09", "\ud83d\ude02"]} -{"id": "1162-bunker", "word": "bunker", "label_binary": 1, "text_1": "\u2800\u2800\u2800\u2800\u2800The nearby S.F base in sector four which was under the command of Scarcrew after M16 paved the way for their advance. Firm foostepts marched into the fortified bunker passing through multiple doors and eventually making her way into the base +]", "token_idx_1": 34, "text_start_1": 165, "text_end_1": 171, "date_1": "2019-06", "text_2": "\"We're surveilling Fort Trumpter but we can't see anyone.\" \"Did you check the underground bunker?\"", "token_idx_2": 18, "text_start_2": 90, "text_end_2": 96, "date_2": "2020-06", "text_1_tokenized": ["\u2800", "\u2800", "\u2800", "The", "nearby", "S", ".", "F", "base", "in", "sector", "four", "which", "was", "under", "the", "command", "of", "Scarcrew", "after", "M16", "paved", "the", "way", "for", "their", "advance", ".", "Firm", "foostepts", "marched", "into", "the", "fortified", "bunker", "passing", "through", "multiple", "doors", "and", "eventually", "making", "her", "way", "into", "the", "base", "+", "]"], "text_2_tokenized": ["\"", "We're", "surveilling", "Fort", "Trumpter", "but", "we", "can't", "see", "anyone", ".", "\"", "\"", "Did", "you", "check", "the", "underground", "bunker", "?", "\""]} -{"id": "1163-bunker", "word": "bunker", "label_binary": 1, "text_1": "Did Marianne Williamson emerge from a bunker 3 years ago and discover TruthOut?", "token_idx_1": 6, "text_start_1": 38, "text_end_1": 44, "date_1": "2019-06", "text_2": "Im about to start making a bunker just in case", "token_idx_2": 6, "text_start_2": 27, "text_end_2": 33, "date_2": "2020-06", "text_1_tokenized": ["Did", "Marianne", "Williamson", "emerge", "from", "a", "bunker", "3", "years", "ago", "and", "discover", "TruthOut", "?"], "text_2_tokenized": ["Im", "about", "to", "start", "making", "a", "bunker", "just", "in", "case"]} -{"id": "1164-bunker", "word": "bunker", "label_binary": 1, "text_1": "ion like streaming services cuz they all whacky i like dvds. i got everything \ud83d\ude0e when there's a zombie apocalypse & we ain't got no Netflix, i got all the dvds so i can watch spiderverse whenever i want in my bunker", "token_idx_1": 43, "text_start_1": 212, "text_end_1": 218, "date_1": "2019-06", "text_2": "When China never invaded our land neither build any bunker on our territory. Our soldiers were killed from falling of the cliff after killing 45 Chinese soldiers. Then what's the point of #BoycottChina Little bit of help here @PMOIndia ? #PoliticsLive #IndoChinaFaceoff", "token_idx_2": 9, "text_start_2": 52, "text_end_2": 58, "date_2": "2020-06", "text_1_tokenized": ["ion", "like", "streaming", "services", "cuz", "they", "all", "whacky", "i", "like", "dvds", ".", "i", "got", "everything", "\ud83d\ude0e", "when", "there's", "a", "zombie", "apocalypse", "&", "we", "ain't", "got", "no", "Netflix", ",", "i", "got", "all", "the", "dvds", "so", "i", "can", "watch", "spiderverse", "whenever", "i", "want", "in", "my", "bunker"], "text_2_tokenized": ["When", "China", "never", "invaded", "our", "land", "neither", "build", "any", "bunker", "on", "our", "territory", ".", "Our", "soldiers", "were", "killed", "from", "falling", "of", "the", "cliff", "after", "killing", "45", "Chinese", "soldiers", ".", "Then", "what's", "the", "point", "of", "#BoycottChina", "Little", "bit", "of", "help", "here", "@PMOIndia", "?", "#PoliticsLive", "#IndoChinaFaceoff"]} -{"id": "1165-bunker", "word": "bunker", "label_binary": 1, "text_1": "RECALL: BIDEN told media if something happened, find him in the secret, undisclosed, secure underground bunker beneath VP house?", "token_idx_1": 19, "text_start_1": 104, "text_end_1": 110, "date_1": "2019-06", "text_2": "America needs a leader to show the people that masks work and how important they are. America does not need a boss who tells people to wear a mask while not wearing one himself and hiding away in his bunker. #LeadByExample", "token_idx_2": 40, "text_start_2": 200, "text_end_2": 206, "date_2": "2020-06", "text_1_tokenized": ["RECALL", ":", "BIDEN", "told", "media", "if", "something", "happened", ",", "find", "him", "in", "the", "secret", ",", "undisclosed", ",", "secure", "underground", "bunker", "beneath", "VP", "house", "?"], "text_2_tokenized": ["America", "needs", "a", "leader", "to", "show", "the", "people", "that", "masks", "work", "and", "how", "important", "they", "are", ".", "America", "does", "not", "need", "a", "boss", "who", "tells", "people", "to", "wear", "a", "mask", "while", "not", "wearing", "one", "himself", "and", "hiding", "away", "in", "his", "bunker", ".", "#LeadByExample"]} -{"id": "1166-bunker", "word": "bunker", "label_binary": 1, "text_1": "I went Normandy in 1999. It was a moving place. Standing near the bunker of Pointe du Hoc, and looking down that cliff, you understand what an effort it was to take that position. I'll never forget, and thankful I never had to experience it 1st hand. #DDay75 \ud83c\uddfa\ud83c\uddf8\ud83c\uddec\ud83c\udde7\ud83c\udde8\ud83c\udde6\ud83c\uddeb\ud83c\uddf7", "token_idx_1": 15, "text_start_1": 66, "text_end_1": 72, "date_1": "2019-06", "text_2": "Yet sleeping Joe was out among protesters while you don the con hid in bunker, then added a taller fence around the Dark House. Be nice if just once you stopped spewing your hatred.", "token_idx_2": 14, "text_start_2": 71, "text_end_2": 77, "date_2": "2020-06", "text_1_tokenized": ["I", "went", "Normandy", "in", "1999", ".", "It", "was", "a", "moving", "place", ".", "Standing", "near", "the", "bunker", "of", "Pointe", "du", "Hoc", ",", "and", "looking", "down", "that", "cliff", ",", "you", "understand", "what", "an", "effort", "it", "was", "to", "take", "that", "position", ".", "I'll", "never", "forget", ",", "and", "thankful", "I", "never", "had", "to", "experience", "it", "1st", "hand", ".", "#DDay75", "\ud83c\uddfa", "\ud83c\uddf8", "\ud83c\uddec", "\ud83c\udde7", "\ud83c\udde8", "\ud83c\udde6", "\ud83c\uddeb", "\ud83c\uddf7"], "text_2_tokenized": ["Yet", "sleeping", "Joe", "was", "out", "among", "protesters", "while", "you", "don", "the", "con", "hid", "in", "bunker", ",", "then", "added", "a", "taller", "fence", "around", "the", "Dark", "House", ".", "Be", "nice", "if", "just", "once", "you", "stopped", "spewing", "your", "hatred", "."]} -{"id": "1167-bunker", "word": "bunker", "label_binary": 0, "text_1": "Cheats! damn bunker at it again #cruvhig", "token_idx_1": 3, "text_start_1": 13, "text_end_1": 19, "date_1": "2019-06", "text_2": ". @realDonaldTrump Lots of people came to say hi to you tonight! How's the bunker? #PresidentBunkerInspector", "token_idx_2": 15, "text_start_2": 75, "text_end_2": 81, "date_2": "2020-06", "text_1_tokenized": ["Cheats", "!", "damn", "bunker", "at", "it", "again", "#cruvhig"], "text_2_tokenized": [".", "@realDonaldTrump", "Lots", "of", "people", "came", "to", "say", "hi", "to", "you", "tonight", "!", "How's", "the", "bunker", "?", "#PresidentBunkerInspector"]} -{"id": "1168-bunker", "word": "bunker", "label_binary": 1, "text_1": "*curls up on the couch in the bunker's main room to wait for the morning. She plugs in her earbuds and listens to music*", "token_idx_1": 8, "text_start_1": 30, "text_end_1": 38, "date_1": "2019-06", "text_2": "Secret Service Agent: Mr. President, protestors have breached the first barricade. Trump: You know that bunker we have downstairs? What sort of condition is it in? You know, on a completely unrelated note. SSA: I'm--not sure, sir. Trump: I should inspect it. Now. Like now.", "token_idx_2": 20, "text_start_2": 104, "text_end_2": 110, "date_2": "2020-06", "text_1_tokenized": ["*", "curls", "up", "on", "the", "couch", "in", "the", "bunker's", "main", "room", "to", "wait", "for", "the", "morning", ".", "She", "plugs", "in", "her", "earbuds", "and", "listens", "to", "music", "*"], "text_2_tokenized": ["Secret", "Service", "Agent", ":", "Mr", ".", "President", ",", "protestors", "have", "breached", "the", "first", "barricade", ".", "Trump", ":", "You", "know", "that", "bunker", "we", "have", "downstairs", "?", "What", "sort", "of", "condition", "is", "it", "in", "?", "You", "know", ",", "on", "a", "completely", "unrelated", "note", ".", "SSA", ":", "I'm--not", "sure", ",", "sir", ".", "Trump", ":", "I", "should", "inspect", "it", ".", "Now", ".", "Like", "now", "."]} -{"id": "1169-bunker", "word": "bunker", "label_binary": 1, "text_1": "Like Hitler in his bunker ignoring the Red Army at Munich's gate, will Trump choose arsenic and resign! Art of the Deal (poll numbers) evitable fact force Trump to leave on his terms? Or continue with his bunker mentality exhibited in last nights rally then escape by body double!", "token_idx_1": 4, "text_start_1": 19, "text_end_1": 25, "date_1": "2019-06", "text_2": "So was Trump actually in the bunker inspecting his new digs, stocking it with Diet Coke and cake for when he refuses to leave the WH in January 2021?", "token_idx_2": 6, "text_start_2": 29, "text_end_2": 35, "date_2": "2020-06", "text_1_tokenized": ["Like", "Hitler", "in", "his", "bunker", "ignoring", "the", "Red", "Army", "at", "Munich's", "gate", ",", "will", "Trump", "choose", "arsenic", "and", "resign", "!", "Art", "of", "the", "Deal", "(", "poll", "numbers", ")", "evitable", "fact", "force", "Trump", "to", "leave", "on", "his", "terms", "?", "Or", "continue", "with", "his", "bunker", "mentality", "exhibited", "in", "last", "nights", "rally", "then", "escape", "by", "body", "double", "!"], "text_2_tokenized": ["So", "was", "Trump", "actually", "in", "the", "bunker", "inspecting", "his", "new", "digs", ",", "stocking", "it", "with", "Diet", "Coke", "and", "cake", "for", "when", "he", "refuses", "to", "leave", "the", "WH", "in", "January", "2021", "?"]} -{"id": "1170-bunker", "word": "bunker", "label_binary": 0, "text_1": "Gonna be in the twitter bunker when Eva drops on netflix. Catch me posting mecha that arent weird and skinny and whose pilots almost never refuse to get into them.", "token_idx_1": 5, "text_start_1": 24, "text_end_1": 30, "date_1": "2019-06", "text_2": "Do we still have a President after last night? Or maybe he's just back in the bunker. #bunkerboy #FyreBrad", "token_idx_2": 17, "text_start_2": 78, "text_end_2": 84, "date_2": "2020-06", "text_1_tokenized": ["Gonna", "be", "in", "the", "twitter", "bunker", "when", "Eva", "drops", "on", "netflix", ".", "Catch", "me", "posting", "mecha", "that", "arent", "weird", "and", "skinny", "and", "whose", "pilots", "almost", "never", "refuse", "to", "get", "into", "them", "."], "text_2_tokenized": ["Do", "we", "still", "have", "a", "President", "after", "last", "night", "?", "Or", "maybe", "he's", "just", "back", "in", "the", "bunker", ".", "#bunkerboy", "#FyreBrad"]} -{"id": "1171-bunker", "word": "bunker", "label_binary": 0, "text_1": "i want to see that bunker shot of jordans infinite times this weekend like just have it on a loop", "token_idx_1": 5, "text_start_1": 19, "text_end_1": 25, "date_1": "2019-06", "text_2": "*walks out of fortified steel bunker he hid in all night surrounded by highly trained security personnel* \u2018Yall some pussies get it together'", "token_idx_2": 6, "text_start_2": 30, "text_end_2": 36, "date_2": "2020-06", "text_1_tokenized": ["i", "want", "to", "see", "that", "bunker", "shot", "of", "jordans", "infinite", "times", "this", "weekend", "like", "just", "have", "it", "on", "a", "loop"], "text_2_tokenized": ["*", "walks", "out", "of", "fortified", "steel", "bunker", "he", "hid", "in", "all", "night", "surrounded", "by", "highly", "trained", "security", "personnel", "*", "\u2018", "Yall", "some", "pussies", "get", "it", "together", "'"]} -{"id": "1172-bunker", "word": "bunker", "label_binary": 0, "text_1": "i only rly keep going FEAR @ the nge discourse because when i originally watched it, i only saw the discourse referenced second hand so to have to experience it in real time is like... ready my bunker, captain.", "token_idx_1": 39, "text_start_1": 194, "text_end_1": 200, "date_1": "2019-06", "text_2": "Washington gets a crowd a big one and is against the dude hiding in the bunker", "token_idx_2": 15, "text_start_2": 72, "text_end_2": 78, "date_2": "2020-06", "text_1_tokenized": ["i", "only", "rly", "keep", "going", "FEAR", "@", "the", "nge", "discourse", "because", "when", "i", "originally", "watched", "it", ",", "i", "only", "saw", "the", "discourse", "referenced", "second", "hand", "so", "to", "have", "to", "experience", "it", "in", "real", "time", "is", "like", "...", "ready", "my", "bunker", ",", "captain", "."], "text_2_tokenized": ["Washington", "gets", "a", "crowd", "a", "big", "one", "and", "is", "against", "the", "dude", "hiding", "in", "the", "bunker"]} -{"id": "1173-bunker", "word": "bunker", "label_binary": 1, "text_1": "BBC still refusing to reply to my complaint about @bbcnickrobinson's lie about Jackie Walker. Looks to me like they're in their bunker. Come out and play, BBC. Let's hear your insultingly childish excuse for this one. Don't be shy.", "token_idx_1": 24, "text_start_1": 128, "text_end_1": 134, "date_1": "2019-06", "text_2": "Why is Trump still playing President when he was impeached, polling dropping, Hid in the bunker, Bolton's book tells all, Not completed the wall in 3 1/2 years. Badly managed the Virus and stockpile, Cut CDC, children in cages & called military on citizens.", "token_idx_2": 17, "text_start_2": 89, "text_end_2": 95, "date_2": "2020-06", "text_1_tokenized": ["BBC", "still", "refusing", "to", "reply", "to", "my", "complaint", "about", "@bbcnickrobinson", "'", "s", "lie", "about", "Jackie", "Walker", ".", "Looks", "to", "me", "like", "they're", "in", "their", "bunker", ".", "Come", "out", "and", "play", ",", "BBC", ".", "Let's", "hear", "your", "insultingly", "childish", "excuse", "for", "this", "one", ".", "Don't", "be", "shy", "."], "text_2_tokenized": ["Why", "is", "Trump", "still", "playing", "President", "when", "he", "was", "impeached", ",", "polling", "dropping", ",", "Hid", "in", "the", "bunker", ",", "Bolton's", "book", "tells", "all", ",", "Not", "completed", "the", "wall", "in", "3", "1/2", "years", ".", "Badly", "managed", "the", "Virus", "and", "stockpile", ",", "Cut", "CDC", ",", "children", "in", "cages", "&", "called", "military", "on", "citizens", "."]} -{"id": "0182-mask", "word": "mask", "label_binary": 0, "text_1": "Why do all my guy friends that I wanna give amazing hugs to have to wear white shirts when I wear a fuckin mask of makeup \ud83d\ude21", "token_idx_1": 23, "text_start_1": 107, "text_end_1": 111, "date_1": "2019-07", "text_2": "The woman who refused to wear a face mask in Starbucks is considering suing to get half of the barista's $100,000. @littlecaesars ...what do you think a jury will award for one of your store managers brutally attacking an elderly man for returning a frozen pizza? Retweet Please", "token_idx_2": 8, "text_start_2": 37, "text_end_2": 41, "date_2": "2020-07", "text_1_tokenized": ["Why", "do", "all", "my", "guy", "friends", "that", "I", "wanna", "give", "amazing", "hugs", "to", "have", "to", "wear", "white", "shirts", "when", "I", "wear", "a", "fuckin", "mask", "of", "makeup", "\ud83d\ude21"], "text_2_tokenized": ["The", "woman", "who", "refused", "to", "wear", "a", "face", "mask", "in", "Starbucks", "is", "considering", "suing", "to", "get", "half", "of", "the", "barista's", "$", "100,000", ".", "@littlecaesars", "...", "what", "do", "you", "think", "a", "jury", "will", "award", "for", "one", "of", "your", "store", "managers", "brutally", "attacking", "an", "elderly", "man", "for", "returning", "a", "frozen", "pizza", "?", "Retweet", "Please"]} -{"id": "0183-mask", "word": "mask", "label_binary": 0, "text_1": "In an ideal world I could hook tiny lil fog machines up behind the eyes of my Zacharie mask", "token_idx_1": 18, "text_start_1": 87, "text_end_1": 91, "date_1": "2019-07", "text_2": "We made it all the way to Tennessee before it was a mask off party. Mississippi and Alabama surprised me!", "token_idx_2": 12, "text_start_2": 52, "text_end_2": 56, "date_2": "2020-07", "text_1_tokenized": ["In", "an", "ideal", "world", "I", "could", "hook", "tiny", "lil", "fog", "machines", "up", "behind", "the", "eyes", "of", "my", "Zacharie", "mask"], "text_2_tokenized": ["We", "made", "it", "all", "the", "way", "to", "Tennessee", "before", "it", "was", "a", "mask", "off", "party", ".", "Mississippi", "and", "Alabama", "surprised", "me", "!"]} -{"id": "0184-mask", "word": "mask", "label_binary": 0, "text_1": "Nothing like chilling in bed eating chocolate, having a face mask on and watching love island \ud83d\ude0d", "token_idx_1": 11, "text_start_1": 61, "text_end_1": 65, "date_1": "2019-07", "text_2": "Covid has me even more picky when it comes to take-out. Just left the third restaurant without any food. If they letting customers in the lobby w/o a mask and people up on each other, I'm good. Their kitchen is probably nasty too. I rather eat water ice and go to bed.", "token_idx_2": 32, "text_start_2": 150, "text_end_2": 154, "date_2": "2020-07", "text_1_tokenized": ["Nothing", "like", "chilling", "in", "bed", "eating", "chocolate", ",", "having", "a", "face", "mask", "on", "and", "watching", "love", "island", "\ud83d\ude0d"], "text_2_tokenized": ["Covid", "has", "me", "even", "more", "picky", "when", "it", "comes", "to", "take-out", ".", "Just", "left", "the", "third", "restaurant", "without", "any", "food", ".", "If", "they", "letting", "customers", "in", "the", "lobby", "w", "/", "o", "a", "mask", "and", "people", "up", "on", "each", "other", ",", "I'm", "good", ".", "Their", "kitchen", "is", "probably", "nasty", "too", ".", "I", "rather", "eat", "water", "ice", "and", "go", "to", "bed", "."]} -{"id": "0185-mask", "word": "mask", "label_binary": 0, "text_1": "\u201cI used to save hoes with a mask and a cape. Now I'm like, Nah, love, I'm good, go away\"", "token_idx_1": 8, "text_start_1": 28, "text_end_1": 32, "date_1": "2019-07", "text_2": "Idk what's worse, my lipgloss sticking to my mask or my glasses constantly getting foggy if my mask slightly moves...", "token_idx_2": 9, "text_start_2": 45, "text_end_2": 49, "date_2": "2020-07", "text_1_tokenized": ["\u201c", "I", "used", "to", "save", "hoes", "with", "a", "mask", "and", "a", "cape", ".", "Now", "I'm", "like", ",", "Nah", ",", "love", ",", "I'm", "good", ",", "go", "away", "\""], "text_2_tokenized": ["Idk", "what's", "worse", ",", "my", "lipgloss", "sticking", "to", "my", "mask", "or", "my", "glasses", "constantly", "getting", "foggy", "if", "my", "mask", "slightly", "moves", "..."]} -{"id": "0186-mask", "word": "mask", "label_binary": 0, "text_1": "yoongi wearing the cat mask for the entire run episode lengthened my life span", "token_idx_1": 4, "text_start_1": 23, "text_end_1": 27, "date_1": "2019-07", "text_2": "yeah so what if you're cute,, if u don't wear mask outside i don't trust \u00fc", "token_idx_2": 12, "text_start_2": 46, "text_end_2": 50, "date_2": "2020-07", "text_1_tokenized": ["yoongi", "wearing", "the", "cat", "mask", "for", "the", "entire", "run", "episode", "lengthened", "my", "life", "span"], "text_2_tokenized": ["yeah", "so", "what", "if", "you're", "cute", ",", ",", "if", "u", "don't", "wear", "mask", "outside", "i", "don't", "trust", "\u00fc"]} -{"id": "0187-mask", "word": "mask", "label_binary": 0, "text_1": "The border agent I got at Logan Airport is a huge Zelda fan apparently. Hi there @CBP from the cat mask guy!", "token_idx_1": 21, "text_start_1": 99, "text_end_1": 103, "date_1": "2019-07", "text_2": "At my job, a customer isn't served their food (it's a buffet) unless they have their mask on.", "token_idx_2": 19, "text_start_2": 85, "text_end_2": 89, "date_2": "2020-07", "text_1_tokenized": ["The", "border", "agent", "I", "got", "at", "Logan", "Airport", "is", "a", "huge", "Zelda", "fan", "apparently", ".", "Hi", "there", "@CBP", "from", "the", "cat", "mask", "guy", "!"], "text_2_tokenized": ["At", "my", "job", ",", "a", "customer", "isn't", "served", "their", "food", "(", "it's", "a", "buffet", ")", "unless", "they", "have", "their", "mask", "on", "."]} -{"id": "0188-mask", "word": "mask", "label_binary": 0, "text_1": "Today's self-care list - been to IKEA, had fun and bought useful things - organised my clothes - separated stuff for sale - drank smoothie - put on an essence mask - doing my nails - watching the last ep of Good Omens - doctor and hairstylist appointment next week \ud83e\udd1e", "token_idx_1": 31, "text_start_1": 159, "text_end_1": 163, "date_1": "2019-07", "text_2": "what do you mean im wearing a mask? its a part of my face someone gorilla glued it on", "token_idx_2": 7, "text_start_2": 30, "text_end_2": 34, "date_2": "2020-07", "text_1_tokenized": ["Today's", "self-care", "list", "-", "been", "to", "IKEA", ",", "had", "fun", "and", "bought", "useful", "things", "-", "organised", "my", "clothes", "-", "separated", "stuff", "for", "sale", "-", "drank", "smoothie", "-", "put", "on", "an", "essence", "mask", "-", "doing", "my", "nails", "-", "watching", "the", "last", "ep", "of", "Good", "Omens", "-", "doctor", "and", "hairstylist", "appointment", "next", "week", "\ud83e\udd1e"], "text_2_tokenized": ["what", "do", "you", "mean", "im", "wearing", "a", "mask", "?", "its", "a", "part", "of", "my", "face", "someone", "gorilla", "glued", "it", "on"]} -{"id": "0189-mask", "word": "mask", "label_binary": 0, "text_1": "Naebody replied to my tweet asking for fake eyelashes recommendations but here I am again asking about something else lol but anyone know a good hair mask for damaged/coloured hair??? \ud83d\udc87\u200d\u2640\ufe0f\ud83d\udc87\u200d\u2640\ufe0f", "token_idx_1": 26, "text_start_1": 150, "text_end_1": 154, "date_1": "2019-07", "text_2": "I'm breaking out so bad and I feel like it's cause I have to wear this mask at work \ud83d\ude2d", "token_idx_2": 16, "text_start_2": 71, "text_end_2": 75, "date_2": "2020-07", "text_1_tokenized": ["Naebody", "replied", "to", "my", "tweet", "asking", "for", "fake", "eyelashes", "recommendations", "but", "here", "I", "am", "again", "asking", "about", "something", "else", "lol", "but", "anyone", "know", "a", "good", "hair", "mask", "for", "damaged", "/", "coloured", "hair", "?", "?", "?", "\ud83d\udc87\u200d\u2640", "\ufe0f", "\ud83d\udc87\u200d\u2640", "\ufe0f"], "text_2_tokenized": ["I'm", "breaking", "out", "so", "bad", "and", "I", "feel", "like", "it's", "cause", "I", "have", "to", "wear", "this", "mask", "at", "work", "\ud83d\ude2d"]} -{"id": "0190-mask", "word": "mask", "label_binary": 0, "text_1": "When someone sees you perform, likes wrestling mask to fringe this way.", "token_idx_1": 8, "text_start_1": 47, "text_end_1": 51, "date_1": "2019-07", "text_2": "Especially Dedrick... Too keep him in a mask all day HELL NO.. For one he aint going to keep it on and for 2 i cant even wear mine for 8hrs straight so I know these kids aint \ud83d\ude12\ud83d\ude12", "token_idx_2": 8, "text_start_2": 40, "text_end_2": 44, "date_2": "2020-07", "text_1_tokenized": ["When", "someone", "sees", "you", "perform", ",", "likes", "wrestling", "mask", "to", "fringe", "this", "way", "."], "text_2_tokenized": ["Especially", "Dedrick", "...", "Too", "keep", "him", "in", "a", "mask", "all", "day", "HELL", "NO", "..", "For", "one", "he", "aint", "going", "to", "keep", "it", "on", "and", "for", "2", "i", "cant", "even", "wear", "mine", "for", "8hrs", "straight", "so", "I", "know", "these", "kids", "aint", "\ud83d\ude12", "\ud83d\ude12"]} -{"id": "0191-mask", "word": "mask", "label_binary": 0, "text_1": "Sometimes self care is slapping on a face mask whilst watch Avatar: the legend of Ang with yer tits out.", "token_idx_1": 8, "text_start_1": 42, "text_end_1": 46, "date_1": "2019-07", "text_2": "You can't be PRO life AND it is what it is. You can't be ALL lives matter AND anti mask. You can't have the ALL American dream AND reign supreme. You cannot bow to Trump AND to God; for no one can serve two masters #ForGodORForGreatness #TrumpAndChristianityDontMix", "token_idx_2": 20, "text_start_2": 83, "text_end_2": 87, "date_2": "2020-07", "text_1_tokenized": ["Sometimes", "self", "care", "is", "slapping", "on", "a", "face", "mask", "whilst", "watch", "Avatar", ":", "the", "legend", "of", "Ang", "with", "yer", "tits", "out", "."], "text_2_tokenized": ["You", "can't", "be", "PRO", "life", "AND", "it", "is", "what", "it", "is", ".", "You", "can't", "be", "ALL", "lives", "matter", "AND", "anti", "mask", ".", "You", "can't", "have", "the", "ALL", "American", "dream", "AND", "reign", "supreme", ".", "You", "cannot", "bow", "to", "Trump", "AND", "to", "God", ";", "for", "no", "one", "can", "serve", "two", "masters", "#ForGodORForGreatness", "#TrumpAndChristianityDontMix"]} -{"id": "0192-mask", "word": "mask", "label_binary": 1, "text_1": "Madden y'all have to stop putting the game out this early you can just tell it's not complete yet. I'm tired of these fucking fumbles and face mask calls\ud83d\ude21", "token_idx_1": 28, "text_start_1": 143, "text_end_1": 147, "date_1": "2019-07", "text_2": "Governor initiated a mask mandate today and people are freaking out! Why, why are people so damn pissed off about this? I don't get it.", "token_idx_2": 3, "text_start_2": 21, "text_end_2": 25, "date_2": "2020-07", "text_1_tokenized": ["Madden", "y'all", "have", "to", "stop", "putting", "the", "game", "out", "this", "early", "you", "can", "just", "tell", "it's", "not", "complete", "yet", ".", "I'm", "tired", "of", "these", "fucking", "fumbles", "and", "face", "mask", "calls", "\ud83d\ude21"], "text_2_tokenized": ["Governor", "initiated", "a", "mask", "mandate", "today", "and", "people", "are", "freaking", "out", "!", "Why", ",", "why", "are", "people", "so", "damn", "pissed", "off", "about", "this", "?", "I", "don't", "get", "it", "."]} -{"id": "0193-mask", "word": "mask", "label_binary": 0, "text_1": "k i'm mad i got that BTS mask and i didnt even know", "token_idx_1": 7, "text_start_1": 25, "text_end_1": 29, "date_1": "2019-07", "text_2": "It's so hard for me to get my needed dihydrogen monoxide when I wear a mask", "token_idx_2": 15, "text_start_2": 71, "text_end_2": 75, "date_2": "2020-07", "text_1_tokenized": ["k", "i'm", "mad", "i", "got", "that", "BTS", "mask", "and", "i", "didnt", "even", "know"], "text_2_tokenized": ["It's", "so", "hard", "for", "me", "to", "get", "my", "needed", "dihydrogen", "monoxide", "when", "I", "wear", "a", "mask"]} -{"id": "0194-mask", "word": "mask", "label_binary": 1, "text_1": "No one: Literally no one: Madden 20 Ref: \"Personal Foul, Face mask, on the defense\"", "token_idx_1": 16, "text_start_1": 62, "text_end_1": 66, "date_1": "2019-07", "text_2": "I don't understand why people insist on coming into public settings like grocery stores without wearing a mask?! Then are shocked when you get denied entry... somebody pls explain it cause ion get it!", "token_idx_2": 17, "text_start_2": 106, "text_end_2": 110, "date_2": "2020-07", "text_1_tokenized": ["No", "one", ":", "Literally", "no", "one", ":", "Madden", "20", "Ref", ":", "\"", "Personal", "Foul", ",", "Face", "mask", ",", "on", "the", "defense", "\""], "text_2_tokenized": ["I", "don't", "understand", "why", "people", "insist", "on", "coming", "into", "public", "settings", "like", "grocery", "stores", "without", "wearing", "a", "mask", "?", "!", "Then", "are", "shocked", "when", "you", "get", "denied", "entry", "...", "somebody", "pls", "explain", "it", "cause", "ion", "get", "it", "!"]} -{"id": "0195-mask", "word": "mask", "label_binary": 0, "text_1": "I finally got some more essentials \ud83d\ude4c\ud83c\udffe I'm so excited to finally try the clay mask!", "token_idx_1": 15, "text_start_1": 77, "text_end_1": 81, "date_1": "2019-07", "text_2": "I'm sold. Wearing a mask is a good idea. People my age should probably wear a mask whether there is a pandemic or not.", "token_idx_2": 5, "text_start_2": 20, "text_end_2": 24, "date_2": "2020-07", "text_1_tokenized": ["I", "finally", "got", "some", "more", "essentials", "\ud83d\ude4c\ud83c\udffe", "I'm", "so", "excited", "to", "finally", "try", "the", "clay", "mask", "!"], "text_2_tokenized": ["I'm", "sold", ".", "Wearing", "a", "mask", "is", "a", "good", "idea", ".", "People", "my", "age", "should", "probably", "wear", "a", "mask", "whether", "there", "is", "a", "pandemic", "or", "not", "."]} -{"id": "0196-mask", "word": "mask", "label_binary": 0, "text_1": "You know TFW u had a good cry and the next morning ur eyes feel so puffy but they dont really look it, yet u still think people can tell you've been crying so you are all self conscious? I hate that. I want to go home and put on an ice mask. Anyway, how is everyone's Tuesday? \ud83e\udd14", "token_idx_1": 55, "text_start_1": 236, "text_end_1": 240, "date_1": "2019-07", "text_2": "I applied one of those long-lasting temporary tattoos last night and somehow, in my sleep, transferred it to my face... how does this happen to me. HOW. I guess it's a good thing I have to wear a mask now??", "token_idx_2": 43, "text_start_2": 196, "text_end_2": 200, "date_2": "2020-07", "text_1_tokenized": ["You", "know", "TFW", "u", "had", "a", "good", "cry", "and", "the", "next", "morning", "ur", "eyes", "feel", "so", "puffy", "but", "they", "dont", "really", "look", "it", ",", "yet", "u", "still", "think", "people", "can", "tell", "you've", "been", "crying", "so", "you", "are", "all", "self", "conscious", "?", "I", "hate", "that", ".", "I", "want", "to", "go", "home", "and", "put", "on", "an", "ice", "mask", ".", "Anyway", ",", "how", "is", "everyone's", "Tuesday", "?", "\ud83e\udd14"], "text_2_tokenized": ["I", "applied", "one", "of", "those", "long-lasting", "temporary", "tattoos", "last", "night", "and", "somehow", ",", "in", "my", "sleep", ",", "transferred", "it", "to", "my", "face", "...", "how", "does", "this", "happen", "to", "me", ".", "HOW", ".", "I", "guess", "it's", "a", "good", "thing", "I", "have", "to", "wear", "a", "mask", "now", "?", "?"]} -{"id": "0197-mask", "word": "mask", "label_binary": 0, "text_1": "The trap under the mask was manipulated rope that hangs around my neck is clamped to the maximum\u3010VERMIN\u3011", "token_idx_1": 4, "text_start_1": 19, "text_end_1": 23, "date_1": "2019-07", "text_2": "*goes into Starbucks Me: why are you wearing a surgical mask? Starbucks worker: I'm not it's a coughy filter...", "token_idx_2": 12, "text_start_2": 56, "text_end_2": 60, "date_2": "2020-07", "text_1_tokenized": ["The", "trap", "under", "the", "mask", "was", "manipulated", "rope", "that", "hangs", "around", "my", "neck", "is", "clamped", "to", "the", "maximum", "\u3010", "VERMIN", "\u3011"], "text_2_tokenized": ["*", "goes", "into", "Starbucks", "Me", ":", "why", "are", "you", "wearing", "a", "surgical", "mask", "?", "Starbucks", "worker", ":", "I'm", "not", "it's", "a", "coughy", "filter", "..."]} -{"id": "0198-mask", "word": "mask", "label_binary": 0, "text_1": ".@cvspharmacy Tried redeeming the H(app)iness Event free Beauty 360 mask today and the system kept rejecting the ones I kept choosing. It only said the sun ones were excluded no other info given. Tried a sheet mask & a clay mask. Neither worked. Happy I'm not. \ud83d\ude10", "token_idx_1": 14, "text_start_1": 68, "text_end_1": 72, "date_1": "2019-07", "text_2": "I putting on the mask shit can't fucking breathe", "token_idx_2": 4, "text_start_2": 17, "text_end_2": 21, "date_2": "2020-07", "text_1_tokenized": [".", "@cvspharmacy", "Tried", "redeeming", "the", "H", "(", "app", ")", "iness", "Event", "free", "Beauty", "360", "mask", "today", "and", "the", "system", "kept", "rejecting", "the", "ones", "I", "kept", "choosing", ".", "It", "only", "said", "the", "sun", "ones", "were", "excluded", "no", "other", "info", "given", ".", "Tried", "a", "sheet", "mask", "&", "a", "clay", "mask", ".", "Neither", "worked", ".", "Happy", "I'm", "not", ".", "\ud83d\ude10"], "text_2_tokenized": ["I", "putting", "on", "the", "mask", "shit", "can't", "fucking", "breathe"]} -{"id": "0199-mask", "word": "mask", "label_binary": 0, "text_1": "Need a good face mask for super sensitive skin", "token_idx_1": 4, "text_start_1": 17, "text_end_1": 21, "date_1": "2019-07", "text_2": "wait no i want a louis tomlinson mask", "token_idx_2": 7, "text_start_2": 33, "text_end_2": 37, "date_2": "2020-07", "text_1_tokenized": ["Need", "a", "good", "face", "mask", "for", "super", "sensitive", "skin"], "text_2_tokenized": ["wait", "no", "i", "want", "a", "louis", "tomlinson", "mask"]} -{"id": "0200-mask", "word": "mask", "label_binary": 0, "text_1": "This girl is playing in her face too lmao. I might have taken my mask off too soon", "token_idx_1": 15, "text_start_1": 65, "text_end_1": 69, "date_1": "2019-07", "text_2": "On a football board, one guy was blaming China for no football. Trump has mishandled this virus and CONTINUES to not promote mask wearing and social distancing. He needs a villain and for now, China works just fine.", "token_idx_2": 24, "text_start_2": 125, "text_end_2": 129, "date_2": "2020-07", "text_1_tokenized": ["This", "girl", "is", "playing", "in", "her", "face", "too", "lmao", ".", "I", "might", "have", "taken", "my", "mask", "off", "too", "soon"], "text_2_tokenized": ["On", "a", "football", "board", ",", "one", "guy", "was", "blaming", "China", "for", "no", "football", ".", "Trump", "has", "mishandled", "this", "virus", "and", "CONTINUES", "to", "not", "promote", "mask", "wearing", "and", "social", "distancing", ".", "He", "needs", "a", "villain", "and", "for", "now", ",", "China", "works", "just", "fine", "."]} -{"id": "0201-mask", "word": "mask", "label_binary": 0, "text_1": "It's called a \"mask.\" You wear it over your face to have fun.", "token_idx_1": 4, "text_start_1": 15, "text_end_1": 19, "date_1": "2019-07", "text_2": "Can people please wear a mask and so silky distance themselves? I would really love to go visit my brother and parents? And would love to travel interstate sometime in the future.", "token_idx_2": 5, "text_start_2": 25, "text_end_2": 29, "date_2": "2020-07", "text_1_tokenized": ["It's", "called", "a", "\"", "mask", ".", "\"", "You", "wear", "it", "over", "your", "face", "to", "have", "fun", "."], "text_2_tokenized": ["Can", "people", "please", "wear", "a", "mask", "and", "so", "silky", "distance", "themselves", "?", "I", "would", "really", "love", "to", "go", "visit", "my", "brother", "and", "parents", "?", "And", "would", "love", "to", "travel", "interstate", "sometime", "in", "the", "future", "."]} -{"id": "0202-mask", "word": "mask", "label_binary": 0, "text_1": "Sid Wilson's mask is insane. Super fucking creepy, I love it!", "token_idx_1": 2, "text_start_1": 13, "text_end_1": 17, "date_1": "2019-07", "text_2": "Hair mask, face mask, cleaning my room, & clearing my head >>>> \ud83d\ude0c", "token_idx_2": 1, "text_start_2": 5, "text_end_2": 9, "date_2": "2020-07", "text_1_tokenized": ["Sid", "Wilson's", "mask", "is", "insane", ".", "Super", "fucking", "creepy", ",", "I", "love", "it", "!"], "text_2_tokenized": ["Hair", "mask", ",", "face", "mask", ",", "cleaning", "my", "room", ",", "&", "clearing", "my", "head", ">", ">", ">", "\ud83d\ude0c"]} -{"id": "0203-mask", "word": "mask", "label_binary": 0, "text_1": "Eternal Legends armor packs featuring Royal Cyborg/Arcann armor set (body, mask) Immortal Emperor/Valkorion armor set Sinister heiress/Vaylin kotfe armor set ImmortalEmpress/Vaylin kotet armor set #swtor #swtorfamily", "token_idx_1": 14, "text_start_1": 75, "text_end_1": 79, "date_1": "2019-07", "text_2": "I think it's really funny how so many people make up whatever they can to not wear a mask but doing that will just make the masks last longer.", "token_idx_2": 18, "text_start_2": 85, "text_end_2": 89, "date_2": "2020-07", "text_1_tokenized": ["Eternal", "Legends", "armor", "packs", "featuring", "Royal", "Cyborg", "/", "Arcann", "armor", "set", "(", "body", ",", "mask", ")", "Immortal", "Emperor", "/", "Valkorion", "armor", "set", "Sinister", "heiress", "/", "Vaylin", "kotfe", "armor", "set", "ImmortalEmpress", "/", "Vaylin", "kotet", "armor", "set", "#swtor", "#swtorfamily"], "text_2_tokenized": ["I", "think", "it's", "really", "funny", "how", "so", "many", "people", "make", "up", "whatever", "they", "can", "to", "not", "wear", "a", "mask", "but", "doing", "that", "will", "just", "make", "the", "masks", "last", "longer", "."]} -{"id": "0204-mask", "word": "mask", "label_binary": 0, "text_1": "I'm showered, I've painted my nails and now I'm doing a face mask and self care. I'm: happy.", "token_idx_1": 13, "text_start_1": 61, "text_end_1": 65, "date_1": "2019-07", "text_2": "do i feel like a clown for putting on a full face of makeup every day, knowing my mask is going to take it all off? yes. am i going to stop doing it? no.", "token_idx_2": 19, "text_start_2": 82, "text_end_2": 86, "date_2": "2020-07", "text_1_tokenized": ["I'm", "showered", ",", "I've", "painted", "my", "nails", "and", "now", "I'm", "doing", "a", "face", "mask", "and", "self", "care", ".", "I'm", ":", "happy", "."], "text_2_tokenized": ["do", "i", "feel", "like", "a", "clown", "for", "putting", "on", "a", "full", "face", "of", "makeup", "every", "day", ",", "knowing", "my", "mask", "is", "going", "to", "take", "it", "all", "off", "?", "yes", ".", "am", "i", "going", "to", "stop", "doing", "it", "?", "no", "."]} -{"id": "0205-mask", "word": "mask", "label_binary": 1, "text_1": "Just learned that Andrew Lloyd Webber was so distraught in \u201886 about Chernobyl that he dressed his phantom at half mask.", "token_idx_1": 21, "text_start_1": 115, "text_end_1": 119, "date_1": "2019-07", "text_2": "The scary guy in strong boon soon is indeed very scary. That mask \ud83d\ude2d\ud83d\ude2d\ud83d\ude2d", "token_idx_2": 13, "text_start_2": 61, "text_end_2": 65, "date_2": "2020-07", "text_1_tokenized": ["Just", "learned", "that", "Andrew", "Lloyd", "Webber", "was", "so", "distraught", "in", "\u2018", "86", "about", "Chernobyl", "that", "he", "dressed", "his", "phantom", "at", "half", "mask", "."], "text_2_tokenized": ["The", "scary", "guy", "in", "strong", "boon", "soon", "is", "indeed", "very", "scary", ".", "That", "mask", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude2d"]} -{"id": "0206-mask", "word": "mask", "label_binary": 1, "text_1": "Don't know anything more on brand than doing a face mask drunk at 10pm", "token_idx_1": 10, "text_start_1": 52, "text_end_1": 56, "date_1": "2019-07", "text_2": "Nothing feels better than freshly washed hair and a face mask! Prepping your body and kind is just as important for a new week", "token_idx_2": 10, "text_start_2": 57, "text_end_2": 61, "date_2": "2020-07", "text_1_tokenized": ["Don't", "know", "anything", "more", "on", "brand", "than", "doing", "a", "face", "mask", "drunk", "at", "10pm"], "text_2_tokenized": ["Nothing", "feels", "better", "than", "freshly", "washed", "hair", "and", "a", "face", "mask", "!", "Prepping", "your", "body", "and", "kind", "is", "just", "as", "important", "for", "a", "new", "week"]} -{"id": "0207-mask", "word": "mask", "label_binary": 0, "text_1": "Super heated I don't have a face mask for today. \ud83d\ude44", "token_idx_1": 7, "text_start_1": 33, "text_end_1": 37, "date_1": "2019-07", "text_2": "so now that y'alls president is suggesting you wear a mask, will you finally just do it without crying?", "token_idx_2": 10, "text_start_2": 54, "text_end_2": 58, "date_2": "2020-07", "text_1_tokenized": ["Super", "heated", "I", "don't", "have", "a", "face", "mask", "for", "today", ".", "\ud83d\ude44"], "text_2_tokenized": ["so", "now", "that", "y'alls", "president", "is", "suggesting", "you", "wear", "a", "mask", ",", "will", "you", "finally", "just", "do", "it", "without", "crying", "?"]} -{"id": "0209-mask", "word": "mask", "label_binary": 0, "text_1": "I'm actually seen the Tanaka mask", "token_idx_1": 5, "text_start_1": 29, "text_end_1": 33, "date_1": "2019-07", "text_2": "Why are people in the NBA bubble wearing a mask???!", "token_idx_2": 9, "text_start_2": 43, "text_end_2": 47, "date_2": "2020-07", "text_1_tokenized": ["I'm", "actually", "seen", "the", "Tanaka", "mask"], "text_2_tokenized": ["Why", "are", "people", "in", "the", "NBA", "bubble", "wearing", "a", "mask", "?", "?", "?", "!"]} -{"id": "0210-mask", "word": "mask", "label_binary": 0, "text_1": "I want to reconcile the violence in your heart I want to recognise your beauty is not just a mask", "token_idx_1": 19, "text_start_1": 93, "text_end_1": 97, "date_1": "2019-07", "text_2": "Wearing a mask is hurting my ears a wee bit lately, but it doesn't hurt them as much as listening to the sound of my lungs audibly crackle as they struggle for air if I catch Rona #wearamask", "token_idx_2": 2, "text_start_2": 10, "text_end_2": 14, "date_2": "2020-07", "text_1_tokenized": ["I", "want", "to", "reconcile", "the", "violence", "in", "your", "heart", "I", "want", "to", "recognise", "your", "beauty", "is", "not", "just", "a", "mask"], "text_2_tokenized": ["Wearing", "a", "mask", "is", "hurting", "my", "ears", "a", "wee", "bit", "lately", ",", "but", "it", "doesn't", "hurt", "them", "as", "much", "as", "listening", "to", "the", "sound", "of", "my", "lungs", "audibly", "crackle", "as", "they", "struggle", "for", "air", "if", "I", "catch", "Rona", "#wearamask"]} -{"id": "0211-mask", "word": "mask", "label_binary": 0, "text_1": "So what was that mask about at the end of the Cassandra trailer? #SOULCALIBURVI", "token_idx_1": 4, "text_start_1": 17, "text_end_1": 21, "date_1": "2019-07", "text_2": "I am still shaking my head daily at everyone trying to reopen everything. The data just isn't there to support a safe way to reopen right now. If we ever want this thing to end, we are going to have to buckle down and do what needs to be done. Also, wear a mask.", "token_idx_2": 58, "text_start_2": 257, "text_end_2": 261, "date_2": "2020-07", "text_1_tokenized": ["So", "what", "was", "that", "mask", "about", "at", "the", "end", "of", "the", "Cassandra", "trailer", "?", "#SOULCALIBURVI"], "text_2_tokenized": ["I", "am", "still", "shaking", "my", "head", "daily", "at", "everyone", "trying", "to", "reopen", "everything", ".", "The", "data", "just", "isn't", "there", "to", "support", "a", "safe", "way", "to", "reopen", "right", "now", ".", "If", "we", "ever", "want", "this", "thing", "to", "end", ",", "we", "are", "going", "to", "have", "to", "buckle", "down", "and", "do", "what", "needs", "to", "be", "done", ".", "Also", ",", "wear", "a", "mask", "."]} -{"id": "0212-mask", "word": "mask", "label_binary": 0, "text_1": "After throwing shade at @aightsoboom . I'm really about to join face mask gang", "token_idx_1": 12, "text_start_1": 69, "text_end_1": 73, "date_1": "2019-07", "text_2": "My dumb ass be pulling my mask down to hear\ud83d\ude02\ud83e\udd26\ud83c\udffe\u200d\u2642\ufe0f", "token_idx_2": 6, "text_start_2": 26, "text_end_2": 30, "date_2": "2020-07", "text_1_tokenized": ["After", "throwing", "shade", "at", "@aightsoboom", ".", "I'm", "really", "about", "to", "join", "face", "mask", "gang"], "text_2_tokenized": ["My", "dumb", "ass", "be", "pulling", "my", "mask", "down", "to", "hear", "\ud83d\ude02", "\ud83e\udd26\ud83c\udffe\u200d\u2642", "\ufe0f"]} -{"id": "0213-mask", "word": "mask", "label_binary": 0, "text_1": "Definitely hitting a mask when I get from this pool", "token_idx_1": 3, "text_start_1": 21, "text_end_1": 25, "date_1": "2019-07", "text_2": "Disposable surgical nose mask available at cool price. Call on #0242649436 for yours", "token_idx_2": 3, "text_start_2": 25, "text_end_2": 29, "date_2": "2020-07", "text_1_tokenized": ["Definitely", "hitting", "a", "mask", "when", "I", "get", "from", "this", "pool"], "text_2_tokenized": ["Disposable", "surgical", "nose", "mask", "available", "at", "cool", "price", ".", "Call", "on", "#0242649436", "for", "yours"]} -{"id": "0214-mask", "word": "mask", "label_binary": 0, "text_1": "When I was about 10-12 years old, I liked going to the museum of religion and atheism. And I loved everything there. There was a whole hall devoted to the history of children's crusades, and in general, the Middle Ages with a torture rack, Spanish boot, and iron mask.", "token_idx_1": 55, "text_start_1": 263, "text_end_1": 267, "date_1": "2019-07", "text_2": "Used this new face mask and it made me break out so bad. I wanna kms", "token_idx_2": 4, "text_start_2": 19, "text_end_2": 23, "date_2": "2020-07", "text_1_tokenized": ["When", "I", "was", "about", "10-12", "years", "old", ",", "I", "liked", "going", "to", "the", "museum", "of", "religion", "and", "atheism", ".", "And", "I", "loved", "everything", "there", ".", "There", "was", "a", "whole", "hall", "devoted", "to", "the", "history", "of", "children's", "crusades", ",", "and", "in", "general", ",", "the", "Middle", "Ages", "with", "a", "torture", "rack", ",", "Spanish", "boot", ",", "and", "iron", "mask", "."], "text_2_tokenized": ["Used", "this", "new", "face", "mask", "and", "it", "made", "me", "break", "out", "so", "bad", ".", "I", "wanna", "kms"]} -{"id": "0215-mask", "word": "mask", "label_binary": 1, "text_1": "Don't ask how the mask contains the nose.", "token_idx_1": 4, "text_start_1": 18, "text_end_1": 22, "date_1": "2019-07", "text_2": "I find it mad disrespectful when someone who's not wearing a mask just cover their face with their hand or shirt... like cmon bruh", "token_idx_2": 11, "text_start_2": 61, "text_end_2": 65, "date_2": "2020-07", "text_1_tokenized": ["Don't", "ask", "how", "the", "mask", "contains", "the", "nose", "."], "text_2_tokenized": ["I", "find", "it", "mad", "disrespectful", "when", "someone", "who's", "not", "wearing", "a", "mask", "just", "cover", "their", "face", "with", "their", "hand", "or", "shirt", "...", "like", "cmon", "bruh"]} -{"id": "0216-mask", "word": "mask", "label_binary": 0, "text_1": "Was 100% certain I was gonna spend however much money it costed for a Bray Wyatt \u201cThe Fiend\u201d mask, but after seeing how shitty those replicas look, i'll pass. Extremely disappointed, was gonna wear the shit outta that mask.", "token_idx_1": 21, "text_start_1": 93, "text_end_1": 97, "date_1": "2019-07", "text_2": "Telling me to smile while I'm wearing a mask and you aren't? Very funny. Good one sir.", "token_idx_2": 8, "text_start_2": 40, "text_end_2": 44, "date_2": "2020-07", "text_1_tokenized": ["Was", "100", "%", "certain", "I", "was", "gonna", "spend", "however", "much", "money", "it", "costed", "for", "a", "Bray", "Wyatt", "\u201c", "The", "Fiend", "\u201d", "mask", ",", "but", "after", "seeing", "how", "shitty", "those", "replicas", "look", ",", "i'll", "pass", ".", "Extremely", "disappointed", ",", "was", "gonna", "wear", "the", "shit", "outta", "that", "mask", "."], "text_2_tokenized": ["Telling", "me", "to", "smile", "while", "I'm", "wearing", "a", "mask", "and", "you", "aren't", "?", "Very", "funny", ".", "Good", "one", "sir", "."]} -{"id": "0217-mask", "word": "mask", "label_binary": 0, "text_1": "This @PixiBeautyPH overnight mask i used left my skin feeling soooo good \ud83e\udd70", "token_idx_1": 3, "text_start_1": 29, "text_end_1": 33, "date_1": "2019-07", "text_2": "People are asking others to show respect for the dead, specifically Herman Cain. Sure. But it's not wrong to ask how many people were effected because of his refusal to wear a mask. By refusing to wear a mask, he put others in danger. Show some respect first. Wear a mask.", "token_idx_2": 35, "text_start_2": 176, "text_end_2": 180, "date_2": "2020-07", "text_1_tokenized": ["This", "@PixiBeautyPH", "overnight", "mask", "i", "used", "left", "my", "skin", "feeling", "soooo", "good", "\ud83e\udd70"], "text_2_tokenized": ["People", "are", "asking", "others", "to", "show", "respect", "for", "the", "dead", ",", "specifically", "Herman", "Cain", ".", "Sure", ".", "But", "it's", "not", "wrong", "to", "ask", "how", "many", "people", "were", "effected", "because", "of", "his", "refusal", "to", "wear", "a", "mask", ".", "By", "refusing", "to", "wear", "a", "mask", ",", "he", "put", "others", "in", "danger", ".", "Show", "some", "respect", "first", ".", "Wear", "a", "mask", "."]} -{"id": "0218-mask", "word": "mask", "label_binary": 0, "text_1": "Giving away free anonymous mask face scan dm me for a chance to win take a picture and put on notifications!", "token_idx_1": 4, "text_start_1": 27, "text_end_1": 31, "date_1": "2019-07", "text_2": "Poking holes in your face mask is like poking holes in your condom. It's not gonna stop microscopic organisms from entering another person's body", "token_idx_2": 5, "text_start_2": 26, "text_end_2": 30, "date_2": "2020-07", "text_1_tokenized": ["Giving", "away", "free", "anonymous", "mask", "face", "scan", "dm", "me", "for", "a", "chance", "to", "win", "take", "a", "picture", "and", "put", "on", "notifications", "!"], "text_2_tokenized": ["Poking", "holes", "in", "your", "face", "mask", "is", "like", "poking", "holes", "in", "your", "condom", ".", "It's", "not", "gonna", "stop", "microscopic", "organisms", "from", "entering", "another", "person's", "body"]} -{"id": "0219-mask", "word": "mask", "label_binary": 0, "text_1": "Ladies... Ulta has a foot peeling mask that works just as good as Baby Feet and they are 6.99.", "token_idx_1": 7, "text_start_1": 34, "text_end_1": 38, "date_1": "2019-07", "text_2": "You know old ppl mad when they say \u201cor nothing\u201d ....she was like I seen You on yo page ...up there with a bunch of boys , no mask , no shirt OR NOTHIN!", "token_idx_2": 32, "text_start_2": 125, "text_end_2": 129, "date_2": "2020-07", "text_1_tokenized": ["Ladies", "...", "Ulta", "has", "a", "foot", "peeling", "mask", "that", "works", "just", "as", "good", "as", "Baby", "Feet", "and", "they", "are", "6.99", "."], "text_2_tokenized": ["You", "know", "old", "ppl", "mad", "when", "they", "say", "\u201c", "or", "nothing", "\u201d", "...", "she", "was", "like", "I", "seen", "You", "on", "yo", "page", "...", "up", "there", "with", "a", "bunch", "of", "boys", ",", "no", "mask", ",", "no", "shirt", "OR", "NOTHIN", "!"]} -{"id": "0220-mask", "word": "mask", "label_binary": 0, "text_1": "Chilling in bed with a green tea and face mask on safe in the knowledge I'm never gonna let a guy mug me off like these love island scruffs", "token_idx_1": 9, "text_start_1": 42, "text_end_1": 46, "date_1": "2019-07", "text_2": "Judge, after first open court post-quarantine: There were unexpected difficulties Me: Like what?! J: As it turns out, if the DA is wearing a mask, & the defendant is wearing a mask, & the defense attorney is wearing a mask & *I'M* wearing a mask...no one knows who's talking", "token_idx_2": 31, "text_start_2": 141, "text_end_2": 145, "date_2": "2020-07", "text_1_tokenized": ["Chilling", "in", "bed", "with", "a", "green", "tea", "and", "face", "mask", "on", "safe", "in", "the", "knowledge", "I'm", "never", "gonna", "let", "a", "guy", "mug", "me", "off", "like", "these", "love", "island", "scruffs"], "text_2_tokenized": ["Judge", ",", "after", "first", "open", "court", "post-quarantine", ":", "There", "were", "unexpected", "difficulties", "Me", ":", "Like", "what", "?", "!", "J", ":", "As", "it", "turns", "out", ",", "if", "the", "DA", "is", "wearing", "a", "mask", ",", "&", "the", "defendant", "is", "wearing", "a", "mask", ",", "&", "the", "defense", "attorney", "is", "wearing", "a", "mask", "&", "*", "I'M", "*", "wearing", "a", "mask", "...", "no", "one", "knows", "who's", "talking"]} -{"id": "0221-mask", "word": "mask", "label_binary": 0, "text_1": "I been very productive today made breakfast washed clothes and folded them and made dinner. Also gave myself a face and lip mask.", "token_idx_1": 23, "text_start_1": 124, "text_end_1": 128, "date_1": "2019-07", "text_2": "It's just so hard, waking up everyday putting a mask with the smile on your face when you don't even feel okay.", "token_idx_2": 10, "text_start_2": 48, "text_end_2": 52, "date_2": "2020-07", "text_1_tokenized": ["I", "been", "very", "productive", "today", "made", "breakfast", "washed", "clothes", "and", "folded", "them", "and", "made", "dinner", ".", "Also", "gave", "myself", "a", "face", "and", "lip", "mask", "."], "text_2_tokenized": ["It's", "just", "so", "hard", ",", "waking", "up", "everyday", "putting", "a", "mask", "with", "the", "smile", "on", "your", "face", "when", "you", "don't", "even", "feel", "okay", "."]} -{"id": "0222-mask", "word": "mask", "label_binary": 0, "text_1": "You guys have been so consistent in posting that ponds face mask had so many plans of buying it today but life happened \ud83d\ude29", "token_idx_1": 11, "text_start_1": 60, "text_end_1": 64, "date_1": "2019-07", "text_2": "If more person pulls down there mask to talk to me I may actually have to choke bitch", "token_idx_2": 6, "text_start_2": 32, "text_end_2": 36, "date_2": "2020-07", "text_1_tokenized": ["You", "guys", "have", "been", "so", "consistent", "in", "posting", "that", "ponds", "face", "mask", "had", "so", "many", "plans", "of", "buying", "it", "today", "but", "life", "happened", "\ud83d\ude29"], "text_2_tokenized": ["If", "more", "person", "pulls", "down", "there", "mask", "to", "talk", "to", "me", "I", "may", "actually", "have", "to", "choke", "bitch"]} -{"id": "0223-mask", "word": "mask", "label_binary": 1, "text_1": "In case you were wondering facial devotion still worked with a face mask on", "token_idx_1": 12, "text_start_1": 68, "text_end_1": 72, "date_1": "2019-07", "text_2": "With these mask at work customers are forever confusing me and Reyna lmao", "token_idx_2": 2, "text_start_2": 11, "text_end_2": 15, "date_2": "2020-07", "text_1_tokenized": ["In", "case", "you", "were", "wondering", "facial", "devotion", "still", "worked", "with", "a", "face", "mask", "on"], "text_2_tokenized": ["With", "these", "mask", "at", "work", "customers", "are", "forever", "confusing", "me", "and", "Reyna", "lmao"]} -{"id": "0224-mask", "word": "mask", "label_binary": 0, "text_1": "My skin feels like a baby after a good sheet mask\ud83d\udc96", "token_idx_1": 10, "text_start_1": 45, "text_end_1": 49, "date_1": "2019-07", "text_2": "very cool when an entire family wears a mask but their chilsren dont. you do realize your children are JUST as susceptible of contracting covid right???", "token_idx_2": 8, "text_start_2": 40, "text_end_2": 44, "date_2": "2020-07", "text_1_tokenized": ["My", "skin", "feels", "like", "a", "baby", "after", "a", "good", "sheet", "mask", "\ud83d\udc96"], "text_2_tokenized": ["very", "cool", "when", "an", "entire", "family", "wears", "a", "mask", "but", "their", "chilsren", "dont", ".", "you", "do", "realize", "your", "children", "are", "JUST", "as", "susceptible", "of", "contracting", "covid", "right", "?", "?", "?"]} -{"id": "0225-mask", "word": "mask", "label_binary": 0, "text_1": "// trying to do a face mask whilst watching these plane crash episodes is just impossible \ud83d\ude02\ud83d\ude2d\ud83d\ude2d", "token_idx_1": 7, "text_start_1": 23, "text_end_1": 27, "date_1": "2019-07", "text_2": "The people who don't wear a mask <<<says <<<< they're a>>> Asshole<<<Period \ud83d\udde3\ud83d\ude37 trying to b, someone their not\ud83d\udc12 Standing up for only a >>>FEW \ud83d\ude48\u2620\ud83d\ude49\u2620\ud83d\udc18\ud83d\udc4e looking bad\ud83d\udc4e\ud83d\udde3\ud83d\ude37\ud83d\udde3\ud83c\uddfa\ud83c\uddf2\ud83d\ude37\ud83d\ude37\ud83d\ude37\ud83c\uddfa\ud83c\uddf2\ud83d\udc4d\u2665\ufe0f\ud83d\ude37\u2665\ufe0f", "token_idx_2": 6, "text_start_2": 28, "text_end_2": 32, "date_2": "2020-07", "text_1_tokenized": ["/", "/", "trying", "to", "do", "a", "face", "mask", "whilst", "watching", "these", "plane", "crash", "episodes", "is", "just", "impossible", "\ud83d\ude02", "\ud83d\ude2d", "\ud83d\ude2d"], "text_2_tokenized": ["The", "people", "who", "don't", "wear", "a", "mask", "<", "<", "<", "says", "<", "<", "<", "they're", "a", ">", ">", ">", "Asshole", "<", "<", "<", "Period", "\ud83d\udde3", "\ud83d\ude37", "trying", "to", "b", ",", "someone", "their", "not", "\ud83d\udc12", "Standing", "up", "for", "only", "a", ">", ">", ">", "FEW", "\ud83d\ude48", "\u2620", "\ud83d\ude49", "\u2620", "\ud83d\udc18", "\ud83d\udc4e", "looking", "bad", "\ud83d\udc4e", "\ud83d\udde3", "\ud83d\ude37", "\ud83d\udde3", "\ud83c\uddfa", "\ud83c\uddf2", "\ud83d\ude37", "\ud83d\ude37", "\ud83d\ude37", "\ud83c\uddfa", "\ud83c\uddf2", "\ud83d\udc4d", "\u2665", "\ufe0f", "\ud83d\ude37", "\u2665", "\ufe0f"]} -{"id": "0226-mask", "word": "mask", "label_binary": 0, "text_1": "In real need of a face mask, wine and chocolate night \ud83e\udd14", "token_idx_1": 6, "text_start_1": 23, "text_end_1": 27, "date_1": "2019-07", "text_2": "Waterloo region's mask bylaw went into effect this week (yes it took this long) and my experience from grocery shopping is that the jump in compliance is huge. Maybe 50-60% before, over 95% now. It's completely normalized.", "token_idx_2": 2, "text_start_2": 18, "text_end_2": 22, "date_2": "2020-07", "text_1_tokenized": ["In", "real", "need", "of", "a", "face", "mask", ",", "wine", "and", "chocolate", "night", "\ud83e\udd14"], "text_2_tokenized": ["Waterloo", "region's", "mask", "bylaw", "went", "into", "effect", "this", "week", "(", "yes", "it", "took", "this", "long", ")", "and", "my", "experience", "from", "grocery", "shopping", "is", "that", "the", "jump", "in", "compliance", "is", "huge", ".", "Maybe", "50-60", "%", "before", ",", "over", "95", "%", "now", ".", "It's", "completely", "normalized", "."]} -{"id": "0227-mask", "word": "mask", "label_binary": 0, "text_1": "what's behind that mask?", "token_idx_1": 3, "text_start_1": 19, "text_end_1": 23, "date_1": "2019-07", "text_2": "Omg wear your mask you guys I'm scared", "token_idx_2": 3, "text_start_2": 14, "text_end_2": 18, "date_2": "2020-07", "text_1_tokenized": ["what's", "behind", "that", "mask", "?"], "text_2_tokenized": ["Omg", "wear", "your", "mask", "you", "guys", "I'm", "scared"]} -{"id": "0228-mask", "word": "mask", "label_binary": 0, "text_1": "self care is me taking a shower putting on a face mask and eating mint chocolate ice cream", "token_idx_1": 11, "text_start_1": 50, "text_end_1": 54, "date_1": "2019-07", "text_2": "Ok but the man told me I look like a criminal with my mask on", "token_idx_2": 13, "text_start_2": 54, "text_end_2": 58, "date_2": "2020-07", "text_1_tokenized": ["self", "care", "is", "me", "taking", "a", "shower", "putting", "on", "a", "face", "mask", "and", "eating", "mint", "chocolate", "ice", "cream"], "text_2_tokenized": ["Ok", "but", "the", "man", "told", "me", "I", "look", "like", "a", "criminal", "with", "my", "mask", "on"]} -{"id": "0229-mask", "word": "mask", "label_binary": 0, "text_1": "i'm trying a foot peel mask, lets hope my toes don't fall off \ud83e\udd20", "token_idx_1": 5, "text_start_1": 23, "text_end_1": 27, "date_1": "2019-07", "text_2": "me, not exerting the effort to emote because i'm wearing a mask: \ud83e\uddff\ud83e\uddff", "token_idx_2": 12, "text_start_2": 59, "text_end_2": 63, "date_2": "2020-07", "text_1_tokenized": ["i'm", "trying", "a", "foot", "peel", "mask", ",", "lets", "hope", "my", "toes", "don't", "fall", "off", "\ud83e\udd20"], "text_2_tokenized": ["me", ",", "not", "exerting", "the", "effort", "to", "emote", "because", "i'm", "wearing", "a", "mask", ":", "\ud83e\uddff", "\ud83e\uddff"]} -{"id": "0230-mask", "word": "mask", "label_binary": 0, "text_1": "found a facial mask that i'm in love with \ud83d\ude4c\ud83c\udffd", "token_idx_1": 3, "text_start_1": 15, "text_end_1": 19, "date_1": "2019-07", "text_2": "wish the bitches in my state would wear a fucking mask outside, i just want to have a musical this year PLEASE all i'm asking is that you wear a tiny fucking piece of fabric", "token_idx_2": 10, "text_start_2": 50, "text_end_2": 54, "date_2": "2020-07", "text_1_tokenized": ["found", "a", "facial", "mask", "that", "i'm", "in", "love", "with", "\ud83d\ude4c\ud83c\udffd"], "text_2_tokenized": ["wish", "the", "bitches", "in", "my", "state", "would", "wear", "a", "fucking", "mask", "outside", ",", "i", "just", "want", "to", "have", "a", "musical", "this", "year", "PLEASE", "all", "i'm", "asking", "is", "that", "you", "wear", "a", "tiny", "fucking", "piece", "of", "fabric"]} -{"id": "0231-mask", "word": "mask", "label_binary": 0, "text_1": "I'm fairly convinced that my upstairs neighbors got some 50 Shades of Grey shenanigans going on. I'm just sitting here minding my own business, but I'm hearing random words like sit, permission, mask, strap on, harness, etc...", "token_idx_1": 36, "text_start_1": 195, "text_end_1": 199, "date_1": "2019-07", "text_2": "Y'all aren't allowed to go out places and not wear a mask and then complain about them canceling sports. You're part of the problem.", "token_idx_2": 11, "text_start_2": 53, "text_end_2": 57, "date_2": "2020-07", "text_1_tokenized": ["I'm", "fairly", "convinced", "that", "my", "upstairs", "neighbors", "got", "some", "50", "Shades", "of", "Grey", "shenanigans", "going", "on", ".", "I'm", "just", "sitting", "here", "minding", "my", "own", "business", ",", "but", "I'm", "hearing", "random", "words", "like", "sit", ",", "permission", ",", "mask", ",", "strap", "on", ",", "harness", ",", "etc", "..."], "text_2_tokenized": ["Y'all", "aren't", "allowed", "to", "go", "out", "places", "and", "not", "wear", "a", "mask", "and", "then", "complain", "about", "them", "canceling", "sports", ".", "You're", "part", "of", "the", "problem", "."]} -{"id": "0232-mask", "word": "mask", "label_binary": 0, "text_1": "Drake nice name drop mask n cape l im escape break the mold n bank", "token_idx_1": 4, "text_start_1": 21, "text_end_1": 25, "date_1": "2019-07", "text_2": "RT @BridgetPhetasy: @RexChapman Wait. So her justification for wearing that mask is that this is what happens if you vote for Biden? Americans are proving over and over again to be the dumbest fucking people on Earth.", "token_idx_2": 12, "text_start_2": 76, "text_end_2": 80, "date_2": "2020-07", "text_1_tokenized": ["Drake", "nice", "name", "drop", "mask", "n", "cape", "l", "im", "escape", "break", "the", "mold", "n", "bank"], "text_2_tokenized": ["RT", "@BridgetPhetasy", ":", "@RexChapman", "Wait", ".", "So", "her", "justification", "for", "wearing", "that", "mask", "is", "that", "this", "is", "what", "happens", "if", "you", "vote", "for", "Biden", "?", "Americans", "are", "proving", "over", "and", "over", "again", "to", "be", "the", "dumbest", "fucking", "people", "on", "Earth", "."]} -{"id": "0233-mask", "word": "mask", "label_binary": 0, "text_1": "I want to be unfiltered and honest online, but then I realize how sad desperate and lonely I'd look but then again I'm wearing a mask too so lol", "token_idx_1": 26, "text_start_1": 129, "text_end_1": 133, "date_1": "2019-07", "text_2": "Wear a mask. Scientists say the Coronavirus is not going to go away until Everyone starts wearing a mask properly when you must go out. And social distance. Please", "token_idx_2": 2, "text_start_2": 7, "text_end_2": 11, "date_2": "2020-07", "text_1_tokenized": ["I", "want", "to", "be", "unfiltered", "and", "honest", "online", ",", "but", "then", "I", "realize", "how", "sad", "desperate", "and", "lonely", "I'd", "look", "but", "then", "again", "I'm", "wearing", "a", "mask", "too", "so", "lol"], "text_2_tokenized": ["Wear", "a", "mask", ".", "Scientists", "say", "the", "Coronavirus", "is", "not", "going", "to", "go", "away", "until", "Everyone", "starts", "wearing", "a", "mask", "properly", "when", "you", "must", "go", "out", ".", "And", "social", "distance", ".", "Please"]} -{"id": "0234-mask", "word": "mask", "label_binary": 0, "text_1": "I get Rufaro on my skin care routine and she be out here telling she's got a face mask on, hate to see it", "token_idx_1": 18, "text_start_1": 82, "text_end_1": 86, "date_1": "2019-07", "text_2": "Far-Right: YOU WON'T INFRINGE ON MY RIGHTS!!! Everyone else: Where's your right to get me sick? Far-Right:... Everyone else: Exactly, we don't like it either, just wear the damn mask, and STFU! #COVID #COVIDIOT #howarewestillhavingthisconversation", "token_idx_2": 40, "text_start_2": 178, "text_end_2": 182, "date_2": "2020-07", "text_1_tokenized": ["I", "get", "Rufaro", "on", "my", "skin", "care", "routine", "and", "she", "be", "out", "here", "telling", "she's", "got", "a", "face", "mask", "on", ",", "hate", "to", "see", "it"], "text_2_tokenized": ["Far-Right", ":", "YOU", "WON'T", "INFRINGE", "ON", "MY", "RIGHTS", "!", "!", "!", "Everyone", "else", ":", "Where's", "your", "right", "to", "get", "me", "sick", "?", "Far-Right", ":", "...", "Everyone", "else", ":", "Exactly", ",", "we", "don't", "like", "it", "either", ",", "just", "wear", "the", "damn", "mask", ",", "and", "STFU", "!", "#COVID", "#COVIDIOT", "#howarewestillhavingthisconversation"]} -{"id": "0235-mask", "word": "mask", "label_binary": 1, "text_1": "gonna post even though it's obvious, just didn't see anything when i searched: new Guilty Gear character has \"\u5fc3\" (read as kokoro/shin, meaning 'heart' or 'mind') on their jacket and face mask. additional Japanese flourish, along with the dual katana and samurai ponytail.", "token_idx_1": 44, "text_start_1": 187, "text_end_1": 191, "date_1": "2019-07", "text_2": "is there anything grosser than burping in your mask?", "token_idx_2": 8, "text_start_2": 47, "text_end_2": 51, "date_2": "2020-07", "text_1_tokenized": ["gonna", "post", "even", "though", "it's", "obvious", ",", "just", "didn't", "see", "anything", "when", "i", "searched", ":", "new", "Guilty", "Gear", "character", "has", "\"", "\u5fc3", "\"", "(", "read", "as", "kokoro", "/", "shin", ",", "meaning", "'", "heart", "'", "or", "'", "mind", "'", ")", "on", "their", "jacket", "and", "face", "mask", ".", "additional", "Japanese", "flourish", ",", "along", "with", "the", "dual", "katana", "and", "samurai", "ponytail", "."], "text_2_tokenized": ["is", "there", "anything", "grosser", "than", "burping", "in", "your", "mask", "?"]} -{"id": "0236-mask", "word": "mask", "label_binary": 1, "text_1": "Is YJ Static not concerned with secret identity? He's all over social without a mask #YoungJusticeOutsiders #weareoutsiders #DCUYoungJustice", "token_idx_1": 15, "text_start_1": 80, "text_end_1": 84, "date_1": "2019-07", "text_2": "It be the walk back to the car for me when I leave my damn mask.", "token_idx_2": 15, "text_start_2": 59, "text_end_2": 63, "date_2": "2020-07", "text_1_tokenized": ["Is", "YJ", "Static", "not", "concerned", "with", "secret", "identity", "?", "He's", "all", "over", "social", "without", "a", "mask", "#YoungJusticeOutsiders", "#weareoutsiders", "#DCUYoungJustice"], "text_2_tokenized": ["It", "be", "the", "walk", "back", "to", "the", "car", "for", "me", "when", "I", "leave", "my", "damn", "mask", "."]} -{"id": "0237-mask", "word": "mask", "label_binary": 0, "text_1": "Why does my oldest only ever want to snuggle with me when I sit down for a very specific and short amount of time? E.g. While doing a face mask.", "token_idx_1": 33, "text_start_1": 139, "text_end_1": 143, "date_1": "2019-07", "text_2": "I'm tired of getting make up on a mask.", "token_idx_2": 8, "text_start_2": 34, "text_end_2": 38, "date_2": "2020-07", "text_1_tokenized": ["Why", "does", "my", "oldest", "only", "ever", "want", "to", "snuggle", "with", "me", "when", "I", "sit", "down", "for", "a", "very", "specific", "and", "short", "amount", "of", "time", "?", "E", ".", "g", ".", "While", "doing", "a", "face", "mask", "."], "text_2_tokenized": ["I'm", "tired", "of", "getting", "make", "up", "on", "a", "mask", "."]} -{"id": "0238-mask", "word": "mask", "label_binary": 0, "text_1": "\u2014- Weekly\u2728 \u2022 Exfoliate \u2022 Use a face mask \u2022 Use pore or fave strips", "token_idx_1": 10, "text_start_1": 36, "text_end_1": 40, "date_1": "2019-07", "text_2": "Baffles me they way people come at Tom Wolf for trying to keep you safe. Bro you want your grandparents dying because you are too PrOuD to wear a mask. If he was a Republican you ding dongs wouldn't have an issue wearing one huh", "token_idx_2": 30, "text_start_2": 146, "text_end_2": 150, "date_2": "2020-07", "text_1_tokenized": ["\u2014", "-", "Weekly", "\u2728", "\u2022", "Exfoliate", "\u2022", "Use", "a", "face", "mask", "\u2022", "Use", "pore", "or", "fave", "strips"], "text_2_tokenized": ["Baffles", "me", "they", "way", "people", "come", "at", "Tom", "Wolf", "for", "trying", "to", "keep", "you", "safe", ".", "Bro", "you", "want", "your", "grandparents", "dying", "because", "you", "are", "too", "PrOuD", "to", "wear", "a", "mask", ".", "If", "he", "was", "a", "Republican", "you", "ding", "dongs", "wouldn't", "have", "an", "issue", "wearing", "one", "huh"]} -{"id": "0239-mask", "word": "mask", "label_binary": 0, "text_1": "There isn't a lot I'll miss about NYC. However, I fucking love my Bushwick hood and damn, am I going to miss running to the corner fruit stand in leopard print PJs and a face mask.", "token_idx_1": 38, "text_start_1": 175, "text_end_1": 179, "date_1": "2019-07", "text_2": "I got high and walked to 7/11 for munchies and as soon I left the store I realized I forgot to put on my mask. RIP me, don't do drugs kids", "token_idx_2": 24, "text_start_2": 105, "text_end_2": 109, "date_2": "2020-07", "text_1_tokenized": ["There", "isn't", "a", "lot", "I'll", "miss", "about", "NYC", ".", "However", ",", "I", "fucking", "love", "my", "Bushwick", "hood", "and", "damn", ",", "am", "I", "going", "to", "miss", "running", "to", "the", "corner", "fruit", "stand", "in", "leopard", "print", "PJs", "and", "a", "face", "mask", "."], "text_2_tokenized": ["I", "got", "high", "and", "walked", "to", "7/11", "for", "munchies", "and", "as", "soon", "I", "left", "the", "store", "I", "realized", "I", "forgot", "to", "put", "on", "my", "mask", ".", "RIP", "me", ",", "don't", "do", "drugs", "kids"]} -{"id": "0240-mask", "word": "mask", "label_binary": 0, "text_1": "Favorite face mask? I'm liking this honeycomb one", "token_idx_1": 2, "text_start_1": 14, "text_end_1": 18, "date_1": "2019-07", "text_2": "i've seen so many people take their mask off to sneeze into the open air like???", "token_idx_2": 7, "text_start_2": 36, "text_end_2": 40, "date_2": "2020-07", "text_1_tokenized": ["Favorite", "face", "mask", "?", "I'm", "liking", "this", "honeycomb", "one"], "text_2_tokenized": ["i've", "seen", "so", "many", "people", "take", "their", "mask", "off", "to", "sneeze", "into", "the", "open", "air", "like", "?", "?", "?"]} -{"id": "0241-mask", "word": "mask", "label_binary": 0, "text_1": "One thing I've always disliked about After Effects is how long mask take to generate. Use the rotobrush, & you need to triple check it for procedural errors. Pen tool, & it's a tedious half hour.", "token_idx_1": 11, "text_start_1": 63, "text_end_1": 67, "date_1": "2019-07", "text_2": "Are you wearing a mask because you genuinely think it's beneficial or because it's the new fad?", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 22, "date_2": "2020-07", "text_1_tokenized": ["One", "thing", "I've", "always", "disliked", "about", "After", "Effects", "is", "how", "long", "mask", "take", "to", "generate", ".", "Use", "the", "rotobrush", ",", "&", "you", "need", "to", "triple", "check", "it", "for", "procedural", "errors", ".", "Pen", "tool", ",", "&", "it's", "a", "tedious", "half", "hour", "."], "text_2_tokenized": ["Are", "you", "wearing", "a", "mask", "because", "you", "genuinely", "think", "it's", "beneficial", "or", "because", "it's", "the", "new", "fad", "?"]} -{"id": "0242-mask", "word": "mask", "label_binary": 1, "text_1": "I say it's hotter than the Devil with a fever wearing a leather and a beater in a sweater while he eatin' a jalape\u00f1o fajita with a bookbag, full of heat pads, with a skully on and a ski mask with the heat on", "token_idx_1": 42, "text_start_1": 186, "text_end_1": 190, "date_1": "2019-07", "text_2": "Love Tito Francona, but does he know his mask is supposed to cover his nose?! \ud83e\udd37\ud83c\udffb\u200d\u2642\ufe0f\ud83d\ude37", "token_idx_2": 9, "text_start_2": 41, "text_end_2": 45, "date_2": "2020-07", "text_1_tokenized": ["I", "say", "it's", "hotter", "than", "the", "Devil", "with", "a", "fever", "wearing", "a", "leather", "and", "a", "beater", "in", "a", "sweater", "while", "he", "eatin", "'", "a", "jalape\u00f1o", "fajita", "with", "a", "bookbag", ",", "full", "of", "heat", "pads", ",", "with", "a", "skully", "on", "and", "a", "ski", "mask", "with", "the", "heat", "on"], "text_2_tokenized": ["Love", "Tito", "Francona", ",", "but", "does", "he", "know", "his", "mask", "is", "supposed", "to", "cover", "his", "nose", "?", "!", "\ud83e\udd37\ud83c\udffb\u200d\u2642", "\ufe0f", "\ud83d\ude37"]} -{"id": "0243-mask", "word": "mask", "label_binary": 0, "text_1": "I just went through a deep clean, put a mask on, lotioned everywhere, cleaned my pores. Blud i feel like a new man.", "token_idx_1": 10, "text_start_1": 40, "text_end_1": 44, "date_1": "2019-07", "text_2": "A yt lady at kaiser really wore her mask inside but the minute she got into the pharmacy she took it off, took a loud phone call, and stood less than 2ft away from the people in line. Can you tell I live in California?", "token_idx_2": 8, "text_start_2": 36, "text_end_2": 40, "date_2": "2020-07", "text_1_tokenized": ["I", "just", "went", "through", "a", "deep", "clean", ",", "put", "a", "mask", "on", ",", "lotioned", "everywhere", ",", "cleaned", "my", "pores", ".", "Blud", "i", "feel", "like", "a", "new", "man", "."], "text_2_tokenized": ["A", "yt", "lady", "at", "kaiser", "really", "wore", "her", "mask", "inside", "but", "the", "minute", "she", "got", "into", "the", "pharmacy", "she", "took", "it", "off", ",", "took", "a", "loud", "phone", "call", ",", "and", "stood", "less", "than", "2ft", "away", "from", "the", "people", "in", "line", ".", "Can", "you", "tell", "I", "live", "in", "California", "?"]} -{"id": "0244-mask", "word": "mask", "label_binary": 0, "text_1": "Oh that SPIFF face mask is a must.", "token_idx_1": 4, "text_start_1": 19, "text_end_1": 23, "date_1": "2019-07", "text_2": "This is the first time I've been close (2 meters) to someone wearing a mask. She might as well not be. I can see in side it.", "token_idx_2": 16, "text_start_2": 71, "text_end_2": 75, "date_2": "2020-07", "text_1_tokenized": ["Oh", "that", "SPIFF", "face", "mask", "is", "a", "must", "."], "text_2_tokenized": ["This", "is", "the", "first", "time", "I've", "been", "close", "(", "2", "meters", ")", "to", "someone", "wearing", "a", "mask", ".", "She", "might", "as", "well", "not", "be", ".", "I", "can", "see", "in", "side", "it", "."]} -{"id": "0245-mask", "word": "mask", "label_binary": 1, "text_1": "I'm very curious what it would be like to be fucked with the CPAP mask on. Will the extra oxygen give me more stamina? Am I just looking for an excuse to indulge medical play?", "token_idx_1": 14, "text_start_1": 66, "text_end_1": 70, "date_1": "2019-07", "text_2": "Anyone voluntarily wearing a mask at this point is a Chinese operative nuff said", "token_idx_2": 4, "text_start_2": 29, "text_end_2": 33, "date_2": "2020-07", "text_1_tokenized": ["I'm", "very", "curious", "what", "it", "would", "be", "like", "to", "be", "fucked", "with", "the", "CPAP", "mask", "on", ".", "Will", "the", "extra", "oxygen", "give", "me", "more", "stamina", "?", "Am", "I", "just", "looking", "for", "an", "excuse", "to", "indulge", "medical", "play", "?"], "text_2_tokenized": ["Anyone", "voluntarily", "wearing", "a", "mask", "at", "this", "point", "is", "a", "Chinese", "operative", "nuff", "said"]} -{"id": "0246-mask", "word": "mask", "label_binary": 0, "text_1": "I look so scary with a clay mask on fkfkkf", "token_idx_1": 7, "text_start_1": 28, "text_end_1": 32, "date_1": "2019-07", "text_2": "been thinking about getting a ski mask cus i like the look of them but with mesh covering the eye and mouth holes..or maybe a fishnet material..? idk if they make those so i might have to diy but..it's been on my mind a bit..", "token_idx_2": 6, "text_start_2": 34, "text_end_2": 38, "date_2": "2020-07", "text_1_tokenized": ["I", "look", "so", "scary", "with", "a", "clay", "mask", "on", "fkfkkf"], "text_2_tokenized": ["been", "thinking", "about", "getting", "a", "ski", "mask", "cus", "i", "like", "the", "look", "of", "them", "but", "with", "mesh", "covering", "the", "eye", "and", "mouth", "holes", "..", "or", "maybe", "a", "fishnet", "material", "..", "?", "idk", "if", "they", "make", "those", "so", "i", "might", "have", "to", "diy", "but", "..", "it's", "been", "on", "my", "mind", "a", "bit", ".."]} -{"id": "0247-mask", "word": "mask", "label_binary": 1, "text_1": "my new dentist is the hottest dentist i've ever seen in my life??? up until now i've only had dentists who remind me of stereotypical asian dads???? when he took off his mask i almost choked and instantly regretted all the times i didn't floss", "token_idx_1": 38, "text_start_1": 170, "text_end_1": 174, "date_1": "2019-07", "text_2": "Prayers to @bosnianbeast27 and his family. Sending him love and support. Please wear a mask everyone.", "token_idx_2": 16, "text_start_2": 87, "text_end_2": 91, "date_2": "2020-07", "text_1_tokenized": ["my", "new", "dentist", "is", "the", "hottest", "dentist", "i've", "ever", "seen", "in", "my", "life", "?", "?", "?", "up", "until", "now", "i've", "only", "had", "dentists", "who", "remind", "me", "of", "stereotypical", "asian", "dads", "?", "?", "?", "when", "he", "took", "off", "his", "mask", "i", "almost", "choked", "and", "instantly", "regretted", "all", "the", "times", "i", "didn't", "floss"], "text_2_tokenized": ["Prayers", "to", "@bosnianbeast27", "and", "his", "family", ".", "Sending", "him", "love", "and", "support", ".", "Please", "wear", "a", "mask", "everyone", "."]} -{"id": "0248-mask", "word": "mask", "label_binary": 1, "text_1": "\"In the event of an emergency, put your oxygen mask on first before helping others\" It sounds like the most dickheaded selfish instructions when you're used to going out on a limb for others, but if that ain't the wisest advice that not only applies to planes but to life.", "token_idx_1": 11, "text_start_1": 47, "text_end_1": 51, "date_1": "2019-07", "text_2": "Someone told me that my ears got bigger :( bitch I wear a face mask 8 hours a day", "token_idx_2": 14, "text_start_2": 63, "text_end_2": 67, "date_2": "2020-07", "text_1_tokenized": ["\"", "In", "the", "event", "of", "an", "emergency", ",", "put", "your", "oxygen", "mask", "on", "first", "before", "helping", "others", "\"", "It", "sounds", "like", "the", "most", "dickheaded", "selfish", "instructions", "when", "you're", "used", "to", "going", "out", "on", "a", "limb", "for", "others", ",", "but", "if", "that", "ain't", "the", "wisest", "advice", "that", "not", "only", "applies", "to", "planes", "but", "to", "life", "."], "text_2_tokenized": ["Someone", "told", "me", "that", "my", "ears", "got", "bigger", ":(", "bitch", "I", "wear", "a", "face", "mask", "8", "hours", "a", "day"]} -{"id": "0249-mask", "word": "mask", "label_binary": 1, "text_1": "i'm frank lucas in chinchilla with a ski mask like mick foley.", "token_idx_1": 8, "text_start_1": 41, "text_end_1": 45, "date_1": "2019-07", "text_2": "Imagine not choosing tipsy when it's up against busy boy and wwstom \ud83e\udd74. Now imagine debating on wearing a fucking mask for like 30 minutes", "token_idx_2": 21, "text_start_2": 113, "text_end_2": 117, "date_2": "2020-07", "text_1_tokenized": ["i'm", "frank", "lucas", "in", "chinchilla", "with", "a", "ski", "mask", "like", "mick", "foley", "."], "text_2_tokenized": ["Imagine", "not", "choosing", "tipsy", "when", "it's", "up", "against", "busy", "boy", "and", "wwstom", "\ud83e\udd74", ".", "Now", "imagine", "debating", "on", "wearing", "a", "fucking", "mask", "for", "like", "30", "minutes"]} -{"id": "0250-mask", "word": "mask", "label_binary": 1, "text_1": "I wore your mom like her CPAP mask.", "token_idx_1": 7, "text_start_1": 30, "text_end_1": 34, "date_1": "2019-07", "text_2": "Just had a Zoom board meeting. The head and secty were at the office. Head wasn't wearing a mask, secty was. At end of call, I asked head where his mask was. Picked it up from desk. I said \"don't help if you aren't wearing it. Thanks, scty for caring!\"", "token_idx_2": 20, "text_start_2": 92, "text_end_2": 96, "date_2": "2020-07", "text_1_tokenized": ["I", "wore", "your", "mom", "like", "her", "CPAP", "mask", "."], "text_2_tokenized": ["Just", "had", "a", "Zoom", "board", "meeting", ".", "The", "head", "and", "secty", "were", "at", "the", "office", ".", "Head", "wasn't", "wearing", "a", "mask", ",", "secty", "was", ".", "At", "end", "of", "call", ",", "I", "asked", "head", "where", "his", "mask", "was", ".", "Picked", "it", "up", "from", "desk", ".", "I", "said", "\"", "don't", "help", "if", "you", "aren't", "wearing", "it", ".", "Thanks", ",", "scty", "for", "caring", "!", "\""]} -{"id": "0251-mask", "word": "mask", "label_binary": 0, "text_1": "My mom on some weird shit today. \ud83e\udd14 but I'm doing a face mask so I can't really investigate", "token_idx_1": 14, "text_start_1": 56, "text_end_1": 60, "date_1": "2019-07", "text_2": "For the first time in my life I'm ok with having a big nose because it holds up my mask. \ud83d\ude37", "token_idx_2": 19, "text_start_2": 83, "text_end_2": 87, "date_2": "2020-07", "text_1_tokenized": ["My", "mom", "on", "some", "weird", "shit", "today", ".", "\ud83e\udd14", "but", "I'm", "doing", "a", "face", "mask", "so", "I", "can't", "really", "investigate"], "text_2_tokenized": ["For", "the", "first", "time", "in", "my", "life", "I'm", "ok", "with", "having", "a", "big", "nose", "because", "it", "holds", "up", "my", "mask", ".", "\ud83d\ude37"]} -{"id": "0252-mask", "word": "mask", "label_binary": 1, "text_1": "All I'm saying is, what if when Boris Johnson becomes prime minister (all of the choices are shit) it would be great if he rips off his mask, and it was Sacha Baron Cohen underneath the whole time?", "token_idx_1": 30, "text_start_1": 136, "text_end_1": 140, "date_1": "2019-07", "text_2": "I will wear a mask when I feel I need it much like I carry a loaded firearm when I feel I need it. Personal choice", "token_idx_2": 4, "text_start_2": 14, "text_end_2": 18, "date_2": "2020-07", "text_1_tokenized": ["All", "I'm", "saying", "is", ",", "what", "if", "when", "Boris", "Johnson", "becomes", "prime", "minister", "(", "all", "of", "the", "choices", "are", "shit", ")", "it", "would", "be", "great", "if", "he", "rips", "off", "his", "mask", ",", "and", "it", "was", "Sacha", "Baron", "Cohen", "underneath", "the", "whole", "time", "?"], "text_2_tokenized": ["I", "will", "wear", "a", "mask", "when", "I", "feel", "I", "need", "it", "much", "like", "I", "carry", "a", "loaded", "firearm", "when", "I", "feel", "I", "need", "it", ".", "Personal", "choice"]} -{"id": "0253-mask", "word": "mask", "label_binary": 0, "text_1": "is it okay if i sleep with a face mask on because i am too lazy to get up and wash my face rn", "token_idx_1": 9, "text_start_1": 34, "text_end_1": 38, "date_1": "2019-07", "text_2": "You wanna know what your horoscope for the day is? Wear your fcking mask", "token_idx_2": 14, "text_start_2": 68, "text_end_2": 72, "date_2": "2020-07", "text_1_tokenized": ["is", "it", "okay", "if", "i", "sleep", "with", "a", "face", "mask", "on", "because", "i", "am", "too", "lazy", "to", "get", "up", "and", "wash", "my", "face", "rn"], "text_2_tokenized": ["You", "wanna", "know", "what", "your", "horoscope", "for", "the", "day", "is", "?", "Wear", "your", "fcking", "mask"]} -{"id": "0254-mask", "word": "mask", "label_binary": 0, "text_1": "y'all wanna see a horrendous vid of me trying to do eyeshadow and eyeliner then taking it off just to put a mask on all while intoxicated??", "token_idx_1": 22, "text_start_1": 108, "text_end_1": 112, "date_1": "2019-07", "text_2": "oh.......my god........i was just at the orthodontist and the doctor was talking to the assistants about how they would have to throw jim in jail to get him to wear a mask everywhere.....he literally said \"theyre not even good for you, youre supposed to breathe in all those", "token_idx_2": 35, "text_start_2": 167, "text_end_2": 171, "date_2": "2020-07", "text_1_tokenized": ["y'all", "wanna", "see", "a", "horrendous", "vid", "of", "me", "trying", "to", "do", "eyeshadow", "and", "eyeliner", "then", "taking", "it", "off", "just", "to", "put", "a", "mask", "on", "all", "while", "intoxicated", "?", "?"], "text_2_tokenized": ["oh", "...", "my", "god", "...", "i", "was", "just", "at", "the", "orthodontist", "and", "the", "doctor", "was", "talking", "to", "the", "assistants", "about", "how", "they", "would", "have", "to", "throw", "jim", "in", "jail", "to", "get", "him", "to", "wear", "a", "mask", "everywhere", "...", "he", "literally", "said", "\"", "theyre", "not", "even", "good", "for", "you", ",", "youre", "supposed", "to", "breathe", "in", "all", "those"]} -{"id": "0255-mask", "word": "mask", "label_binary": 1, "text_1": "Need a face mask for Hard summer man I hate getting all the dirt up in my nose and shit smh", "token_idx_1": 3, "text_start_1": 12, "text_end_1": 16, "date_1": "2019-07", "text_2": "Someone called me an asshole because I asked them where their mask was \ud83e\udd7a people are great \ud83d\ude39", "token_idx_2": 11, "text_start_2": 62, "text_end_2": 66, "date_2": "2020-07", "text_1_tokenized": ["Need", "a", "face", "mask", "for", "Hard", "summer", "man", "I", "hate", "getting", "all", "the", "dirt", "up", "in", "my", "nose", "and", "shit", "smh"], "text_2_tokenized": ["Someone", "called", "me", "an", "asshole", "because", "I", "asked", "them", "where", "their", "mask", "was", "\ud83e\udd7a", "people", "are", "great", "\ud83d\ude39"]} -{"id": "0256-mask", "word": "mask", "label_binary": 0, "text_1": "this clay mask stings sksksksks feels like my face is on FIRE", "token_idx_1": 2, "text_start_1": 10, "text_end_1": 14, "date_1": "2019-07", "text_2": "Shady I can see it \ud83d\udc40!!! Shame on you take that mask off and be the real you!!!! You know who you are...", "token_idx_2": 14, "text_start_2": 47, "text_end_2": 51, "date_2": "2020-07", "text_1_tokenized": ["this", "clay", "mask", "stings", "sksksksks", "feels", "like", "my", "face", "is", "on", "FIRE"], "text_2_tokenized": ["Shady", "I", "can", "see", "it", "\ud83d\udc40", "!", "!", "!", "Shame", "on", "you", "take", "that", "mask", "off", "and", "be", "the", "real", "you", "!", "!", "!", "You", "know", "who", "you", "are", "..."]} -{"id": "0257-mask", "word": "mask", "label_binary": 0, "text_1": "Oh hey, they're doing a new ROCKETEER show. Too bad they changed the (super cool) mask and I do think a little girl would like gold just as much as pink, but it does look pretty swell.", "token_idx_1": 19, "text_start_1": 82, "text_end_1": 86, "date_1": "2019-07", "text_2": "So...DeWine TOOK OFF his mask to talk. Also, unless I actually see that pitch go from DeWine to Votto, I don't buy it.... #Ohio #COVID19 @Reds", "token_idx_2": 6, "text_start_2": 25, "text_end_2": 29, "date_2": "2020-07", "text_1_tokenized": ["Oh", "hey", ",", "they're", "doing", "a", "new", "ROCKETEER", "show", ".", "Too", "bad", "they", "changed", "the", "(", "super", "cool", ")", "mask", "and", "I", "do", "think", "a", "little", "girl", "would", "like", "gold", "just", "as", "much", "as", "pink", ",", "but", "it", "does", "look", "pretty", "swell", "."], "text_2_tokenized": ["So", "...", "DeWine", "TOOK", "OFF", "his", "mask", "to", "talk", ".", "Also", ",", "unless", "I", "actually", "see", "that", "pitch", "go", "from", "DeWine", "to", "Votto", ",", "I", "don't", "buy", "it", "...", "#Ohio", "#COVID19", "@Reds"]} -{"id": "0258-mask", "word": "mask", "label_binary": 1, "text_1": "\u201cJade he cheated on me I'm buying a ski mask and popping all his tires\u201d LMAO energy", "token_idx_1": 10, "text_start_1": 40, "text_end_1": 44, "date_1": "2019-07", "text_2": "I'm FANGIRLING behind my mask right now,,,,", "token_idx_2": 4, "text_start_2": 25, "text_end_2": 29, "date_2": "2020-07", "text_1_tokenized": ["\u201c", "Jade", "he", "cheated", "on", "me", "I'm", "buying", "a", "ski", "mask", "and", "popping", "all", "his", "tires", "\u201d", "LMAO", "energy"], "text_2_tokenized": ["I'm", "FANGIRLING", "behind", "my", "mask", "right", "now", ",", ",", ","]} -{"id": "0259-mask", "word": "mask", "label_binary": 0, "text_1": "\u201cShawty got the face mask, the red wine. I see the vibeeee\u201d \ud83d\ude02\ud83d\ude02\ud83d\ude02", "token_idx_1": 5, "text_start_1": 21, "text_end_1": 25, "date_1": "2019-07", "text_2": "Dead just heard this lady and the deli say she can't hear sometimes w a mask on..... Lady just say you misheard him \ud83e\udd22", "token_idx_2": 15, "text_start_2": 72, "text_end_2": 76, "date_2": "2020-07", "text_1_tokenized": ["\u201c", "Shawty", "got", "the", "face", "mask", ",", "the", "red", "wine", ".", "I", "see", "the", "vibeeee", "\u201d", "\ud83d\ude02", "\ud83d\ude02", "\ud83d\ude02"], "text_2_tokenized": ["Dead", "just", "heard", "this", "lady", "and", "the", "deli", "say", "she", "can't", "hear", "sometimes", "w", "a", "mask", "on", "...", "Lady", "just", "say", "you", "misheard", "him", "\ud83e\udd22"]} -{"id": "0260-mask", "word": "mask", "label_binary": 0, "text_1": "I started watching that secret obsession while I was doing a sheet mask and honest leigh....", "token_idx_1": 12, "text_start_1": 67, "text_end_1": 71, "date_1": "2019-07", "text_2": "Autumn hate when I pull her mask down.. she pull it right back up. I'm sick she think it's normal", "token_idx_2": 6, "text_start_2": 28, "text_end_2": 32, "date_2": "2020-07", "text_1_tokenized": ["I", "started", "watching", "that", "secret", "obsession", "while", "I", "was", "doing", "a", "sheet", "mask", "and", "honest", "leigh", "..."], "text_2_tokenized": ["Autumn", "hate", "when", "I", "pull", "her", "mask", "down", "..", "she", "pull", "it", "right", "back", "up", ".", "I'm", "sick", "she", "think", "it's", "normal"]} -{"id": "0261-mask", "word": "mask", "label_binary": 1, "text_1": "I found the same mask @TomHatesGod has. At the fair.", "token_idx_1": 4, "text_start_1": 17, "text_end_1": 21, "date_1": "2019-07", "text_2": "Oh you go for walks/runs without a mask? Back to eating out frequently? Back at your gym? You're mad things aren't going back to \u201cnormal\u201d? You're all like \u201cfuck rona\u201d? NO. NOT FUCK RONA. FUCK YOU. YOU ARE THE PROBLEM.", "token_idx_2": 9, "text_start_2": 35, "text_end_2": 39, "date_2": "2020-07", "text_1_tokenized": ["I", "found", "the", "same", "mask", "@TomHatesGod", "has", ".", "At", "the", "fair", "."], "text_2_tokenized": ["Oh", "you", "go", "for", "walks", "/", "runs", "without", "a", "mask", "?", "Back", "to", "eating", "out", "frequently", "?", "Back", "at", "your", "gym", "?", "You're", "mad", "things", "aren't", "going", "back", "to", "\u201c", "normal", "\u201d", "?", "You're", "all", "like", "\u201c", "fuck", "rona", "\u201d", "?", "NO", ".", "NOT", "FUCK", "RONA", ".", "FUCK", "YOU", ".", "YOU", "ARE", "THE", "PROBLEM", "."]} -{"id": "0262-mask", "word": "mask", "label_binary": 0, "text_1": "Why is is so hard to find this aztec clay mask in Warner Robins omg", "token_idx_1": 10, "text_start_1": 42, "text_end_1": 46, "date_1": "2019-07", "text_2": "Not wearing a mask is like frying bacon naked. Your dick might be fine, but why the hell would you risk that? #WearAMask", "token_idx_2": 3, "text_start_2": 14, "text_end_2": 18, "date_2": "2020-07", "text_1_tokenized": ["Why", "is", "is", "so", "hard", "to", "find", "this", "aztec", "clay", "mask", "in", "Warner", "Robins", "omg"], "text_2_tokenized": ["Not", "wearing", "a", "mask", "is", "like", "frying", "bacon", "naked", ".", "Your", "dick", "might", "be", "fine", ",", "but", "why", "the", "hell", "would", "you", "risk", "that", "?", "#WearAMask"]} -{"id": "0263-mask", "word": "mask", "label_binary": 0, "text_1": "I been saying I wanted the deluxe pedicure.. I wanted the massage, mask and everything. I hate when my time is wasted... \ud83d\ude2d\ud83d\udc94\ud83d\udc94", "token_idx_1": 14, "text_start_1": 67, "text_end_1": 71, "date_1": "2019-07", "text_2": "You know that look that you give someone who's shopping without a mask on? I wish we'd all give that same look at people we know who don't store their guns properly and those who abuse their spouses or children. #BeSmart #LivesOverGuns We at @MomsDemand are here to help.", "token_idx_2": 12, "text_start_2": 66, "text_end_2": 70, "date_2": "2020-07", "text_1_tokenized": ["I", "been", "saying", "I", "wanted", "the", "deluxe", "pedicure", "..", "I", "wanted", "the", "massage", ",", "mask", "and", "everything", ".", "I", "hate", "when", "my", "time", "is", "wasted", "...", "\ud83d\ude2d", "\ud83d\udc94", "\ud83d\udc94"], "text_2_tokenized": ["You", "know", "that", "look", "that", "you", "give", "someone", "who's", "shopping", "without", "a", "mask", "on", "?", "I", "wish", "we'd", "all", "give", "that", "same", "look", "at", "people", "we", "know", "who", "don't", "store", "their", "guns", "properly", "and", "those", "who", "abuse", "their", "spouses", "or", "children", ".", "#BeSmart", "#LivesOverGuns", "We", "at", "@MomsDemand", "are", "here", "to", "help", "."]} -{"id": "0264-mask", "word": "mask", "label_binary": 0, "text_1": "Tell me HOW Austin Ames couldn't figure out that it was Sam wearing the mask in a A Cinderella Story... it's not like she had the Batman mask on", "token_idx_1": 14, "text_start_1": 72, "text_end_1": 76, "date_1": "2019-07", "text_2": "Covid killed my uncle this past week. I'm tired of people acting like it's a joke or complaining about wearing a mask, the bare fucking minimum. Some of you are pathetic.", "token_idx_2": 22, "text_start_2": 113, "text_end_2": 117, "date_2": "2020-07", "text_1_tokenized": ["Tell", "me", "HOW", "Austin", "Ames", "couldn't", "figure", "out", "that", "it", "was", "Sam", "wearing", "the", "mask", "in", "a", "A", "Cinderella", "Story", "...", "it's", "not", "like", "she", "had", "the", "Batman", "mask", "on"], "text_2_tokenized": ["Covid", "killed", "my", "uncle", "this", "past", "week", ".", "I'm", "tired", "of", "people", "acting", "like", "it's", "a", "joke", "or", "complaining", "about", "wearing", "a", "mask", ",", "the", "bare", "fucking", "minimum", ".", "Some", "of", "you", "are", "pathetic", "."]} -{"id": "0265-mask", "word": "mask", "label_binary": 1, "text_1": "seems like hiichan is the only one that's not wearing a mask. hiichan really takes care of how she looks at the airport.", "token_idx_1": 11, "text_start_1": 56, "text_end_1": 60, "date_1": "2019-07", "text_2": "Teaching back to back high intensity indoor cycling classes while wearing a mask \ud83d\ude37 = an asthma attack waiting to happen \ud83e\udd74", "token_idx_2": 12, "text_start_2": 76, "text_end_2": 80, "date_2": "2020-07", "text_1_tokenized": ["seems", "like", "hiichan", "is", "the", "only", "one", "that's", "not", "wearing", "a", "mask", ".", "hiichan", "really", "takes", "care", "of", "how", "she", "looks", "at", "the", "airport", "."], "text_2_tokenized": ["Teaching", "back", "to", "back", "high", "intensity", "indoor", "cycling", "classes", "while", "wearing", "a", "mask", "\ud83d\ude37", "=", "an", "asthma", "attack", "waiting", "to", "happen", "\ud83e\udd74"]} -{"id": "0266-mask", "word": "mask", "label_binary": 0, "text_1": "I'm going to hit the gym, go home and shower, whiten my teeth and do a face mask \ud83d\udc86\u200d\u2640\ufe0f", "token_idx_1": 19, "text_start_1": 76, "text_end_1": 80, "date_1": "2019-07", "text_2": "Wearing a mask is like showering in a wetsuit.", "token_idx_2": 2, "text_start_2": 10, "text_end_2": 14, "date_2": "2020-07", "text_1_tokenized": ["I'm", "going", "to", "hit", "the", "gym", ",", "go", "home", "and", "shower", ",", "whiten", "my", "teeth", "and", "do", "a", "face", "mask", "\ud83d\udc86\u200d\u2640", "\ufe0f"], "text_2_tokenized": ["Wearing", "a", "mask", "is", "like", "showering", "in", "a", "wetsuit", "."]} -{"id": "0267-mask", "word": "mask", "label_binary": 0, "text_1": "I'm putting on an overnight face mask because I care about my pie and phlegm filled body \u270c\ud83c\udffc", "token_idx_1": 6, "text_start_1": 33, "text_end_1": 37, "date_1": "2019-07", "text_2": "Imagine being in Kansas and getting covid because you got swept up into a tornado along with some guy who refuses to wear a mask", "token_idx_2": 24, "text_start_2": 124, "text_end_2": 128, "date_2": "2020-07", "text_1_tokenized": ["I'm", "putting", "on", "an", "overnight", "face", "mask", "because", "I", "care", "about", "my", "pie", "and", "phlegm", "filled", "body", "\u270c\ud83c\udffc"], "text_2_tokenized": ["Imagine", "being", "in", "Kansas", "and", "getting", "covid", "because", "you", "got", "swept", "up", "into", "a", "tornado", "along", "with", "some", "guy", "who", "refuses", "to", "wear", "a", "mask"]} -{"id": "0268-mask", "word": "mask", "label_binary": 1, "text_1": "not the ski mask \ud83d\ude2d\ud83d\ude2d\ud83d\ude2d #Bringitback23", "token_idx_1": 3, "text_start_1": 12, "text_end_1": 16, "date_1": "2019-07", "text_2": "Saw one of my supervisors without his mask on today and babyyyyyyy", "token_idx_2": 7, "text_start_2": 38, "text_end_2": 42, "date_2": "2020-07", "text_1_tokenized": ["not", "the", "ski", "mask", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude2d", "#Bringitback23"], "text_2_tokenized": ["Saw", "one", "of", "my", "supervisors", "without", "his", "mask", "on", "today", "and", "babyyyyyyy"]} -{"id": "0269-mask", "word": "mask", "label_binary": 1, "text_1": "Everybody is wearing a mask here. Very few genuine ones.", "token_idx_1": 4, "text_start_1": 23, "text_end_1": 27, "date_1": "2019-07", "text_2": "If you wear a mask while driving alone, you're probably not voting for the best candidates to save our country.", "token_idx_2": 4, "text_start_2": 14, "text_end_2": 18, "date_2": "2020-07", "text_1_tokenized": ["Everybody", "is", "wearing", "a", "mask", "here", ".", "Very", "few", "genuine", "ones", "."], "text_2_tokenized": ["If", "you", "wear", "a", "mask", "while", "driving", "alone", ",", "you're", "probably", "not", "voting", "for", "the", "best", "candidates", "to", "save", "our", "country", "."]} -{"id": "0270-mask", "word": "mask", "label_binary": 1, "text_1": "\u00a34.50 to get out of the coach park at @manairport after 5 minutes stay. Even Dick Turpin wore a mask!", "token_idx_1": 21, "text_start_1": 96, "text_end_1": 100, "date_1": "2019-07", "text_2": "Hey Mark Lepper is wearing a mask \ud83d\ude37(@thelepper), thank you for following me", "token_idx_2": 6, "text_start_2": 29, "text_end_2": 33, "date_2": "2020-07", "text_1_tokenized": ["\u00a3", "4.50", "to", "get", "out", "of", "the", "coach", "park", "at", "@manairport", "after", "5", "minutes", "stay", ".", "Even", "Dick", "Turpin", "wore", "a", "mask", "!"], "text_2_tokenized": ["Hey", "Mark", "Lepper", "is", "wearing", "a", "mask", "\ud83d\ude37", "(", "@thelepper", ")", ",", "thank", "you", "for", "following", "me"]} -{"id": "0271-mask", "word": "mask", "label_binary": 0, "text_1": "currently watching vlogs in the shower while I let my face mask sit lol happy wednesday world, do something nice for yourself today", "token_idx_1": 11, "text_start_1": 59, "text_end_1": 63, "date_1": "2019-07", "text_2": "The Judge's chambers are open for business...just wear a mask #5straight", "token_idx_2": 11, "text_start_2": 57, "text_end_2": 61, "date_2": "2020-07", "text_1_tokenized": ["currently", "watching", "vlogs", "in", "the", "shower", "while", "I", "let", "my", "face", "mask", "sit", "lol", "happy", "wednesday", "world", ",", "do", "something", "nice", "for", "yourself", "today"], "text_2_tokenized": ["The", "Judge's", "chambers", "are", "open", "for", "business", "...", "just", "wear", "a", "mask", "#5straight"]} -{"id": "0272-mask", "word": "mask", "label_binary": 1, "text_1": "ski mask the slump god is a legend", "token_idx_1": 1, "text_start_1": 4, "text_end_1": 8, "date_1": "2019-07", "text_2": "Pregnancy is literally in the air cause ain't no way this many people done/finna pop out some babies this year. I'm glad I wear a mask everytime i go out\ud83d\ude37", "token_idx_2": 28, "text_start_2": 130, "text_end_2": 134, "date_2": "2020-07", "text_1_tokenized": ["ski", "mask", "the", "slump", "god", "is", "a", "legend"], "text_2_tokenized": ["Pregnancy", "is", "literally", "in", "the", "air", "cause", "ain't", "no", "way", "this", "many", "people", "done", "/", "finna", "pop", "out", "some", "babies", "this", "year", ".", "I'm", "glad", "I", "wear", "a", "mask", "everytime", "i", "go", "out", "\ud83d\ude37"]} -{"id": "0273-mask", "word": "mask", "label_binary": 0, "text_1": "a couple significant-ish things I've done differently: - didn't tell matt (as ashley) about emily & mike 'flirting' - scared jessica (as mike) with the mask so she fell into the stream (to see what would happen more than anything else)", "token_idx_1": 32, "text_start_1": 156, "text_end_1": 160, "date_1": "2019-07", "text_2": "Wearing a mask is basicly a \"I care about others\" sign", "token_idx_2": 2, "text_start_2": 10, "text_end_2": 14, "date_2": "2020-07", "text_1_tokenized": ["a", "couple", "significant-ish", "things", "I've", "done", "differently", ":", "-", "didn't", "tell", "matt", "(", "as", "ashley", ")", "about", "emily", "&", "mike", "'", "flirting", "'", "-", "scared", "jessica", "(", "as", "mike", ")", "with", "the", "mask", "so", "she", "fell", "into", "the", "stream", "(", "to", "see", "what", "would", "happen", "more", "than", "anything", "else", ")"], "text_2_tokenized": ["Wearing", "a", "mask", "is", "basicly", "a", "\"", "I", "care", "about", "others", "\"", "sign"]} -{"id": "0274-mask", "word": "mask", "label_binary": 0, "text_1": "OK but legit can fellow Witcher fans tell me what scene that was where Yen is wearing that mask?", "token_idx_1": 18, "text_start_1": 91, "text_end_1": 95, "date_1": "2019-07", "text_2": "Well whaddya know, dude walks in with a tr*mp hat and is a total douche. No mask. Immediately starts complaining about things lol \ud83d\udc80\ud83d\udd2b", "token_idx_2": 20, "text_start_2": 76, "text_end_2": 80, "date_2": "2020-07", "text_1_tokenized": ["OK", "but", "legit", "can", "fellow", "Witcher", "fans", "tell", "me", "what", "scene", "that", "was", "where", "Yen", "is", "wearing", "that", "mask", "?"], "text_2_tokenized": ["Well", "whaddya", "know", ",", "dude", "walks", "in", "with", "a", "tr", "*", "mp", "hat", "and", "is", "a", "total", "douche", ".", "No", "mask", ".", "Immediately", "starts", "complaining", "about", "things", "lol", "\ud83d\udc80", "\ud83d\udd2b"]} -{"id": "0275-mask", "word": "mask", "label_binary": 0, "text_1": "I wish I could use my phone to take a picture of my phone but I can't. I got a clay mask in the speakers on the bottom and I'm weak but also sorry Huawei", "token_idx_1": 22, "text_start_1": 84, "text_end_1": 88, "date_1": "2019-07", "text_2": "I need to stop reading the comments on Facebook. People in Wisconsin are stupid. Just wear the freaking mask.", "token_idx_2": 20, "text_start_2": 104, "text_end_2": 108, "date_2": "2020-07", "text_1_tokenized": ["I", "wish", "I", "could", "use", "my", "phone", "to", "take", "a", "picture", "of", "my", "phone", "but", "I", "can't", ".", "I", "got", "a", "clay", "mask", "in", "the", "speakers", "on", "the", "bottom", "and", "I'm", "weak", "but", "also", "sorry", "Huawei"], "text_2_tokenized": ["I", "need", "to", "stop", "reading", "the", "comments", "on", "Facebook", ".", "People", "in", "Wisconsin", "are", "stupid", ".", "Just", "wear", "the", "freaking", "mask", "."]} -{"id": "0276-mask", "word": "mask", "label_binary": 0, "text_1": "all of this selfcare bs is absolutely right, today I used a clay mask, did my nails, and I'am not feeling like an absolute trash, just a little", "token_idx_1": 14, "text_start_1": 65, "text_end_1": 69, "date_1": "2019-07", "text_2": "OMG! CNN just had a moron telling people that bed sheets make a great mask and they are better than an N95 mask with a valve! Are people this stupid?", "token_idx_2": 15, "text_start_2": 70, "text_end_2": 74, "date_2": "2020-07", "text_1_tokenized": ["all", "of", "this", "selfcare", "bs", "is", "absolutely", "right", ",", "today", "I", "used", "a", "clay", "mask", ",", "did", "my", "nails", ",", "and", "I'am", "not", "feeling", "like", "an", "absolute", "trash", ",", "just", "a", "little"], "text_2_tokenized": ["OMG", "!", "CNN", "just", "had", "a", "moron", "telling", "people", "that", "bed", "sheets", "make", "a", "great", "mask", "and", "they", "are", "better", "than", "an", "N95", "mask", "with", "a", "valve", "!", "Are", "people", "this", "stupid", "?"]} -{"id": "0277-mask", "word": "mask", "label_binary": 0, "text_1": "i literally paused surfing the web to wash off my mask moisturize and change from pjs to clothes and went back to my bed to surf again", "token_idx_1": 10, "text_start_1": 50, "text_end_1": 54, "date_1": "2019-07", "text_2": "everytime i see a coworker without their mask on im like oh wow thats not what I thought the lower half of their face would look like", "token_idx_2": 7, "text_start_2": 41, "text_end_2": 45, "date_2": "2020-07", "text_1_tokenized": ["i", "literally", "paused", "surfing", "the", "web", "to", "wash", "off", "my", "mask", "moisturize", "and", "change", "from", "pjs", "to", "clothes", "and", "went", "back", "to", "my", "bed", "to", "surf", "again"], "text_2_tokenized": ["everytime", "i", "see", "a", "coworker", "without", "their", "mask", "on", "im", "like", "oh", "wow", "thats", "not", "what", "I", "thought", "the", "lower", "half", "of", "their", "face", "would", "look", "like"]} -{"id": "0278-mask", "word": "mask", "label_binary": 0, "text_1": "I think I am about to thread my brows tonight then do an aloe face mask \ud83e\uddd6\ud83c\udffe\u200d\u2640\ufe0f", "token_idx_1": 15, "text_start_1": 67, "text_end_1": 71, "date_1": "2019-07", "text_2": "If you don't listen to us, we'll set up checkpoints to enforce mask requirements. If you don't listen to us, we'll turn off your electricity and water. If you don't listen to us, we'll shut down your business. If you don't listen to us, your children may never get educated.", "token_idx_2": 13, "text_start_2": 63, "text_end_2": 67, "date_2": "2020-07", "text_1_tokenized": ["I", "think", "I", "am", "about", "to", "thread", "my", "brows", "tonight", "then", "do", "an", "aloe", "face", "mask", "\ud83e\uddd6\ud83c\udffe\u200d\u2640", "\ufe0f"], "text_2_tokenized": ["If", "you", "don't", "listen", "to", "us", ",", "we'll", "set", "up", "checkpoints", "to", "enforce", "mask", "requirements", ".", "If", "you", "don't", "listen", "to", "us", ",", "we'll", "turn", "off", "your", "electricity", "and", "water", ".", "If", "you", "don't", "listen", "to", "us", ",", "we'll", "shut", "down", "your", "business", ".", "If", "you", "don't", "listen", "to", "us", ",", "your", "children", "may", "never", "get", "educated", "."]} -{"id": "0279-mask", "word": "mask", "label_binary": 0, "text_1": "The shit I gotta deal with thank god it's over I love my life I'm grateful and ready to groom myself and shower and face mask and life", "token_idx_1": 25, "text_start_1": 121, "text_end_1": 125, "date_1": "2019-07", "text_2": "What the what. Trunk show did well. Now she wants a wholesale order. And I won't a raffle prize for the great Toronto yarn hop??? I guess that's to make up for this weird shit on my scalp and losing my first self made mask today, huh?", "token_idx_2": 50, "text_start_2": 218, "text_end_2": 222, "date_2": "2020-07", "text_1_tokenized": ["The", "shit", "I", "gotta", "deal", "with", "thank", "god", "it's", "over", "I", "love", "my", "life", "I'm", "grateful", "and", "ready", "to", "groom", "myself", "and", "shower", "and", "face", "mask", "and", "life"], "text_2_tokenized": ["What", "the", "what", ".", "Trunk", "show", "did", "well", ".", "Now", "she", "wants", "a", "wholesale", "order", ".", "And", "I", "won't", "a", "raffle", "prize", "for", "the", "great", "Toronto", "yarn", "hop", "?", "?", "?", "I", "guess", "that's", "to", "make", "up", "for", "this", "weird", "shit", "on", "my", "scalp", "and", "losing", "my", "first", "self", "made", "mask", "today", ",", "huh", "?"]} -{"id": "0280-mask", "word": "mask", "label_binary": 0, "text_1": "birthday has been good so far!! i had lunch and hung out with my family. now im chilling in my pjs with a face mask, getting ready to go out with my friends tonight!!!", "token_idx_1": 27, "text_start_1": 111, "text_end_1": 115, "date_1": "2019-07", "text_2": "Also i have a reflective Real Madrid mask coming out soon", "token_idx_2": 7, "text_start_2": 37, "text_end_2": 41, "date_2": "2020-07", "text_1_tokenized": ["birthday", "has", "been", "good", "so", "far", "!", "!", "i", "had", "lunch", "and", "hung", "out", "with", "my", "family", ".", "now", "im", "chilling", "in", "my", "pjs", "with", "a", "face", "mask", ",", "getting", "ready", "to", "go", "out", "with", "my", "friends", "tonight", "!", "!", "!"], "text_2_tokenized": ["Also", "i", "have", "a", "reflective", "Real", "Madrid", "mask", "coming", "out", "soon"]} -{"id": "2466-teargas", "word": "teargas", "label_binary": 1, "text_1": "#AmKenyan A journalist was hit directly by a teargas canister. I was standing next to him and we had to carry him out of the street. Police are claiming he got injured picking the canister up. Total lies!! #SwitchOffKPLC #STOPTheseTHIEVES", "token_idx_1": 8, "text_start_1": 45, "text_end_1": 52, "date_1": "2019-07", "text_2": "Intense anger on the streets of #Beirut tonight. @RedCrossLebanon has so far confirmed 63 people have been taken to hospital for treatment, 175 treated on the scene. Live bullets and teargas being used by security forces according to domestic media channels.", "token_idx_2": 33, "text_start_2": 183, "text_end_2": 190, "date_2": "2020-07", "text_1_tokenized": ["#AmKenyan", "A", "journalist", "was", "hit", "directly", "by", "a", "teargas", "canister", ".", "I", "was", "standing", "next", "to", "him", "and", "we", "had", "to", "carry", "him", "out", "of", "the", "street", ".", "Police", "are", "claiming", "he", "got", "injured", "picking", "the", "canister", "up", ".", "Total", "lies", "!", "!", "#SwitchOffKPLC", "#STOPTheseTHIEVES"], "text_2_tokenized": ["Intense", "anger", "on", "the", "streets", "of", "#Beirut", "tonight", ".", "@RedCrossLebanon", "has", "so", "far", "confirmed", "63", "people", "have", "been", "taken", "to", "hospital", "for", "treatment", ",", "175", "treated", "on", "the", "scene", ".", "Live", "bullets", "and", "teargas", "being", "used", "by", "security", "forces", "according", "to", "domestic", "media", "channels", "."]} -{"id": "2467-teargas", "word": "teargas", "label_binary": 1, "text_1": "Why did the police deploy teargas in the first place?", "token_idx_1": 5, "text_start_1": 26, "text_end_1": 33, "date_1": "2019-07", "text_2": "Is there anyone out there that thinks the mayor of Portland joining a riot and getting teargas is a little bit strange? Doesn't that go against his oath of office? Condoning rioters is one thing but to join them I think is another thing!!!!", "token_idx_2": 16, "text_start_2": 87, "text_end_2": 94, "date_2": "2020-07", "text_1_tokenized": ["Why", "did", "the", "police", "deploy", "teargas", "in", "the", "first", "place", "?"], "text_2_tokenized": ["Is", "there", "anyone", "out", "there", "that", "thinks", "the", "mayor", "of", "Portland", "joining", "a", "riot", "and", "getting", "teargas", "is", "a", "little", "bit", "strange", "?", "Doesn't", "that", "go", "against", "his", "oath", "of", "office", "?", "Condoning", "rioters", "is", "one", "thing", "but", "to", "join", "them", "I", "think", "is", "another", "thing", "!", "!", "!"]} -{"id": "2468-teargas", "word": "teargas", "label_binary": 1, "text_1": "Just saw several MTR staff unloading several boxes of 3M masks through Exit B at Central station as the police fired teargas into Kwai Fong station last week. #antiELAB", "token_idx_1": 21, "text_start_1": 117, "text_end_1": 124, "date_1": "2019-07", "text_2": "How many people actually feel safer that cops are firing rubber bullets and teargas at groups like \"Wall of Moms\"?", "token_idx_2": 13, "text_start_2": 76, "text_end_2": 83, "date_2": "2020-07", "text_1_tokenized": ["Just", "saw", "several", "MTR", "staff", "unloading", "several", "boxes", "of", "3M", "masks", "through", "Exit", "B", "at", "Central", "station", "as", "the", "police", "fired", "teargas", "into", "Kwai", "Fong", "station", "last", "week", ".", "#antiELAB"], "text_2_tokenized": ["How", "many", "people", "actually", "feel", "safer", "that", "cops", "are", "firing", "rubber", "bullets", "and", "teargas", "at", "groups", "like", "\"", "Wall", "of", "Moms", "\"", "?"]} -{"id": "2469-teargas", "word": "teargas", "label_binary": 1, "text_1": "Thank God,I'm not part of the fake #revolutionnow , I'm just a passerby today at national stadium Lagos, omo see teargas everywhere , I check some of our social media activist , I no see any of their brake light , me too pick race with plenty tear on my eyez .", "token_idx_1": 23, "text_start_1": 113, "text_end_1": 120, "date_1": "2019-07", "text_2": "#Teargas #BarrHearing \u201cIs it ever appropriate to use teargas on peaceful protestors?\u201d Thank you Rep. Cicilline - Rhode Island", "token_idx_2": 9, "text_start_2": 53, "text_end_2": 60, "date_2": "2020-07", "text_1_tokenized": ["Thank", "God", ",", "I'm", "not", "part", "of", "the", "fake", "#revolutionnow", ",", "I'm", "just", "a", "passerby", "today", "at", "national", "stadium", "Lagos", ",", "omo", "see", "teargas", "everywhere", ",", "I", "check", "some", "of", "our", "social", "media", "activist", ",", "I", "no", "see", "any", "of", "their", "brake", "light", ",", "me", "too", "pick", "race", "with", "plenty", "tear", "on", "my", "eyez", "."], "text_2_tokenized": ["#Teargas", "#BarrHearing", "\u201c", "Is", "it", "ever", "appropriate", "to", "use", "teargas", "on", "peaceful", "protestors", "?", "\u201d", "Thank", "you", "Rep", ".", "Cicilline", "-", "Rhode", "Island"]} -{"id": "2470-teargas", "word": "teargas", "label_binary": 1, "text_1": "RT cheriechancy: 50 days into #HongKongProtests, hundreds of teargas fired; bill not withdrawn; no other demands have been answered; viole\u2026", "token_idx_1": 10, "text_start_1": 61, "text_end_1": 68, "date_1": "2019-07", "text_2": "Dancing with teargas in my eyes... #MAGASongs", "token_idx_2": 2, "text_start_2": 13, "text_end_2": 20, "date_2": "2020-07", "text_1_tokenized": ["RT", "cheriechancy", ":", "50", "days", "into", "#HongKongProtests", ",", "hundreds", "of", "teargas", "fired", ";", "bill", "not", "withdrawn", ";", "no", "other", "demands", "have", "been", "answered", ";", "viole", "\u2026"], "text_2_tokenized": ["Dancing", "with", "teargas", "in", "my", "eyes", "...", "#MAGASongs"]} -{"id": "2471-teargas", "word": "teargas", "label_binary": 1, "text_1": "#RevolutionProtest squads, are we going out today?, please when una return, I will wait for gist, out kill una dia, there's no peace for the wicked, that's y teargas will be una friend as una step out to distrust our president @MBuhari , if u support d revolution, tears follow u", "token_idx_1": 35, "text_start_1": 158, "text_end_1": 165, "date_1": "2019-07", "text_2": "Taking two chops while having teargas residue on urself then going for a walk in the sun, then have and a pilsner that tastes like water then getting on to Twitter and flexing ur fingers in anticipation of this tweet>>>>>>>>>>>>", "token_idx_2": 5, "text_start_2": 30, "text_end_2": 37, "date_2": "2020-07", "text_1_tokenized": ["#RevolutionProtest", "squads", ",", "are", "we", "going", "out", "today", "?", ",", "please", "when", "una", "return", ",", "I", "will", "wait", "for", "gist", ",", "out", "kill", "una", "dia", ",", "there's", "no", "peace", "for", "the", "wicked", ",", "that's", "y", "teargas", "will", "be", "una", "friend", "as", "una", "step", "out", "to", "distrust", "our", "president", "@MBuhari", ",", "if", "u", "support", "d", "revolution", ",", "tears", "follow", "u"], "text_2_tokenized": ["Taking", "two", "chops", "while", "having", "teargas", "residue", "on", "urself", "then", "going", "for", "a", "walk", "in", "the", "sun", ",", "then", "have", "and", "a", "pilsner", "that", "tastes", "like", "water", "then", "getting", "on", "to", "Twitter", "and", "flexing", "ur", "fingers", "in", "anticipation", "of", "this", "tweet", ">", ">", ">"]} -{"id": "2472-teargas", "word": "teargas", "label_binary": 1, "text_1": "A foolish leader once bought guns, teargas and riot gear for his stupid dogs after noticing his people's discontent. He could have bought the medication and the essentials his people needed but like i said he was a foolish leader #16August #Zimbabwe #FreeZimbabweMarch", "token_idx_1": 7, "text_start_1": 35, "text_end_1": 42, "date_1": "2019-07", "text_2": "Nice that @tedwheeler got a dose of teargas last night. Too bad he wasn't out there 55 days ago.", "token_idx_2": 7, "text_start_2": 36, "text_end_2": 43, "date_2": "2020-07", "text_1_tokenized": ["A", "foolish", "leader", "once", "bought", "guns", ",", "teargas", "and", "riot", "gear", "for", "his", "stupid", "dogs", "after", "noticing", "his", "people's", "discontent", ".", "He", "could", "have", "bought", "the", "medication", "and", "the", "essentials", "his", "people", "needed", "but", "like", "i", "said", "he", "was", "a", "foolish", "leader", "#16August", "#Zimbabwe", "#FreeZimbabweMarch"], "text_2_tokenized": ["Nice", "that", "@tedwheeler", "got", "a", "dose", "of", "teargas", "last", "night", ".", "Too", "bad", "he", "wasn't", "out", "there", "55", "days", "ago", "."]} -{"id": "2473-teargas", "word": "teargas", "label_binary": 1, "text_1": "The fuck, teargas and shiiiiiit.. Poly is demonstrating right now\ud83d\ude05\ud83d\ude05", "token_idx_1": 3, "text_start_1": 10, "text_end_1": 17, "date_1": "2019-07", "text_2": "My teenage son created a 2020 Bingo board months ago of \"crazy things that will happen\". He actually got to tick off \"teargas tornado\" this week.", "token_idx_2": 26, "text_start_2": 118, "text_end_2": 125, "date_2": "2020-07", "text_1_tokenized": ["The", "fuck", ",", "teargas", "and", "shiiiiiit", "..", "Poly", "is", "demonstrating", "right", "now", "\ud83d\ude05", "\ud83d\ude05"], "text_2_tokenized": ["My", "teenage", "son", "created", "a", "2020", "Bingo", "board", "months", "ago", "of", "\"", "crazy", "things", "that", "will", "happen", "\"", ".", "He", "actually", "got", "to", "tick", "off", "\"", "teargas", "tornado", "\"", "this", "week", "."]} -{"id": "2474-teargas", "word": "teargas", "label_binary": 1, "text_1": "Exceptional night: short interval between each round of teargas; many many rounds firing; teargas felt on many streets & children at resto affected; cops seized Taiwanese journo press card; a photog hit by teargas spat blood+rushed up old building #HongKong #HongKongProtests", "token_idx_1": 9, "text_start_1": 56, "text_end_1": 63, "date_1": "2019-07", "text_2": "Assault is now the common contraceptor of liberal democracy, be you standing in line to vote or protesting domestic invasion forces. Rubber bullets, teargas and those who deploy them don't incarnate from the ether. They are funded by your Congress and deployed by your Executive.", "token_idx_2": 26, "text_start_2": 149, "text_end_2": 156, "date_2": "2020-07", "text_1_tokenized": ["Exceptional", "night", ":", "short", "interval", "between", "each", "round", "of", "teargas", ";", "many", "many", "rounds", "firing", ";", "teargas", "felt", "on", "many", "streets", "&", "children", "at", "resto", "affected", ";", "cops", "seized", "Taiwanese", "journo", "press", "card", ";", "a", "photog", "hit", "by", "teargas", "spat", "blood", "+", "rushed", "up", "old", "building", "#HongKong", "#HongKongProtests"], "text_2_tokenized": ["Assault", "is", "now", "the", "common", "contraceptor", "of", "liberal", "democracy", ",", "be", "you", "standing", "in", "line", "to", "vote", "or", "protesting", "domestic", "invasion", "forces", ".", "Rubber", "bullets", ",", "teargas", "and", "those", "who", "deploy", "them", "don't", "incarnate", "from", "the", "ether", ".", "They", "are", "funded", "by", "your", "Congress", "and", "deployed", "by", "your", "Executive", "."]} -{"id": "2475-teargas", "word": "teargas", "label_binary": 1, "text_1": "Just walked through this eerily silent peaceful protest at TST. Were told to leave soon please because teargas is scheduled at 9pm. Apparently counter demos going on on the island. I don't know what I can say to these brave young people. #HongKongProtests", "token_idx_1": 18, "text_start_1": 103, "text_end_1": 110, "date_1": "2019-07", "text_2": "I should be there in a yellow dress defending my childrens right not to live like this. But Im scared of teargas and the streets are filled. I had Covid in the Spring and I still have days where I cant breathe. If we dont end this in November it might not end. #PortlandMoms", "token_idx_2": 22, "text_start_2": 105, "text_end_2": 112, "date_2": "2020-07", "text_1_tokenized": ["Just", "walked", "through", "this", "eerily", "silent", "peaceful", "protest", "at", "TST", ".", "Were", "told", "to", "leave", "soon", "please", "because", "teargas", "is", "scheduled", "at", "9pm", ".", "Apparently", "counter", "demos", "going", "on", "on", "the", "island", ".", "I", "don't", "know", "what", "I", "can", "say", "to", "these", "brave", "young", "people", ".", "#HongKongProtests"], "text_2_tokenized": ["I", "should", "be", "there", "in", "a", "yellow", "dress", "defending", "my", "childrens", "right", "not", "to", "live", "like", "this", ".", "But", "Im", "scared", "of", "teargas", "and", "the", "streets", "are", "filled", ".", "I", "had", "Covid", "in", "the", "Spring", "and", "I", "still", "have", "days", "where", "I", "cant", "breathe", ".", "If", "we", "dont", "end", "this", "in", "November", "it", "might", "not", "end", ".", "#PortlandMoms"]} -{"id": "2476-teargas", "word": "teargas", "label_binary": 1, "text_1": "I pray I never experience the effects of teargas again \ud83d\ude02", "token_idx_1": 8, "text_start_1": 41, "text_end_1": 48, "date_1": "2019-07", "text_2": "Protesters in Portland are right now clashing with federal thugs who are using teargas again on them. The protesters were peacefully heading up 5th Ave when the clash began. The fed presence has done nothing except to exacerbate the situation. Last week police & protesters along", "token_idx_2": 13, "text_start_2": 79, "text_end_2": 86, "date_2": "2020-07", "text_1_tokenized": ["I", "pray", "I", "never", "experience", "the", "effects", "of", "teargas", "again", "\ud83d\ude02"], "text_2_tokenized": ["Protesters", "in", "Portland", "are", "right", "now", "clashing", "with", "federal", "thugs", "who", "are", "using", "teargas", "again", "on", "them", ".", "The", "protesters", "were", "peacefully", "heading", "up", "5th", "Ave", "when", "the", "clash", "began", ".", "The", "fed", "presence", "has", "done", "nothing", "except", "to", "exacerbate", "the", "situation", ".", "Last", "week", "police", "&", "protesters", "along"]} -{"id": "2477-teargas", "word": "teargas", "label_binary": 1, "text_1": "In any peaceful rally, police should stop using teargas as a means to arrest innocent people please.", "token_idx_1": 9, "text_start_1": 48, "text_end_1": 55, "date_1": "2019-07", "text_2": "Sang to the tune of Yankee Doodle. Feel free to add verses. Went to a protest in Portland through the early morning. When they shot the teargas we continued even footing. We will protest every night Saying Black Lives Matter We will sing ACAB until we get some justice. #PDX", "token_idx_2": 29, "text_start_2": 136, "text_end_2": 143, "date_2": "2020-07", "text_1_tokenized": ["In", "any", "peaceful", "rally", ",", "police", "should", "stop", "using", "teargas", "as", "a", "means", "to", "arrest", "innocent", "people", "please", "."], "text_2_tokenized": ["Sang", "to", "the", "tune", "of", "Yankee", "Doodle", ".", "Feel", "free", "to", "add", "verses", ".", "Went", "to", "a", "protest", "in", "Portland", "through", "the", "early", "morning", ".", "When", "they", "shot", "the", "teargas", "we", "continued", "even", "footing", ".", "We", "will", "protest", "every", "night", "Saying", "Black", "Lives", "Matter", "We", "will", "sing", "ACAB", "until", "we", "get", "some", "justice", ".", "#PDX"]} -{"id": "2478-teargas", "word": "teargas", "label_binary": 1, "text_1": "Seeing that police have launched teargas against demonstrators in #PuertoRico's #Guaynabo, where embattled Gov @ricardorossello met with mayors and legislators earlier this evening.", "token_idx_1": 5, "text_start_1": 33, "text_end_1": 40, "date_1": "2019-07", "text_2": "The teargas that's about to fall down on those NUP headquaters might even blind the neighbors era Kamwokya people's better start stalking up on masks because ANYTIME NOW...", "token_idx_2": 1, "text_start_2": 4, "text_end_2": 11, "date_2": "2020-07", "text_1_tokenized": ["Seeing", "that", "police", "have", "launched", "teargas", "against", "demonstrators", "in", "#PuertoRico's", "#Guaynabo", ",", "where", "embattled", "Gov", "@ricardorossello", "met", "with", "mayors", "and", "legislators", "earlier", "this", "evening", "."], "text_2_tokenized": ["The", "teargas", "that's", "about", "to", "fall", "down", "on", "those", "NUP", "headquaters", "might", "even", "blind", "the", "neighbors", "era", "Kamwokya", "people's", "better", "start", "stalking", "up", "on", "masks", "because", "ANYTIME", "NOW", "..."]} -{"id": "2479-teargas", "word": "teargas", "label_binary": 1, "text_1": "So I was at #Banex today just few minutes after the clash with Shiites. Everywhere rowdy, teargas everywhere. I wasn't aware that place was a hot bed today. Thank God for safety.", "token_idx_1": 18, "text_start_1": 90, "text_end_1": 97, "date_1": "2019-07", "text_2": "Pliz whoever has a video clip containing the Police operatives flying teargas carnsters to NRM supporters clad in yellow T-shirts, send it to me pliz and whoever wants those that present Opposition suffering, I have them here!", "token_idx_2": 11, "text_start_2": 70, "text_end_2": 77, "date_2": "2020-07", "text_1_tokenized": ["So", "I", "was", "at", "#Banex", "today", "just", "few", "minutes", "after", "the", "clash", "with", "Shiites", ".", "Everywhere", "rowdy", ",", "teargas", "everywhere", ".", "I", "wasn't", "aware", "that", "place", "was", "a", "hot", "bed", "today", ".", "Thank", "God", "for", "safety", "."], "text_2_tokenized": ["Pliz", "whoever", "has", "a", "video", "clip", "containing", "the", "Police", "operatives", "flying", "teargas", "carnsters", "to", "NRM", "supporters", "clad", "in", "yellow", "T-shirts", ",", "send", "it", "to", "me", "pliz", "and", "whoever", "wants", "those", "that", "present", "Opposition", "suffering", ",", "I", "have", "them", "here", "!"]} -{"id": "2480-teargas", "word": "teargas", "label_binary": 1, "text_1": "ah yes, advocating for not using teargas, pinnacle of socialism", "token_idx_1": 7, "text_start_1": 33, "text_end_1": 40, "date_1": "2019-07", "text_2": "Love, the virus and teargas", "token_idx_2": 5, "text_start_2": 20, "text_end_2": 27, "date_2": "2020-07", "text_1_tokenized": ["ah", "yes", ",", "advocating", "for", "not", "using", "teargas", ",", "pinnacle", "of", "socialism"], "text_2_tokenized": ["Love", ",", "the", "virus", "and", "teargas"]} -{"id": "2481-teargas", "word": "teargas", "label_binary": 1, "text_1": "Yo jozi is HOT rn. Rubber bullets, teargas, protests. Its lit", "token_idx_1": 9, "text_start_1": 35, "text_end_1": 42, "date_1": "2019-07", "text_2": "ni sawa acha teargas ikuje Enough is Enough let's #OccupyUhuruPark and claim this country from #Covid19Millionaires and other Selfish greedy people in this country", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 20, "date_2": "2020-07", "text_1_tokenized": ["Yo", "jozi", "is", "HOT", "rn", ".", "Rubber", "bullets", ",", "teargas", ",", "protests", ".", "Its", "lit"], "text_2_tokenized": ["ni", "sawa", "acha", "teargas", "ikuje", "Enough", "is", "Enough", "let's", "#OccupyUhuruPark", "and", "claim", "this", "country", "from", "#Covid19Millionaires", "and", "other", "Selfish", "greedy", "people", "in", "this", "country"]} -{"id": "2482-teargas", "word": "teargas", "label_binary": 1, "text_1": "Harare Update: Street battles in town now ,police moving around with three water cannons ,people were running kuna Jason Moyo and there was teargas", "token_idx_1": 26, "text_start_1": 140, "text_end_1": 147, "date_1": "2019-07", "text_2": "you go to ONE protest and here comes the teargas, pack it up war criminals", "token_idx_2": 9, "text_start_2": 41, "text_end_2": 48, "date_2": "2020-07", "text_1_tokenized": ["Harare", "Update", ":", "Street", "battles", "in", "town", "now", ",", "police", "moving", "around", "with", "three", "water", "cannons", ",", "people", "were", "running", "kuna", "Jason", "Moyo", "and", "there", "was", "teargas"], "text_2_tokenized": ["you", "go", "to", "ONE", "protest", "and", "here", "comes", "the", "teargas", ",", "pack", "it", "up", "war", "criminals"]} -{"id": "2483-teargas", "word": "teargas", "label_binary": 1, "text_1": "1130 the healthcare personnel will have a press conference about the harm of teargas, especially expired ones. #antielab #hongkongprotest", "token_idx_1": 13, "text_start_1": 77, "text_end_1": 84, "date_1": "2019-07", "text_2": "Ati obare anataka tuende tuprotest tukule teargas aachiliwe only for him to expose our entanglementz escapades", "token_idx_2": 6, "text_start_2": 42, "text_end_2": 49, "date_2": "2020-07", "text_1_tokenized": ["1130", "the", "healthcare", "personnel", "will", "have", "a", "press", "conference", "about", "the", "harm", "of", "teargas", ",", "especially", "expired", "ones", ".", "#antielab", "#hongkongprotest"], "text_2_tokenized": ["Ati", "obare", "anataka", "tuende", "tuprotest", "tukule", "teargas", "aachiliwe", "only", "for", "him", "to", "expose", "our", "entanglementz", "escapades"]} -{"id": "2484-teargas", "word": "teargas", "label_binary": 1, "text_1": "Hong Kong citizens are really good teargas experts", "token_idx_1": 6, "text_start_1": 35, "text_end_1": 42, "date_1": "2019-07", "text_2": "I fucking love the way that Gen Z is so chaotic and rebellious but it's not always throwing teargas cannisters back at cops sometimes it's like \"we're planting monster sunflowers around the entire city without getting anyone's permission fuck yeah\" love that", "token_idx_2": 18, "text_start_2": 92, "text_end_2": 99, "date_2": "2020-07", "text_1_tokenized": ["Hong", "Kong", "citizens", "are", "really", "good", "teargas", "experts"], "text_2_tokenized": ["I", "fucking", "love", "the", "way", "that", "Gen", "Z", "is", "so", "chaotic", "and", "rebellious", "but", "it's", "not", "always", "throwing", "teargas", "cannisters", "back", "at", "cops", "sometimes", "it's", "like", "\"", "we're", "planting", "monster", "sunflowers", "around", "the", "entire", "city", "without", "getting", "anyone's", "permission", "fuck", "yeah", "\"", "love", "that"]} -{"id": "2485-teargas", "word": "teargas", "label_binary": 1, "text_1": "Wuse market Axis is tensed ATM Shiites protesting teargas, everybody scampering for safety", "token_idx_1": 8, "text_start_1": 50, "text_end_1": 57, "date_1": "2019-07", "text_2": "i could probably breathe in more teargas than the average person", "token_idx_2": 6, "text_start_2": 33, "text_end_2": 40, "date_2": "2020-07", "text_1_tokenized": ["Wuse", "market", "Axis", "is", "tensed", "ATM", "Shiites", "protesting", "teargas", ",", "everybody", "scampering", "for", "safety"], "text_2_tokenized": ["i", "could", "probably", "breathe", "in", "more", "teargas", "than", "the", "average", "person"]} -{"id": "2486-teargas", "word": "teargas", "label_binary": 1, "text_1": "Nigeria Security agents fires gunshots, teargas at protesters chanting revolutionary songs in Lagos", "token_idx_1": 6, "text_start_1": 40, "text_end_1": 47, "date_1": "2019-07", "text_2": "Protesters in Portland are really using leaf blowers to counter teargas. I'm dying.", "token_idx_2": 10, "text_start_2": 64, "text_end_2": 71, "date_2": "2020-07", "text_1_tokenized": ["Nigeria", "Security", "agents", "fires", "gunshots", ",", "teargas", "at", "protesters", "chanting", "revolutionary", "songs", "in", "Lagos"], "text_2_tokenized": ["Protesters", "in", "Portland", "are", "really", "using", "leaf", "blowers", "to", "counter", "teargas", ".", "I'm", "dying", "."]} -{"id": "2487-teargas", "word": "teargas", "label_binary": 1, "text_1": "Our cousins LUOS why are you trending? Ni teargas mnamiss ama?", "token_idx_1": 9, "text_start_1": 42, "text_end_1": 49, "date_1": "2019-07", "text_2": "Trying to get the video now but as teargas was being launched at protesters a tornado started forming", "token_idx_2": 8, "text_start_2": 35, "text_end_2": 42, "date_2": "2020-07", "text_1_tokenized": ["Our", "cousins", "LUOS", "why", "are", "you", "trending", "?", "Ni", "teargas", "mnamiss", "ama", "?"], "text_2_tokenized": ["Trying", "to", "get", "the", "video", "now", "but", "as", "teargas", "was", "being", "launched", "at", "protesters", "a", "tornado", "started", "forming"]} -{"id": "2488-teargas", "word": "teargas", "label_binary": 1, "text_1": "UPDATES: Chaos at Nairobi Central SDA church as rival groups clash over leadership; GSU officers disperse worshippers. Watu wakule teargas kama wametosheka kula neno.", "token_idx_1": 22, "text_start_1": 131, "text_end_1": 138, "date_1": "2019-07", "text_2": "Unidentified Feds: \"We're here to help!\" Mayor: We didn't ask for your help. You're scaring us. Please leave. (Feds teargas Mayor) Unidentified Feds: You're welcome!", "token_idx_2": 28, "text_start_2": 116, "text_end_2": 123, "date_2": "2020-07", "text_1_tokenized": ["UPDATES", ":", "Chaos", "at", "Nairobi", "Central", "SDA", "church", "as", "rival", "groups", "clash", "over", "leadership", ";", "GSU", "officers", "disperse", "worshippers", ".", "Watu", "wakule", "teargas", "kama", "wametosheka", "kula", "neno", "."], "text_2_tokenized": ["Unidentified", "Feds", ":", "\"", "We're", "here", "to", "help", "!", "\"", "Mayor", ":", "We", "didn't", "ask", "for", "your", "help", ".", "You're", "scaring", "us", ".", "Please", "leave", ".", "(", "Feds", "teargas", "Mayor", ")", "Unidentified", "Feds", ":", "You're", "welcome", "!"]} -{"id": "2489-teargas", "word": "teargas", "label_binary": 1, "text_1": "World news | The Guardian Yuen Long protests: screams as Hong Kong police fire teargas on crowds \u2013 video Police fire teargas on thousands of protestors in the Hong Kong village of Yuen Long on Saturday, who had gathered despite a police ban. Last week thugs indiscriminately\u2026", "token_idx_1": 15, "text_start_1": 79, "text_end_1": 86, "date_1": "2019-07", "text_2": "Lebanese protesters take to streets of Beirut to demonstrate against government's handling of port explosion that killed at least 154 people this week. Riot police fired teargas at demonstrators trying to break through barrier to get to the parliament building in central Beirut.", "token_idx_2": 27, "text_start_2": 170, "text_end_2": 177, "date_2": "2020-07", "text_1_tokenized": ["World", "news", "|", "The", "Guardian", "Yuen", "Long", "protests", ":", "screams", "as", "Hong", "Kong", "police", "fire", "teargas", "on", "crowds", "\u2013", "video", "Police", "fire", "teargas", "on", "thousands", "of", "protestors", "in", "the", "Hong", "Kong", "village", "of", "Yuen", "Long", "on", "Saturday", ",", "who", "had", "gathered", "despite", "a", "police", "ban", ".", "Last", "week", "thugs", "indiscriminately", "\u2026"], "text_2_tokenized": ["Lebanese", "protesters", "take", "to", "streets", "of", "Beirut", "to", "demonstrate", "against", "government's", "handling", "of", "port", "explosion", "that", "killed", "at", "least", "154", "people", "this", "week", ".", "Riot", "police", "fired", "teargas", "at", "demonstrators", "trying", "to", "break", "through", "barrier", "to", "get", "to", "the", "parliament", "building", "in", "central", "Beirut", "."]} -{"id": "2490-teargas", "word": "teargas", "label_binary": 0, "text_1": "my late uncle used to love umhlobo wam' by teargas. \ud83d\ude2d\ud83d\udc80", "token_idx_1": 10, "text_start_1": 43, "text_end_1": 50, "date_1": "2019-07", "text_2": "Small reminder for all of you to make sure you are hydrating and eating - appetite is completely gone but the body still needs food. We need to fuel ourselves to continue. Many people are fainting (teargas unrelated) due to stress, pressure and exhaustion.", "token_idx_2": 39, "text_start_2": 198, "text_end_2": 205, "date_2": "2020-07", "text_1_tokenized": ["my", "late", "uncle", "used", "to", "love", "umhlobo", "wam", "'", "by", "teargas", ".", "\ud83d\ude2d", "\ud83d\udc80"], "text_2_tokenized": ["Small", "reminder", "for", "all", "of", "you", "to", "make", "sure", "you", "are", "hydrating", "and", "eating", "-", "appetite", "is", "completely", "gone", "but", "the", "body", "still", "needs", "food", ".", "We", "need", "to", "fuel", "ourselves", "to", "continue", ".", "Many", "people", "are", "fainting", "(", "teargas", "unrelated", ")", "due", "to", "stress", ",", "pressure", "and", "exhaustion", "."]} -{"id": "2491-teargas", "word": "teargas", "label_binary": 1, "text_1": "I've chewed teargas enough times this year \ud83d\ude11 and for no justifiable reason GoK can suck a dick", "token_idx_1": 2, "text_start_1": 12, "text_end_1": 19, "date_1": "2019-07", "text_2": "It is not even funny. Kenya is going back to late 80s. Why is oparanya, Wamalwa, Atwoli, Kenneth, hold meetings yet @KBonimtetezi can't hold a matanga event? Why? This is'nt Kenya I envisioned or future of this country. Khalwale was delivering water to pple affected by teargas", "token_idx_2": 55, "text_start_2": 270, "text_end_2": 277, "date_2": "2020-07", "text_1_tokenized": ["I've", "chewed", "teargas", "enough", "times", "this", "year", "\ud83d\ude11", "and", "for", "no", "justifiable", "reason", "GoK", "can", "suck", "a", "dick"], "text_2_tokenized": ["It", "is", "not", "even", "funny", ".", "Kenya", "is", "going", "back", "to", "late", "80s", ".", "Why", "is", "oparanya", ",", "Wamalwa", ",", "Atwoli", ",", "Kenneth", ",", "hold", "meetings", "yet", "@KBonimtetezi", "can't", "hold", "a", "matanga", "event", "?", "Why", "?", "This", "is'nt", "Kenya", "I", "envisioned", "or", "future", "of", "this", "country", ".", "Khalwale", "was", "delivering", "water", "to", "pple", "affected", "by", "teargas"]} -{"id": "2492-teargas", "word": "teargas", "label_binary": 1, "text_1": "Police arrassing the press, shooting teargas and bullets at peaceful protesters? #FinishIsOk!", "token_idx_1": 6, "text_start_1": 37, "text_end_1": 44, "date_1": "2019-07", "text_2": "It occurs to me that lacrosse players might be good to have at a protest. Think how quickly they would be able to catch and throw back teargas canisters.... #ProtestThoughts", "token_idx_2": 28, "text_start_2": 135, "text_end_2": 142, "date_2": "2020-07", "text_1_tokenized": ["Police", "arrassing", "the", "press", ",", "shooting", "teargas", "and", "bullets", "at", "peaceful", "protesters", "?", "#FinishIsOk", "!"], "text_2_tokenized": ["It", "occurs", "to", "me", "that", "lacrosse", "players", "might", "be", "good", "to", "have", "at", "a", "protest", ".", "Think", "how", "quickly", "they", "would", "be", "able", "to", "catch", "and", "throw", "back", "teargas", "canisters", "...", "#ProtestThoughts"]} -{"id": "2493-teargas", "word": "teargas", "label_binary": 0, "text_1": "Dear @NPSOfficial_KE @PoliceKE @IG_NPS why are your officers always misusing teargas? Whatever they did this evening at Temple-Ronald Ngala junction is disgusting. Women and children sprayed with teargas.", "token_idx_1": 10, "text_start_1": 77, "text_end_1": 84, "date_1": "2019-07", "text_2": "\ud83d\ude02\ud83d\ude02\ud83d\ude02\ud83d\ude02\ud83d\ude2d\ud83d\ude2d\ud83d\ude2d\ud83d\ude2d\ud83d\ude2d\ud83d\ude2d\ud83d\ude2d. The way my sports twitter is furious today \ud83d\ude2d\ud83d\ude02\ud83d\ude02 some bring teargas \ud83d\ude2d\ud83d\ude2d\ud83d\ude2d\ud83d\ude02\ud83d\ude02.", "token_idx_2": 20, "text_start_2": 71, "text_end_2": 78, "date_2": "2020-07", "text_1_tokenized": ["Dear", "@NPSOfficial_KE", "@PoliceKE", "@IG_NPS", "why", "are", "your", "officers", "always", "misusing", "teargas", "?", "Whatever", "they", "did", "this", "evening", "at", "Temple-Ronald", "Ngala", "junction", "is", "disgusting", ".", "Women", "and", "children", "sprayed", "with", "teargas", "."], "text_2_tokenized": ["\ud83d\ude02", "\ud83d\ude02", "\ud83d\ude02", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude2d", ".", "The", "way", "my", "sports", "twitter", "is", "furious", "today", "\ud83d\ude2d", "\ud83d\ude02", "\ud83d\ude02", "some", "bring", "teargas", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude02", "\ud83d\ude02", "."]} -{"id": "2494-teargas", "word": "teargas", "label_binary": 1, "text_1": "I think ATM in Nigeria needs a timer. When your time is up, it should just seize the card & spread teargas at d person. Some people Just Mad!!! @thepamilerin", "token_idx_1": 23, "text_start_1": 103, "text_end_1": 110, "date_1": "2019-07", "text_2": "I love PPB warning us teargas may be used when we are currently choking in a cloud of Fed tear gas", "token_idx_2": 5, "text_start_2": 22, "text_end_2": 29, "date_2": "2020-07", "text_1_tokenized": ["I", "think", "ATM", "in", "Nigeria", "needs", "a", "timer", ".", "When", "your", "time", "is", "up", ",", "it", "should", "just", "seize", "the", "card", "&", "spread", "teargas", "at", "d", "person", ".", "Some", "people", "Just", "Mad", "!", "!", "!", "@thepamilerin"], "text_2_tokenized": ["I", "love", "PPB", "warning", "us", "teargas", "may", "be", "used", "when", "we", "are", "currently", "choking", "in", "a", "cloud", "of", "Fed", "tear", "gas"]} -{"id": "2495-teargas", "word": "teargas", "label_binary": 1, "text_1": "\"the whole world is watching\" they dont give a shit. the world watching isn't doing shit because watching doesnt actually stop them. the world is going to watch them continue killing and ruining because theyre the ones with the guns, trucks, teargas and permission to use them", "token_idx_1": 47, "text_start_1": 242, "text_end_1": 249, "date_1": "2019-07", "text_2": "Reports that Beirut police have been using teargas on the protesters in Beirut.", "token_idx_2": 7, "text_start_2": 43, "text_end_2": 50, "date_2": "2020-07", "text_1_tokenized": ["\"", "the", "whole", "world", "is", "watching", "\"", "they", "dont", "give", "a", "shit", ".", "the", "world", "watching", "isn't", "doing", "shit", "because", "watching", "doesnt", "actually", "stop", "them", ".", "the", "world", "is", "going", "to", "watch", "them", "continue", "killing", "and", "ruining", "because", "theyre", "the", "ones", "with", "the", "guns", ",", "trucks", ",", "teargas", "and", "permission", "to", "use", "them"], "text_2_tokenized": ["Reports", "that", "Beirut", "police", "have", "been", "using", "teargas", "on", "the", "protesters", "in", "Beirut", "."]} -{"id": "2496-teargas", "word": "teargas", "label_binary": 1, "text_1": "\u201cNever take a teargas canister for nobody\u201d~ A new Kenyan proverb", "token_idx_1": 4, "text_start_1": 14, "text_end_1": 21, "date_1": "2019-07", "text_2": "Didn't the Governor sign a ban on the use of teargas in Oregon? Or did I just dream that? #acab", "token_idx_2": 10, "text_start_2": 45, "text_end_2": 52, "date_2": "2020-07", "text_1_tokenized": ["\u201c", "Never", "take", "a", "teargas", "canister", "for", "nobody", "\u201d", "~", "A", "new", "Kenyan", "proverb"], "text_2_tokenized": ["Didn't", "the", "Governor", "sign", "a", "ban", "on", "the", "use", "of", "teargas", "in", "Oregon", "?", "Or", "did", "I", "just", "dream", "that", "?", "#acab"]} -{"id": "2497-teargas", "word": "teargas", "label_binary": 1, "text_1": "You come against citizens with teargas cannisters and AK-47s, but leave same citizens to rot at the hands of kidnappers, herdsmen and Boko Haram #RevolutionIsNow", "token_idx_1": 5, "text_start_1": 31, "text_end_1": 38, "date_1": "2019-07", "text_2": "Protests looking more and more violent in #Beirut, #Lebanon as police, military are getting involved with teargas and less than lethal weapons.", "token_idx_2": 18, "text_start_2": 106, "text_end_2": 113, "date_2": "2020-07", "text_1_tokenized": ["You", "come", "against", "citizens", "with", "teargas", "cannisters", "and", "AK", "-", "47s", ",", "but", "leave", "same", "citizens", "to", "rot", "at", "the", "hands", "of", "kidnappers", ",", "herdsmen", "and", "Boko", "Haram", "#RevolutionIsNow"], "text_2_tokenized": ["Protests", "looking", "more", "and", "more", "violent", "in", "#Beirut", ",", "#Lebanon", "as", "police", ",", "military", "are", "getting", "involved", "with", "teargas", "and", "less", "than", "lethal", "weapons", "."]} -{"id": "2499-teargas", "word": "teargas", "label_binary": 1, "text_1": "Leo teargas inarushwa west mall\ud83d\udca3\ud83d\udca3", "token_idx_1": 1, "text_start_1": 4, "text_end_1": 11, "date_1": "2019-07", "text_2": "Because nothing says freedom like truncheons and teargas. #DefundDHS", "token_idx_2": 7, "text_start_2": 49, "text_end_2": 56, "date_2": "2020-07", "text_1_tokenized": ["Leo", "teargas", "inarushwa", "west", "mall", "\ud83d\udca3", "\ud83d\udca3"], "text_2_tokenized": ["Because", "nothing", "says", "freedom", "like", "truncheons", "and", "teargas", ".", "#DefundDHS"]} -{"id": "2500-teargas", "word": "teargas", "label_binary": 1, "text_1": "The protests have been disrupted in Mzuzu. Police officers are are firing teargas at the protesters. Reported by Sam Kalimira, @SamuelKalimira #TimesNews #CSODemos", "token_idx_1": 13, "text_start_1": 74, "text_end_1": 81, "date_1": "2019-07", "text_2": "Why are we tearing up Portland. Go to the source. Personally I would not object to dismantling the White House to get this scum out of our government. Do you think he would use teargas in an area where he is?", "token_idx_2": 37, "text_start_2": 177, "text_end_2": 184, "date_2": "2020-07", "text_1_tokenized": ["The", "protests", "have", "been", "disrupted", "in", "Mzuzu", ".", "Police", "officers", "are", "are", "firing", "teargas", "at", "the", "protesters", ".", "Reported", "by", "Sam", "Kalimira", ",", "@SamuelKalimira", "#TimesNews", "#CSODemos"], "text_2_tokenized": ["Why", "are", "we", "tearing", "up", "Portland", ".", "Go", "to", "the", "source", ".", "Personally", "I", "would", "not", "object", "to", "dismantling", "the", "White", "House", "to", "get", "this", "scum", "out", "of", "our", "government", ".", "Do", "you", "think", "he", "would", "use", "teargas", "in", "an", "area", "where", "he", "is", "?"]} -{"id": "2501-teargas", "word": "teargas", "label_binary": 1, "text_1": "Nigerians should buy teargas oh! On how you see any police man protesting. Fire at them with full force!! We don't like protests in our country and our wonderful president wouldn't approve it.", "token_idx_1": 3, "text_start_1": 21, "text_end_1": 28, "date_1": "2019-07", "text_2": "OK, July 28. We already have: * It's illegal to criticize the president * SPERM DEMONS (did not have that on the bingo card) * Possible teargas hoses * Protest llama ...and it's not even 9am yet.", "token_idx_2": 31, "text_start_2": 136, "text_end_2": 143, "date_2": "2020-07", "text_1_tokenized": ["Nigerians", "should", "buy", "teargas", "oh", "!", "On", "how", "you", "see", "any", "police", "man", "protesting", ".", "Fire", "at", "them", "with", "full", "force", "!", "!", "We", "don't", "like", "protests", "in", "our", "country", "and", "our", "wonderful", "president", "wouldn't", "approve", "it", "."], "text_2_tokenized": ["OK", ",", "July", "28", ".", "We", "already", "have", ":", "*", "It's", "illegal", "to", "criticize", "the", "president", "*", "SPERM", "DEMONS", "(", "did", "not", "have", "that", "on", "the", "bingo", "card", ")", "*", "Possible", "teargas", "hoses", "*", "Protest", "llama", "...", "and", "it's", "not", "even", "9am", "yet", "."]} -{"id": "2502-teargas", "word": "teargas", "label_binary": 1, "text_1": "Ali is fucking teargas \ud83d\udc4a #Ashes", "token_idx_1": 3, "text_start_1": 15, "text_end_1": 22, "date_1": "2019-07", "text_2": "When the National guard/ Marshalls bring teargas to the party, protesters have a right to protect themselves. They are allowed to bring their , leaf blowers to the fight!", "token_idx_2": 7, "text_start_2": 41, "text_end_2": 48, "date_2": "2020-07", "text_1_tokenized": ["Ali", "is", "fucking", "teargas", "\ud83d\udc4a", "#Ashes"], "text_2_tokenized": ["When", "the", "National", "guard", "/", "Marshalls", "bring", "teargas", "to", "the", "party", ",", "protesters", "have", "a", "right", "to", "protect", "themselves", ".", "They", "are", "allowed", "to", "bring", "their", ",", "leaf", "blowers", "to", "the", "fight", "!"]} -{"id": "2503-teargas", "word": "teargas", "label_binary": 1, "text_1": "The police ought to use teargas, pepper spray, batons and shield not Life rounds in riffles. The police formation should be geared up for crowd control not citizen brutality. RIOT police not ANTI-Robbery squad. #RevolutionNow #RevolutionIsNow", "token_idx_1": 5, "text_start_1": 24, "text_end_1": 31, "date_1": "2019-07", "text_2": "Forget teargas. Use fire hoses and dogs", "token_idx_2": 1, "text_start_2": 7, "text_end_2": 14, "date_2": "2020-07", "text_1_tokenized": ["The", "police", "ought", "to", "use", "teargas", ",", "pepper", "spray", ",", "batons", "and", "shield", "not", "Life", "rounds", "in", "riffles", ".", "The", "police", "formation", "should", "be", "geared", "up", "for", "crowd", "control", "not", "citizen", "brutality", ".", "RIOT", "police", "not", "ANTI-Robbery", "squad", ".", "#RevolutionNow", "#RevolutionIsNow"], "text_2_tokenized": ["Forget", "teargas", ".", "Use", "fire", "hoses", "and", "dogs"]} -{"id": "2504-teargas", "word": "teargas", "label_binary": 1, "text_1": "Police incompetence is to blame for the chaotic scenes in Lilongwe. Recklessly firing teargas at a peaceful crowd is uncalled for. #MalawiDemos #PostElectionsMalawi", "token_idx_1": 14, "text_start_1": 86, "text_end_1": 93, "date_1": "2019-07", "text_2": "So just because you dislike Antifa because communism means you're going to welcome a paramilitary force to kidnap and teargas protestors? Sounds a lot like you're on team authoritarian rather than a very messy democracy.", "token_idx_2": 19, "text_start_2": 118, "text_end_2": 125, "date_2": "2020-07", "text_1_tokenized": ["Police", "incompetence", "is", "to", "blame", "for", "the", "chaotic", "scenes", "in", "Lilongwe", ".", "Recklessly", "firing", "teargas", "at", "a", "peaceful", "crowd", "is", "uncalled", "for", ".", "#MalawiDemos", "#PostElectionsMalawi"], "text_2_tokenized": ["So", "just", "because", "you", "dislike", "Antifa", "because", "communism", "means", "you're", "going", "to", "welcome", "a", "paramilitary", "force", "to", "kidnap", "and", "teargas", "protestors", "?", "Sounds", "a", "lot", "like", "you're", "on", "team", "authoritarian", "rather", "than", "a", "very", "messy", "democracy", "."]} -{"id": "2505-teargas", "word": "teargas", "label_binary": 1, "text_1": "Some of y'all got the Ugandan flag in your username but ain't never tasted teargas? Lol, we need y'all back for the the elections.", "token_idx_1": 14, "text_start_1": 75, "text_end_1": 82, "date_1": "2019-07", "text_2": "I have an IUD so I just don't really get my period, but thanks to the fact that we were almost completely surrounded by teargas for hours on Friday night I guess that has now changed (:", "token_idx_2": 25, "text_start_2": 120, "text_end_2": 127, "date_2": "2020-07", "text_1_tokenized": ["Some", "of", "y'all", "got", "the", "Ugandan", "flag", "in", "your", "username", "but", "ain't", "never", "tasted", "teargas", "?", "Lol", ",", "we", "need", "y'all", "back", "for", "the", "the", "elections", "."], "text_2_tokenized": ["I", "have", "an", "IUD", "so", "I", "just", "don't", "really", "get", "my", "period", ",", "but", "thanks", "to", "the", "fact", "that", "we", "were", "almost", "completely", "surrounded", "by", "teargas", "for", "hours", "on", "Friday", "night", "I", "guess", "that", "has", "now", "changed", "(:"]} -{"id": "2506-teargas", "word": "teargas", "label_binary": 1, "text_1": "If just a few house arrests have stopped stone-pelting and teargas then it only means those who are put under house-arrest are the 'culprits' for violence in J&K as well as they 'misguiding' country. Likes of RaGa, Raja,Yechuri, don't really matter. They r irrelevant", "token_idx_1": 10, "text_start_1": 59, "text_end_1": 66, "date_1": "2019-07", "text_2": "Simping is allowed for 3 types of people: - people who throw teargas back at cops - anyone who has executed any billionaire - Jacinda Arden, prime minister of New Zealand", "token_idx_2": 13, "text_start_2": 61, "text_end_2": 68, "date_2": "2020-07", "text_1_tokenized": ["If", "just", "a", "few", "house", "arrests", "have", "stopped", "stone-pelting", "and", "teargas", "then", "it", "only", "means", "those", "who", "are", "put", "under", "house-arrest", "are", "the", "'", "culprits", "'", "for", "violence", "in", "J", "&", "K", "as", "well", "as", "they", "'", "misguiding", "'", "country", ".", "Likes", "of", "RaGa", ",", "Raja", ",", "Yechuri", ",", "don't", "really", "matter", ".", "They", "r", "irrelevant"], "text_2_tokenized": ["Simping", "is", "allowed", "for", "3", "types", "of", "people", ":", "-", "people", "who", "throw", "teargas", "back", "at", "cops", "-", "anyone", "who", "has", "executed", "any", "billionaire", "-", "Jacinda", "Arden", ",", "prime", "minister", "of", "New", "Zealand"]} -{"id": "2507-teargas", "word": "teargas", "label_binary": 1, "text_1": "Well, that was compelling. Stay safe @SheliNBC6, who was just on the air as teargas was deployed in Old San Juan. People bumping past her as they ran, SWAT police bumping shields on her shoulder as they passed her. Yikes. @nbc6 #PuertoRicoProtests #PuertoRicoMarcha", "token_idx_1": 17, "text_start_1": 76, "text_end_1": 83, "date_1": "2019-07", "text_2": "Is there any way to find out what the insane amount of teargas is doing to the air quality downtown? I feel like that would be good information to get out there. This can't be good for children or people with breathing difficulties in the city. #PortlandProtests", "token_idx_2": 12, "text_start_2": 55, "text_end_2": 62, "date_2": "2020-07", "text_1_tokenized": ["Well", ",", "that", "was", "compelling", ".", "Stay", "safe", "@SheliNBC6", ",", "who", "was", "just", "on", "the", "air", "as", "teargas", "was", "deployed", "in", "Old", "San", "Juan", ".", "People", "bumping", "past", "her", "as", "they", "ran", ",", "SWAT", "police", "bumping", "shields", "on", "her", "shoulder", "as", "they", "passed", "her", ".", "Yikes", ".", "@nbc6", "#PuertoRicoProtests", "#PuertoRicoMarcha"], "text_2_tokenized": ["Is", "there", "any", "way", "to", "find", "out", "what", "the", "insane", "amount", "of", "teargas", "is", "doing", "to", "the", "air", "quality", "downtown", "?", "I", "feel", "like", "that", "would", "be", "good", "information", "to", "get", "out", "there", ".", "This", "can't", "be", "good", "for", "children", "or", "people", "with", "breathing", "difficulties", "in", "the", "city", ".", "#PortlandProtests"]} -{"id": "2508-teargas", "word": "teargas", "label_binary": 1, "text_1": "Shouldn't the Senators have 'eaten' teargas kidogo? Isn't that the Kenyan way with (peaceful or otherwise) demos and picketing?", "token_idx_1": 7, "text_start_1": 36, "text_end_1": 43, "date_1": "2019-07", "text_2": "This is the dumbest fucking thing I've ever seen The protests were non-violent than this wasn't required but if it was as far as I'm concerned they're just another teargas target", "token_idx_2": 29, "text_start_2": 164, "text_end_2": 171, "date_2": "2020-07", "text_1_tokenized": ["Shouldn't", "the", "Senators", "have", "'", "eaten", "'", "teargas", "kidogo", "?", "Isn't", "that", "the", "Kenyan", "way", "with", "(", "peaceful", "or", "otherwise", ")", "demos", "and", "picketing", "?"], "text_2_tokenized": ["This", "is", "the", "dumbest", "fucking", "thing", "I've", "ever", "seen", "The", "protests", "were", "non-violent", "than", "this", "wasn't", "required", "but", "if", "it", "was", "as", "far", "as", "I'm", "concerned", "they're", "just", "another", "teargas", "target"]} -{"id": "2509-teargas", "word": "teargas", "label_binary": 1, "text_1": "The next time @hkpoliceforce issues statement about how teargas, can womxn bring up their pads/tampons of BLACK BLOOD to the table? A tad gross, but pls. Tell me again how teargas is not messing with our health #StandWithHK #FreeHongKong", "token_idx_1": 8, "text_start_1": 56, "text_end_1": 63, "date_1": "2019-07", "text_2": "All deaths and Figths start when @PoliceUg gets in to people power supporters ....... I wish they can learn how to let things go those guys can't be wild if you do not start teargas and shooting live bullets in them .... just a concerned citizen who knows that things change \ud83d\ude12\ud83d\ude12", "token_idx_2": 34, "text_start_2": 174, "text_end_2": 181, "date_2": "2020-07", "text_1_tokenized": ["The", "next", "time", "@hkpoliceforce", "issues", "statement", "about", "how", "teargas", ",", "can", "womxn", "bring", "up", "their", "pads", "/", "tampons", "of", "BLACK", "BLOOD", "to", "the", "table", "?", "A", "tad", "gross", ",", "but", "pls", ".", "Tell", "me", "again", "how", "teargas", "is", "not", "messing", "with", "our", "health", "#StandWithHK", "#FreeHongKong"], "text_2_tokenized": ["All", "deaths", "and", "Figths", "start", "when", "@PoliceUg", "gets", "in", "to", "people", "power", "supporters", "...", "I", "wish", "they", "can", "learn", "how", "to", "let", "things", "go", "those", "guys", "can't", "be", "wild", "if", "you", "do", "not", "start", "teargas", "and", "shooting", "live", "bullets", "in", "them", "...", "just", "a", "concerned", "citizen", "who", "knows", "that", "things", "change", "\ud83d\ude12", "\ud83d\ude12"]} -{"id": "2510-teargas", "word": "teargas", "label_binary": 1, "text_1": "When I moved from Luther Plaza (next to UoN) I thought my teargas days were over.", "token_idx_1": 14, "text_start_1": 58, "text_end_1": 65, "date_1": "2019-07", "text_2": "From Tanga Tanga, ahead of the Afternoon Senate session, a tailored outrage-in whatever form or shape meant to elicit a fierce confrontation btwn RAO PM and the President. Assumption: ODM gets back to the street for teargas and bullets as they abuse and mock him.", "token_idx_2": 40, "text_start_2": 216, "text_end_2": 223, "date_2": "2020-07", "text_1_tokenized": ["When", "I", "moved", "from", "Luther", "Plaza", "(", "next", "to", "UoN", ")", "I", "thought", "my", "teargas", "days", "were", "over", "."], "text_2_tokenized": ["From", "Tanga", "Tanga", ",", "ahead", "of", "the", "Afternoon", "Senate", "session", ",", "a", "tailored", "outrage-in", "whatever", "form", "or", "shape", "meant", "to", "elicit", "a", "fierce", "confrontation", "btwn", "RAO", "PM", "and", "the", "President", ".", "Assumption", ":", "ODM", "gets", "back", "to", "the", "street", "for", "teargas", "and", "bullets", "as", "they", "abuse", "and", "mock", "him", "."]} -{"id": "2511-teargas", "word": "teargas", "label_binary": 1, "text_1": "So there was two different protests in Lagos today, one group received teargas and live bullets (allegedly) from policemen while the other group of protesters received utmost police protection. This govt is a huge joke.", "token_idx_1": 13, "text_start_1": 71, "text_end_1": 78, "date_1": "2019-07", "text_2": "One of my biggest problems with teargas is how indescriminant it is. Another is that it's toxic.", "token_idx_2": 6, "text_start_2": 32, "text_end_2": 39, "date_2": "2020-07", "text_1_tokenized": ["So", "there", "was", "two", "different", "protests", "in", "Lagos", "today", ",", "one", "group", "received", "teargas", "and", "live", "bullets", "(", "allegedly", ")", "from", "policemen", "while", "the", "other", "group", "of", "protesters", "received", "utmost", "police", "protection", ".", "This", "govt", "is", "a", "huge", "joke", "."], "text_2_tokenized": ["One", "of", "my", "biggest", "problems", "with", "teargas", "is", "how", "indescriminant", "it", "is", ".", "Another", "is", "that", "it's", "toxic", "."]} -{"id": "2512-teargas", "word": "teargas", "label_binary": 1, "text_1": "Happening Now: Members @PoliceNG taking cover inside Eagle Square are shooting live ammunition and teargas canisters on #FreeZakzaky protesters in Abuja. 1:18 pm", "token_idx_1": 15, "text_start_1": 99, "text_end_1": 106, "date_1": "2019-07", "text_2": "I automatically block people who respond to me re. not using teargas with a \"well they should be shot instead,\" and/or a \"clearly you support murdering Black teens in CHOP.\" Ridiculous. I'm more than happy to have a conversation but not if you're going to say things like that.", "token_idx_2": 12, "text_start_2": 61, "text_end_2": 68, "date_2": "2020-07", "text_1_tokenized": ["Happening", "Now", ":", "Members", "@PoliceNG", "taking", "cover", "inside", "Eagle", "Square", "are", "shooting", "live", "ammunition", "and", "teargas", "canisters", "on", "#FreeZakzaky", "protesters", "in", "Abuja", ".", "1:18", "pm"], "text_2_tokenized": ["I", "automatically", "block", "people", "who", "respond", "to", "me", "re", ".", "not", "using", "teargas", "with", "a", "\"", "well", "they", "should", "be", "shot", "instead", ",", "\"", "and", "/", "or", "a", "\"", "clearly", "you", "support", "murdering", "Black", "teens", "in", "CHOP", ".", "\"", "Ridiculous", ".", "I'm", "more", "than", "happy", "to", "have", "a", "conversation", "but", "not", "if", "you're", "going", "to", "say", "things", "like", "that", "."]} -{"id": "2513-teargas", "word": "teargas", "label_binary": 1, "text_1": "\"Never been called a kaffir before/Can't imagine seeing ten cops and dogs crushing through my front door/Can't say what teargas smelt like/Can't even imagine what the rubber bullet on your back felt like\"", "token_idx_1": 24, "text_start_1": 120, "text_end_1": 127, "date_1": "2019-07", "text_2": "Watching the Portland Moms last night, now outfitted with helmets, holding the line against a federal assault with teargas and flashbangs. Congrats feds, you managed to turn a bunch of suburban white women into revolutionaries.", "token_idx_2": 20, "text_start_2": 115, "text_end_2": 122, "date_2": "2020-07", "text_1_tokenized": ["\"", "Never", "been", "called", "a", "kaffir", "before", "/", "Can't", "imagine", "seeing", "ten", "cops", "and", "dogs", "crushing", "through", "my", "front", "door", "/", "Can't", "say", "what", "teargas", "smelt", "like", "/", "Can't", "even", "imagine", "what", "the", "rubber", "bullet", "on", "your", "back", "felt", "like", "\""], "text_2_tokenized": ["Watching", "the", "Portland", "Moms", "last", "night", ",", "now", "outfitted", "with", "helmets", ",", "holding", "the", "line", "against", "a", "federal", "assault", "with", "teargas", "and", "flashbangs", ".", "Congrats", "feds", ",", "you", "managed", "to", "turn", "a", "bunch", "of", "suburban", "white", "women", "into", "revolutionaries", "."]} -{"id": "2514-teargas", "word": "teargas", "label_binary": 1, "text_1": "A Cool Jape would be to replace all the police's teargas with nitrous oxide. The next time they go out to try and corral some civil unrest will be a real laugh riot.", "token_idx_1": 10, "text_start_1": 49, "text_end_1": 56, "date_1": "2019-07", "text_2": "Last nights teargas was unbearable and completely disorienting. I don't know what kind of teargas that was but it was scary. I couldn't breathe and I couldn't see and my whole face burnt. Somehow I made it through the park with some help and there we're people with eyewash. Scary", "token_idx_2": 2, "text_start_2": 12, "text_end_2": 19, "date_2": "2020-07", "text_1_tokenized": ["A", "Cool", "Jape", "would", "be", "to", "replace", "all", "the", "police's", "teargas", "with", "nitrous", "oxide", ".", "The", "next", "time", "they", "go", "out", "to", "try", "and", "corral", "some", "civil", "unrest", "will", "be", "a", "real", "laugh", "riot", "."], "text_2_tokenized": ["Last", "nights", "teargas", "was", "unbearable", "and", "completely", "disorienting", ".", "I", "don't", "know", "what", "kind", "of", "teargas", "that", "was", "but", "it", "was", "scary", ".", "I", "couldn't", "breathe", "and", "I", "couldn't", "see", "and", "my", "whole", "face", "burnt", ".", "Somehow", "I", "made", "it", "through", "the", "park", "with", "some", "help", "and", "there", "we're", "people", "with", "eyewash", ".", "Scary"]} -{"id": "2515-teargas", "word": "teargas", "label_binary": 0, "text_1": "Mxm let me listen to teargas", "token_idx_1": 5, "text_start_1": 21, "text_end_1": 28, "date_1": "2019-07", "text_2": "I was walking near Kencom when all of a sudden a teargas canister exploded and people started running, I too took to my heals and ran away.. I've no idea what was happening..", "token_idx_2": 11, "text_start_2": 49, "text_end_2": 56, "date_2": "2020-07", "text_1_tokenized": ["Mxm", "let", "me", "listen", "to", "teargas"], "text_2_tokenized": ["I", "was", "walking", "near", "Kencom", "when", "all", "of", "a", "sudden", "a", "teargas", "canister", "exploded", "and", "people", "started", "running", ",", "I", "too", "took", "to", "my", "heals", "and", "ran", "away", "..", "I've", "no", "idea", "what", "was", "happening", ".."]} -{"id": "2516-teargas", "word": "teargas", "label_binary": 1, "text_1": "Soshanguve is on fire. I love the smell of teargas in a township. -Moses #TheRepublic #TheRepublicMzansi", "token_idx_1": 10, "text_start_1": 43, "text_end_1": 50, "date_1": "2019-07", "text_2": "Use teargas used teargas clear the streets clear the streets", "token_idx_2": 1, "text_start_2": 4, "text_end_2": 11, "date_2": "2020-07", "text_1_tokenized": ["Soshanguve", "is", "on", "fire", ".", "I", "love", "the", "smell", "of", "teargas", "in", "a", "township", ".", "-", "Moses", "#TheRepublic", "#TheRepublicMzansi"], "text_2_tokenized": ["Use", "teargas", "used", "teargas", "clear", "the", "streets", "clear", "the", "streets"]} -{"id": "2517-teargas", "word": "teargas", "label_binary": 1, "text_1": "If you're going towards Surulere, please turn back. The place is crawling with Policemen, soldiers, armed civil defense, mobile policemen all trying to prevent protests. They've already fired teargas twice.", "token_idx_1": 34, "text_start_1": 192, "text_end_1": 199, "date_1": "2019-07", "text_2": "Is there a way to neutralize teargas? Can something be put on top of the canister? A bucket? A bucket of concrete? Surely the military has come up with something?", "token_idx_2": 6, "text_start_2": 29, "text_end_2": 36, "date_2": "2020-07", "text_1_tokenized": ["If", "you're", "going", "towards", "Surulere", ",", "please", "turn", "back", ".", "The", "place", "is", "crawling", "with", "Policemen", ",", "soldiers", ",", "armed", "civil", "defense", ",", "mobile", "policemen", "all", "trying", "to", "prevent", "protests", ".", "They've", "already", "fired", "teargas", "twice", "."], "text_2_tokenized": ["Is", "there", "a", "way", "to", "neutralize", "teargas", "?", "Can", "something", "be", "put", "on", "top", "of", "the", "canister", "?", "A", "bucket", "?", "A", "bucket", "of", "concrete", "?", "Surely", "the", "military", "has", "come", "up", "with", "something", "?"]} -{"id": "2518-teargas", "word": "teargas", "label_binary": 1, "text_1": "These demonstrating #Senators wanaharibu biashara. Where the police and teargas at?", "token_idx_1": 10, "text_start_1": 72, "text_end_1": 79, "date_1": "2019-07", "text_2": "Bobiwine is the new Besigye, wherever he goes, teargas joins the chat\ud83d\ude02", "token_idx_2": 10, "text_start_2": 47, "text_end_2": 54, "date_2": "2020-07", "text_1_tokenized": ["These", "demonstrating", "#Senators", "wanaharibu", "biashara", ".", "Where", "the", "police", "and", "teargas", "at", "?"], "text_2_tokenized": ["Bobiwine", "is", "the", "new", "Besigye", ",", "wherever", "he", "goes", ",", "teargas", "joins", "the", "chat", "\ud83d\ude02"]} -{"id": "2519-teargas", "word": "teargas", "label_binary": 1, "text_1": "Hong Kong police just fired teargas on protesters #PrayForHongkong", "token_idx_1": 5, "text_start_1": 28, "text_end_1": 35, "date_1": "2019-07", "text_2": "Wasn't quite sure what my kink was until I watched protestors throwing teargas back.", "token_idx_2": 12, "text_start_2": 71, "text_end_2": 78, "date_2": "2020-07", "text_1_tokenized": ["Hong", "Kong", "police", "just", "fired", "teargas", "on", "protesters", "#PrayForHongkong"], "text_2_tokenized": ["Wasn't", "quite", "sure", "what", "my", "kink", "was", "until", "I", "watched", "protestors", "throwing", "teargas", "back", "."]} -{"id": "2520-teargas", "word": "teargas", "label_binary": 1, "text_1": "Police using teargas in Harare to disperse people from the CBD", "token_idx_1": 2, "text_start_1": 13, "text_end_1": 20, "date_1": "2019-07", "text_2": "Chairman PPP @BBhuttoZardari condemns the unnecessary use of force, teargas and stone pelting on the PML-N leader", "token_idx_2": 10, "text_start_2": 68, "text_end_2": 75, "date_2": "2020-07", "text_1_tokenized": ["Police", "using", "teargas", "in", "Harare", "to", "disperse", "people", "from", "the", "CBD"], "text_2_tokenized": ["Chairman", "PPP", "@BBhuttoZardari", "condemns", "the", "unnecessary", "use", "of", "force", ",", "teargas", "and", "stone", "pelting", "on", "the", "PML-N", "leader"]} -{"id": "2521-teargas", "word": "teargas", "label_binary": 1, "text_1": "When the teargas got to me & it was too much. This old security guard came & pulled me over, he took me to building, pulled a chair for me. Made me sit there until it was safe. I am thankful for that security guard. #16AugustDemo #Zimbabwe", "token_idx_1": 2, "text_start_1": 9, "text_end_1": 16, "date_1": "2019-07", "text_2": "Please I'll like to know if @MBuhari has honoured the memory of Dr Chuba Okadigbo his running mate who died as a result of the teargas by the Police during his protest alongside Buhari in Kano or done anything for the Okadigbos in respect to their father?", "token_idx_2": 25, "text_start_2": 127, "text_end_2": 134, "date_2": "2020-07", "text_1_tokenized": ["When", "the", "teargas", "got", "to", "me", "&", "it", "was", "too", "much", ".", "This", "old", "security", "guard", "came", "&", "pulled", "me", "over", ",", "he", "took", "me", "to", "building", ",", "pulled", "a", "chair", "for", "me", ".", "Made", "me", "sit", "there", "until", "it", "was", "safe", ".", "I", "am", "thankful", "for", "that", "security", "guard", ".", "#16AugustDemo", "#Zimbabwe"], "text_2_tokenized": ["Please", "I'll", "like", "to", "know", "if", "@MBuhari", "has", "honoured", "the", "memory", "of", "Dr", "Chuba", "Okadigbo", "his", "running", "mate", "who", "died", "as", "a", "result", "of", "the", "teargas", "by", "the", "Police", "during", "his", "protest", "alongside", "Buhari", "in", "Kano", "or", "done", "anything", "for", "the", "Okadigbos", "in", "respect", "to", "their", "father", "?"]} -{"id": "2522-teargas", "word": "teargas", "label_binary": 1, "text_1": "At least 7 of the more than 200 Palestinians killed in the Great March of Return Protests in Gaza died as a result of a direct teargas canister hit, according to @btselem @ochaopt", "token_idx_1": 26, "text_start_1": 127, "text_end_1": 134, "date_1": "2019-07", "text_2": "When do they run out of teargas?", "token_idx_2": 6, "text_start_2": 24, "text_end_2": 31, "date_2": "2020-07", "text_1_tokenized": ["At", "least", "7", "of", "the", "more", "than", "200", "Palestinians", "killed", "in", "the", "Great", "March", "of", "Return", "Protests", "in", "Gaza", "died", "as", "a", "result", "of", "a", "direct", "teargas", "canister", "hit", ",", "according", "to", "@btselem", "@ochaopt"], "text_2_tokenized": ["When", "do", "they", "run", "out", "of", "teargas", "?"]} -{"id": "2523-teargas", "word": "teargas", "label_binary": 1, "text_1": "Someone has fed us with teargas along Harambee Avenue.. it's been long kumbe ilikuwa hivi \ud83d\ude2d\ud83d\ude2d", "token_idx_1": 5, "text_start_1": 24, "text_end_1": 31, "date_1": "2019-07", "text_2": "In a separate video showing the damaged windshield, Maryam said that the police was firing teargas at her convoy. She added that police were pelting stones and shelling outside NAB's office to disperse the PML-N workers who had gathered outside. #\u062a\u062e\u062a\u06cc_\u0686\u0648\u0631_\u0648\u0632\u06cc\u0631\u0627\u0639\u0638\u0645 152", "token_idx_2": 16, "text_start_2": 91, "text_end_2": 98, "date_2": "2020-07", "text_1_tokenized": ["Someone", "has", "fed", "us", "with", "teargas", "along", "Harambee", "Avenue", "..", "it's", "been", "long", "kumbe", "ilikuwa", "hivi", "\ud83d\ude2d", "\ud83d\ude2d"], "text_2_tokenized": ["In", "a", "separate", "video", "showing", "the", "damaged", "windshield", ",", "Maryam", "said", "that", "the", "police", "was", "firing", "teargas", "at", "her", "convoy", ".", "She", "added", "that", "police", "were", "pelting", "stones", "and", "shelling", "outside", "NAB's", "office", "to", "disperse", "the", "PML-N", "workers", "who", "had", "gathered", "outside", ".", "#\u062a\u062e\u062a\u06cc_\u0686\u0648\u0631_\u0648\u0632\u06cc\u0631\u0627\u0639\u0638\u0645", "152"]} -{"id": "2524-teargas", "word": "teargas", "label_binary": 1, "text_1": "Police camouflaging themselves as protesters just so the media can continue to demonize them and make the police seem innocent and allowed to spread teargas and to be violent towards them,,", "token_idx_1": 24, "text_start_1": 149, "text_end_1": 156, "date_1": "2019-07", "text_2": "There is Zero difference in the thought pattern separating those attacking a Trump hat and those claiming racism. Mass indoctrination has created a mental block in a large portion of our youth. Until the sting of teargas, a club on the head or welp from rubber bullets snaps em!", "token_idx_2": 38, "text_start_2": 213, "text_end_2": 220, "date_2": "2020-07", "text_1_tokenized": ["Police", "camouflaging", "themselves", "as", "protesters", "just", "so", "the", "media", "can", "continue", "to", "demonize", "them", "and", "make", "the", "police", "seem", "innocent", "and", "allowed", "to", "spread", "teargas", "and", "to", "be", "violent", "towards", "them", ",", ","], "text_2_tokenized": ["There", "is", "Zero", "difference", "in", "the", "thought", "pattern", "separating", "those", "attacking", "a", "Trump", "hat", "and", "those", "claiming", "racism", ".", "Mass", "indoctrination", "has", "created", "a", "mental", "block", "in", "a", "large", "portion", "of", "our", "youth", ".", "Until", "the", "sting", "of", "teargas", ",", "a", "club", "on", "the", "head", "or", "welp", "from", "rubber", "bullets", "snaps", "em", "!"]} -{"id": "2525-teargas", "word": "teargas", "label_binary": 1, "text_1": "#RevolutionIsNow see zombies dey fire teargas haba Nigerians shey Dem talk say na democracy we dey buh now we no get right to protest #thiefgoverment", "token_idx_1": 5, "text_start_1": 38, "text_end_1": 45, "date_1": "2019-07", "text_2": "I spoke to a human who attended exactly one protest in a larger city and were near or in teargas and pepper spray. They told me about a dramatic change in their cycle shortly thereafter. They corroborated this with other humans with vaginas. Is this a widespread phenomena?", "token_idx_2": 19, "text_start_2": 89, "text_end_2": 96, "date_2": "2020-07", "text_1_tokenized": ["#RevolutionIsNow", "see", "zombies", "dey", "fire", "teargas", "haba", "Nigerians", "shey", "Dem", "talk", "say", "na", "democracy", "we", "dey", "buh", "now", "we", "no", "get", "right", "to", "protest", "#thiefgoverment"], "text_2_tokenized": ["I", "spoke", "to", "a", "human", "who", "attended", "exactly", "one", "protest", "in", "a", "larger", "city", "and", "were", "near", "or", "in", "teargas", "and", "pepper", "spray", ".", "They", "told", "me", "about", "a", "dramatic", "change", "in", "their", "cycle", "shortly", "thereafter", ".", "They", "corroborated", "this", "with", "other", "humans", "with", "vaginas", ".", "Is", "this", "a", "widespread", "phenomena", "?"]} -{"id": "2526-teargas", "word": "teargas", "label_binary": 1, "text_1": "They could not even use teargas to dispurse the croud ,what a joke!", "token_idx_1": 5, "text_start_1": 24, "text_end_1": 31, "date_1": "2019-07", "text_2": "protesting against covid (?) police: yeah let's just take the mic and tell them to stay 3 feet apart and wear their masks (which they don't) protesting for basic human rights police: yeah we,,, we can't let that happen. lemme get the teargas. also we need to come in full armor.", "token_idx_2": 52, "text_start_2": 234, "text_end_2": 241, "date_2": "2020-07", "text_1_tokenized": ["They", "could", "not", "even", "use", "teargas", "to", "dispurse", "the", "croud", ",", "what", "a", "joke", "!"], "text_2_tokenized": ["protesting", "against", "covid", "(", "?", ")", "police", ":", "yeah", "let's", "just", "take", "the", "mic", "and", "tell", "them", "to", "stay", "3", "feet", "apart", "and", "wear", "their", "masks", "(", "which", "they", "don't", ")", "protesting", "for", "basic", "human", "rights", "police", ":", "yeah", "we", ",", ",", ",", "we", "can't", "let", "that", "happen", ".", "lemme", "get", "the", "teargas", ".", "also", "we", "need", "to", "come", "in", "full", "armor", "."]} -{"id": "2527-teargas", "word": "teargas", "label_binary": 1, "text_1": "RT njokingumi: I was today years old, today...,! When I learned from AJ+ that teargas is BANNED in warfare and is classified as a chemical\u20262019-07-26T08:00:41.000Z TW DRH HOTM", "token_idx_1": 20, "text_start_1": 78, "text_end_1": 85, "date_1": "2019-07", "text_2": "Trump Supporters would be okay with having Military teargas and beat up racial left citizens, they are so blinded by politics that they forget we are only human too. We are more than just a political party. We bleed just like y'all. I hate this", "token_idx_2": 8, "text_start_2": 52, "text_end_2": 59, "date_2": "2020-07", "text_1_tokenized": ["RT", "njokingumi", ":", "I", "was", "today", "years", "old", ",", "today", "...", ",", "!", "When", "I", "learned", "from", "AJ", "+", "that", "teargas", "is", "BANNED", "in", "warfare", "and", "is", "classified", "as", "a", "chemical", "\u2026", "2019-07-", "26T08", ":", "00:41", ".", "000Z", "TW", "DRH", "HOTM"], "text_2_tokenized": ["Trump", "Supporters", "would", "be", "okay", "with", "having", "Military", "teargas", "and", "beat", "up", "racial", "left", "citizens", ",", "they", "are", "so", "blinded", "by", "politics", "that", "they", "forget", "we", "are", "only", "human", "too", ".", "We", "are", "more", "than", "just", "a", "political", "party", ".", "We", "bleed", "just", "like", "y'all", ".", "I", "hate", "this"]} -{"id": "2528-teargas", "word": "teargas", "label_binary": 1, "text_1": "These people are not afraid of death, there's not much bullets and teargas can achieve in this needless fight. It's free Zakzakky or nothing.", "token_idx_1": 13, "text_start_1": 67, "text_end_1": 74, "date_1": "2019-07", "text_2": "He already knew what he was going to say about the teargas. That's my take.", "token_idx_2": 11, "text_start_2": 51, "text_end_2": 58, "date_2": "2020-07", "text_1_tokenized": ["These", "people", "are", "not", "afraid", "of", "death", ",", "there's", "not", "much", "bullets", "and", "teargas", "can", "achieve", "in", "this", "needless", "fight", ".", "It's", "free", "Zakzakky", "or", "nothing", "."], "text_2_tokenized": ["He", "already", "knew", "what", "he", "was", "going", "to", "say", "about", "the", "teargas", ".", "That's", "my", "take", "."]} -{"id": "2529-teargas", "word": "teargas", "label_binary": 1, "text_1": "In Philadelphia having a standoff because somebody shot three officers . They just said four police on Fox News. Why don't they shoot teargas bombs in those building or homes ? Some type of sleeping gas bombs and shoot in there ? That would stop police or anyone else to get shot.", "token_idx_1": 24, "text_start_1": 134, "text_end_1": 141, "date_1": "2019-07", "text_2": "Police proverb : \"Don't cry over spilled teargas!\"", "token_idx_2": 8, "text_start_2": 41, "text_end_2": 48, "date_2": "2020-07", "text_1_tokenized": ["In", "Philadelphia", "having", "a", "standoff", "because", "somebody", "shot", "three", "officers", ".", "They", "just", "said", "four", "police", "on", "Fox", "News", ".", "Why", "don't", "they", "shoot", "teargas", "bombs", "in", "those", "building", "or", "homes", "?", "Some", "type", "of", "sleeping", "gas", "bombs", "and", "shoot", "in", "there", "?", "That", "would", "stop", "police", "or", "anyone", "else", "to", "get", "shot", "."], "text_2_tokenized": ["Police", "proverb", ":", "\"", "Don't", "cry", "over", "spilled", "teargas", "!", "\""]} -{"id": "2530-teargas", "word": "teargas", "label_binary": 1, "text_1": "The #RevolutionNow campaigners have done well to initiate the move, but trust Nigerians, only teargas and everyone will go home. Nigerians are too lazy for struggles. #FreeSoworeNow", "token_idx_1": 16, "text_start_1": 94, "text_end_1": 101, "date_1": "2019-07", "text_2": "How much is the antifa daily wage for arguing and causing resentments? When teargas is thrown the next day the show with tear gas helmets. When pepper spray is launched, they have shields! Where do they get that stuff? Antifa leaders??? Hillary and friends?", "token_idx_2": 14, "text_start_2": 76, "text_end_2": 83, "date_2": "2020-07", "text_1_tokenized": ["The", "#RevolutionNow", "campaigners", "have", "done", "well", "to", "initiate", "the", "move", ",", "but", "trust", "Nigerians", ",", "only", "teargas", "and", "everyone", "will", "go", "home", ".", "Nigerians", "are", "too", "lazy", "for", "struggles", ".", "#FreeSoworeNow"], "text_2_tokenized": ["How", "much", "is", "the", "antifa", "daily", "wage", "for", "arguing", "and", "causing", "resentments", "?", "When", "teargas", "is", "thrown", "the", "next", "day", "the", "show", "with", "tear", "gas", "helmets", ".", "When", "pepper", "spray", "is", "launched", ",", "they", "have", "shields", "!", "Where", "do", "they", "get", "that", "stuff", "?", "Antifa", "leaders", "?", "?", "?", "Hillary", "and", "friends", "?"]} -{"id": "2531-teargas", "word": "teargas", "label_binary": 1, "text_1": "Police teargas activists protesting against high power bills, @nyamita1 Why Don't you support a bill for sustainable energy? #NewsCompass @EbruTVKenya", "token_idx_1": 1, "text_start_1": 7, "text_end_1": 14, "date_1": "2019-07", "text_2": "Send traffic cones to #PDXprotests. It'll piss off the stormtroopers when they find out Hong Kong's protesters taught us all these cool ways to defeat teargas. \u270c", "token_idx_2": 26, "text_start_2": 151, "text_end_2": 158, "date_2": "2020-07", "text_1_tokenized": ["Police", "teargas", "activists", "protesting", "against", "high", "power", "bills", ",", "@nyamita1", "Why", "Don't", "you", "support", "a", "bill", "for", "sustainable", "energy", "?", "#NewsCompass", "@EbruTVKenya"], "text_2_tokenized": ["Send", "traffic", "cones", "to", "#PDXprotests", ".", "It'll", "piss", "off", "the", "stormtroopers", "when", "they", "find", "out", "Hong", "Kong's", "protesters", "taught", "us", "all", "these", "cool", "ways", "to", "defeat", "teargas", ".", "\u270c"]} -{"id": "2532-teargas", "word": "teargas", "label_binary": 1, "text_1": "Foxnews corrupt Puerto Rico government shooting teargas at protesters just wanting answers from there governor. Yesterday he was tongue tied stuttering his words on shepherd Smith he couldn't give Smith straight answer are Democrats going to teargas us for believing them", "token_idx_1": 6, "text_start_1": 48, "text_end_1": 55, "date_1": "2019-07", "text_2": "Lol guys,, the teargas is getting old ,, how about you just let up already and stop being pussies,, would you rather keep randomly killing and beating black people or just stop being a prick", "token_idx_2": 5, "text_start_2": 15, "text_end_2": 22, "date_2": "2020-07", "text_1_tokenized": ["Foxnews", "corrupt", "Puerto", "Rico", "government", "shooting", "teargas", "at", "protesters", "just", "wanting", "answers", "from", "there", "governor", ".", "Yesterday", "he", "was", "tongue", "tied", "stuttering", "his", "words", "on", "shepherd", "Smith", "he", "couldn't", "give", "Smith", "straight", "answer", "are", "Democrats", "going", "to", "teargas", "us", "for", "believing", "them"], "text_2_tokenized": ["Lol", "guys", ",", ",", "the", "teargas", "is", "getting", "old", ",", ",", "how", "about", "you", "just", "let", "up", "already", "and", "stop", "being", "pussies", ",", ",", "would", "you", "rather", "keep", "randomly", "killing", "and", "beating", "black", "people", "or", "just", "stop", "being", "a", "prick"]} -{"id": "2533-teargas", "word": "teargas", "label_binary": 1, "text_1": "\ud835\udc01\ud835\udc25\ud835\udc28\ud835\udc28\ud835\udc1d\ud835\udc32 \ud835\udc07\ud835\udc22\ud835\udc2c\ud835\udc2d\ud835\udc28\ud835\udc2b\ud835\udc32 #21JulyShahidDibas The youth activists, who were peacefully marching, refused to stop and walked ahead. Many rounds of teargas shells were fired but the supporters kept moving forward.", "token_idx_1": 22, "text_start_1": 134, "text_end_1": 141, "date_1": "2019-07", "text_2": "so what i'm seeing is play hockey and bring your dad so he can use the leafblower on teargas", "token_idx_2": 18, "text_start_2": 85, "text_end_2": 92, "date_2": "2020-07", "text_1_tokenized": ["\ud835\udc01\ud835\udc25\ud835\udc28\ud835\udc28\ud835\udc1d\ud835\udc32", "\ud835\udc07\ud835\udc22\ud835\udc2c\ud835\udc2d\ud835\udc28\ud835\udc2b\ud835\udc32", "#21JulyShahidDibas", "The", "youth", "activists", ",", "who", "were", "peacefully", "marching", ",", "refused", "to", "stop", "and", "walked", "ahead", ".", "Many", "rounds", "of", "teargas", "shells", "were", "fired", "but", "the", "supporters", "kept", "moving", "forward", "."], "text_2_tokenized": ["so", "what", "i'm", "seeing", "is", "play", "hockey", "and", "bring", "your", "dad", "so", "he", "can", "use", "the", "leafblower", "on", "teargas"]} -{"id": "2534-teargas", "word": "teargas", "label_binary": 1, "text_1": "A year ago, on this date, a protest in Bucharest culminated in teargas being thrown amid paceful protesters, and police brutality. The legal case against these aggressions bas stalled for an entire year. Today, we go back, to remind them that we have not forgotten. Stay safe!", "token_idx_1": 14, "text_start_1": 63, "text_end_1": 70, "date_1": "2019-07", "text_2": "PDX PD has deemed the crowd at Courthouse an \"unlawful assembly.\" Being told to \"Disperse to the North or West immediately. Failure to adhere...may subject you to arrest.\" Threats of teargas and other war criminal weapons in use.", "token_idx_2": 39, "text_start_2": 183, "text_end_2": 190, "date_2": "2020-07", "text_1_tokenized": ["A", "year", "ago", ",", "on", "this", "date", ",", "a", "protest", "in", "Bucharest", "culminated", "in", "teargas", "being", "thrown", "amid", "paceful", "protesters", ",", "and", "police", "brutality", ".", "The", "legal", "case", "against", "these", "aggressions", "bas", "stalled", "for", "an", "entire", "year", ".", "Today", ",", "we", "go", "back", ",", "to", "remind", "them", "that", "we", "have", "not", "forgotten", ".", "Stay", "safe", "!"], "text_2_tokenized": ["PDX", "PD", "has", "deemed", "the", "crowd", "at", "Courthouse", "an", "\"", "unlawful", "assembly", ".", "\"", "Being", "told", "to", "\"", "Disperse", "to", "the", "North", "or", "West", "immediately", ".", "Failure", "to", "adhere", "...", "may", "subject", "you", "to", "arrest", ".", "\"", "Threats", "of", "teargas", "and", "other", "war", "criminal", "weapons", "in", "use", "."]} -{"id": "2535-teargas", "word": "teargas", "label_binary": 1, "text_1": "Wondering how the Police, @PRPDNoticias, will justify their reasons to fire teargas at a suburb, a residential area in Guaynabo, PR, in response to protests from a crowd of roughly 150 people. People were protesting because Gov. @ricardorossello was there. #RickyRenuncia", "token_idx_1": 13, "text_start_1": 76, "text_end_1": 83, "date_1": "2019-07", "text_2": "Again I am not in Portland but if I have learned anything from living in this country for so long it's that being immune to teargas is a very good thing.", "token_idx_2": 25, "text_start_2": 124, "text_end_2": 131, "date_2": "2020-07", "text_1_tokenized": ["Wondering", "how", "the", "Police", ",", "@PRPDNoticias", ",", "will", "justify", "their", "reasons", "to", "fire", "teargas", "at", "a", "suburb", ",", "a", "residential", "area", "in", "Guaynabo", ",", "PR", ",", "in", "response", "to", "protests", "from", "a", "crowd", "of", "roughly", "150", "people", ".", "People", "were", "protesting", "because", "Gov", ".", "@ricardorossello", "was", "there", ".", "#RickyRenuncia"], "text_2_tokenized": ["Again", "I", "am", "not", "in", "Portland", "but", "if", "I", "have", "learned", "anything", "from", "living", "in", "this", "country", "for", "so", "long", "it's", "that", "being", "immune", "to", "teargas", "is", "a", "very", "good", "thing", "."]} -{"id": "2536-teargas", "word": "teargas", "label_binary": 1, "text_1": "I'll never forget the day the SANDF was on strike and got annihilated by SAPS with teargas and stun grenades at the Union Buildings \ud83d\ude2d", "token_idx_1": 16, "text_start_1": 83, "text_end_1": 90, "date_1": "2019-07", "text_2": "feds shot flashbangs, teargas, and threw schrapnel directly into riot ribs", "token_idx_2": 4, "text_start_2": 22, "text_end_2": 29, "date_2": "2020-07", "text_1_tokenized": ["I'll", "never", "forget", "the", "day", "the", "SANDF", "was", "on", "strike", "and", "got", "annihilated", "by", "SAPS", "with", "teargas", "and", "stun", "grenades", "at", "the", "Union", "Buildings", "\ud83d\ude2d"], "text_2_tokenized": ["feds", "shot", "flashbangs", ",", "teargas", ",", "and", "threw", "schrapnel", "directly", "into", "riot", "ribs"]} -{"id": "2537-teargas", "word": "teargas", "label_binary": 1, "text_1": "Sources at the protest just told me the cops themselves threw teargas bombs to a car, causing the fire. Fire is under control now. #RickyRenunciaYa #PuertoRicoProtests", "token_idx_1": 11, "text_start_1": 62, "text_end_1": 69, "date_1": "2019-07", "text_2": "Just a reminder that chemical agents like teargas are banned in warfare and teargas is an abortifacient.", "token_idx_2": 7, "text_start_2": 42, "text_end_2": 49, "date_2": "2020-07", "text_1_tokenized": ["Sources", "at", "the", "protest", "just", "told", "me", "the", "cops", "themselves", "threw", "teargas", "bombs", "to", "a", "car", ",", "causing", "the", "fire", ".", "Fire", "is", "under", "control", "now", ".", "#RickyRenunciaYa", "#PuertoRicoProtests"], "text_2_tokenized": ["Just", "a", "reminder", "that", "chemical", "agents", "like", "teargas", "are", "banned", "in", "warfare", "and", "teargas", "is", "an", "abortifacient", "."]} -{"id": "2538-teargas", "word": "teargas", "label_binary": 1, "text_1": "With all the demos, teargas and all the shouts, the only thing I know back in my mind is we will never win against the government. It will still happen! #census2019", "token_idx_1": 5, "text_start_1": 20, "text_end_1": 27, "date_1": "2019-07", "text_2": "The leaders a the one teaches youth how betrayal works raila odinga betrayed Kenyans he should know those people who used to eat teargas a not with him today and still he is pushing things which a impossible to be accepted allocation of funds should be equal", "token_idx_2": 23, "text_start_2": 129, "text_end_2": 136, "date_2": "2020-07", "text_1_tokenized": ["With", "all", "the", "demos", ",", "teargas", "and", "all", "the", "shouts", ",", "the", "only", "thing", "I", "know", "back", "in", "my", "mind", "is", "we", "will", "never", "win", "against", "the", "government", ".", "It", "will", "still", "happen", "!", "#census2019"], "text_2_tokenized": ["The", "leaders", "a", "the", "one", "teaches", "youth", "how", "betrayal", "works", "raila", "odinga", "betrayed", "Kenyans", "he", "should", "know", "those", "people", "who", "used", "to", "eat", "teargas", "a", "not", "with", "him", "today", "and", "still", "he", "is", "pushing", "things", "which", "a", "impossible", "to", "be", "accepted", "allocation", "of", "funds", "should", "be", "equal"]} -{"id": "2539-teargas", "word": "teargas", "label_binary": 1, "text_1": "There's an alarming number of police with riot gear and teargas all over town in Harare, what did I miss?", "token_idx_1": 10, "text_start_1": 56, "text_end_1": 63, "date_1": "2019-07", "text_2": "#AmericaOrTrump So 40% of Americans approve everything Trump does! Kids in cages for God's sake! Lying over 19,000 times! Bounty on US troops! Secret Police using teargas on Moms! Killing over 140,000 Americans by not dealing with Coronavirus. The US died in 2016! Very sad!", "token_idx_2": 31, "text_start_2": 163, "text_end_2": 170, "date_2": "2020-07", "text_1_tokenized": ["There's", "an", "alarming", "number", "of", "police", "with", "riot", "gear", "and", "teargas", "all", "over", "town", "in", "Harare", ",", "what", "did", "I", "miss", "?"], "text_2_tokenized": ["#AmericaOrTrump", "So", "40", "%", "of", "Americans", "approve", "everything", "Trump", "does", "!", "Kids", "in", "cages", "for", "God's", "sake", "!", "Lying", "over", "19,000", "times", "!", "Bounty", "on", "US", "troops", "!", "Secret", "Police", "using", "teargas", "on", "Moms", "!", "Killing", "over", "140,000", "Americans", "by", "not", "dealing", "with", "Coronavirus", ".", "The", "US", "died", "in", "2016", "!", "Very", "sad", "!"]} -{"id": "2540-teargas", "word": "teargas", "label_binary": 1, "text_1": "If our tax money is abused by the regime to finance purchase of guns, teargas, waters canons & war machinery to crush us; one possible remedy might be to limit the tax we pay to the state. It's for our own safety.", "token_idx_1": 15, "text_start_1": 70, "text_end_1": 77, "date_1": "2019-07", "text_2": "OK we need to do something right now there is protesters destroying things The police are firing teargas rubber Bullets at protesters in Portland Oregon", "token_idx_2": 17, "text_start_2": 97, "text_end_2": 104, "date_2": "2020-07", "text_1_tokenized": ["If", "our", "tax", "money", "is", "abused", "by", "the", "regime", "to", "finance", "purchase", "of", "guns", ",", "teargas", ",", "waters", "canons", "&", "war", "machinery", "to", "crush", "us", ";", "one", "possible", "remedy", "might", "be", "to", "limit", "the", "tax", "we", "pay", "to", "the", "state", ".", "It's", "for", "our", "own", "safety", "."], "text_2_tokenized": ["OK", "we", "need", "to", "do", "something", "right", "now", "there", "is", "protesters", "destroying", "things", "The", "police", "are", "firing", "teargas", "rubber", "Bullets", "at", "protesters", "in", "Portland", "Oregon"]} -{"id": "2541-teargas", "word": "teargas", "label_binary": 1, "text_1": "Open air inasowadi iyi. Aftr teargas people can still dance like this?? Oh wow!!\ud83d\ude4c\ud83d\ude4c", "token_idx_1": 6, "text_start_1": 29, "text_end_1": 36, "date_1": "2019-07", "text_2": "blowing gas back towards the cops is cool and all, but could we focus more on actually extinguishing the teargas? a lot of people live downtown, both indoors and outdoors, and leafblowers don't stop the gas from spreading. #defendpdx #portlandprotests", "token_idx_2": 20, "text_start_2": 105, "text_end_2": 112, "date_2": "2020-07", "text_1_tokenized": ["Open", "air", "inasowadi", "iyi", ".", "Aftr", "teargas", "people", "can", "still", "dance", "like", "this", "?", "?", "Oh", "wow", "!", "!", "\ud83d\ude4c", "\ud83d\ude4c"], "text_2_tokenized": ["blowing", "gas", "back", "towards", "the", "cops", "is", "cool", "and", "all", ",", "but", "could", "we", "focus", "more", "on", "actually", "extinguishing", "the", "teargas", "?", "a", "lot", "of", "people", "live", "downtown", ",", "both", "indoors", "and", "outdoors", ",", "and", "leafblowers", "don't", "stop", "the", "gas", "from", "spreading", ".", "#defendpdx", "#portlandprotests"]} -{"id": "2542-teargas", "word": "teargas", "label_binary": 1, "text_1": "September 20 is when a lot a weebs are gonna learn the taste of teargas and sand bags", "token_idx_1": 14, "text_start_1": 64, "text_end_1": 71, "date_1": "2019-07", "text_2": "Incredibly frustrated with everyone who has been making distinctions like *peaceful* protestors & \"outside agitators,\" bc DHS doesn't care. DHS is going to show up in your town to teargas, beat, & disappear you and your neighbors then use your own words to justify it afterwards.", "token_idx_2": 35, "text_start_2": 184, "text_end_2": 191, "date_2": "2020-07", "text_1_tokenized": ["September", "20", "is", "when", "a", "lot", "a", "weebs", "are", "gonna", "learn", "the", "taste", "of", "teargas", "and", "sand", "bags"], "text_2_tokenized": ["Incredibly", "frustrated", "with", "everyone", "who", "has", "been", "making", "distinctions", "like", "*", "peaceful", "*", "protestors", "&", "\"", "outside", "agitators", ",", "\"", "bc", "DHS", "doesn't", "care", ".", "DHS", "is", "going", "to", "show", "up", "in", "your", "town", "to", "teargas", ",", "beat", ",", "&", "disappear", "you", "and", "your", "neighbors", "then", "use", "your", "own", "words", "to", "justify", "it", "afterwards", "."]} -{"id": "2543-teargas", "word": "teargas", "label_binary": 1, "text_1": "Firing teargas where it harms elderly residents... um, what happened to risk assessment? #YuenLong", "token_idx_1": 1, "text_start_1": 7, "text_end_1": 14, "date_1": "2019-07", "text_2": "Can I put immune to teargas on my resume", "token_idx_2": 5, "text_start_2": 20, "text_end_2": 27, "date_2": "2020-07", "text_1_tokenized": ["Firing", "teargas", "where", "it", "harms", "elderly", "residents", "...", "um", ",", "what", "happened", "to", "risk", "assessment", "?", "#YuenLong"], "text_2_tokenized": ["Can", "I", "put", "immune", "to", "teargas", "on", "my", "resume"]} -{"id": "2544-teargas", "word": "teargas", "label_binary": 1, "text_1": "Watching protesters in Hongkong (teargas included) and wondering how much has been stolen by their leaders? They are protesting bills! bills people. Let it sink in as you skip lunch, later join traffic home to no food, water or electricity! #IamGoingToNakuru #TheGenerationKE", "token_idx_1": 5, "text_start_1": 33, "text_end_1": 40, "date_1": "2019-07", "text_2": "lmao remember when the cops were like \u201cif you throw our teargas back at us us you'll be arrested for assault\u201d so like... you agree you're assaulting ya", "token_idx_2": 12, "text_start_2": 56, "text_end_2": 63, "date_2": "2020-07", "text_1_tokenized": ["Watching", "protesters", "in", "Hongkong", "(", "teargas", "included", ")", "and", "wondering", "how", "much", "has", "been", "stolen", "by", "their", "leaders", "?", "They", "are", "protesting", "bills", "!", "bills", "people", ".", "Let", "it", "sink", "in", "as", "you", "skip", "lunch", ",", "later", "join", "traffic", "home", "to", "no", "food", ",", "water", "or", "electricity", "!", "#IamGoingToNakuru", "#TheGenerationKE"], "text_2_tokenized": ["lmao", "remember", "when", "the", "cops", "were", "like", "\u201c", "if", "you", "throw", "our", "teargas", "back", "at", "us", "us", "you'll", "be", "arrested", "for", "assault", "\u201d", "so", "like", "...", "you", "agree", "you're", "assaulting", "ya"]} -{"id": "2545-teargas", "word": "teargas", "label_binary": 1, "text_1": "Police fire teargas to break up massive protests demanding Puerto Rico governor resign #US | #RTGNews", "token_idx_1": 2, "text_start_1": 12, "text_end_1": 19, "date_1": "2019-07", "text_2": "i have such a headache today from the teargas and fireworks from last night...my body is so sore & i feel hungover", "token_idx_2": 8, "text_start_2": 38, "text_end_2": 45, "date_2": "2020-07", "text_1_tokenized": ["Police", "fire", "teargas", "to", "break", "up", "massive", "protests", "demanding", "Puerto", "Rico", "governor", "resign", "#US", "|", "#RTGNews"], "text_2_tokenized": ["i", "have", "such", "a", "headache", "today", "from", "the", "teargas", "and", "fireworks", "from", "last", "night", "...", "my", "body", "is", "so", "sore", "&", "i", "feel", "hungover"]} -{"id": "2547-teargas", "word": "teargas", "label_binary": 1, "text_1": "Fred Matiangi, under whose leadership ALL OUR RIGHTS HAVE BEEN ERODED has send cops to met out violence/teargas on DEFENSELESS #SwitchOffKPLC demonstrators. now can you imagine when he becomes president? he is one of those vying in 2022! btw do cops get free elec? @WanjeriNderu", "token_idx_1": 20, "text_start_1": 104, "text_end_1": 111, "date_1": "2019-07", "text_2": "A bunch of pinkos shaking their fists at the scary government don't even know why they are protesting. Hope you coconut heads get hit with a teargas can or get COVID\ud83d\ude02 #PortlandProtests", "token_idx_2": 27, "text_start_2": 141, "text_end_2": 148, "date_2": "2020-07", "text_1_tokenized": ["Fred", "Matiangi", ",", "under", "whose", "leadership", "ALL", "OUR", "RIGHTS", "HAVE", "BEEN", "ERODED", "has", "send", "cops", "to", "met", "out", "violence", "/", "teargas", "on", "DEFENSELESS", "#SwitchOffKPLC", "demonstrators", ".", "now", "can", "you", "imagine", "when", "he", "becomes", "president", "?", "he", "is", "one", "of", "those", "vying", "in", "2022", "!", "btw", "do", "cops", "get", "free", "elec", "?", "@WanjeriNderu"], "text_2_tokenized": ["A", "bunch", "of", "pinkos", "shaking", "their", "fists", "at", "the", "scary", "government", "don't", "even", "know", "why", "they", "are", "protesting", ".", "Hope", "you", "coconut", "heads", "get", "hit", "with", "a", "teargas", "can", "or", "get", "COVID", "\ud83d\ude02", "#PortlandProtests"]} -{"id": "2548-teargas", "word": "teargas", "label_binary": 1, "text_1": "Kings and Queens living in Kaduna, please watch your movement today. You won't inhale teargas for nothing in Jesus name.", "token_idx_1": 16, "text_start_1": 86, "text_end_1": 93, "date_1": "2019-07", "text_2": "The @IYCWestBengal supporters who were peacefully rallying refused to stop & walk ahead. The police started lathicharge & many rounds of teargas shell were fired but undeterred the supporters kept on moving. #\u0985\u09ae\u09b0\u09e8\u09e7\u09b6\u09c7\u099c\u09c1\u09b2\u09be\u0987", "token_idx_2": 22, "text_start_2": 145, "text_end_2": 152, "date_2": "2020-07", "text_1_tokenized": ["Kings", "and", "Queens", "living", "in", "Kaduna", ",", "please", "watch", "your", "movement", "today", ".", "You", "won't", "inhale", "teargas", "for", "nothing", "in", "Jesus", "name", "."], "text_2_tokenized": ["The", "@IYCWestBengal", "supporters", "who", "were", "peacefully", "rallying", "refused", "to", "stop", "&", "walk", "ahead", ".", "The", "police", "started", "lathicharge", "&", "many", "rounds", "of", "teargas", "shell", "were", "fired", "but", "undeterred", "the", "supporters", "kept", "on", "moving", ".", "#\u0985\u09ae\u09b0\u09e8\u09e7\u09b6\u09c7\u099c\u09c1\u09b2\u09be\u0987"]} -{"id": "2549-teargas", "word": "teargas", "label_binary": 1, "text_1": "#HongKong: When the demonstrators converge in residential areas, and the Police use teargas to disperse them. Hundreds of young children and babies in those homes are suffocating from the smoke from those Tear Gas canisters. Must those children/babies suffer from your actions?", "token_idx_1": 14, "text_start_1": 84, "text_end_1": 91, "date_1": "2019-07", "text_2": "When a cop sprays teargas on a person standing still, it's an appropriate use of force. But if you use a leaf-blower to revert the tear gas back towards the police, it's assault on an officer. If this isn't ridiculous to you, you aren't paying attention.", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 25, "date_2": "2020-07", "text_1_tokenized": ["#HongKong", ":", "When", "the", "demonstrators", "converge", "in", "residential", "areas", ",", "and", "the", "Police", "use", "teargas", "to", "disperse", "them", ".", "Hundreds", "of", "young", "children", "and", "babies", "in", "those", "homes", "are", "suffocating", "from", "the", "smoke", "from", "those", "Tear", "Gas", "canisters", ".", "Must", "those", "children", "/", "babies", "suffer", "from", "your", "actions", "?"], "text_2_tokenized": ["When", "a", "cop", "sprays", "teargas", "on", "a", "person", "standing", "still", ",", "it's", "an", "appropriate", "use", "of", "force", ".", "But", "if", "you", "use", "a", "leaf-blower", "to", "revert", "the", "tear", "gas", "back", "towards", "the", "police", ",", "it's", "assault", "on", "an", "officer", ".", "If", "this", "isn't", "ridiculous", "to", "you", ",", "you", "aren't", "paying", "attention", "."]} -{"id": "2550-teargas", "word": "teargas", "label_binary": 1, "text_1": "Heavy rain on its way, to wash the toxic teargas residue to our surrounding waterways.", "token_idx_1": 10, "text_start_1": 41, "text_end_1": 48, "date_1": "2019-07", "text_2": "The @GOP refuses to condemn Russia for putting bounties on American soldiers, but sends jackboots in to teargas peacefully protesting mothers. #Resist #VoteBlue2020", "token_idx_2": 18, "text_start_2": 104, "text_end_2": 111, "date_2": "2020-07", "text_1_tokenized": ["Heavy", "rain", "on", "its", "way", ",", "to", "wash", "the", "toxic", "teargas", "residue", "to", "our", "surrounding", "waterways", "."], "text_2_tokenized": ["The", "@GOP", "refuses", "to", "condemn", "Russia", "for", "putting", "bounties", "on", "American", "soldiers", ",", "but", "sends", "jackboots", "in", "to", "teargas", "peacefully", "protesting", "mothers", ".", "#Resist", "#VoteBlue2020"]} -{"id": "2551-teargas", "word": "teargas", "label_binary": 1, "text_1": "Sowore's Revolution Now people are protesting o. The police is firing teargas at them.", "token_idx_1": 12, "text_start_1": 70, "text_end_1": 77, "date_1": "2019-07", "text_2": "not my aunt asking if the \u201cacab\u201d on my jacket was a band i liked and then forbidding me from going to another protest when i told her about the teargas - all in the span of two minutes", "token_idx_2": 32, "text_start_2": 144, "text_end_2": 151, "date_2": "2020-07", "text_1_tokenized": ["Sowore's", "Revolution", "Now", "people", "are", "protesting", "o", ".", "The", "police", "is", "firing", "teargas", "at", "them", "."], "text_2_tokenized": ["not", "my", "aunt", "asking", "if", "the", "\u201c", "acab", "\u201d", "on", "my", "jacket", "was", "a", "band", "i", "liked", "and", "then", "forbidding", "me", "from", "going", "to", "another", "protest", "when", "i", "told", "her", "about", "the", "teargas", "-", "all", "in", "the", "span", "of", "two", "minutes"]} -{"id": "2552-teargas", "word": "teargas", "label_binary": 1, "text_1": "Where is our revolution? Sowore bring back our revolution. Chai common teargas.", "token_idx_1": 13, "text_start_1": 71, "text_end_1": 78, "date_1": "2019-07", "text_2": "Just remember at the end of the day Trump would rather injure and teargas moms over Property. (We pay for)", "token_idx_2": 13, "text_start_2": 66, "text_end_2": 73, "date_2": "2020-07", "text_1_tokenized": ["Where", "is", "our", "revolution", "?", "Sowore", "bring", "back", "our", "revolution", ".", "Chai", "common", "teargas", "."], "text_2_tokenized": ["Just", "remember", "at", "the", "end", "of", "the", "day", "Trump", "would", "rather", "injure", "and", "teargas", "moms", "over", "Property", ".", "(", "We", "pay", "for", ")"]} -{"id": "2553-teargas", "word": "teargas", "label_binary": 1, "text_1": "the hong kong teargas videos are beautiful. just *completely* disappointing the cops. they're just launching bomb after bomb and totally wasting them. it's gorgeous.", "token_idx_1": 3, "text_start_1": 14, "text_end_1": 21, "date_1": "2019-07", "text_2": "If troops are deployed it should be to enforce mask wearing and social distancing\u2014NOT to fire teargas at and detain protestors.", "token_idx_2": 18, "text_start_2": 94, "text_end_2": 101, "date_2": "2020-07", "text_1_tokenized": ["the", "hong", "kong", "teargas", "videos", "are", "beautiful", ".", "just", "*", "completely", "*", "disappointing", "the", "cops", ".", "they're", "just", "launching", "bomb", "after", "bomb", "and", "totally", "wasting", "them", ".", "it's", "gorgeous", "."], "text_2_tokenized": ["If", "troops", "are", "deployed", "it", "should", "be", "to", "enforce", "mask", "wearing", "and", "social", "distancing", "\u2014", "NOT", "to", "fire", "teargas", "at", "and", "detain", "protestors", "."]} -{"id": "2554-teargas", "word": "teargas", "label_binary": 1, "text_1": "Hong Kong police fire teargas and rubber bullets as demonstrators defy ban on protest against triad thugs Hong Kong police fired tear gas and rubber bullets on Saturday to disperse huge crowds holding a banned rally, snarling the city in its eight consecutive weekend of pro\u2026", "token_idx_1": 4, "text_start_1": 22, "text_end_1": 29, "date_1": "2019-07", "text_2": "Welp. They separated my 15y/o niece from her mom downtown today and started chucking teargas. I'm done with this country.", "token_idx_2": 17, "text_start_2": 85, "text_end_2": 92, "date_2": "2020-07", "text_1_tokenized": ["Hong", "Kong", "police", "fire", "teargas", "and", "rubber", "bullets", "as", "demonstrators", "defy", "ban", "on", "protest", "against", "triad", "thugs", "Hong", "Kong", "police", "fired", "tear", "gas", "and", "rubber", "bullets", "on", "Saturday", "to", "disperse", "huge", "crowds", "holding", "a", "banned", "rally", ",", "snarling", "the", "city", "in", "its", "eight", "consecutive", "weekend", "of", "pro", "\u2026"], "text_2_tokenized": ["Welp", ".", "They", "separated", "my", "15y", "/", "o", "niece", "from", "her", "mom", "downtown", "today", "and", "started", "chucking", "teargas", ".", "I'm", "done", "with", "this", "country", "."]} -{"id": "2555-teargas", "word": "teargas", "label_binary": 1, "text_1": "I think I can smell teargas (we're in Tin Hau, right across Victoria Park from Causeway Bay). The missus is clucking at the TV and blaming the smell on the burning bin and not the indiscriminate use of Tear Gas.", "token_idx_1": 5, "text_start_1": 20, "text_end_1": 27, "date_1": "2019-07", "text_2": "Oregon's attorney general has filed a lawsuit against the Department of Homeland Security accusing law-enforcement officials of tossing teargas at demonstrators and detaining them without explanation.", "token_idx_2": 18, "text_start_2": 136, "text_end_2": 143, "date_2": "2020-07", "text_1_tokenized": ["I", "think", "I", "can", "smell", "teargas", "(", "we're", "in", "Tin", "Hau", ",", "right", "across", "Victoria", "Park", "from", "Causeway", "Bay", ")", ".", "The", "missus", "is", "clucking", "at", "the", "TV", "and", "blaming", "the", "smell", "on", "the", "burning", "bin", "and", "not", "the", "indiscriminate", "use", "of", "Tear", "Gas", "."], "text_2_tokenized": ["Oregon's", "attorney", "general", "has", "filed", "a", "lawsuit", "against", "the", "Department", "of", "Homeland", "Security", "accusing", "law-enforcement", "officials", "of", "tossing", "teargas", "at", "demonstrators", "and", "detaining", "them", "without", "explanation", "."]} -{"id": "2556-teargas", "word": "teargas", "label_binary": 1, "text_1": "So we are in this jav that only 3 windows can be opened and to worsened it someone is busy releasing teargas.. \ud83d\ude44\ud83d\ude44\ud83d\ude44", "token_idx_1": 21, "text_start_1": 101, "text_end_1": 108, "date_1": "2019-07", "text_2": "#AfricanLivesMatter I don't how many times we must repeat this ...BUT you cannot beat, teargas, arrest , torture , abduct ,disappear, rape,imprison or kill poverty out of people . You have to build solutions together with the affected, listen to their concerns,needs&aspirations", "token_idx_2": 16, "text_start_2": 87, "text_end_2": 94, "date_2": "2020-07", "text_1_tokenized": ["So", "we", "are", "in", "this", "jav", "that", "only", "3", "windows", "can", "be", "opened", "and", "to", "worsened", "it", "someone", "is", "busy", "releasing", "teargas", "..", "\ud83d\ude44", "\ud83d\ude44", "\ud83d\ude44"], "text_2_tokenized": ["#AfricanLivesMatter", "I", "don't", "how", "many", "times", "we", "must", "repeat", "this", "...", "BUT", "you", "cannot", "beat", ",", "teargas", ",", "arrest", ",", "torture", ",", "abduct", ",", "disappear", ",", "rape", ",", "imprison", "or", "kill", "poverty", "out", "of", "people", ".", "You", "have", "to", "build", "solutions", "together", "with", "the", "affected", ",", "listen", "to", "their", "concerns", ",", "needs", "&", "aspirations"]} -{"id": "2557-teargas", "word": "teargas", "label_binary": 1, "text_1": "What a goddamn mess, the hell is wrong wif hk police?? Assaulting women, firing at random passerbys, using teargas which may have expired = harmful to health. Is this how hongkong actl treats its citizens? Lol carrie lam please hold ur shit tgt and do smth abt it.", "token_idx_1": 23, "text_start_1": 107, "text_end_1": 114, "date_1": "2019-07", "text_2": "Barr's answer is the crux of the problem. They justify using teargas on everybody because of \"a few bad apples\" so to speak.", "token_idx_2": 12, "text_start_2": 61, "text_end_2": 68, "date_2": "2020-07", "text_1_tokenized": ["What", "a", "goddamn", "mess", ",", "the", "hell", "is", "wrong", "wif", "hk", "police", "?", "?", "Assaulting", "women", ",", "firing", "at", "random", "passerbys", ",", "using", "teargas", "which", "may", "have", "expired", "=", "harmful", "to", "health", ".", "Is", "this", "how", "hongkong", "actl", "treats", "its", "citizens", "?", "Lol", "carrie", "lam", "please", "hold", "ur", "shit", "tgt", "and", "do", "smth", "abt", "it", "."], "text_2_tokenized": ["Barr's", "answer", "is", "the", "crux", "of", "the", "problem", ".", "They", "justify", "using", "teargas", "on", "everybody", "because", "of", "\"", "a", "few", "bad", "apples", "\"", "so", "to", "speak", "."]} -{"id": "2558-teargas", "word": "teargas", "label_binary": 1, "text_1": "\"I'm not the political type, Not the type to fake an image for the sake of this whole consciousness type, Never been called a Kaffir before, Can't imagine seeing 10 cops and dogs crashing through my front door, Can't say what teargas smells like,", "token_idx_1": 46, "text_start_1": 226, "text_end_1": 233, "date_1": "2019-07", "text_2": "What I've seen on a nightly basis is that the citizens of Portland have been successful in defending their rights and city. In fact, so successful that feds are now copying the protestors (leafblowers to blow teargas)", "token_idx_2": 39, "text_start_2": 209, "text_end_2": 216, "date_2": "2020-07", "text_1_tokenized": ["\"", "I'm", "not", "the", "political", "type", ",", "Not", "the", "type", "to", "fake", "an", "image", "for", "the", "sake", "of", "this", "whole", "consciousness", "type", ",", "Never", "been", "called", "a", "Kaffir", "before", ",", "Can't", "imagine", "seeing", "10", "cops", "and", "dogs", "crashing", "through", "my", "front", "door", ",", "Can't", "say", "what", "teargas", "smells", "like", ","], "text_2_tokenized": ["What", "I've", "seen", "on", "a", "nightly", "basis", "is", "that", "the", "citizens", "of", "Portland", "have", "been", "successful", "in", "defending", "their", "rights", "and", "city", ".", "In", "fact", ",", "so", "successful", "that", "feds", "are", "now", "copying", "the", "protestors", "(", "leafblowers", "to", "blow", "teargas", ")"]} -{"id": "2559-teargas", "word": "teargas", "label_binary": 1, "text_1": "Denying permit to peaceful protesters is wrong! Added to that, police threw teargas at the protesters suffocating them... #NeverAgain", "token_idx_1": 14, "text_start_1": 76, "text_end_1": 83, "date_1": "2019-07", "text_2": "Friendly reminder that teargas is illegal and use of it is a war crime.", "token_idx_2": 3, "text_start_2": 23, "text_end_2": 30, "date_2": "2020-07", "text_1_tokenized": ["Denying", "permit", "to", "peaceful", "protesters", "is", "wrong", "!", "Added", "to", "that", ",", "police", "threw", "teargas", "at", "the", "protesters", "suffocating", "them", "...", "#NeverAgain"], "text_2_tokenized": ["Friendly", "reminder", "that", "teargas", "is", "illegal", "and", "use", "of", "it", "is", "a", "war", "crime", "."]} -{"id": "2560-teargas", "word": "teargas", "label_binary": 1, "text_1": "In HK now, and watching from my hotel room (in WanChai) the protests on the street live from @Reuters twitter feed. So far the streets outside by hotel os quiet, tho round the corner... quite fluid the situation. Riot police are out and had fired teargas", "token_idx_1": 52, "text_start_1": 247, "text_end_1": 254, "date_1": "2019-07", "text_2": "The best time for police to unwantedly use teargas in favour of the incumbent president is during election time.", "token_idx_2": 8, "text_start_2": 43, "text_end_2": 50, "date_2": "2020-07", "text_1_tokenized": ["In", "HK", "now", ",", "and", "watching", "from", "my", "hotel", "room", "(", "in", "WanChai", ")", "the", "protests", "on", "the", "street", "live", "from", "@Reuters", "twitter", "feed", ".", "So", "far", "the", "streets", "outside", "by", "hotel", "os", "quiet", ",", "tho", "round", "the", "corner", "...", "quite", "fluid", "the", "situation", ".", "Riot", "police", "are", "out", "and", "had", "fired", "teargas"], "text_2_tokenized": ["The", "best", "time", "for", "police", "to", "unwantedly", "use", "teargas", "in", "favour", "of", "the", "incumbent", "president", "is", "during", "election", "time", "."]} -{"id": "2561-teargas", "word": "teargas", "label_binary": 1, "text_1": "Herb \ud83c\udf3f which is known as Weed\ud83c\udf41\ud83c\udf2c is for small boys. Real men smoke mosquito coils and teargas .\ud83d\ude0e", "token_idx_1": 20, "text_start_1": 85, "text_end_1": 92, "date_1": "2019-07", "text_2": "Condemn the unnecessary use of force, teargas & stone pelting on @MaryamNSharif & her party workers by police in Lahore today.", "token_idx_2": 7, "text_start_2": 38, "text_end_2": 45, "date_2": "2020-07", "text_1_tokenized": ["Herb", "\ud83c\udf3f", "which", "is", "known", "as", "Weed", "\ud83c\udf41", "\ud83c\udf2c", "is", "for", "small", "boys", ".", "Real", "men", "smoke", "mosquito", "coils", "and", "teargas", ".", "\ud83d\ude0e"], "text_2_tokenized": ["Condemn", "the", "unnecessary", "use", "of", "force", ",", "teargas", "&", "stone", "pelting", "on", "@MaryamNSharif", "&", "her", "party", "workers", "by", "police", "in", "Lahore", "today", "."]} -{"id": "2562-teargas", "word": "teargas", "label_binary": 1, "text_1": "Sowore: Dramatic photos, videos as Soldiers, Police fire teargas, disperse #RevolutionNow protesters Let the protest continue!!!", "token_idx_1": 11, "text_start_1": 57, "text_end_1": 64, "date_1": "2019-07", "text_2": "I keep wondering how different it might be if 45 had sent truckloads of Coca-Cola to protests instead of guns and teargas.", "token_idx_2": 21, "text_start_2": 114, "text_end_2": 121, "date_2": "2020-07", "text_1_tokenized": ["Sowore", ":", "Dramatic", "photos", ",", "videos", "as", "Soldiers", ",", "Police", "fire", "teargas", ",", "disperse", "#RevolutionNow", "protesters", "Let", "the", "protest", "continue", "!", "!", "!"], "text_2_tokenized": ["I", "keep", "wondering", "how", "different", "it", "might", "be", "if", "45", "had", "sent", "truckloads", "of", "Coca-Cola", "to", "protests", "instead", "of", "guns", "and", "teargas", "."]} -{"id": "2563-teargas", "word": "teargas", "label_binary": 1, "text_1": "If you show them how their NASA principles are excelling in life their children are being paraded to take teargas, wataanza matusi", "token_idx_1": 19, "text_start_1": 106, "text_end_1": 113, "date_1": "2019-07", "text_2": "It's horrific how police in Seattle wield bikes as weapons. Bikes & teargas have become key weapons in the police surge against peaceful protests. #DefundThePolice #BlackLivesMatter", "token_idx_2": 13, "text_start_2": 72, "text_end_2": 79, "date_2": "2020-07", "text_1_tokenized": ["If", "you", "show", "them", "how", "their", "NASA", "principles", "are", "excelling", "in", "life", "their", "children", "are", "being", "paraded", "to", "take", "teargas", ",", "wataanza", "matusi"], "text_2_tokenized": ["It's", "horrific", "how", "police", "in", "Seattle", "wield", "bikes", "as", "weapons", ".", "Bikes", "&", "teargas", "have", "become", "key", "weapons", "in", "the", "police", "surge", "against", "peaceful", "protests", ".", "#DefundThePolice", "#BlackLivesMatter"]} -{"id": "3745-paternity", "word": "paternity", "label_binary": 1, "text_1": "My coworker has been on paternity leave for 2 months now, and he still has a whole month left and I'm in charge of his workload \ud83d\ude2d\ud83d\ude29", "token_idx_1": 5, "text_start_1": 24, "text_end_1": 33, "date_1": "2019-07", "text_2": "asafa a get paternity test ? Child support? Girlfriend ??? Baby girl...If u reading this 25000 a month is NOT enough....80k minimum", "token_idx_2": 3, "text_start_2": 12, "text_end_2": 21, "date_2": "2020-07", "text_1_tokenized": ["My", "coworker", "has", "been", "on", "paternity", "leave", "for", "2", "months", "now", ",", "and", "he", "still", "has", "a", "whole", "month", "left", "and", "I'm", "in", "charge", "of", "his", "workload", "\ud83d\ude2d", "\ud83d\ude29"], "text_2_tokenized": ["asafa", "a", "get", "paternity", "test", "?", "Child", "support", "?", "Girlfriend", "?", "?", "?", "Baby", "girl", "...", "If", "u", "reading", "this", "25000", "a", "month", "is", "NOT", "enough", "...", "80k", "minimum"]} -{"id": "3746-paternity", "word": "paternity", "label_binary": 1, "text_1": "\u201cOnly 9% of U.S. work sites advertise jobs that offer paid paternity leave \u2014 and even most new dads who have that option are back at work within a week of welcoming a child.\u201d @katiecouric", "token_idx_1": 16, "text_start_1": 59, "text_end_1": 68, "date_1": "2019-07", "text_2": "Tiny baby is getting more attitude & sass every day - aka maternity & paternity confirmed", "token_idx_2": 14, "text_start_2": 78, "text_end_2": 87, "date_2": "2020-07", "text_1_tokenized": ["\u201c", "Only", "9", "%", "of", "U", ".", "S", ".", "work", "sites", "advertise", "jobs", "that", "offer", "paid", "paternity", "leave", "\u2014", "and", "even", "most", "new", "dads", "who", "have", "that", "option", "are", "back", "at", "work", "within", "a", "week", "of", "welcoming", "a", "child", ".", "\u201d", "@katiecouric"], "text_2_tokenized": ["Tiny", "baby", "is", "getting", "more", "attitude", "&", "sass", "every", "day", "-", "aka", "maternity", "&", "paternity", "confirmed"]} -{"id": "3747-paternity", "word": "paternity", "label_binary": 1, "text_1": "Yahoo fantasy says Khris Davis is Not Active (paternity list) but also starting and batting 7th tonight. So, which is it?", "token_idx_1": 9, "text_start_1": 46, "text_end_1": 55, "date_1": "2019-07", "text_2": "Making your wife get paternity tests isn't the lib own Ben Shapiro thinks it is \ud83d\ude02", "token_idx_2": 4, "text_start_2": 21, "text_end_2": 30, "date_2": "2020-07", "text_1_tokenized": ["Yahoo", "fantasy", "says", "Khris", "Davis", "is", "Not", "Active", "(", "paternity", "list", ")", "but", "also", "starting", "and", "batting", "7th", "tonight", ".", "So", ",", "which", "is", "it", "?"], "text_2_tokenized": ["Making", "your", "wife", "get", "paternity", "tests", "isn't", "the", "lib", "own", "Ben", "Shapiro", "thinks", "it", "is", "\ud83d\ude02"]} -{"id": "3748-paternity", "word": "paternity", "label_binary": 1, "text_1": "I feel the most profound sense of paternity when I look at baby chameleons", "token_idx_1": 7, "text_start_1": 34, "text_end_1": 43, "date_1": "2019-07", "text_2": "Melanie gave DJ a paternity test and told Derwin he wasn't his, immediately after telling him she wouldn't tell him ...", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 27, "date_2": "2020-07", "text_1_tokenized": ["I", "feel", "the", "most", "profound", "sense", "of", "paternity", "when", "I", "look", "at", "baby", "chameleons"], "text_2_tokenized": ["Melanie", "gave", "DJ", "a", "paternity", "test", "and", "told", "Derwin", "he", "wasn't", "his", ",", "immediately", "after", "telling", "him", "she", "wouldn't", "tell", "him", "..."]} -{"id": "3749-paternity", "word": "paternity", "label_binary": 0, "text_1": "Lauren lake in paternity court is savage af \ud83d\ude02\ud83d\ude02", "token_idx_1": 3, "text_start_1": 15, "text_end_1": 24, "date_1": "2019-07", "text_2": "Period leaves are a very small part of the gamut of reproductive rights: maternity/paternity leaves, birth control, abortion rights, recognition of transmen under the biological rubric of reproductive mechanisms, and accepting non menstruating women as women. Y'all aren't ready.", "token_idx_2": 16, "text_start_2": 83, "text_end_2": 92, "date_2": "2020-07", "text_1_tokenized": ["Lauren", "lake", "in", "paternity", "court", "is", "savage", "af", "\ud83d\ude02", "\ud83d\ude02"], "text_2_tokenized": ["Period", "leaves", "are", "a", "very", "small", "part", "of", "the", "gamut", "of", "reproductive", "rights", ":", "maternity", "/", "paternity", "leaves", ",", "birth", "control", ",", "abortion", "rights", ",", "recognition", "of", "transmen", "under", "the", "biological", "rubric", "of", "reproductive", "mechanisms", ",", "and", "accepting", "non", "menstruating", "women", "as", "women", ".", "Y'all", "aren't", "ready", "."]} -{"id": "3750-paternity", "word": "paternity", "label_binary": 1, "text_1": "These women who rush to court to claim a man's paternity of their children when they are dead,why can't they do it when the man is still alive? I think they ought to be disowned in entirety by the law. That is negligence.", "token_idx_1": 10, "text_start_1": 47, "text_end_1": 56, "date_1": "2019-07", "text_2": "We was just talking about how an increasing number of black women are dying during childbirth last week and now mfs talking about paternity tests being given right after birth... lol", "token_idx_2": 23, "text_start_2": 130, "text_end_2": 139, "date_2": "2020-07", "text_1_tokenized": ["These", "women", "who", "rush", "to", "court", "to", "claim", "a", "man's", "paternity", "of", "their", "children", "when", "they", "are", "dead", ",", "why", "can't", "they", "do", "it", "when", "the", "man", "is", "still", "alive", "?", "I", "think", "they", "ought", "to", "be", "disowned", "in", "entirety", "by", "the", "law", ".", "That", "is", "negligence", "."], "text_2_tokenized": ["We", "was", "just", "talking", "about", "how", "an", "increasing", "number", "of", "black", "women", "are", "dying", "during", "childbirth", "last", "week", "and", "now", "mfs", "talking", "about", "paternity", "tests", "being", "given", "right", "after", "birth", "...", "lol"]} -{"id": "3751-paternity", "word": "paternity", "label_binary": 1, "text_1": "Reading about this Jones Day sex discrimination suit re parental leave and have many thoughts. Last month I tweeted that the fight for paternity leave us necessary but isn't worth overshadowing the fight for reasonable maternity leave.", "token_idx_1": 24, "text_start_1": 135, "text_end_1": 144, "date_1": "2019-07", "text_2": "for the record, paternity & maternity leave laws in the us fucking SUCK", "token_idx_2": 4, "text_start_2": 16, "text_end_2": 25, "date_2": "2020-07", "text_1_tokenized": ["Reading", "about", "this", "Jones", "Day", "sex", "discrimination", "suit", "re", "parental", "leave", "and", "have", "many", "thoughts", ".", "Last", "month", "I", "tweeted", "that", "the", "fight", "for", "paternity", "leave", "us", "necessary", "but", "isn't", "worth", "overshadowing", "the", "fight", "for", "reasonable", "maternity", "leave", "."], "text_2_tokenized": ["for", "the", "record", ",", "paternity", "&", "maternity", "leave", "laws", "in", "the", "us", "fucking", "SUCK"]} -{"id": "3752-paternity", "word": "paternity", "label_binary": 1, "text_1": "I'm so glad my coworker did none of his fucking work ahead of time so I now have to do all of it bc he's now on paternity leave \ud83d\ude43", "token_idx_1": 27, "text_start_1": 112, "text_end_1": 121, "date_1": "2019-07", "text_2": "Even though that paternity test video is fake, it's wild seeing people defending the girl as if she didn't do anything wrong", "token_idx_2": 3, "text_start_2": 17, "text_end_2": 26, "date_2": "2020-07", "text_1_tokenized": ["I'm", "so", "glad", "my", "coworker", "did", "none", "of", "his", "fucking", "work", "ahead", "of", "time", "so", "I", "now", "have", "to", "do", "all", "of", "it", "bc", "he's", "now", "on", "paternity", "leave", "\ud83d\ude43"], "text_2_tokenized": ["Even", "though", "that", "paternity", "test", "video", "is", "fake", ",", "it's", "wild", "seeing", "people", "defending", "the", "girl", "as", "if", "she", "didn't", "do", "anything", "wrong"]} -{"id": "3753-paternity", "word": "paternity", "label_binary": 1, "text_1": "Listening to the @Sawbones history of paternity testing episode and somehow only just realized the plot of Mammia Mia didnt need to happen if anybody had done some blood type testing or dna testing.", "token_idx_1": 6, "text_start_1": 38, "text_end_1": 47, "date_1": "2019-07", "text_2": "Lmao wow this new job I just started gives equal opportunity maternity and paternity leave! It's 2 days and you have to show the birth certificate", "token_idx_2": 13, "text_start_2": 75, "text_end_2": 84, "date_2": "2020-07", "text_1_tokenized": ["Listening", "to", "the", "@Sawbones", "history", "of", "paternity", "testing", "episode", "and", "somehow", "only", "just", "realized", "the", "plot", "of", "Mammia", "Mia", "didnt", "need", "to", "happen", "if", "anybody", "had", "done", "some", "blood", "type", "testing", "or", "dna", "testing", "."], "text_2_tokenized": ["Lmao", "wow", "this", "new", "job", "I", "just", "started", "gives", "equal", "opportunity", "maternity", "and", "paternity", "leave", "!", "It's", "2", "days", "and", "you", "have", "to", "show", "the", "birth", "certificate"]} -{"id": "3754-paternity", "word": "paternity", "label_binary": 1, "text_1": "I don't even watch tv but, I can binge watch paternity court, orange is the new black & chicago PD.", "token_idx_1": 11, "text_start_1": 45, "text_end_1": 54, "date_1": "2019-07", "text_2": "bro i love lauren lakes paternity court\ud83d\ude02", "token_idx_2": 5, "text_start_2": 24, "text_end_2": 33, "date_2": "2020-07", "text_1_tokenized": ["I", "don't", "even", "watch", "tv", "but", ",", "I", "can", "binge", "watch", "paternity", "court", ",", "orange", "is", "the", "new", "black", "&", "chicago", "PD", "."], "text_2_tokenized": ["bro", "i", "love", "lauren", "lakes", "paternity", "court", "\ud83d\ude02"]} -{"id": "3755-paternity", "word": "paternity", "label_binary": 1, "text_1": "Ravi is an idiot for not having Ingrid take a paternity test especially when he can't \"remember\" and doesn't know her. #GrandHotel", "token_idx_1": 10, "text_start_1": 46, "text_end_1": 55, "date_1": "2019-07", "text_2": "Tonight I was watching the baseball game with my 11-year-old-son. After I explained to him that Albert Pujols' last name was pronounced \"Pooh-Holes\", he laughed for 5 minutes straight. No paternity test needed. This is definitely my kid. #YouAreTheFather #LetsGoGiants :)", "token_idx_2": 38, "text_start_2": 188, "text_end_2": 197, "date_2": "2020-07", "text_1_tokenized": ["Ravi", "is", "an", "idiot", "for", "not", "having", "Ingrid", "take", "a", "paternity", "test", "especially", "when", "he", "can't", "\"", "remember", "\"", "and", "doesn't", "know", "her", ".", "#GrandHotel"], "text_2_tokenized": ["Tonight", "I", "was", "watching", "the", "baseball", "game", "with", "my", "11", "-", "year-old-son", ".", "After", "I", "explained", "to", "him", "that", "Albert", "Pujols", "'", "last", "name", "was", "pronounced", "\"", "Pooh-Holes", "\"", ",", "he", "laughed", "for", "5", "minutes", "straight", ".", "No", "paternity", "test", "needed", ".", "This", "is", "definitely", "my", "kid", ".", "#YouAreTheFather", "#LetsGoGiants", ":)"]} -{"id": "3756-paternity", "word": "paternity", "label_binary": 0, "text_1": "people on paternity court don't use condoms clearly\ud83d\ude2d \"plaintiff met defendant in club and they had sexual relations, now ms X is trying to prove mr Y fathered her 2 month old twins\" BATHONG??\ud83d\udc80\ud83e\udd23", "token_idx_1": 2, "text_start_1": 10, "text_end_1": 19, "date_1": "2019-07", "text_2": "Sweating harder than Boris on a paternity test. Warm one today \ud83e\udd75\ud83d\udd25", "token_idx_2": 6, "text_start_2": 32, "text_end_2": 41, "date_2": "2020-07", "text_1_tokenized": ["people", "on", "paternity", "court", "don't", "use", "condoms", "clearly", "\ud83d\ude2d", "\"", "plaintiff", "met", "defendant", "in", "club", "and", "they", "had", "sexual", "relations", ",", "now", "ms", "X", "is", "trying", "to", "prove", "mr", "Y", "fathered", "her", "2", "month", "old", "twins", "\"", "BATHONG", "?", "?", "\ud83d\udc80", "\ud83e\udd23"], "text_2_tokenized": ["Sweating", "harder", "than", "Boris", "on", "a", "paternity", "test", ".", "Warm", "one", "today", "\ud83e\udd75", "\ud83d\udd25"]} -{"id": "3757-paternity", "word": "paternity", "label_binary": 0, "text_1": "VERY thankful and grateful for the paternity time I've been able to take, but today's the last day. Back to work tomorrow. I've just accepted the fact that I'm going to randomly burst into tears throughout the day. I love our new little family so much. \ud83d\ude22", "token_idx_1": 6, "text_start_1": 35, "text_end_1": 44, "date_1": "2019-07", "text_2": "Maury show & paternity court>>>", "token_idx_2": 3, "text_start_2": 17, "text_end_2": 26, "date_2": "2020-07", "text_1_tokenized": ["VERY", "thankful", "and", "grateful", "for", "the", "paternity", "time", "I've", "been", "able", "to", "take", ",", "but", "today's", "the", "last", "day", ".", "Back", "to", "work", "tomorrow", ".", "I've", "just", "accepted", "the", "fact", "that", "I'm", "going", "to", "randomly", "burst", "into", "tears", "throughout", "the", "day", ".", "I", "love", "our", "new", "little", "family", "so", "much", ".", "\ud83d\ude22"], "text_2_tokenized": ["Maury", "show", "&", "paternity", "court", ">", ">", ">"]} -{"id": "3758-paternity", "word": "paternity", "label_binary": 1, "text_1": "Update: he is not the father, he took a paternity test to be sure & still has the paperwork as proof. I knew he was one of the good ones. Man some girls tho I swear.... idgi....", "token_idx_1": 11, "text_start_1": 40, "text_end_1": 49, "date_1": "2019-07", "text_2": "Did we have a paternity test yet? #SiestaKey BG", "token_idx_2": 4, "text_start_2": 14, "text_end_2": 23, "date_2": "2020-07", "text_1_tokenized": ["Update", ":", "he", "is", "not", "the", "father", ",", "he", "took", "a", "paternity", "test", "to", "be", "sure", "&", "still", "has", "the", "paperwork", "as", "proof", ".", "I", "knew", "he", "was", "one", "of", "the", "good", "ones", ".", "Man", "some", "girls", "tho", "I", "swear", "...", "idgi", "..."], "text_2_tokenized": ["Did", "we", "have", "a", "paternity", "test", "yet", "?", "#SiestaKey", "BG"]} -{"id": "3759-paternity", "word": "paternity", "label_binary": 1, "text_1": "\"Took the DNA paternity test at a Labcorp in Gretna, LA. Received results in 5 business days. Quick reliable service.\" -JBason, Gretna LA #gretna", "token_idx_1": 4, "text_start_1": 14, "text_end_1": 23, "date_1": "2019-07", "text_2": "Paid/ unpaid paternity leave for new parents should be mandatory Period; it is unethical that any employer who considers themselves decent would reject/ retaliate against an employee who Needs time off to care for their child.", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 22, "date_2": "2020-07", "text_1_tokenized": ["\"", "Took", "the", "DNA", "paternity", "test", "at", "a", "Labcorp", "in", "Gretna", ",", "LA", ".", "Received", "results", "in", "5", "business", "days", ".", "Quick", "reliable", "service", ".", "\"", "-", "JBason", ",", "Gretna", "LA", "#gretna"], "text_2_tokenized": ["Paid", "/", "unpaid", "paternity", "leave", "for", "new", "parents", "should", "be", "mandatory", "Period", ";", "it", "is", "unethical", "that", "any", "employer", "who", "considers", "themselves", "decent", "would", "reject", "/", "retaliate", "against", "an", "employee", "who", "Needs", "time", "off", "to", "care", "for", "their", "child", "."]} -{"id": "3760-paternity", "word": "paternity", "label_binary": 0, "text_1": "me: soft hours also me: watching paternity court so And I oop- hours", "token_idx_1": 8, "text_start_1": 33, "text_end_1": 42, "date_1": "2019-07", "text_2": "A finding of paternity brings the obligation to pay child support but also includes the right to parenting time with a child.", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 22, "date_2": "2020-07", "text_1_tokenized": ["me", ":", "soft", "hours", "also", "me", ":", "watching", "paternity", "court", "so", "And", "I", "oop", "-", "hours"], "text_2_tokenized": ["A", "finding", "of", "paternity", "brings", "the", "obligation", "to", "pay", "child", "support", "but", "also", "includes", "the", "right", "to", "parenting", "time", "with", "a", "child", "."]} -{"id": "3761-paternity", "word": "paternity", "label_binary": 1, "text_1": "What if Harry Potter didn't look like his dad, that's just something Wizarding people always say to children because to do otherwise questions said child's paternity, a deadly insult. What if Harry looked like Lily.", "token_idx_1": 26, "text_start_1": 156, "text_end_1": 165, "date_1": "2019-07", "text_2": "You should get a ring every time you beat a paternity test.", "token_idx_2": 10, "text_start_2": 44, "text_end_2": 53, "date_2": "2020-07", "text_1_tokenized": ["What", "if", "Harry", "Potter", "didn't", "look", "like", "his", "dad", ",", "that's", "just", "something", "Wizarding", "people", "always", "say", "to", "children", "because", "to", "do", "otherwise", "questions", "said", "child's", "paternity", ",", "a", "deadly", "insult", ".", "What", "if", "Harry", "looked", "like", "Lily", "."], "text_2_tokenized": ["You", "should", "get", "a", "ring", "every", "time", "you", "beat", "a", "paternity", "test", "."]} -{"id": "3762-paternity", "word": "paternity", "label_binary": 1, "text_1": "Just opened my A Level results, I got a D an A and another D. In other news, I also opened the paternity test and I got 3 \u2018U's which I'm pretty sure means I'm just the Uncle.", "token_idx_1": 25, "text_start_1": 95, "text_end_1": 104, "date_1": "2019-07", "text_2": "I wish you could get maternity/paternity leave for new dogs. I just need like a week or two so I can welcome my new dog home and get him on a schedule. Pawternity leave should be real.", "token_idx_2": 7, "text_start_2": 31, "text_end_2": 40, "date_2": "2020-07", "text_1_tokenized": ["Just", "opened", "my", "A", "Level", "results", ",", "I", "got", "a", "D", "an", "A", "and", "another", "D", ".", "In", "other", "news", ",", "I", "also", "opened", "the", "paternity", "test", "and", "I", "got", "3", "\u2018", "U's", "which", "I'm", "pretty", "sure", "means", "I'm", "just", "the", "Uncle", "."], "text_2_tokenized": ["I", "wish", "you", "could", "get", "maternity", "/", "paternity", "leave", "for", "new", "dogs", ".", "I", "just", "need", "like", "a", "week", "or", "two", "so", "I", "can", "welcome", "my", "new", "dog", "home", "and", "get", "him", "on", "a", "schedule", ".", "Pawternity", "leave", "should", "be", "real", "."]} -{"id": "3763-paternity", "word": "paternity", "label_binary": 1, "text_1": "Should've kept Haseley up, used him as the CF and Alternated Dickerson and Bruce in LF. Used Quinn as the PH and late game defense replacement ala micheal bourn. Plus isn't Bryce going out soon for paternity leave? \ud83e\udd14", "token_idx_1": 39, "text_start_1": 198, "text_end_1": 207, "date_1": "2019-07", "text_2": "And I'm not talking about the \u201ctruth\u201d of a political position. Or the truth of a decision between two options. I'm talking about the truth of brotherhood and sisterhood. Of intimacy. Of paternity and maternity. Let that truth guide you.", "token_idx_2": 38, "text_start_2": 186, "text_end_2": 195, "date_2": "2020-07", "text_1_tokenized": ["Should've", "kept", "Haseley", "up", ",", "used", "him", "as", "the", "CF", "and", "Alternated", "Dickerson", "and", "Bruce", "in", "LF", ".", "Used", "Quinn", "as", "the", "PH", "and", "late", "game", "defense", "replacement", "ala", "micheal", "bourn", ".", "Plus", "isn't", "Bryce", "going", "out", "soon", "for", "paternity", "leave", "?", "\ud83e\udd14"], "text_2_tokenized": ["And", "I'm", "not", "talking", "about", "the", "\u201c", "truth", "\u201d", "of", "a", "political", "position", ".", "Or", "the", "truth", "of", "a", "decision", "between", "two", "options", ".", "I'm", "talking", "about", "the", "truth", "of", "brotherhood", "and", "sisterhood", ".", "Of", "intimacy", ".", "Of", "paternity", "and", "maternity", ".", "Let", "that", "truth", "guide", "you", "."]} -{"id": "3764-paternity", "word": "paternity", "label_binary": 1, "text_1": "What's on my mind, FB? Do the people who go on Maury Povich for paternity tests know that they can do that test without going on a TV show and airing all their dirty laundry? \ud83e\udd14\ud83e\udd14\ud83e\udd14\ud83e\udd14\ud83e\udd14\ud83e\udd14\ud83e\udd14\ud83e\udd14\ud83e\udd14\ud83e\udd14\ud83e\udd14\ud83e\udd14\ud83e\udd14", "token_idx_1": 16, "text_start_1": 64, "text_end_1": 73, "date_1": "2019-07", "text_2": "Bozza was never in hospital. So he's on death's door, almost on a ventilator, yet days later he's bounced back & tip-top. Caz had the kid earlier than reported and he's had 2 extra week's paternity leave while feeding his propaganda machine. #boristellslies #BorisMustGo #KBF", "token_idx_2": 39, "text_start_2": 192, "text_end_2": 201, "date_2": "2020-07", "text_1_tokenized": ["What's", "on", "my", "mind", ",", "FB", "?", "Do", "the", "people", "who", "go", "on", "Maury", "Povich", "for", "paternity", "tests", "know", "that", "they", "can", "do", "that", "test", "without", "going", "on", "a", "TV", "show", "and", "airing", "all", "their", "dirty", "laundry", "?", "\ud83e\udd14", "\ud83e\udd14", "\ud83e\udd14"], "text_2_tokenized": ["Bozza", "was", "never", "in", "hospital", ".", "So", "he's", "on", "death's", "door", ",", "almost", "on", "a", "ventilator", ",", "yet", "days", "later", "he's", "bounced", "back", "&", "tip-top", ".", "Caz", "had", "the", "kid", "earlier", "than", "reported", "and", "he's", "had", "2", "extra", "week's", "paternity", "leave", "while", "feeding", "his", "propaganda", "machine", ".", "#boristellslies", "#BorisMustGo", "#KBF"]} -{"id": "3765-paternity", "word": "paternity", "label_binary": 0, "text_1": "All I do is watch paternity court on YouTube", "token_idx_1": 5, "text_start_1": 18, "text_end_1": 27, "date_1": "2019-07", "text_2": "I wish I was on paternity leave", "token_idx_2": 5, "text_start_2": 16, "text_end_2": 25, "date_2": "2020-07", "text_1_tokenized": ["All", "I", "do", "is", "watch", "paternity", "court", "on", "YouTube"], "text_2_tokenized": ["I", "wish", "I", "was", "on", "paternity", "leave"]} -{"id": "3766-paternity", "word": "paternity", "label_binary": 1, "text_1": "Finally catching up on #TheOrville while on paternity leave and I kinda like the grungy, dystopian version instead of the sterile \u201cTrek\u201d knock off.", "token_idx_1": 7, "text_start_1": 44, "text_end_1": 53, "date_1": "2019-07", "text_2": "One player has the power to end the entire @MLB season and that player is @MikeTrout - if Trout says \u201cno I'm not coming back from paternity leave...dosent feel safe for my family or my new daughter\u201d then boom MLB season over - guaranteed \u26be\ufe0f \ud83d\udca5 #shutdownmlb", "token_idx_2": 27, "text_start_2": 130, "text_end_2": 139, "date_2": "2020-07", "text_1_tokenized": ["Finally", "catching", "up", "on", "#TheOrville", "while", "on", "paternity", "leave", "and", "I", "kinda", "like", "the", "grungy", ",", "dystopian", "version", "instead", "of", "the", "sterile", "\u201c", "Trek", "\u201d", "knock", "off", "."], "text_2_tokenized": ["One", "player", "has", "the", "power", "to", "end", "the", "entire", "@MLB", "season", "and", "that", "player", "is", "@MikeTrout", "-", "if", "Trout", "says", "\u201c", "no", "I'm", "not", "coming", "back", "from", "paternity", "leave", "...", "dosent", "feel", "safe", "for", "my", "family", "or", "my", "new", "daughter", "\u201d", "then", "boom", "MLB", "season", "over", "-", "guaranteed", "\u26be", "\ufe0f", "\ud83d\udca5", "#shutdownmlb"]} -{"id": "3767-paternity", "word": "paternity", "label_binary": 1, "text_1": "SEEKING Stories : If you have ever appeared on the Murray Show for a paternity test , I would love to hear your story!!! If anyone knows someone send em my way @WhatsRaySaying", "token_idx_1": 14, "text_start_1": 69, "text_end_1": 78, "date_1": "2019-07", "text_2": "How would women feel if paternity tests were required right after birth? \ud83e\udd74", "token_idx_2": 5, "text_start_2": 24, "text_end_2": 33, "date_2": "2020-07", "text_1_tokenized": ["SEEKING", "Stories", ":", "If", "you", "have", "ever", "appeared", "on", "the", "Murray", "Show", "for", "a", "paternity", "test", ",", "I", "would", "love", "to", "hear", "your", "story", "!", "!", "!", "If", "anyone", "knows", "someone", "send", "em", "my", "way", "@WhatsRaySaying"], "text_2_tokenized": ["How", "would", "women", "feel", "if", "paternity", "tests", "were", "required", "right", "after", "birth", "?", "\ud83e\udd74"]} -{"id": "3768-paternity", "word": "paternity", "label_binary": 1, "text_1": "I'm back from paternity leave, and tried my best to stay off this website. So what did I miss in the world of news since July 19??", "token_idx_1": 3, "text_start_1": 14, "text_end_1": 23, "date_1": "2019-07", "text_2": "Thanks to the never-ending bounty of capitalism, I am on never-ending paternity leave with my three week old daughter (furlough led to lay-off, health insurance dries up end of month). \u2764\ufe0f \u2764\ufe0f \u2764\ufe0f", "token_idx_2": 12, "text_start_2": 70, "text_end_2": 79, "date_2": "2020-07", "text_1_tokenized": ["I'm", "back", "from", "paternity", "leave", ",", "and", "tried", "my", "best", "to", "stay", "off", "this", "website", ".", "So", "what", "did", "I", "miss", "in", "the", "world", "of", "news", "since", "July", "19", "?", "?"], "text_2_tokenized": ["Thanks", "to", "the", "never-ending", "bounty", "of", "capitalism", ",", "I", "am", "on", "never-ending", "paternity", "leave", "with", "my", "three", "week", "old", "daughter", "(", "furlough", "led", "to", "lay-off", ",", "health", "insurance", "dries", "up", "end", "of", "month", ")", ".", "\u2764", "\ufe0f", "\u2764", "\ufe0f", "\u2764", "\ufe0f"]} -{"id": "3769-paternity", "word": "paternity", "label_binary": 1, "text_1": "Brian Dozier (paternity list) and Max Scherzer (10-day IL) have been reinstated. Adrian Sanchez and Kyle McGowin both optioned to Harrisburg in corresponding moves.", "token_idx_1": 3, "text_start_1": 14, "text_end_1": 23, "date_1": "2019-07", "text_2": "Aye @urltv ... do y'all offer paternity leave?", "token_idx_2": 6, "text_start_2": 30, "text_end_2": 39, "date_2": "2020-07", "text_1_tokenized": ["Brian", "Dozier", "(", "paternity", "list", ")", "and", "Max", "Scherzer", "(", "10", "-", "day", "IL", ")", "have", "been", "reinstated", ".", "Adrian", "Sanchez", "and", "Kyle", "McGowin", "both", "optioned", "to", "Harrisburg", "in", "corresponding", "moves", "."], "text_2_tokenized": ["Aye", "@urltv", "...", "do", "y'all", "offer", "paternity", "leave", "?"]} -{"id": "3770-paternity", "word": "paternity", "label_binary": 1, "text_1": "It's like sitting in the paternity waiting room.. is it a signing or just a bad case of wind? @LUFC", "token_idx_1": 5, "text_start_1": 25, "text_end_1": 34, "date_1": "2019-07", "text_2": "\"fuck them other niggaz, cuz i'm down for my niggaz: an exploration of paternity and homoeroticism within gangster rap\"", "token_idx_2": 16, "text_start_2": 71, "text_end_2": 80, "date_2": "2020-07", "text_1_tokenized": ["It's", "like", "sitting", "in", "the", "paternity", "waiting", "room", "..", "is", "it", "a", "signing", "or", "just", "a", "bad", "case", "of", "wind", "?", "@LUFC"], "text_2_tokenized": ["\"", "fuck", "them", "other", "niggaz", ",", "cuz", "i'm", "down", "for", "my", "niggaz", ":", "an", "exploration", "of", "paternity", "and", "homoeroticism", "within", "gangster", "rap", "\""]} -{"id": "3771-paternity", "word": "paternity", "label_binary": 1, "text_1": "I can't help but imagine that the calls encouraging paternity leave in the US are for Silicon Valley types because regular everyday women struggle with unpaid maternity leave.", "token_idx_1": 9, "text_start_1": 52, "text_end_1": 61, "date_1": "2019-07", "text_2": "Never wa paternity when yuh ina di belly doh... wasteman", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 18, "date_2": "2020-07", "text_1_tokenized": ["I", "can't", "help", "but", "imagine", "that", "the", "calls", "encouraging", "paternity", "leave", "in", "the", "US", "are", "for", "Silicon", "Valley", "types", "because", "regular", "everyday", "women", "struggle", "with", "unpaid", "maternity", "leave", "."], "text_2_tokenized": ["Never", "wa", "paternity", "when", "yuh", "ina", "di", "belly", "doh", "...", "wasteman"]} -{"id": "3772-paternity", "word": "paternity", "label_binary": 0, "text_1": "I gotta stop watching paternity court cuz whew", "token_idx_1": 4, "text_start_1": 22, "text_end_1": 31, "date_1": "2019-07", "text_2": "Imagine you have a BLACK SON, who the paternity test determined was yours, and you sued for custody, but you talking crazy about BLACK MEN on the internet trying to advocate for positive change and progress for the child you created? That's white privilege...", "token_idx_2": 9, "text_start_2": 38, "text_end_2": 47, "date_2": "2020-07", "text_1_tokenized": ["I", "gotta", "stop", "watching", "paternity", "court", "cuz", "whew"], "text_2_tokenized": ["Imagine", "you", "have", "a", "BLACK", "SON", ",", "who", "the", "paternity", "test", "determined", "was", "yours", ",", "and", "you", "sued", "for", "custody", ",", "but", "you", "talking", "crazy", "about", "BLACK", "MEN", "on", "the", "internet", "trying", "to", "advocate", "for", "positive", "change", "and", "progress", "for", "the", "child", "you", "created", "?", "That's", "white", "privilege", "..."]} -{"id": "3773-paternity", "word": "paternity", "label_binary": 1, "text_1": "Nice that the @Yankees sport announcers are talking about paternity leave for pitchers what do you think @kavithadavidson", "token_idx_1": 9, "text_start_1": 58, "text_end_1": 67, "date_1": "2019-07", "text_2": "If gender doesn't exist then my company owes me 8 more weeks of paternity leave Men - 2 weeks Women - 10 weeks The fuck is that all about?", "token_idx_2": 13, "text_start_2": 64, "text_end_2": 73, "date_2": "2020-07", "text_1_tokenized": ["Nice", "that", "the", "@Yankees", "sport", "announcers", "are", "talking", "about", "paternity", "leave", "for", "pitchers", "what", "do", "you", "think", "@kavithadavidson"], "text_2_tokenized": ["If", "gender", "doesn't", "exist", "then", "my", "company", "owes", "me", "8", "more", "weeks", "of", "paternity", "leave", "Men", "-", "2", "weeks", "Women", "-", "10", "weeks", "The", "fuck", "is", "that", "all", "about", "?"]} -{"id": "3774-paternity", "word": "paternity", "label_binary": 1, "text_1": "Interesting fact about paternity. I am the only MEN in an hotel of 60 rooms that take care of his child for breakfast. It become to change at age 7-8. Then there are more MEN. #Feminism #Equality #why", "token_idx_1": 3, "text_start_1": 23, "text_end_1": 32, "date_1": "2019-07", "text_2": "First show back on the radio today since coming off paternity leave. Let's see if I remember how to press all the buttons. Covering for the quiz master general @pauladdo. Some good tunes, the sport quiz and a load of interviews from the week in sport as well! We're on 2-6pm!", "token_idx_2": 10, "text_start_2": 52, "text_end_2": 61, "date_2": "2020-07", "text_1_tokenized": ["Interesting", "fact", "about", "paternity", ".", "I", "am", "the", "only", "MEN", "in", "an", "hotel", "of", "60", "rooms", "that", "take", "care", "of", "his", "child", "for", "breakfast", ".", "It", "become", "to", "change", "at", "age", "7-8", ".", "Then", "there", "are", "more", "MEN", ".", "#Feminism", "#Equality", "#why"], "text_2_tokenized": ["First", "show", "back", "on", "the", "radio", "today", "since", "coming", "off", "paternity", "leave", ".", "Let's", "see", "if", "I", "remember", "how", "to", "press", "all", "the", "buttons", ".", "Covering", "for", "the", "quiz", "master", "general", "@pauladdo", ".", "Some", "good", "tunes", ",", "the", "sport", "quiz", "and", "a", "load", "of", "interviews", "from", "the", "week", "in", "sport", "as", "well", "!", "We're", "on", "2-6", "pm", "!"]} -{"id": "3775-paternity", "word": "paternity", "label_binary": 1, "text_1": "Back to work tomorrow after 2 weeks off on paternity leave. Welcoming our son into the world has been the most beautiful experience of my life. Any plans at government level to make changes to statutory paternity pay and leave? Asking for a friend... @NicolaSturgeon @scotgov", "token_idx_1": 9, "text_start_1": 43, "text_end_1": 52, "date_1": "2019-07", "text_2": "Any woman who lies to a man about the true paternity of a child is a bird. That is a horrible thing to do to someone.", "token_idx_2": 10, "text_start_2": 43, "text_end_2": 52, "date_2": "2020-07", "text_1_tokenized": ["Back", "to", "work", "tomorrow", "after", "2", "weeks", "off", "on", "paternity", "leave", ".", "Welcoming", "our", "son", "into", "the", "world", "has", "been", "the", "most", "beautiful", "experience", "of", "my", "life", ".", "Any", "plans", "at", "government", "level", "to", "make", "changes", "to", "statutory", "paternity", "pay", "and", "leave", "?", "Asking", "for", "a", "friend", "...", "@NicolaSturgeon", "@scotgov"], "text_2_tokenized": ["Any", "woman", "who", "lies", "to", "a", "man", "about", "the", "true", "paternity", "of", "a", "child", "is", "a", "bird", ".", "That", "is", "a", "horrible", "thing", "to", "do", "to", "someone", "."]} -{"id": "3776-paternity", "word": "paternity", "label_binary": 1, "text_1": "Since when does jerry springer do paternity tests? #bb21", "token_idx_1": 6, "text_start_1": 34, "text_end_1": 43, "date_1": "2019-07", "text_2": "Yesterday unno loud up unno self withering the paternity thing and today unno again a loud up yourself with this child support thing", "token_idx_2": 8, "text_start_2": 47, "text_end_2": 56, "date_2": "2020-07", "text_1_tokenized": ["Since", "when", "does", "jerry", "springer", "do", "paternity", "tests", "?", "#bb21"], "text_2_tokenized": ["Yesterday", "unno", "loud", "up", "unno", "self", "withering", "the", "paternity", "thing", "and", "today", "unno", "again", "a", "loud", "up", "yourself", "with", "this", "child", "support", "thing"]} -{"id": "3777-paternity", "word": "paternity", "label_binary": 1, "text_1": "Darla used to have The Bordelons on her side, but then the truth about Blue's paternity came out. It's amazing she hadnt already relapsed before Nova's book. #QueenSugar", "token_idx_1": 16, "text_start_1": 78, "text_end_1": 87, "date_1": "2019-07", "text_2": "Girl Melanie was sooooo trash. She swabbed a toddler that wasn't hers for a paternity test", "token_idx_2": 15, "text_start_2": 76, "text_end_2": 85, "date_2": "2020-07", "text_1_tokenized": ["Darla", "used", "to", "have", "The", "Bordelons", "on", "her", "side", ",", "but", "then", "the", "truth", "about", "Blue's", "paternity", "came", "out", ".", "It's", "amazing", "she", "hadnt", "already", "relapsed", "before", "Nova's", "book", ".", "#QueenSugar"], "text_2_tokenized": ["Girl", "Melanie", "was", "sooooo", "trash", ".", "She", "swabbed", "a", "toddler", "that", "wasn't", "hers", "for", "a", "paternity", "test"]} -{"id": "3778-paternity", "word": "paternity", "label_binary": 1, "text_1": "MLB: BRYCE HARPER was placed on paternity leave & didn't play last night against the Marlins - He could miss up to three games with he & his wife , expecting their first child, a boy - Congratulations to the Harper's!", "token_idx_1": 7, "text_start_1": 32, "text_end_1": 41, "date_1": "2019-07", "text_2": "mentioned to my mum that work has been stressful and she's like \"oh you'll be getting two weeks off soon\" yes that's called paternity leave and i'm not exactly banking on it to be altogether relaxing", "token_idx_2": 25, "text_start_2": 124, "text_end_2": 133, "date_2": "2020-07", "text_1_tokenized": ["MLB", ":", "BRYCE", "HARPER", "was", "placed", "on", "paternity", "leave", "&", "didn't", "play", "last", "night", "against", "the", "Marlins", "-", "He", "could", "miss", "up", "to", "three", "games", "with", "he", "&", "his", "wife", ",", "expecting", "their", "first", "child", ",", "a", "boy", "-", "Congratulations", "to", "the", "Harper's", "!"], "text_2_tokenized": ["mentioned", "to", "my", "mum", "that", "work", "has", "been", "stressful", "and", "she's", "like", "\"", "oh", "you'll", "be", "getting", "two", "weeks", "off", "soon", "\"", "yes", "that's", "called", "paternity", "leave", "and", "i'm", "not", "exactly", "banking", "on", "it", "to", "be", "altogether", "relaxing"]} -{"id": "3779-paternity", "word": "paternity", "label_binary": 1, "text_1": "I'm just going to throw this out there \u2014 it's never appropriate to ask a woman about the paternity of her children. Don't ask if they have the same dad. Don't ask if she's still with their dad.", "token_idx_1": 18, "text_start_1": 89, "text_end_1": 98, "date_1": "2019-07", "text_2": "Blake still has a whole month off left!!! He got 10 weeks paternity leave and I'm the one who had the damn baby and I only got 6..... \ud83d\ude42", "token_idx_2": 15, "text_start_2": 58, "text_end_2": 67, "date_2": "2020-07", "text_1_tokenized": ["I'm", "just", "going", "to", "throw", "this", "out", "there", "\u2014", "it's", "never", "appropriate", "to", "ask", "a", "woman", "about", "the", "paternity", "of", "her", "children", ".", "Don't", "ask", "if", "they", "have", "the", "same", "dad", ".", "Don't", "ask", "if", "she's", "still", "with", "their", "dad", "."], "text_2_tokenized": ["Blake", "still", "has", "a", "whole", "month", "off", "left", "!", "!", "!", "He", "got", "10", "weeks", "paternity", "leave", "and", "I'm", "the", "one", "who", "had", "the", "damn", "baby", "and", "I", "only", "got", "6", "...", "\ud83d\ude42"]} -{"id": "3780-paternity", "word": "paternity", "label_binary": 1, "text_1": "Tigers are likely to bring up a reliever tomorrow to take Matthew Boyd's spot on the active roster. Boyd, of course, will get his four-day paternity leave.", "token_idx_1": 28, "text_start_1": 139, "text_end_1": 148, "date_1": "2019-07", "text_2": "Have you listened to Ep.1 our podcast? We talk about Black is King and how it represents the african culture and paternity issues in relation to infidelity. #bantstothebonepodcast", "token_idx_2": 24, "text_start_2": 113, "text_end_2": 122, "date_2": "2020-07", "text_1_tokenized": ["Tigers", "are", "likely", "to", "bring", "up", "a", "reliever", "tomorrow", "to", "take", "Matthew", "Boyd's", "spot", "on", "the", "active", "roster", ".", "Boyd", ",", "of", "course", ",", "will", "get", "his", "four-day", "paternity", "leave", "."], "text_2_tokenized": ["Have", "you", "listened", "to", "Ep", ".", "1", "our", "podcast", "?", "We", "talk", "about", "Black", "is", "King", "and", "how", "it", "represents", "the", "african", "culture", "and", "paternity", "issues", "in", "relation", "to", "infidelity", ".", "#bantstothebonepodcast"]} -{"id": "3781-paternity", "word": "paternity", "label_binary": 0, "text_1": "paternity court is a reminder of why sex education is sooo important", "token_idx_1": 0, "text_start_1": 0, "text_end_1": 9, "date_1": "2019-07", "text_2": "I just really think that if I sign an affidavit promising not to have a kid for five years, I should be allowed at least a month of paid \u201cpaternity\u201d leave.", "token_idx_2": 31, "text_start_2": 138, "text_end_2": 147, "date_2": "2020-07", "text_1_tokenized": ["paternity", "court", "is", "a", "reminder", "of", "why", "sex", "education", "is", "sooo", "important"], "text_2_tokenized": ["I", "just", "really", "think", "that", "if", "I", "sign", "an", "affidavit", "promising", "not", "to", "have", "a", "kid", "for", "five", "years", ",", "I", "should", "be", "allowed", "at", "least", "a", "month", "of", "paid", "\u201c", "paternity", "\u201d", "leave", "."]} -{"id": "3782-paternity", "word": "paternity", "label_binary": 1, "text_1": "Victor Robles chases down another liner in center field for the last out of the Nationals win 13-0. Asdrubal Cabrera, getting the start with Brian Dozier on the paternity lists, hits a homer in the ninth and ends the day with five RBIs. #Nats @nationals", "token_idx_1": 30, "text_start_1": 161, "text_end_1": 170, "date_1": "2019-07", "text_2": "I hope the paternity test shows 99.999% and sis triple the money", "token_idx_2": 3, "text_start_2": 11, "text_end_2": 20, "date_2": "2020-07", "text_1_tokenized": ["Victor", "Robles", "chases", "down", "another", "liner", "in", "center", "field", "for", "the", "last", "out", "of", "the", "Nationals", "win", "13-0", ".", "Asdrubal", "Cabrera", ",", "getting", "the", "start", "with", "Brian", "Dozier", "on", "the", "paternity", "lists", ",", "hits", "a", "homer", "in", "the", "ninth", "and", "ends", "the", "day", "with", "five", "RBIs", ".", "#Nats", "@nationals"], "text_2_tokenized": ["I", "hope", "the", "paternity", "test", "shows", "99.999", "%", "and", "sis", "triple", "the", "money"]} -{"id": "3783-paternity", "word": "paternity", "label_binary": 1, "text_1": "Price was placed on the paternity list on Fri. Unknown if he's able to pitch on Sun. If he does, looking at the Over, depending on the total. Over is 7-2-1 in his L10 starts. Last 5 visits to Yankee Stadium, all went Over. Unfortunately, both Happ starts vs BOS stayed Under", "token_idx_1": 5, "text_start_1": 24, "text_end_1": 33, "date_1": "2019-07", "text_2": "If you're going to make up a fake tweet claiming someone said they got paternity tests maybe check to see that you have the right number of children? This is just sloppy.", "token_idx_2": 14, "text_start_2": 71, "text_end_2": 80, "date_2": "2020-07", "text_1_tokenized": ["Price", "was", "placed", "on", "the", "paternity", "list", "on", "Fri", ".", "Unknown", "if", "he's", "able", "to", "pitch", "on", "Sun", ".", "If", "he", "does", ",", "looking", "at", "the", "Over", ",", "depending", "on", "the", "total", ".", "Over", "is", "7-2-", "1", "in", "his", "L10", "starts", ".", "Last", "5", "visits", "to", "Yankee", "Stadium", ",", "all", "went", "Over", ".", "Unfortunately", ",", "both", "Happ", "starts", "vs", "BOS", "stayed", "Under"], "text_2_tokenized": ["If", "you're", "going", "to", "make", "up", "a", "fake", "tweet", "claiming", "someone", "said", "they", "got", "paternity", "tests", "maybe", "check", "to", "see", "that", "you", "have", "the", "right", "number", "of", "children", "?", "This", "is", "just", "sloppy", "."]} -{"id": "3784-paternity", "word": "paternity", "label_binary": 1, "text_1": "Just back from paternity leave and I am so grateful to my wife for committing 100% of her TIME and her ENERGY do my two boys. There is nothing harder or more self sacrificing and the reason they are so wonderful is because of this 100% dedication. Period.", "token_idx_1": 3, "text_start_1": 15, "text_end_1": 24, "date_1": "2019-07", "text_2": "If my kid grew up a bit and started playing fortnite can't lie I'd do paternity test", "token_idx_2": 15, "text_start_2": 70, "text_end_2": 79, "date_2": "2020-07", "text_1_tokenized": ["Just", "back", "from", "paternity", "leave", "and", "I", "am", "so", "grateful", "to", "my", "wife", "for", "committing", "100", "%", "of", "her", "TIME", "and", "her", "ENERGY", "do", "my", "two", "boys", ".", "There", "is", "nothing", "harder", "or", "more", "self", "sacrificing", "and", "the", "reason", "they", "are", "so", "wonderful", "is", "because", "of", "this", "100", "%", "dedication", ".", "Period", "."], "text_2_tokenized": ["If", "my", "kid", "grew", "up", "a", "bit", "and", "started", "playing", "fortnite", "can't", "lie", "I'd", "do", "paternity", "test"]} -{"id": "3785-paternity", "word": "paternity", "label_binary": 1, "text_1": "Today: Actress @dianeguerrero__ tells the story of her parents being deported, D.C. Councilmember @CMBrandonTodd on news that a shelter for migrant children may be placed in his district, and @alexisohanian says more fathers should take paternity leave. Listen in!\ud83c\udfa7", "token_idx_1": 41, "text_start_1": 237, "text_end_1": 246, "date_1": "2019-07", "text_2": "The Rays have placed RH Charlie Morton (right shoulder inflammation) on the 10-day IL and reinstated LH Jos\u00e9 Alvarado from the paternity list.", "token_idx_2": 25, "text_start_2": 127, "text_end_2": 136, "date_2": "2020-07", "text_1_tokenized": ["Today", ":", "Actress", "@dianeguerrero__", "tells", "the", "story", "of", "her", "parents", "being", "deported", ",", "D", ".", "C", ".", "Councilmember", "@CMBrandonTodd", "on", "news", "that", "a", "shelter", "for", "migrant", "children", "may", "be", "placed", "in", "his", "district", ",", "and", "@alexisohanian", "says", "more", "fathers", "should", "take", "paternity", "leave", ".", "Listen", "in", "!", "\ud83c\udfa7"], "text_2_tokenized": ["The", "Rays", "have", "placed", "RH", "Charlie", "Morton", "(", "right", "shoulder", "inflammation", ")", "on", "the", "10", "-", "day", "IL", "and", "reinstated", "LH", "Jos\u00e9", "Alvarado", "from", "the", "paternity", "list", "."]} -{"id": "3786-paternity", "word": "paternity", "label_binary": 1, "text_1": "All you nigg@$ in Houston brothers IDC where's the paternity results", "token_idx_1": 11, "text_start_1": 51, "text_end_1": 60, "date_1": "2019-07", "text_2": "Adam has one more night then we are officially on VACATION! Since we got off maternity and paternity leave we have worked NONSTOP and we are both so so deserving of this time off!! Our mental health is very excited \ud83d\ude4c\ud83c\udffc", "token_idx_2": 18, "text_start_2": 91, "text_end_2": 100, "date_2": "2020-07", "text_1_tokenized": ["All", "you", "nigg", "@", "$", "in", "Houston", "brothers", "IDC", "where's", "the", "paternity", "results"], "text_2_tokenized": ["Adam", "has", "one", "more", "night", "then", "we", "are", "officially", "on", "VACATION", "!", "Since", "we", "got", "off", "maternity", "and", "paternity", "leave", "we", "have", "worked", "NONSTOP", "and", "we", "are", "both", "so", "so", "deserving", "of", "this", "time", "off", "!", "!", "Our", "mental", "health", "is", "very", "excited", "\ud83d\ude4c\ud83c\udffc"]} -{"id": "3787-paternity", "word": "paternity", "label_binary": 1, "text_1": "I am officially muting anyone advocating for paternity tests for all marriages, every time. There are ZERO circumstances where I would cheat on my husband and any decent woman should be offended by this suggestion because that is the ONLY implication.", "token_idx_1": 7, "text_start_1": 45, "text_end_1": 54, "date_1": "2019-07", "text_2": "2 - Why is maternity longer than paternity? If we're trying to make genders more equal - surely it would be an easy fix to say either parent has the option to take the longer time?", "token_idx_2": 7, "text_start_2": 33, "text_end_2": 42, "date_2": "2020-07", "text_1_tokenized": ["I", "am", "officially", "muting", "anyone", "advocating", "for", "paternity", "tests", "for", "all", "marriages", ",", "every", "time", ".", "There", "are", "ZERO", "circumstances", "where", "I", "would", "cheat", "on", "my", "husband", "and", "any", "decent", "woman", "should", "be", "offended", "by", "this", "suggestion", "because", "that", "is", "the", "ONLY", "implication", "."], "text_2_tokenized": ["2", "-", "Why", "is", "maternity", "longer", "than", "paternity", "?", "If", "we're", "trying", "to", "make", "genders", "more", "equal", "-", "surely", "it", "would", "be", "an", "easy", "fix", "to", "say", "either", "parent", "has", "the", "option", "to", "take", "the", "longer", "time", "?"]} -{"id": "3788-paternity", "word": "paternity", "label_binary": 1, "text_1": "#BlueJays Ken Giles is back from the paternity leave list #FantasyBaseball", "token_idx_1": 7, "text_start_1": 37, "text_end_1": 46, "date_1": "2019-07", "text_2": "Lito went hard on paternity leave intro", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 27, "date_2": "2020-07", "text_1_tokenized": ["#BlueJays", "Ken", "Giles", "is", "back", "from", "the", "paternity", "leave", "list", "#FantasyBaseball"], "text_2_tokenized": ["Lito", "went", "hard", "on", "paternity", "leave", "intro"]} -{"id": "3789-paternity", "word": "paternity", "label_binary": 1, "text_1": "New job gives me 8 weeks paid paternity leave... I've never seen that, and it's awesome.", "token_idx_1": 7, "text_start_1": 30, "text_end_1": 39, "date_1": "2019-07", "text_2": "timeout lol . So yall saying that man in that video is wrong because he waited to get a paternity test ?????? what about when shorty cheated on him and got pregnant by another man\ud83d\ude29", "token_idx_2": 19, "text_start_2": 88, "text_end_2": 97, "date_2": "2020-07", "text_1_tokenized": ["New", "job", "gives", "me", "8", "weeks", "paid", "paternity", "leave", "...", "I've", "never", "seen", "that", ",", "and", "it's", "awesome", "."], "text_2_tokenized": ["timeout", "lol", ".", "So", "yall", "saying", "that", "man", "in", "that", "video", "is", "wrong", "because", "he", "waited", "to", "get", "a", "paternity", "test", "?", "?", "?", "what", "about", "when", "shorty", "cheated", "on", "him", "and", "got", "pregnant", "by", "another", "man", "\ud83d\ude29"]} -{"id": "3790-paternity", "word": "paternity", "label_binary": 1, "text_1": "I'm taking paternity leave lol", "token_idx_1": 2, "text_start_1": 11, "text_end_1": 20, "date_1": "2019-07", "text_2": "Parents will do anything to protect their kids. Imagine taking a bullet for another man's child. Thixo! This is why paternity tests are important.", "token_idx_2": 23, "text_start_2": 116, "text_end_2": 125, "date_2": "2020-07", "text_1_tokenized": ["I'm", "taking", "paternity", "leave", "lol"], "text_2_tokenized": ["Parents", "will", "do", "anything", "to", "protect", "their", "kids", ".", "Imagine", "taking", "a", "bullet", "for", "another", "man's", "child", ".", "Thixo", "!", "This", "is", "why", "paternity", "tests", "are", "important", "."]} -{"id": "3791-paternity", "word": "paternity", "label_binary": 0, "text_1": "An employer whose policy was not to pay Statutory Maternity Pay has (unsurprisingly) lost a discrimination claim. Don't let this be you. For advice on maternity/paternity/shared parental leave and other HR policies contact Haslers HR 020 8418 3333 or email HR@Haslers.com", "token_idx_1": 31, "text_start_1": 161, "text_end_1": 170, "date_1": "2019-07", "text_2": "gonna watch paternity court until I fall asleep \u2764\u2764 stream dynamite", "token_idx_2": 2, "text_start_2": 12, "text_end_2": 21, "date_2": "2020-07", "text_1_tokenized": ["An", "employer", "whose", "policy", "was", "not", "to", "pay", "Statutory", "Maternity", "Pay", "has", "(", "unsurprisingly", ")", "lost", "a", "discrimination", "claim", ".", "Don't", "let", "this", "be", "you", ".", "For", "advice", "on", "maternity", "/", "paternity", "/", "shared", "parental", "leave", "and", "other", "HR", "policies", "contact", "Haslers", "HR", "020 8418", "3333", "or", "email", "HR@Haslers.com"], "text_2_tokenized": ["gonna", "watch", "paternity", "court", "until", "I", "fall", "asleep", "\u2764", "\u2764", "stream", "dynamite"]} -{"id": "3792-paternity", "word": "paternity", "label_binary": 1, "text_1": "Just a little more paperwork and I'm finally on paternity leave...", "token_idx_1": 9, "text_start_1": 48, "text_end_1": 57, "date_1": "2019-07", "text_2": "Universal state healthcare, shared parental leave and open employer policies on maternity paternity and parental leave. @DFSDublin #workequal report 1/", "token_idx_2": 13, "text_start_2": 90, "text_end_2": 99, "date_2": "2020-07", "text_1_tokenized": ["Just", "a", "little", "more", "paperwork", "and", "I'm", "finally", "on", "paternity", "leave", "..."], "text_2_tokenized": ["Universal", "state", "healthcare", ",", "shared", "parental", "leave", "and", "open", "employer", "policies", "on", "maternity", "paternity", "and", "parental", "leave", ".", "@DFSDublin", "#workequal", "report", "1", "/"]} -{"id": "3793-paternity", "word": "paternity", "label_binary": 0, "text_1": "What takes are worse, the \"He makes $330 million, he shouldn't get paternity leave\" takes or the \"They should've planned better so they could have the baby in the offseason\" takes?", "token_idx_1": 16, "text_start_1": 67, "text_end_1": 76, "date_1": "2019-07", "text_2": "Y'all need to watch a few episodes of paternity court while y'all talking about these DNA tests \ud83d\ude02\ud83e\udd74", "token_idx_2": 8, "text_start_2": 38, "text_end_2": 47, "date_2": "2020-07", "text_1_tokenized": ["What", "takes", "are", "worse", ",", "the", "\"", "He", "makes", "$", "330", "million", ",", "he", "shouldn't", "get", "paternity", "leave", "\"", "takes", "or", "the", "\"", "They", "should've", "planned", "better", "so", "they", "could", "have", "the", "baby", "in", "the", "offseason", "\"", "takes", "?"], "text_2_tokenized": ["Y'all", "need", "to", "watch", "a", "few", "episodes", "of", "paternity", "court", "while", "y'all", "talking", "about", "these", "DNA", "tests", "\ud83d\ude02", "\ud83e\udd74"]} -{"id": "3794-paternity", "word": "paternity", "label_binary": 1, "text_1": "Men: I don't want to date a feminist. I hate feminism. I want a traditional relationship with a woman. Also men: What the fuck? My missus expects me to pay for everything, go to work whilst she stays home with the kids, I don't get any paternity leave. This is so unfair!", "token_idx_1": 54, "text_start_1": 236, "text_end_1": 245, "date_1": "2019-07", "text_2": "No to the paternity tests just cuff different bitches", "token_idx_2": 3, "text_start_2": 10, "text_end_2": 19, "date_2": "2020-07", "text_1_tokenized": ["Men", ":", "I", "don't", "want", "to", "date", "a", "feminist", ".", "I", "hate", "feminism", ".", "I", "want", "a", "traditional", "relationship", "with", "a", "woman", ".", "Also", "men", ":", "What", "the", "fuck", "?", "My", "missus", "expects", "me", "to", "pay", "for", "everything", ",", "go", "to", "work", "whilst", "she", "stays", "home", "with", "the", "kids", ",", "I", "don't", "get", "any", "paternity", "leave", ".", "This", "is", "so", "unfair", "!"], "text_2_tokenized": ["No", "to", "the", "paternity", "tests", "just", "cuff", "different", "bitches"]} -{"id": "3795-paternity", "word": "paternity", "label_binary": 1, "text_1": "when my ex is gonna be on one of these daytime DNA paternity test shows someone please let me record it for personal reasons", "token_idx_1": 12, "text_start_1": 51, "text_end_1": 60, "date_1": "2019-07", "text_2": "I hope #BG was smart and got a paternity test. #SiestaKey \ud83e\udd37\ud83c\udffc\u200d\u2640\ufe0f", "token_idx_2": 8, "text_start_2": 31, "text_end_2": 40, "date_2": "2020-07", "text_1_tokenized": ["when", "my", "ex", "is", "gonna", "be", "on", "one", "of", "these", "daytime", "DNA", "paternity", "test", "shows", "someone", "please", "let", "me", "record", "it", "for", "personal", "reasons"], "text_2_tokenized": ["I", "hope", "#BG", "was", "smart", "and", "got", "a", "paternity", "test", ".", "#SiestaKey", "\ud83e\udd37\ud83c\udffc\u200d\u2640", "\ufe0f"]} -{"id": "3796-paternity", "word": "paternity", "label_binary": 0, "text_1": "true life: I cant stop watching paternity court with lauren lake", "token_idx_1": 7, "text_start_1": 32, "text_end_1": 41, "date_1": "2019-07", "text_2": "mom: you're an asshole, you got all of michael jonas's traits down to the damn face me: at least you know you never needed a paternity test\ud83e\udd37\ud83c\udffe\u200d\u2640\ufe0f", "token_idx_2": 28, "text_start_2": 125, "text_end_2": 134, "date_2": "2020-07", "text_1_tokenized": ["true", "life", ":", "I", "cant", "stop", "watching", "paternity", "court", "with", "lauren", "lake"], "text_2_tokenized": ["mom", ":", "you're", "an", "asshole", ",", "you", "got", "all", "of", "michael", "jonas's", "traits", "down", "to", "the", "damn", "face", "me", ":", "at", "least", "you", "know", "you", "never", "needed", "a", "paternity", "test", "\ud83e\udd37\ud83c\udffe\u200d\u2640", "\ufe0f"]} -{"id": "3797-paternity", "word": "paternity", "label_binary": 1, "text_1": "My boss is back from paternity leave and I still haven't finished Hegel", "token_idx_1": 5, "text_start_1": 21, "text_end_1": 30, "date_1": "2019-07", "text_2": "Normalize paternity tests fellas \ud83e\udd26\ud83c\udffd\u200d\u2642\ufe0f", "token_idx_2": 1, "text_start_2": 10, "text_end_2": 19, "date_2": "2020-07", "text_1_tokenized": ["My", "boss", "is", "back", "from", "paternity", "leave", "and", "I", "still", "haven't", "finished", "Hegel"], "text_2_tokenized": ["Normalize", "paternity", "tests", "fellas", "\ud83e\udd26\ud83c\udffd\u200d\u2642", "\ufe0f"]} -{"id": "3798-paternity", "word": "paternity", "label_binary": 1, "text_1": "Weekend paternity leave: wayyyyy too much time off. Hes got 30 million reasons to take ONe day off. Chime in here. Am i wrong???", "token_idx_1": 1, "text_start_1": 8, "text_end_1": 17, "date_1": "2019-07", "text_2": "I bought a salmon steak that's Beatles-esque(bigger than Jesus) and my dad declined my cooking because he's not much of a fish eater. I'm getting a fucking paternity test.", "token_idx_2": 31, "text_start_2": 156, "text_end_2": 165, "date_2": "2020-07", "text_1_tokenized": ["Weekend", "paternity", "leave", ":", "wayyyyy", "too", "much", "time", "off", ".", "Hes", "got", "30", "million", "reasons", "to", "take", "ONe", "day", "off", ".", "Chime", "in", "here", ".", "Am", "i", "wrong", "?", "?", "?"], "text_2_tokenized": ["I", "bought", "a", "salmon", "steak", "that's", "Beatles-esque", "(", "bigger", "than", "Jesus", ")", "and", "my", "dad", "declined", "my", "cooking", "because", "he's", "not", "much", "of", "a", "fish", "eater", ".", "I'm", "getting", "a", "fucking", "paternity", "test", "."]} -{"id": "3799-paternity", "word": "paternity", "label_binary": 1, "text_1": "Because I'm on paternity leave, I'm missing this year's college retreat and our first program meeting. I admit I am having some major FOMO. Then I think about the two student papers I turned around and the R&R I got to this am... and then FOMO \u2b07\ufe0f", "token_idx_1": 3, "text_start_1": 15, "text_end_1": 24, "date_1": "2019-07", "text_2": "The Lily paternity issue/ Malcolm & Dru sleeping together was one of the few regrets Bill Bell said he had with what he did. He wanted Lily to be Neil's child not Malcolm's #YR", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 18, "date_2": "2020-07", "text_1_tokenized": ["Because", "I'm", "on", "paternity", "leave", ",", "I'm", "missing", "this", "year's", "college", "retreat", "and", "our", "first", "program", "meeting", ".", "I", "admit", "I", "am", "having", "some", "major", "FOMO", ".", "Then", "I", "think", "about", "the", "two", "student", "papers", "I", "turned", "around", "and", "the", "R", "&", "R", "I", "got", "to", "this", "am", "...", "and", "then", "FOMO", "\u2b07", "\ufe0f"], "text_2_tokenized": ["The", "Lily", "paternity", "issue", "/", "Malcolm", "&", "Dru", "sleeping", "together", "was", "one", "of", "the", "few", "regrets", "Bill", "Bell", "said", "he", "had", "with", "what", "he", "did", ".", "He", "wanted", "Lily", "to", "be", "Neil's", "child", "not", "Malcolm's", "#YR"]} -{"id": "3800-paternity", "word": "paternity", "label_binary": 1, "text_1": "Manager is on paternity leave. Sr Engineer = out . Another Engineer = out. Today, I lead a team that consists of one engineer, a temp, and an intern. #LetsGetIt", "token_idx_1": 3, "text_start_1": 14, "text_end_1": 23, "date_1": "2019-07", "text_2": "My aim is to offer free confidential telephone support to men who have fallen victim to paternity fraud or suspect they have. I can only offer telephone to people in the UK by appointment and within 24 hours. For people outside of the UK I can help through the DM .", "token_idx_2": 16, "text_start_2": 88, "text_end_2": 97, "date_2": "2020-07", "text_1_tokenized": ["Manager", "is", "on", "paternity", "leave", ".", "Sr", "Engineer", "=", "out", ".", "Another", "Engineer", "=", "out", ".", "Today", ",", "I", "lead", "a", "team", "that", "consists", "of", "one", "engineer", ",", "a", "temp", ",", "and", "an", "intern", ".", "#LetsGetIt"], "text_2_tokenized": ["My", "aim", "is", "to", "offer", "free", "confidential", "telephone", "support", "to", "men", "who", "have", "fallen", "victim", "to", "paternity", "fraud", "or", "suspect", "they", "have", ".", "I", "can", "only", "offer", "telephone", "to", "people", "in", "the", "UK", "by", "appointment", "and", "within", "24", "hours", ".", "For", "people", "outside", "of", "the", "UK", "I", "can", "help", "through", "the", "DM", "."]} -{"id": "3801-paternity", "word": "paternity", "label_binary": 0, "text_1": "Perfect timing for Lil to get induced. . . 2 weeks paternity, back to back tests \ud83d\udc76\ud83c\udffb\ud83c\udfcf #bbccricket", "token_idx_1": 10, "text_start_1": 51, "text_end_1": 60, "date_1": "2019-07", "text_2": "paternity court be hilarious..", "token_idx_2": 0, "text_start_2": 0, "text_end_2": 9, "date_2": "2020-07", "text_1_tokenized": ["Perfect", "timing", "for", "Lil", "to", "get", "induced", ". . .", "2", "weeks", "paternity", ",", "back", "to", "back", "tests", "\ud83d\udc76\ud83c\udffb", "\ud83c\udfcf", "#bbccricket"], "text_2_tokenized": ["paternity", "court", "be", "hilarious", ".."]} -{"id": "3802-paternity", "word": "paternity", "label_binary": 1, "text_1": "Naira needs to speak up. Need that sherni back. She should at least defend herself like Kartik. She could say if he can question her love and character then he could have doubted the paternity of the child. That was one of the main reason she left. Sherni speak up! #Naira #yrkkh", "token_idx_1": 37, "text_start_1": 183, "text_end_1": 192, "date_1": "2019-07", "text_2": "my mom doesn't like harry potter what the FUCK ?!!!!?????!!!!!! i want a paternity test", "token_idx_2": 22, "text_start_2": 73, "text_end_2": 82, "date_2": "2020-07", "text_1_tokenized": ["Naira", "needs", "to", "speak", "up", ".", "Need", "that", "sherni", "back", ".", "She", "should", "at", "least", "defend", "herself", "like", "Kartik", ".", "She", "could", "say", "if", "he", "can", "question", "her", "love", "and", "character", "then", "he", "could", "have", "doubted", "the", "paternity", "of", "the", "child", ".", "That", "was", "one", "of", "the", "main", "reason", "she", "left", ".", "Sherni", "speak", "up", "!", "#Naira", "#yrkkh"], "text_2_tokenized": ["my", "mom", "doesn't", "like", "harry", "potter", "what", "the", "FUCK", "?", "!", "!", "!", "?", "?", "?", "!", "!", "!", "i", "want", "a", "paternity", "test"]} -{"id": "3803-paternity", "word": "paternity", "label_binary": 1, "text_1": "Ummm yeah Anthony need to get that paternity test #unexpected", "token_idx_1": 7, "text_start_1": 35, "text_end_1": 44, "date_1": "2019-07", "text_2": "Smaak n paternity leave.. olye wii?", "token_idx_2": 2, "text_start_2": 8, "text_end_2": 17, "date_2": "2020-07", "text_1_tokenized": ["Ummm", "yeah", "Anthony", "need", "to", "get", "that", "paternity", "test", "#unexpected"], "text_2_tokenized": ["Smaak", "n", "paternity", "leave", "..", "olye", "wii", "?"]} -{"id": "3804-paternity", "word": "paternity", "label_binary": 1, "text_1": "\"paternity leave: get the following documentation from your wife/partner/civil partner\" first of all you could save so much space by just saying \"partner\" second of all if you're so set on that then say \"husband\" as well coward", "token_idx_1": 1, "text_start_1": 1, "text_end_1": 10, "date_1": "2019-07", "text_2": "idk why i keep getting paternity clothing adds but it's getting disrespectful", "token_idx_2": 5, "text_start_2": 23, "text_end_2": 32, "date_2": "2020-07", "text_1_tokenized": ["\"", "paternity", "leave", ":", "get", "the", "following", "documentation", "from", "your", "wife", "/", "partner", "/", "civil", "partner", "\"", "first", "of", "all", "you", "could", "save", "so", "much", "space", "by", "just", "saying", "\"", "partner", "\"", "second", "of", "all", "if", "you're", "so", "set", "on", "that", "then", "say", "\"", "husband", "\"", "as", "well", "coward"], "text_2_tokenized": ["idk", "why", "i", "keep", "getting", "paternity", "clothing", "adds", "but", "it's", "getting", "disrespectful"]} -{"id": "3805-paternity", "word": "paternity", "label_binary": 1, "text_1": "Never knew it was a such thing as paternity leave \ud83d\ude2b\ud83d\ude2b am I tweaking or what? \ud83e\udd37\ud83c\udffd\u200d\u2640\ufe0f", "token_idx_1": 8, "text_start_1": 34, "text_end_1": 43, "date_1": "2019-07", "text_2": "Jordan's nearly finished his 2 weeks paternity and I wanted him to sort the garden but parenting 3 kids in the day actually is pretty difficult!!", "token_idx_2": 6, "text_start_2": 37, "text_end_2": 46, "date_2": "2020-07", "text_1_tokenized": ["Never", "knew", "it", "was", "a", "such", "thing", "as", "paternity", "leave", "\ud83d\ude2b", "\ud83d\ude2b", "am", "I", "tweaking", "or", "what", "?", "\ud83e\udd37\ud83c\udffd\u200d\u2640", "\ufe0f"], "text_2_tokenized": ["Jordan's", "nearly", "finished", "his", "2", "weeks", "paternity", "and", "I", "wanted", "him", "to", "sort", "the", "garden", "but", "parenting", "3", "kids", "in", "the", "day", "actually", "is", "pretty", "difficult", "!", "!"]} -{"id": "3806-paternity", "word": "paternity", "label_binary": 0, "text_1": "Just when I thought sports fans couldn't get lower, there are actually people angry at Bryce Harper for going on paternity leave to be there for his wife as she gives birth to their child... lmfaooo imagine being that much of an insensitive freak", "token_idx_1": 21, "text_start_1": 113, "text_end_1": 122, "date_1": "2019-07", "text_2": "Watching paternity court how you gone come up there with 3 possible men as the father with no shame, whew chile", "token_idx_2": 1, "text_start_2": 9, "text_end_2": 18, "date_2": "2020-07", "text_1_tokenized": ["Just", "when", "I", "thought", "sports", "fans", "couldn't", "get", "lower", ",", "there", "are", "actually", "people", "angry", "at", "Bryce", "Harper", "for", "going", "on", "paternity", "leave", "to", "be", "there", "for", "his", "wife", "as", "she", "gives", "birth", "to", "their", "child", "...", "lmfaooo", "imagine", "being", "that", "much", "of", "an", "insensitive", "freak"], "text_2_tokenized": ["Watching", "paternity", "court", "how", "you", "gone", "come", "up", "there", "with", "3", "possible", "men", "as", "the", "father", "with", "no", "shame", ",", "whew", "chile"]} -{"id": "3807-paternity", "word": "paternity", "label_binary": 1, "text_1": "We have a \"Taking Care of Baby\" class tonight. I'm not really sure what the curriculum is, but hopefully we learn something! Chris has been a trooper going to things w/ me and getting 0 sleep. His schedule is way off working 3rd shift. At least he'll get 2 wks pd paternity leave!", "token_idx_1": 59, "text_start_1": 264, "text_end_1": 273, "date_1": "2019-07", "text_2": "Our pensions and sick pay is one of the worst in Europe. Our maternity and paternity leave is one of the worst is Europe. We have been conned. This government has managed to dupe those towns and cities into thinking that they will work for you. Open your eyes!!!", "token_idx_2": 16, "text_start_2": 75, "text_end_2": 84, "date_2": "2020-07", "text_1_tokenized": ["We", "have", "a", "\"", "Taking", "Care", "of", "Baby", "\"", "class", "tonight", ".", "I'm", "not", "really", "sure", "what", "the", "curriculum", "is", ",", "but", "hopefully", "we", "learn", "something", "!", "Chris", "has", "been", "a", "trooper", "going", "to", "things", "w", "/", "me", "and", "getting", "0", "sleep", ".", "His", "schedule", "is", "way", "off", "working", "3rd", "shift", ".", "At", "least", "he'll", "get", "2", "wks", "pd", "paternity", "leave", "!"], "text_2_tokenized": ["Our", "pensions", "and", "sick", "pay", "is", "one", "of", "the", "worst", "in", "Europe", ".", "Our", "maternity", "and", "paternity", "leave", "is", "one", "of", "the", "worst", "is", "Europe", ".", "We", "have", "been", "conned", ".", "This", "government", "has", "managed", "to", "dupe", "those", "towns", "and", "cities", "into", "thinking", "that", "they", "will", "work", "for", "you", ".", "Open", "your", "eyes", "!", "!", "!"]} -{"id": "3808-paternity", "word": "paternity", "label_binary": 1, "text_1": "If I'm Forrest I woulda got a paternity test done right away. No way little Forrest was his the way Jenny was fucking around. #YouareNOTthefather", "token_idx_1": 7, "text_start_1": 30, "text_end_1": 39, "date_1": "2019-07", "text_2": "Wow you've brought secret families into this thing. (sigh) Careful with that one. Too many tweets about men finding out the paternity of their kids. And there was that recent lie many told here about the domestic violence victim", "token_idx_2": 25, "text_start_2": 124, "text_end_2": 133, "date_2": "2020-07", "text_1_tokenized": ["If", "I'm", "Forrest", "I", "woulda", "got", "a", "paternity", "test", "done", "right", "away", ".", "No", "way", "little", "Forrest", "was", "his", "the", "way", "Jenny", "was", "fucking", "around", ".", "#YouareNOTthefather"], "text_2_tokenized": ["Wow", "you've", "brought", "secret", "families", "into", "this", "thing", ".", "(", "sigh", ")", "Careful", "with", "that", "one", ".", "Too", "many", "tweets", "about", "men", "finding", "out", "the", "paternity", "of", "their", "kids", ".", "And", "there", "was", "that", "recent", "lie", "many", "told", "here", "about", "the", "domestic", "violence", "victim"]} -{"id": "3809-paternity", "word": "paternity", "label_binary": 1, "text_1": "until men's rights activists fight for paternity leave instead of owning the feminists theyll always be lame.", "token_idx_1": 6, "text_start_1": 39, "text_end_1": 48, "date_1": "2019-07", "text_2": "I find it hilarious that Hunter Biden is used to bolster Joe's character. Let's see- Dishonorable Discharge for Drugs; Denies paternity and gets caught, Takes millions from Ukraine and China b/c he can as Joe's son. Use this guy for character?\ud83d\ude09\ud83d\udc4e", "token_idx_2": 23, "text_start_2": 126, "text_end_2": 135, "date_2": "2020-07", "text_1_tokenized": ["until", "men's", "rights", "activists", "fight", "for", "paternity", "leave", "instead", "of", "owning", "the", "feminists", "theyll", "always", "be", "lame", "."], "text_2_tokenized": ["I", "find", "it", "hilarious", "that", "Hunter", "Biden", "is", "used", "to", "bolster", "Joe's", "character", ".", "Let's", "see", "-", "Dishonorable", "Discharge", "for", "Drugs", ";", "Denies", "paternity", "and", "gets", "caught", ",", "Takes", "millions", "from", "Ukraine", "and", "China", "b", "/", "c", "he", "can", "as", "Joe's", "son", ".", "Use", "this", "guy", "for", "character", "?", "\ud83d\ude09", "\ud83d\udc4e"]} -{"id": "3810-paternity", "word": "paternity", "label_binary": 1, "text_1": "The longer paternity leave talk would make sense to me if they actually did fatherly things during the leave.", "token_idx_1": 2, "text_start_1": 11, "text_end_1": 20, "date_1": "2019-07", "text_2": "Up next on Unmasking manhood on 3FM 92.7 we're discussing paternity tests! Tune in and let's get interactive! You can also tweet at me what you think! #monkeydeyworkbaboondeychop", "token_idx_2": 10, "text_start_2": 58, "text_end_2": 67, "date_2": "2020-07", "text_1_tokenized": ["The", "longer", "paternity", "leave", "talk", "would", "make", "sense", "to", "me", "if", "they", "actually", "did", "fatherly", "things", "during", "the", "leave", "."], "text_2_tokenized": ["Up", "next", "on", "Unmasking", "manhood", "on", "3FM", "92.7", "we're", "discussing", "paternity", "tests", "!", "Tune", "in", "and", "let's", "get", "interactive", "!", "You", "can", "also", "tweet", "at", "me", "what", "you", "think", "!", "#monkeydeyworkbaboondeychop"]} -{"id": "3811-paternity", "word": "paternity", "label_binary": 1, "text_1": "asking your gf/wife for a paternity test is disrespectful af.. asking the bitch you accidentally nutted in for a test is protecting yourself.", "token_idx_1": 7, "text_start_1": 26, "text_end_1": 35, "date_1": "2019-07", "text_2": "lool wait men are trying to normalise getting dna tests for their children? i'm sorry if my kids dad tries to get a paternity test i will acc break his head bc wtf??", "token_idx_2": 24, "text_start_2": 116, "text_end_2": 125, "date_2": "2020-07", "text_1_tokenized": ["asking", "your", "gf", "/", "wife", "for", "a", "paternity", "test", "is", "disrespectful", "af", "..", "asking", "the", "bitch", "you", "accidentally", "nutted", "in", "for", "a", "test", "is", "protecting", "yourself", "."], "text_2_tokenized": ["lool", "wait", "men", "are", "trying", "to", "normalise", "getting", "dna", "tests", "for", "their", "children", "?", "i'm", "sorry", "if", "my", "kids", "dad", "tries", "to", "get", "a", "paternity", "test", "i", "will", "acc", "break", "his", "head", "bc", "wtf", "?", "?"]} -{"id": "3812-paternity", "word": "paternity", "label_binary": 1, "text_1": "Hammer the under tonight Sox vs Yanks. Both starting pitchers just came off the paternity list. It's the ultimate matchup of dad strength vs dad strength", "token_idx_1": 15, "text_start_1": 80, "text_end_1": 89, "date_1": "2019-07", "text_2": "The Hayward injury is a significant immediate loss for the Celtics, but ultimately could better prepare them for when he eventually leaves on paternity leave. Next man up.", "token_idx_2": 24, "text_start_2": 142, "text_end_2": 151, "date_2": "2020-07", "text_1_tokenized": ["Hammer", "the", "under", "tonight", "Sox", "vs", "Yanks", ".", "Both", "starting", "pitchers", "just", "came", "off", "the", "paternity", "list", ".", "It's", "the", "ultimate", "matchup", "of", "dad", "strength", "vs", "dad", "strength"], "text_2_tokenized": ["The", "Hayward", "injury", "is", "a", "significant", "immediate", "loss", "for", "the", "Celtics", ",", "but", "ultimately", "could", "better", "prepare", "them", "for", "when", "he", "eventually", "leaves", "on", "paternity", "leave", ".", "Next", "man", "up", "."]} -{"id": "3813-paternity", "word": "paternity", "label_binary": 1, "text_1": "Nice try @bryceharper3 , but I called paternity leave first. Stop trying to copy me brah. #BabyLemOnTheWay", "token_idx_1": 7, "text_start_1": 38, "text_end_1": 47, "date_1": "2019-07", "text_2": "I truly believe that people are bring inseminated while at the doctors office . I dont even think its their DNA snd it causes false paternity tests Every system has been interfered with.", "token_idx_2": 25, "text_start_2": 132, "text_end_2": 141, "date_2": "2020-07", "text_1_tokenized": ["Nice", "try", "@bryceharper3", ",", "but", "I", "called", "paternity", "leave", "first", ".", "Stop", "trying", "to", "copy", "me", "brah", ".", "#BabyLemOnTheWay"], "text_2_tokenized": ["I", "truly", "believe", "that", "people", "are", "bring", "inseminated", "while", "at", "the", "doctors", "office", ".", "I", "dont", "even", "think", "its", "their", "DNA", "snd", "it", "causes", "false", "paternity", "tests", "Every", "system", "has", "been", "interfered", "with", "."]} -{"id": "3814-paternity", "word": "paternity", "label_binary": 1, "text_1": "Men should also have the right to abort their paternity if women can abort their maternity. They should have the right as soon as notified. Women should be able to continue having abortions as well. #EqualityCantWait", "token_idx_1": 9, "text_start_1": 46, "text_end_1": 55, "date_1": "2019-07", "text_2": "She really snuck in that paternity test and thought shit was sweet \ud83e\udd74", "token_idx_2": 5, "text_start_2": 25, "text_end_2": 34, "date_2": "2020-07", "text_1_tokenized": ["Men", "should", "also", "have", "the", "right", "to", "abort", "their", "paternity", "if", "women", "can", "abort", "their", "maternity", ".", "They", "should", "have", "the", "right", "as", "soon", "as", "notified", ".", "Women", "should", "be", "able", "to", "continue", "having", "abortions", "as", "well", ".", "#EqualityCantWait"], "text_2_tokenized": ["She", "really", "snuck", "in", "that", "paternity", "test", "and", "thought", "shit", "was", "sweet", "\ud83e\udd74"]} -{"id": "3815-paternity", "word": "paternity", "label_binary": 1, "text_1": "Have no idea why but #Phillies betting line went up from -165 to -170 tonight when it was announced Bryce Harper out on paternity leave. Crazy, but I would lay the number tonight. @SportsRadioWIP", "token_idx_1": 25, "text_start_1": 120, "text_end_1": 129, "date_1": "2019-07", "text_2": "If a man ever gets me pregnant and wants a paternity test I'm 100% here for it! I can't believe the amount of women who lie to these men about these innocenr babies. That's dry coochie energy fasho.", "token_idx_2": 10, "text_start_2": 43, "text_end_2": 52, "date_2": "2020-07", "text_1_tokenized": ["Have", "no", "idea", "why", "but", "#Phillies", "betting", "line", "went", "up", "from", "-", "165", "to", "-", "170", "tonight", "when", "it", "was", "announced", "Bryce", "Harper", "out", "on", "paternity", "leave", ".", "Crazy", ",", "but", "I", "would", "lay", "the", "number", "tonight", ".", "@SportsRadioWIP"], "text_2_tokenized": ["If", "a", "man", "ever", "gets", "me", "pregnant", "and", "wants", "a", "paternity", "test", "I'm", "100", "%", "here", "for", "it", "!", "I", "can't", "believe", "the", "amount", "of", "women", "who", "lie", "to", "these", "men", "about", "these", "innocenr", "babies", ".", "That's", "dry", "coochie", "energy", "fasho", "."]} -{"id": "3816-paternity", "word": "paternity", "label_binary": 1, "text_1": "How long was Tiffany there that she got pregnant that quickly? If this was Paul she'd be doing a paternity test lol #90DayFiance", "token_idx_1": 20, "text_start_1": 97, "text_end_1": 106, "date_1": "2019-07", "text_2": "My baby daddy asked for a paternity test.... to SKIP HIS COURT ORDERED ONE. Wait it gets better, Only to hire an attorney for a paternity test. What a quality father my son has \ud83e\udd23\ud83e\udd23\ud83e\udd23", "token_idx_2": 6, "text_start_2": 26, "text_end_2": 35, "date_2": "2020-07", "text_1_tokenized": ["How", "long", "was", "Tiffany", "there", "that", "she", "got", "pregnant", "that", "quickly", "?", "If", "this", "was", "Paul", "she'd", "be", "doing", "a", "paternity", "test", "lol", "#90DayFiance"], "text_2_tokenized": ["My", "baby", "daddy", "asked", "for", "a", "paternity", "test", "...", "to", "SKIP", "HIS", "COURT", "ORDERED", "ONE", ".", "Wait", "it", "gets", "better", ",", "Only", "to", "hire", "an", "attorney", "for", "a", "paternity", "test", ".", "What", "a", "quality", "father", "my", "son", "has", "\ud83e\udd23", "\ud83e\udd23", "\ud83e\udd23"]} -{"id": "3817-paternity", "word": "paternity", "label_binary": 1, "text_1": "Adam says everyone knew but Nick about Christian's paternity. WHY isn't Nick bringing up Adam's letter!? Jesus Josh c'mon now. #YR", "token_idx_1": 8, "text_start_1": 51, "text_end_1": 60, "date_1": "2019-07", "text_2": "Capitalism in a nutshell: 4782 types of nappies, baby formula, prams ans strollers. 0 affordable childcare, free & safe playgrounds, paid maternity and paternity leaves.", "token_idx_2": 29, "text_start_2": 156, "text_end_2": 165, "date_2": "2020-07", "text_1_tokenized": ["Adam", "says", "everyone", "knew", "but", "Nick", "about", "Christian's", "paternity", ".", "WHY", "isn't", "Nick", "bringing", "up", "Adam's", "letter", "!", "?", "Jesus", "Josh", "c'mon", "now", ".", "#YR"], "text_2_tokenized": ["Capitalism", "in", "a", "nutshell", ":", "4782", "types", "of", "nappies", ",", "baby", "formula", ",", "prams", "ans", "strollers", ".", "0", "affordable", "childcare", ",", "free", "&", "safe", "playgrounds", ",", "paid", "maternity", "and", "paternity", "leaves", "."]} -{"id": "3818-paternity", "word": "paternity", "label_binary": 1, "text_1": "I was literally mid sentence speaking about how I hope Harper's paternity leave didn't throw off his groove as he hit that home run. The world is crazy man", "token_idx_1": 11, "text_start_1": 64, "text_end_1": 73, "date_1": "2019-07", "text_2": "#boris #government The buck stops, over there. I am on holiday or paternity leave or whatever", "token_idx_2": 14, "text_start_2": 66, "text_end_2": 75, "date_2": "2020-07", "text_1_tokenized": ["I", "was", "literally", "mid", "sentence", "speaking", "about", "how", "I", "hope", "Harper's", "paternity", "leave", "didn't", "throw", "off", "his", "groove", "as", "he", "hit", "that", "home", "run", ".", "The", "world", "is", "crazy", "man"], "text_2_tokenized": ["#boris", "#government", "The", "buck", "stops", ",", "over", "there", ".", "I", "am", "on", "holiday", "or", "paternity", "leave", "or", "whatever"]} -{"id": "3819-paternity", "word": "paternity", "label_binary": 1, "text_1": "#Phillies will be without Bryce Harper - he's out on paternity leave!", "token_idx_1": 10, "text_start_1": 53, "text_end_1": 62, "date_1": "2019-07", "text_2": "No maternity/paternity leave in America? Rah.", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 22, "date_2": "2020-07", "text_1_tokenized": ["#Phillies", "will", "be", "without", "Bryce", "Harper", "-", "he's", "out", "on", "paternity", "leave", "!"], "text_2_tokenized": ["No", "maternity", "/", "paternity", "leave", "in", "America", "?", "Rah", "."]} -{"id": "3820-paternity", "word": "paternity", "label_binary": 0, "text_1": "I've run out of series and movies, even watching paternity court on YouTube now\ud83d\ude43 please share stuff?", "token_idx_1": 10, "text_start_1": 49, "text_end_1": 58, "date_1": "2019-07", "text_2": "There were no paternity tests available around 0 Bc , So 3 dudes showing up at a birth with gifts was probably pretty common. #Showerthoughts", "token_idx_2": 3, "text_start_2": 14, "text_end_2": 23, "date_2": "2020-07", "text_1_tokenized": ["I've", "run", "out", "of", "series", "and", "movies", ",", "even", "watching", "paternity", "court", "on", "YouTube", "now", "\ud83d\ude43", "please", "share", "stuff", "?"], "text_2_tokenized": ["There", "were", "no", "paternity", "tests", "available", "around", "0", "Bc", ",", "So", "3", "dudes", "showing", "up", "at", "a", "birth", "with", "gifts", "was", "probably", "pretty", "common", ".", "#Showerthoughts"]} -{"id": "3821-paternity", "word": "paternity", "label_binary": 1, "text_1": "Tyga and Kylie paternity tests could be differant reasons from mine and Jacob what ever being 0 percent the father of mi daughter ... I had him in court fore the wrong reasons ; Lucky him ...", "token_idx_1": 3, "text_start_1": 15, "text_end_1": 24, "date_1": "2019-07", "text_2": "America isn't Canada. It in some instances we should look to them on healthcare, maternity and paternity leave and more! \ud83e\udd37\ud83c\udffd\u200d\u2640\ufe0f", "token_idx_2": 18, "text_start_2": 95, "text_end_2": 104, "date_2": "2020-07", "text_1_tokenized": ["Tyga", "and", "Kylie", "paternity", "tests", "could", "be", "differant", "reasons", "from", "mine", "and", "Jacob", "what", "ever", "being", "0", "percent", "the", "father", "of", "mi", "daughter", "...", "I", "had", "him", "in", "court", "fore", "the", "wrong", "reasons", ";", "Lucky", "him", "..."], "text_2_tokenized": ["America", "isn't", "Canada", ".", "It", "in", "some", "instances", "we", "should", "look", "to", "them", "on", "healthcare", ",", "maternity", "and", "paternity", "leave", "and", "more", "!", "\ud83e\udd37\ud83c\udffd\u200d\u2640", "\ufe0f"]} -{"id": "3822-paternity", "word": "paternity", "label_binary": 1, "text_1": "My job doesn't offer paternity leave...might gotta quit and sell dope instead.", "token_idx_1": 4, "text_start_1": 21, "text_end_1": 30, "date_1": "2019-07", "text_2": "Last day of work before being off for 16 weeks paternity leave. Tbh, I've been coasting for the past two months with this looming.", "token_idx_2": 10, "text_start_2": 47, "text_end_2": 56, "date_2": "2020-07", "text_1_tokenized": ["My", "job", "doesn't", "offer", "paternity", "leave", "...", "might", "gotta", "quit", "and", "sell", "dope", "instead", "."], "text_2_tokenized": ["Last", "day", "of", "work", "before", "being", "off", "for", "16", "weeks", "paternity", "leave", ".", "Tbh", ",", "I've", "been", "coasting", "for", "the", "past", "two", "months", "with", "this", "looming", "."]} -{"id": "3823-paternity", "word": "paternity", "label_binary": 0, "text_1": "just gonna watch paternity court and eat ice cream", "token_idx_1": 3, "text_start_1": 17, "text_end_1": 26, "date_1": "2019-07", "text_2": "How long should you wait to hear back off a job? Interview on 2nd July then guy goes off on paternity, emailed for an update and they said they're still working through interviews but will be in touch then nothing for 2 weeks \ud83e\udd14", "token_idx_2": 21, "text_start_2": 92, "text_end_2": 101, "date_2": "2020-07", "text_1_tokenized": ["just", "gonna", "watch", "paternity", "court", "and", "eat", "ice", "cream"], "text_2_tokenized": ["How", "long", "should", "you", "wait", "to", "hear", "back", "off", "a", "job", "?", "Interview", "on", "2nd", "July", "then", "guy", "goes", "off", "on", "paternity", ",", "emailed", "for", "an", "update", "and", "they", "said", "they're", "still", "working", "through", "interviews", "but", "will", "be", "in", "touch", "then", "nothing", "for", "2", "weeks", "\ud83e\udd14"]} -{"id": "3824-paternity", "word": "paternity", "label_binary": 1, "text_1": "He is on paternity leave today. My day started at 7am. After almost a day of housework + caregiving + meal prep, I fully support his plan to semi retire at 42-43yo. We don't need that much money. Enough to lead the life we defined as comfortable.", "token_idx_1": 3, "text_start_1": 9, "text_end_1": 18, "date_1": "2019-07", "text_2": "Mike Trout has 3 dingers in two games since coming off the paternity list Which of course means he must just like being called daddy", "token_idx_2": 12, "text_start_2": 59, "text_end_2": 68, "date_2": "2020-07", "text_1_tokenized": ["He", "is", "on", "paternity", "leave", "today", ".", "My", "day", "started", "at", "7am", ".", "After", "almost", "a", "day", "of", "housework", "+", "caregiving", "+", "meal", "prep", ",", "I", "fully", "support", "his", "plan", "to", "semi", "retire", "at", "42-43", "yo", ".", "We", "don't", "need", "that", "much", "money", ".", "Enough", "to", "lead", "the", "life", "we", "defined", "as", "comfortable", "."], "text_2_tokenized": ["Mike", "Trout", "has", "3", "dingers", "in", "two", "games", "since", "coming", "off", "the", "paternity", "list", "Which", "of", "course", "means", "he", "must", "just", "like", "being", "called", "daddy"]} -{"id": "3825-paternity", "word": "paternity", "label_binary": 0, "text_1": "these women on paternity court don't be giving that pussy no time to air out!", "token_idx_1": 3, "text_start_1": 15, "text_end_1": 24, "date_1": "2019-07", "text_2": "Only 1 day left of my paternity/annual leave and then back to work after nearly 4 weeks off \ud83d\ude2d\ud83d\ude43\ud83d\udd2b\u26b0\ud83d\udde1", "token_idx_2": 6, "text_start_2": 22, "text_end_2": 31, "date_2": "2020-07", "text_1_tokenized": ["these", "women", "on", "paternity", "court", "don't", "be", "giving", "that", "pussy", "no", "time", "to", "air", "out", "!"], "text_2_tokenized": ["Only", "1", "day", "left", "of", "my", "paternity", "/", "annual", "leave", "and", "then", "back", "to", "work", "after", "nearly", "4", "weeks", "off", "\ud83d\ude2d", "\ud83d\ude43", "\ud83d\udd2b", "\u26b0", "\ud83d\udde1"]} -{"id": "3826-paternity", "word": "paternity", "label_binary": 1, "text_1": "meg: that teacher was on fraternity leave--i mean paternity pat: ah yes, fraternity leave where youre just chuggin beers", "token_idx_1": 9, "text_start_1": 50, "text_end_1": 59, "date_1": "2019-07", "text_2": "DNA paternity test wi sey ASAFA if it NUH fit they must aquit .", "token_idx_2": 1, "text_start_2": 4, "text_end_2": 13, "date_2": "2020-07", "text_1_tokenized": ["meg", ":", "that", "teacher", "was", "on", "fraternity", "leave--i", "mean", "paternity", "pat", ":", "ah", "yes", ",", "fraternity", "leave", "where", "youre", "just", "chuggin", "beers"], "text_2_tokenized": ["DNA", "paternity", "test", "wi", "sey", "ASAFA", "if", "it", "NUH", "fit", "they", "must", "aquit", "."]} -{"id": "3827-paternity", "word": "paternity", "label_binary": 0, "text_1": "Am I slightly addicted to paternity court? Mayyyyybbeee", "token_idx_1": 5, "text_start_1": 26, "text_end_1": 35, "date_1": "2019-07", "text_2": "I notice a lot of these paternity test shows feature many black people. Is this type of \"entertainment\" damaging to us? It seems like it pushes the stereotype that black women are hypersexual and promiscuous and black men are deadbeat fathers which is deeply damaging in my view.", "token_idx_2": 6, "text_start_2": 24, "text_end_2": 33, "date_2": "2020-07", "text_1_tokenized": ["Am", "I", "slightly", "addicted", "to", "paternity", "court", "?", "Mayyyyybbeee"], "text_2_tokenized": ["I", "notice", "a", "lot", "of", "these", "paternity", "test", "shows", "feature", "many", "black", "people", ".", "Is", "this", "type", "of", "\"", "entertainment", "\"", "damaging", "to", "us", "?", "It", "seems", "like", "it", "pushes", "the", "stereotype", "that", "black", "women", "are", "hypersexual", "and", "promiscuous", "and", "black", "men", "are", "deadbeat", "fathers", "which", "is", "deeply", "damaging", "in", "my", "view", "."]} -{"id": "3828-paternity", "word": "paternity", "label_binary": 1, "text_1": "Bryce is on paternity leave, a few thoughts: 1. Congrats Bryce, prayers will be with Kayla and am genuinely happy for you both. Treasure the moment, its one of the best a Dad can ever have. 2. Take as long as you need, Nats fans won't mind!", "token_idx_1": 3, "text_start_1": 12, "text_end_1": 21, "date_1": "2019-07", "text_2": "Wait why did Ben Shapiro need to have paternity tests for \u201chis\u201d kids\uff1f", "token_idx_2": 8, "text_start_2": 38, "text_end_2": 47, "date_2": "2020-07", "text_1_tokenized": ["Bryce", "is", "on", "paternity", "leave", ",", "a", "few", "thoughts", ":", "1", ".", "Congrats", "Bryce", ",", "prayers", "will", "be", "with", "Kayla", "and", "am", "genuinely", "happy", "for", "you", "both", ".", "Treasure", "the", "moment", ",", "its", "one", "of", "the", "best", "a", "Dad", "can", "ever", "have", ".", "2", ".", "Take", "as", "long", "as", "you", "need", ",", "Nats", "fans", "won't", "mind", "!"], "text_2_tokenized": ["Wait", "why", "did", "Ben", "Shapiro", "need", "to", "have", "paternity", "tests", "for", "\u201c", "his", "\u201d", "kids", "\uff1f"]} -{"id": "3829-paternity", "word": "paternity", "label_binary": 0, "text_1": "ingl, i love watching paternity court", "token_idx_1": 5, "text_start_1": 22, "text_end_1": 31, "date_1": "2019-07", "text_2": "Frank talking about Moesha need to fake a drug test... why don't you take a paternity test WHORE \ud83e\uddd0", "token_idx_2": 16, "text_start_2": 76, "text_end_2": 85, "date_2": "2020-07", "text_1_tokenized": ["ingl", ",", "i", "love", "watching", "paternity", "court"], "text_2_tokenized": ["Frank", "talking", "about", "Moesha", "need", "to", "fake", "a", "drug", "test", "...", "why", "don't", "you", "take", "a", "paternity", "test", "WHORE", "\ud83e\uddd0"]} -{"id": "3830-paternity", "word": "paternity", "label_binary": 1, "text_1": "And that's why I always said I'm getting paternity tested immediately upon birth idc if it's my wife or whoever. I refuse to be an example for some random bitch's narrative if I flash out from the worse occurring", "token_idx_1": 8, "text_start_1": 41, "text_end_1": 50, "date_1": "2019-07", "text_2": "Females can be to sensitive sometimes if he ask u for a paternity test why be mad cuz he want proof that u were honest ijs \ud83d\ude42\ud83d\udcaf", "token_idx_2": 12, "text_start_2": 56, "text_end_2": 65, "date_2": "2020-07", "text_1_tokenized": ["And", "that's", "why", "I", "always", "said", "I'm", "getting", "paternity", "tested", "immediately", "upon", "birth", "idc", "if", "it's", "my", "wife", "or", "whoever", ".", "I", "refuse", "to", "be", "an", "example", "for", "some", "random", "bitch's", "narrative", "if", "I", "flash", "out", "from", "the", "worse", "occurring"], "text_2_tokenized": ["Females", "can", "be", "to", "sensitive", "sometimes", "if", "he", "ask", "u", "for", "a", "paternity", "test", "why", "be", "mad", "cuz", "he", "want", "proof", "that", "u", "were", "honest", "ijs", "\ud83d\ude42", "\ud83d\udcaf"]} -{"id": "3831-paternity", "word": "paternity", "label_binary": 1, "text_1": "This woman really got me in a paternity store rn smh", "token_idx_1": 7, "text_start_1": 30, "text_end_1": 39, "date_1": "2019-07", "text_2": "Just got approved for a month of paid paternity leave for the month of October. Can I say again how grateful I am I got this job, and got it when I did?", "token_idx_2": 8, "text_start_2": 38, "text_end_2": 47, "date_2": "2020-07", "text_1_tokenized": ["This", "woman", "really", "got", "me", "in", "a", "paternity", "store", "rn", "smh"], "text_2_tokenized": ["Just", "got", "approved", "for", "a", "month", "of", "paid", "paternity", "leave", "for", "the", "month", "of", "October", ".", "Can", "I", "say", "again", "how", "grateful", "I", "am", "I", "got", "this", "job", ",", "and", "got", "it", "when", "I", "did", "?"]} -{"id": "3832-paternity", "word": "paternity", "label_binary": 1, "text_1": "Now he's doing a paternity test?!#BingeWatchingVeronicaMars", "token_idx_1": 4, "text_start_1": 17, "text_end_1": 26, "date_1": "2019-07", "text_2": "Did Asafa get that paternity test done?", "token_idx_2": 4, "text_start_2": 19, "text_end_2": 28, "date_2": "2020-07", "text_1_tokenized": ["Now", "he's", "doing", "a", "paternity", "test", "?", "!", "#BingeWatchingVeronicaMars"], "text_2_tokenized": ["Did", "Asafa", "get", "that", "paternity", "test", "done", "?"]} -{"id": "3833-paternity", "word": "paternity", "label_binary": 1, "text_1": "The time for paternity is now, phakama Sechaba. #isidingo", "token_idx_1": 3, "text_start_1": 13, "text_end_1": 22, "date_1": "2019-07", "text_2": "Careful to pass judgment when you don't have all the facts. Asafa muss know why him a do paternity test", "token_idx_2": 19, "text_start_2": 89, "text_end_2": 98, "date_2": "2020-07", "text_1_tokenized": ["The", "time", "for", "paternity", "is", "now", ",", "phakama", "Sechaba", ".", "#isidingo"], "text_2_tokenized": ["Careful", "to", "pass", "judgment", "when", "you", "don't", "have", "all", "the", "facts", ".", "Asafa", "muss", "know", "why", "him", "a", "do", "paternity", "test"]} -{"id": "3834-paternity", "word": "paternity", "label_binary": 1, "text_1": "Since when was paternity established at funerals? \ud83d\uddd1 gafana", "token_idx_1": 3, "text_start_1": 15, "text_end_1": 24, "date_1": "2019-07", "text_2": "The Phillies have made one roster move (outside putting Reggie McClain on the paternity list) and that was sending down Cole Irvin for Spencer Howard.", "token_idx_2": 14, "text_start_2": 78, "text_end_2": 87, "date_2": "2020-07", "text_1_tokenized": ["Since", "when", "was", "paternity", "established", "at", "funerals", "?", "\ud83d\uddd1", "gafana"], "text_2_tokenized": ["The", "Phillies", "have", "made", "one", "roster", "move", "(", "outside", "putting", "Reggie", "McClain", "on", "the", "paternity", "list", ")", "and", "that", "was", "sending", "down", "Cole", "Irvin", "for", "Spencer", "Howard", "."]} -{"id": "3835-paternity", "word": "paternity", "label_binary": 1, "text_1": "I'm balling my eyes out watching paternity court at work \ud83d\ude2b\ud83d\ude2b\ud83d\ude2d\ud83d\ude2d\ud83d\ude2d", "token_idx_1": 6, "text_start_1": 33, "text_end_1": 42, "date_1": "2019-07", "text_2": "Idk why people on paternity court be so forgiving lol. You deny my kids FUCK YOU!", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 27, "date_2": "2020-07", "text_1_tokenized": ["I'm", "balling", "my", "eyes", "out", "watching", "paternity", "court", "at", "work", "\ud83d\ude2b", "\ud83d\ude2b", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude2d"], "text_2_tokenized": ["Idk", "why", "people", "on", "paternity", "court", "be", "so", "forgiving", "lol", ".", "You", "deny", "my", "kids", "FUCK", "YOU", "!"]} -{"id": "3836-paternity", "word": "paternity", "label_binary": 0, "text_1": "All I do is watch paternity court on YouTube. Lmao \ud83d\ude2d\ud83e\udd26\ud83c\udffe\u200d\u2640\ufe0f", "token_idx_1": 5, "text_start_1": 18, "text_end_1": 27, "date_1": "2019-07", "text_2": "No penis, no opinion on paternity.", "token_idx_2": 6, "text_start_2": 24, "text_end_2": 33, "date_2": "2020-07", "text_1_tokenized": ["All", "I", "do", "is", "watch", "paternity", "court", "on", "YouTube", ".", "Lmao", "\ud83d\ude2d", "\ud83e\udd26\ud83c\udffe\u200d\u2640", "\ufe0f"], "text_2_tokenized": ["No", "penis", ",", "no", "opinion", "on", "paternity", "."]} -{"id": "3837-paternity", "word": "paternity", "label_binary": 1, "text_1": "Two doctors coming on bold ..... either someone is pregnant or a paternity switch is going to happen. #BoldandBeautiful", "token_idx_1": 12, "text_start_1": 65, "text_end_1": 74, "date_1": "2019-07", "text_2": "Are you open to a paternity test, if your spouse asks for it?", "token_idx_2": 5, "text_start_2": 18, "text_end_2": 27, "date_2": "2020-07", "text_1_tokenized": ["Two", "doctors", "coming", "on", "bold", "...", "either", "someone", "is", "pregnant", "or", "a", "paternity", "switch", "is", "going", "to", "happen", ".", "#BoldandBeautiful"], "text_2_tokenized": ["Are", "you", "open", "to", "a", "paternity", "test", ",", "if", "your", "spouse", "asks", "for", "it", "?"]} -{"id": "3838-paternity", "word": "paternity", "label_binary": 0, "text_1": "applying for a week of paternity leave 9.5 months after my baby was born is a p cool feeling", "token_idx_1": 5, "text_start_1": 23, "text_end_1": 32, "date_1": "2019-07", "text_2": "these ppl wildin out on paternity court \ud83d\ude02\ud83d\ude02\ud83d\ude02", "token_idx_2": 5, "text_start_2": 24, "text_end_2": 33, "date_2": "2020-07", "text_1_tokenized": ["applying", "for", "a", "week", "of", "paternity", "leave", "9.5", "months", "after", "my", "baby", "was", "born", "is", "a", "p", "cool", "feeling"], "text_2_tokenized": ["these", "ppl", "wildin", "out", "on", "paternity", "court", "\ud83d\ude02", "\ud83d\ude02", "\ud83d\ude02"]} -{"id": "3839-paternity", "word": "paternity", "label_binary": 1, "text_1": "The positive spillover and crossover of paternity leave use: A new perspective", "token_idx_1": 6, "text_start_1": 40, "text_end_1": 49, "date_1": "2019-07", "text_2": "Wait! Y'all ain't know that paternity video was fake?! That's why y'all were so pressed on it", "token_idx_2": 6, "text_start_2": 28, "text_end_2": 37, "date_2": "2020-07", "text_1_tokenized": ["The", "positive", "spillover", "and", "crossover", "of", "paternity", "leave", "use", ":", "A", "new", "perspective"], "text_2_tokenized": ["Wait", "!", "Y'all", "ain't", "know", "that", "paternity", "video", "was", "fake", "?", "!", "That's", "why", "y'all", "were", "so", "pressed", "on", "it"]} -{"id": "3840-paternity", "word": "paternity", "label_binary": 0, "text_1": "The New York Times uses race to attack liberalism, capitalism and America because the New York Times knows that most American intellectuals & professors will protect themselves by shutting up rather than get smeared and damaged personally defending American's liberal paternity.", "token_idx_1": 41, "text_start_1": 272, "text_end_1": 281, "date_1": "2019-07", "text_2": "My daddy gotta stop watching paternity court. He asked his BM is that my son \ud83d\ude02\ud83d\ude02\ud83d\ude02\ud83d\ude02 my brother is 43", "token_idx_2": 5, "text_start_2": 29, "text_end_2": 38, "date_2": "2020-07", "text_1_tokenized": ["The", "New", "York", "Times", "uses", "race", "to", "attack", "liberalism", ",", "capitalism", "and", "America", "because", "the", "New", "York", "Times", "knows", "that", "most", "American", "intellectuals", "&", "professors", "will", "protect", "themselves", "by", "shutting", "up", "rather", "than", "get", "smeared", "and", "damaged", "personally", "defending", "American's", "liberal", "paternity", "."], "text_2_tokenized": ["My", "daddy", "gotta", "stop", "watching", "paternity", "court", ".", "He", "asked", "his", "BM", "is", "that", "my", "son", "\ud83d\ude02", "\ud83d\ude02", "\ud83d\ude02", "my", "brother", "is", "43"]} -{"id": "3841-paternity", "word": "paternity", "label_binary": 0, "text_1": "I can't sleep for shit so the only thing I can do is watch paternity court \ud83d\ude2d", "token_idx_1": 14, "text_start_1": 59, "text_end_1": 68, "date_1": "2019-07", "text_2": "Ah Jamaica dis... woman dem nuh easy! Dem always get exposed when a filing time fi gaah Merikah... drop eh paternity test deh\ud83e\udd28\ud83e\udd28\ud83e\udd28", "token_idx_2": 23, "text_start_2": 107, "text_end_2": 116, "date_2": "2020-07", "text_1_tokenized": ["I", "can't", "sleep", "for", "shit", "so", "the", "only", "thing", "I", "can", "do", "is", "watch", "paternity", "court", "\ud83d\ude2d"], "text_2_tokenized": ["Ah", "Jamaica", "dis", "...", "woman", "dem", "nuh", "easy", "!", "Dem", "always", "get", "exposed", "when", "a", "filing", "time", "fi", "gaah", "Merikah", "...", "drop", "eh", "paternity", "test", "deh", "\ud83e\udd28", "\ud83e\udd28", "\ud83e\udd28"]} -{"id": "3842-paternity", "word": "paternity", "label_binary": 1, "text_1": "#adopted #hertfordshire have any of you succeeded in getting birth father's name out of @hertscc? The original adoption agency redacted the name (even though he admitted paternity and gave plenty of identifying information (subsequently redacted by agency). #HCCTellMeMyDadsName", "token_idx_1": 28, "text_start_1": 170, "text_end_1": 179, "date_1": "2019-07", "text_2": "forcing my wife to take a paternity test to own the libs", "token_idx_2": 6, "text_start_2": 26, "text_end_2": 35, "date_2": "2020-07", "text_1_tokenized": ["#adopted", "#hertfordshire", "have", "any", "of", "you", "succeeded", "in", "getting", "birth", "father's", "name", "out", "of", "@hertscc", "?", "The", "original", "adoption", "agency", "redacted", "the", "name", "(", "even", "though", "he", "admitted", "paternity", "and", "gave", "plenty", "of", "identifying", "information", "(", "subsequently", "redacted", "by", "agency", ")", ".", "#HCCTellMeMyDadsName"], "text_2_tokenized": ["forcing", "my", "wife", "to", "take", "a", "paternity", "test", "to", "own", "the", "libs"]} -{"id": "3843-paternity", "word": "paternity", "label_binary": 1, "text_1": "Its funny how in well developed countries population growth is never an issue ,Its women having rights and autonomy and ofc paternity leave", "token_idx_1": 22, "text_start_1": 124, "text_end_1": 133, "date_1": "2019-07", "text_2": "Mike Trout had to go on paternity leave then hit a home run on his very first swing back because he just needed the freakin' challenge. #LAAvsSEA", "token_idx_2": 6, "text_start_2": 24, "text_end_2": 33, "date_2": "2020-07", "text_1_tokenized": ["Its", "funny", "how", "in", "well", "developed", "countries", "population", "growth", "is", "never", "an", "issue", ",", "Its", "women", "having", "rights", "and", "autonomy", "and", "ofc", "paternity", "leave"], "text_2_tokenized": ["Mike", "Trout", "had", "to", "go", "on", "paternity", "leave", "then", "hit", "a", "home", "run", "on", "his", "very", "first", "swing", "back", "because", "he", "just", "needed", "the", "freakin", "'", "challenge", ".", "#LAAvsSEA"]} -{"id": "3844-paternity", "word": "paternity", "label_binary": 0, "text_1": "I'm giving paternity court a rest. These last 2 episodes take me out.", "token_idx_1": 2, "text_start_1": 11, "text_end_1": 20, "date_1": "2019-07", "text_2": "They fighting on my block over paternity test results. Yikes \ud83e\udd26\ud83c\udffe\u200d\u2642\ufe0f", "token_idx_2": 6, "text_start_2": 31, "text_end_2": 40, "date_2": "2020-07", "text_1_tokenized": ["I'm", "giving", "paternity", "court", "a", "rest", ".", "These", "last", "2", "episodes", "take", "me", "out", "."], "text_2_tokenized": ["They", "fighting", "on", "my", "block", "over", "paternity", "test", "results", ".", "Yikes", "\ud83e\udd26\ud83c\udffe\u200d\u2642", "\ufe0f"]} -{"id": "2763-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Brilliant brilliant brilliant effulgent white \ud83d\udd49\ufe0f Restless mind is the cause of entanglement of the relative world which is the physical world. \u2638\ufe0f This will help me and my Sadhana of the path get me right on track.\ud83d\udd2f Thank you Dada ji. Baba Nam Kevalam. Namaskar \ud83d\ude4f", "token_idx_1": 13, "text_start_1": 79, "text_end_1": 91, "date_1": "2019-08", "text_2": "#BBPrince and three other ladies in an entanglement? #BBWathoni said she knows who prince really has eyes for #BBTolanibaj said she has something to tell prince but she doesn't want to say it so she doesn't ruin his birthday because it isn't nice sho she'll tell him Monday", "token_idx_2": 7, "text_start_2": 39, "text_end_2": 51, "date_2": "2020-08", "text_1_tokenized": ["Brilliant", "brilliant", "brilliant", "effulgent", "white", "\ud83d\udd49", "\ufe0f", "Restless", "mind", "is", "the", "cause", "of", "entanglement", "of", "the", "relative", "world", "which", "is", "the", "physical", "world", ".", "\u2638", "\ufe0f", "This", "will", "help", "me", "and", "my", "Sadhana", "of", "the", "path", "get", "me", "right", "on", "track", ".", "\ud83d\udd2f", "Thank", "you", "Dada", "ji", ".", "Baba", "Nam", "Kevalam", ".", "Namaskar", "\ud83d\ude4f"], "text_2_tokenized": ["#BBPrince", "and", "three", "other", "ladies", "in", "an", "entanglement", "?", "#BBWathoni", "said", "she", "knows", "who", "prince", "really", "has", "eyes", "for", "#BBTolanibaj", "said", "she", "has", "something", "to", "tell", "prince", "but", "she", "doesn't", "want", "to", "say", "it", "so", "she", "doesn't", "ruin", "his", "birthday", "because", "it", "isn't", "nice", "sho", "she'll", "tell", "him", "Monday"]} -{"id": "2764-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Access to credit in order finance a need that has a small capacity to earn is an entanglement to strangling poverty and harassment of debt. #MyMorningMuse #WithMuyiwaOludayo", "token_idx_1": 17, "text_start_1": 81, "text_end_1": 93, "date_1": "2019-08", "text_2": "Will smith having a hell of a year, Jada came clean about her \u201centanglement\u201d now dude gets his front teeth knocked out \ud83d\ude02", "token_idx_2": 15, "text_start_2": 63, "text_end_2": 75, "date_2": "2020-08", "text_1_tokenized": ["Access", "to", "credit", "in", "order", "finance", "a", "need", "that", "has", "a", "small", "capacity", "to", "earn", "is", "an", "entanglement", "to", "strangling", "poverty", "and", "harassment", "of", "debt", ".", "#MyMorningMuse", "#WithMuyiwaOludayo"], "text_2_tokenized": ["Will", "smith", "having", "a", "hell", "of", "a", "year", ",", "Jada", "came", "clean", "about", "her", "\u201c", "entanglement", "\u201d", "now", "dude", "gets", "his", "front", "teeth", "knocked", "out", "\ud83d\ude02"]} -{"id": "2765-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "some kind of heavy quantum entanglement thing i reckon", "token_idx_1": 5, "text_start_1": 27, "text_end_1": 39, "date_1": "2019-08", "text_2": "I don't want no damn boyfriend. Just a entanglement they less complicated \ud83d\ude2d", "token_idx_2": 9, "text_start_2": 39, "text_end_2": 51, "date_2": "2020-08", "text_1_tokenized": ["some", "kind", "of", "heavy", "quantum", "entanglement", "thing", "i", "reckon"], "text_2_tokenized": ["I", "don't", "want", "no", "damn", "boyfriend", ".", "Just", "a", "entanglement", "they", "less", "complicated", "\ud83d\ude2d"]} -{"id": "2766-entanglement", "word": "entanglement", "label_binary": 1, "text_1": "my not-man just offered to pay for my phone bill (all I did was ask him what phone service he used!) and its like @imani_yvonne2 vibes here. i'm not sure i want the entanglement tho", "token_idx_1": 37, "text_start_1": 165, "text_end_1": 177, "date_1": "2019-08", "text_2": "I guess COVID got into an entanglement with August too", "token_idx_2": 6, "text_start_2": 26, "text_end_2": 38, "date_2": "2020-08", "text_1_tokenized": ["my", "not-man", "just", "offered", "to", "pay", "for", "my", "phone", "bill", "(", "all", "I", "did", "was", "ask", "him", "what", "phone", "service", "he", "used", "!", ")", "and", "its", "like", "@imani_yvonne2", "vibes", "here", ".", "i'm", "not", "sure", "i", "want", "the", "entanglement", "tho"], "text_2_tokenized": ["I", "guess", "COVID", "got", "into", "an", "entanglement", "with", "August", "too"]} -{"id": "2767-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Nigeria - A dangerous entanglement of weeds.", "token_idx_1": 4, "text_start_1": 22, "text_end_1": 34, "date_1": "2019-08", "text_2": "I want an entanglement \ud83d\ude2d somebody hmu", "token_idx_2": 3, "text_start_2": 10, "text_end_2": 22, "date_2": "2020-08", "text_1_tokenized": ["Nigeria", "-", "A", "dangerous", "entanglement", "of", "weeds", "."], "text_2_tokenized": ["I", "want", "an", "entanglement", "\ud83d\ude2d", "somebody", "hmu"]} -{"id": "2768-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "quantum superposition when the entire event exists simultaneously? What cause do you attribute to entanglement? Causality works well at the Newtonian level but is inadequate to explain the deepest workings of the universe, and even more so for conjectures about the start of", "token_idx_1": 15, "text_start_1": 98, "text_end_1": 110, "date_1": "2019-08", "text_2": "Oh dear.... folks saying it was an entanglement..... you know what... Let me get back to minding my business....", "token_idx_2": 8, "text_start_2": 35, "text_end_2": 47, "date_2": "2020-08", "text_1_tokenized": ["quantum", "superposition", "when", "the", "entire", "event", "exists", "simultaneously", "?", "What", "cause", "do", "you", "attribute", "to", "entanglement", "?", "Causality", "works", "well", "at", "the", "Newtonian", "level", "but", "is", "inadequate", "to", "explain", "the", "deepest", "workings", "of", "the", "universe", ",", "and", "even", "more", "so", "for", "conjectures", "about", "the", "start", "of"], "text_2_tokenized": ["Oh", "dear", "...", "folks", "saying", "it", "was", "an", "entanglement", "...", "you", "know", "what", "...", "Let", "me", "get", "back", "to", "minding", "my", "business", "..."]} -{"id": "2769-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Rugs isn't going to be able to rough Lachlan up...it'lkll probably just take one leg entanglement. #Kinektic", "token_idx_1": 17, "text_start_1": 85, "text_end_1": 97, "date_1": "2019-08", "text_2": "Falwells got themselves an entanglement!", "token_idx_2": 4, "text_start_2": 27, "text_end_2": 39, "date_2": "2020-08", "text_1_tokenized": ["Rugs", "isn't", "going", "to", "be", "able", "to", "rough", "Lachlan", "up", "...", "it'lkll", "probably", "just", "take", "one", "leg", "entanglement", ".", "#Kinektic"], "text_2_tokenized": ["Falwells", "got", "themselves", "an", "entanglement", "!"]} -{"id": "2770-entanglement", "word": "entanglement", "label_binary": 1, "text_1": "So many of us jump from one toxicity entanglement right into another.. These webs of trauma perceptually make us feel less then, adding romantic hopelessness right into the equation. Simply because we haven't began to untangle ourselves from our own webs of trauma.", "token_idx_1": 8, "text_start_1": 37, "text_end_1": 49, "date_1": "2019-08", "text_2": "me : the way you've been my JC and ive known you since 2013-2015 and then never saw you until I came to college and i'm a sister now\ud83e\udd70 my whole role model and our lives intersected here my big: girl we in an entanglement", "token_idx_2": 46, "text_start_2": 207, "text_end_2": 219, "date_2": "2020-08", "text_1_tokenized": ["So", "many", "of", "us", "jump", "from", "one", "toxicity", "entanglement", "right", "into", "another", "..", "These", "webs", "of", "trauma", "perceptually", "make", "us", "feel", "less", "then", ",", "adding", "romantic", "hopelessness", "right", "into", "the", "equation", ".", "Simply", "because", "we", "haven't", "began", "to", "untangle", "ourselves", "from", "our", "own", "webs", "of", "trauma", "."], "text_2_tokenized": ["me", ":", "the", "way", "you've", "been", "my", "JC", "and", "ive", "known", "you", "since", "2013-2015", "and", "then", "never", "saw", "you", "until", "I", "came", "to", "college", "and", "i'm", "a", "sister", "now", "\ud83e\udd70", "my", "whole", "role", "model", "and", "our", "lives", "intersected", "here", "my", "big", ":", "girl", "we", "in", "an", "entanglement"]} -{"id": "2771-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "I think multi-dimensional might be a way to express quantum entanglement. If both exist in the same space, then it's also time. But that's just to us. Perhaps the \u201cdimensions\u201d aren't fixed, but rather are actually different times only perceived in our dimension of time.", "token_idx_1": 10, "text_start_1": 60, "text_end_1": 72, "date_1": "2019-08", "text_2": "i didn't cheat on my test, i had an entanglement with google.", "token_idx_2": 10, "text_start_2": 36, "text_end_2": 48, "date_2": "2020-08", "text_1_tokenized": ["I", "think", "multi-dimensional", "might", "be", "a", "way", "to", "express", "quantum", "entanglement", ".", "If", "both", "exist", "in", "the", "same", "space", ",", "then", "it's", "also", "time", ".", "But", "that's", "just", "to", "us", ".", "Perhaps", "the", "\u201c", "dimensions", "\u201d", "aren't", "fixed", ",", "but", "rather", "are", "actually", "different", "times", "only", "perceived", "in", "our", "dimension", "of", "time", "."], "text_2_tokenized": ["i", "didn't", "cheat", "on", "my", "test", ",", "i", "had", "an", "entanglement", "with", "google", "."]} -{"id": "2772-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "#Qantum entanglement is how deep #connections are at sub atomic level. Game of Chance ? NO. #STL\u270d\ufe0f", "token_idx_1": 1, "text_start_1": 8, "text_end_1": 20, "date_1": "2019-08", "text_2": "Omo no go mistake your entanglement for relationship or else my na premium tears o", "token_idx_2": 5, "text_start_2": 23, "text_end_2": 35, "date_2": "2020-08", "text_1_tokenized": ["#Qantum", "entanglement", "is", "how", "deep", "#connections", "are", "at", "sub", "atomic", "level", ".", "Game", "of", "Chance", "?", "NO", ".", "#STL", "\u270d", "\ufe0f"], "text_2_tokenized": ["Omo", "no", "go", "mistake", "your", "entanglement", "for", "relationship", "or", "else", "my", "na", "premium", "tears", "o"]} -{"id": "2773-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Would a #CallbacksAndConsequences zini be interesting to folks? A small supplement for prompting you / helping you inject longer term consequences for player actions and world events and call back to prior characters/boons to keep up the entanglement of characters w/ the fiction?", "token_idx_1": 40, "text_start_1": 238, "text_end_1": 250, "date_1": "2019-08", "text_2": "We have our very own Mr entanglement in SA #Vusi , Will Smith is proud of u seun \ud83d\ude02 #ZodwaWabantu", "token_idx_2": 6, "text_start_2": 24, "text_end_2": 36, "date_2": "2020-08", "text_1_tokenized": ["Would", "a", "#CallbacksAndConsequences", "zini", "be", "interesting", "to", "folks", "?", "A", "small", "supplement", "for", "prompting", "you", "/", "helping", "you", "inject", "longer", "term", "consequences", "for", "player", "actions", "and", "world", "events", "and", "call", "back", "to", "prior", "characters", "/", "boons", "to", "keep", "up", "the", "entanglement", "of", "characters", "w", "/", "the", "fiction", "?"], "text_2_tokenized": ["We", "have", "our", "very", "own", "Mr", "entanglement", "in", "SA", "#Vusi", ",", "Will", "Smith", "is", "proud", "of", "u", "seun", "\ud83d\ude02", "#ZodwaWabantu"]} -{"id": "2774-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Today's social structure, calls for the destruction of oppressive religions, and the rebirth of spiritually, understanding that our awareness is our-aspect of God pulsating within us, is one aspect of God, as described by Quantum entanglement, new song coming, expressing this.", "token_idx_1": 40, "text_start_1": 230, "text_end_1": 242, "date_1": "2019-08", "text_2": "These year #BBNajia lockdown season 5 should be changed to#bbnaija entanglement season 5..\ud83e\udd23\ud83e\udd23", "token_idx_2": 11, "text_start_2": 67, "text_end_2": 79, "date_2": "2020-08", "text_1_tokenized": ["Today's", "social", "structure", ",", "calls", "for", "the", "destruction", "of", "oppressive", "religions", ",", "and", "the", "rebirth", "of", "spiritually", ",", "understanding", "that", "our", "awareness", "is", "our-aspect", "of", "God", "pulsating", "within", "us", ",", "is", "one", "aspect", "of", "God", ",", "as", "described", "by", "Quantum", "entanglement", ",", "new", "song", "coming", ",", "expressing", "this", "."], "text_2_tokenized": ["These", "year", "#BBNajia", "lockdown", "season", "5", "should", "be", "changed", "to", "#bbnaija", "entanglement", "season", "5", "..", "\ud83e\udd23", "\ud83e\udd23"]} -{"id": "2775-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "In the Third Chapter, verse thirteen, Sri Krsna explains that only the remains of sacrifice are purified and fit for consumption by those who are seeking advancement in life and release from the clutches of the material entanglement.", "token_idx_1": 39, "text_start_1": 220, "text_end_1": 232, "date_1": "2019-08", "text_2": "Yea neh, Vusi had to choose a month of entanglement,,, August \ud83e\udd26\ud83c\udffe\u200d\u2640\ufe0f\ud83e\udd26\ud83c\udffe\u200d\u2640\ufe0f #VusiForPresident #ZodwaFindsVusi #ZodwaWabantu", "token_idx_2": 10, "text_start_2": 39, "text_end_2": 51, "date_2": "2020-08", "text_1_tokenized": ["In", "the", "Third", "Chapter", ",", "verse", "thirteen", ",", "Sri", "Krsna", "explains", "that", "only", "the", "remains", "of", "sacrifice", "are", "purified", "and", "fit", "for", "consumption", "by", "those", "who", "are", "seeking", "advancement", "in", "life", "and", "release", "from", "the", "clutches", "of", "the", "material", "entanglement", "."], "text_2_tokenized": ["Yea", "neh", ",", "Vusi", "had", "to", "choose", "a", "month", "of", "entanglement", ",", ",", ",", "August", "\ud83e\udd26\ud83c\udffe\u200d\u2640", "\ufe0f", "\ud83e\udd26\ud83c\udffe\u200d\u2640", "\ufe0f", "#VusiForPresident", "#ZodwaFindsVusi", "#ZodwaWabantu"]} -{"id": "2776-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "For today's #modwrite I completed my third chapter on Nancy Cunard and Ezra Pound's primitivist rhetorics of land and stone and their entanglement in modernist discourses of race and nature. Onward to Lewis and Jones tomorrow!", "token_idx_1": 22, "text_start_1": 134, "text_end_1": 146, "date_1": "2019-08", "text_2": "That's some real entanglement and only Zolani can see it #TheRiverMzansi \ud83e\udd23\ud83e\udd23\ud83e\udd23", "token_idx_2": 3, "text_start_2": 17, "text_end_2": 29, "date_2": "2020-08", "text_1_tokenized": ["For", "today's", "#modwrite", "I", "completed", "my", "third", "chapter", "on", "Nancy", "Cunard", "and", "Ezra", "Pound's", "primitivist", "rhetorics", "of", "land", "and", "stone", "and", "their", "entanglement", "in", "modernist", "discourses", "of", "race", "and", "nature", ".", "Onward", "to", "Lewis", "and", "Jones", "tomorrow", "!"], "text_2_tokenized": ["That's", "some", "real", "entanglement", "and", "only", "Zolani", "can", "see", "it", "#TheRiverMzansi", "\ud83e\udd23", "\ud83e\udd23", "\ud83e\udd23"]} -{"id": "2777-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "My shrink i had as an adolescent told my parents to cut me off of any special purchases beyond basic needs for misbehaving. I think on some level i've always recognized a lack of $ entanglement as freedom lol. I'm a minimalist, but i do like to have a few nice/useful things. >", "token_idx_1": 36, "text_start_1": 181, "text_end_1": 193, "date_1": "2019-08", "text_2": "Us: well i hope August is better than July Jokester: yea, I'm not looking forward to an entanglement with August", "token_idx_2": 20, "text_start_2": 88, "text_end_2": 100, "date_2": "2020-08", "text_1_tokenized": ["My", "shrink", "i", "had", "as", "an", "adolescent", "told", "my", "parents", "to", "cut", "me", "off", "of", "any", "special", "purchases", "beyond", "basic", "needs", "for", "misbehaving", ".", "I", "think", "on", "some", "level", "i've", "always", "recognized", "a", "lack", "of", "$", "entanglement", "as", "freedom", "lol", ".", "I'm", "a", "minimalist", ",", "but", "i", "do", "like", "to", "have", "a", "few", "nice", "/", "useful", "things", ".", ">"], "text_2_tokenized": ["Us", ":", "well", "i", "hope", "August", "is", "better", "than", "July", "Jokester", ":", "yea", ",", "I'm", "not", "looking", "forward", "to", "an", "entanglement", "with", "August"]} -{"id": "2778-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Right Whale, Wrong Place, Wrong Time The Right Whale was so named as it was consideredby whalers as the right whale to catch as it floated after death and could be cut up off ship. The Northern Right Whale is no longer hunted but faces entanglement by old fishing nets.", "token_idx_1": 48, "text_start_1": 236, "text_end_1": 248, "date_1": "2019-08", "text_2": "Ozo, Nengi, Dora, Erica, Kiddwaya and Laycon, the viewers top 6 on #BBNaija this year are all in an entanglement \ud83d\ude02", "token_idx_2": 24, "text_start_2": 100, "text_end_2": 112, "date_2": "2020-08", "text_1_tokenized": ["Right", "Whale", ",", "Wrong", "Place", ",", "Wrong", "Time", "The", "Right", "Whale", "was", "so", "named", "as", "it", "was", "consideredby", "whalers", "as", "the", "right", "whale", "to", "catch", "as", "it", "floated", "after", "death", "and", "could", "be", "cut", "up", "off", "ship", ".", "The", "Northern", "Right", "Whale", "is", "no", "longer", "hunted", "but", "faces", "entanglement", "by", "old", "fishing", "nets", "."], "text_2_tokenized": ["Ozo", ",", "Nengi", ",", "Dora", ",", "Erica", ",", "Kiddwaya", "and", "Laycon", ",", "the", "viewers", "top", "6", "on", "#BBNaija", "this", "year", "are", "all", "in", "an", "entanglement", "\ud83d\ude02"]} -{"id": "2779-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Seeing we are surrounded by this great cloud of witnesses,let us lay aside every weight and and every entanglement that easily interferes with our speed and run the race set for us. I see easy pace and speed on your path in the name of Jesus Christ.Amen", "token_idx_1": 20, "text_start_1": 102, "text_end_1": 114, "date_1": "2019-08", "text_2": "My longest and most enjoyable entanglement to date was with another Leo so y'all really can't tell me shit", "token_idx_2": 5, "text_start_2": 30, "text_end_2": 42, "date_2": "2020-08", "text_1_tokenized": ["Seeing", "we", "are", "surrounded", "by", "this", "great", "cloud", "of", "witnesses", ",", "let", "us", "lay", "aside", "every", "weight", "and", "and", "every", "entanglement", "that", "easily", "interferes", "with", "our", "speed", "and", "run", "the", "race", "set", "for", "us", ".", "I", "see", "easy", "pace", "and", "speed", "on", "your", "path", "in", "the", "name", "of", "Jesus", "Christ.Amen"], "text_2_tokenized": ["My", "longest", "and", "most", "enjoyable", "entanglement", "to", "date", "was", "with", "another", "Leo", "so", "y'all", "really", "can't", "tell", "me", "shit"]} -{"id": "2780-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Have any of you ever read on quantum entanglement? Talk about mind boggling!", "token_idx_1": 8, "text_start_1": 37, "text_end_1": 49, "date_1": "2019-08", "text_2": "midway threw my sentence this man Zae talking about \u201cdon't tell me this a entanglement\u201d like Zae \ud83d\udc80", "token_idx_2": 15, "text_start_2": 74, "text_end_2": 86, "date_2": "2020-08", "text_1_tokenized": ["Have", "any", "of", "you", "ever", "read", "on", "quantum", "entanglement", "?", "Talk", "about", "mind", "boggling", "!"], "text_2_tokenized": ["midway", "threw", "my", "sentence", "this", "man", "Zae", "talking", "about", "\u201c", "don't", "tell", "me", "this", "a", "entanglement", "\u201d", "like", "Zae", "\ud83d\udc80"]} -{"id": "2781-entanglement", "word": "entanglement", "label_binary": 1, "text_1": "FAYE: Friendship is different from a romantic entanglement. And you all know it. HARVEY: We also all trust each other. ALEX: Which is why we all waived your so-called conflict so there isn't a problem anymore. FAYE: Says who? #Suits", "token_idx_1": 8, "text_start_1": 46, "text_end_1": 58, "date_1": "2019-08", "text_2": "Prince! Better extract yourself from this Nengi entanglement.", "token_idx_2": 8, "text_start_2": 48, "text_end_2": 60, "date_2": "2020-08", "text_1_tokenized": ["FAYE", ":", "Friendship", "is", "different", "from", "a", "romantic", "entanglement", ".", "And", "you", "all", "know", "it", ".", "HARVEY", ":", "We", "also", "all", "trust", "each", "other", ".", "ALEX", ":", "Which", "is", "why", "we", "all", "waived", "your", "so-called", "conflict", "so", "there", "isn't", "a", "problem", "anymore", ".", "FAYE", ":", "Says", "who", "?", "#Suits"], "text_2_tokenized": ["Prince", "!", "Better", "extract", "yourself", "from", "this", "Nengi", "entanglement", "."]} -{"id": "2782-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Psychic brain to brain communication via quantum entanglement", "token_idx_1": 7, "text_start_1": 49, "text_end_1": 61, "date_1": "2019-08", "text_2": "\u201ci'm not cheating on dream with george, it was just an entanglement\u201d -my sister 2020", "token_idx_2": 13, "text_start_2": 55, "text_end_2": 67, "date_2": "2020-08", "text_1_tokenized": ["Psychic", "brain", "to", "brain", "communication", "via", "quantum", "entanglement"], "text_2_tokenized": ["\u201c", "i'm", "not", "cheating", "on", "dream", "with", "george", ",", "it", "was", "just", "an", "entanglement", "\u201d", "-", "my", "sister", "2020"]} -{"id": "2783-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "me: this is a fun MG book about rune magic with anime adventure vibes also me: this book deals with deep existential despair about the entanglement of memory, embodiment, and identity me: a girl and her demon, power of friendship also me: deepest dream leads to darkest fear", "token_idx_1": 27, "text_start_1": 135, "text_end_1": 147, "date_1": "2019-08", "text_2": "August, but without the entanglement", "token_idx_2": 5, "text_start_2": 24, "text_end_2": 36, "date_2": "2020-08", "text_1_tokenized": ["me", ":", "this", "is", "a", "fun", "MG", "book", "about", "rune", "magic", "with", "anime", "adventure", "vibes", "also", "me", ":", "this", "book", "deals", "with", "deep", "existential", "despair", "about", "the", "entanglement", "of", "memory", ",", "embodiment", ",", "and", "identity", "me", ":", "a", "girl", "and", "her", "demon", ",", "power", "of", "friendship", "also", "me", ":", "deepest", "dream", "leads", "to", "darkest", "fear"], "text_2_tokenized": ["August", ",", "but", "without", "the", "entanglement"]} -{"id": "2784-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Quantum entanglement in chemical reactions? Now there's a way to find out", "token_idx_1": 1, "text_start_1": 8, "text_end_1": 20, "date_1": "2019-08", "text_2": "Twitter 2020: \"normalize manifesting a toxic entanglement\" \ud83d\ude4f\ud83d\udc96\ud83d\udc51", "token_idx_2": 8, "text_start_2": 45, "text_end_2": 57, "date_2": "2020-08", "text_1_tokenized": ["Quantum", "entanglement", "in", "chemical", "reactions", "?", "Now", "there's", "a", "way", "to", "find", "out"], "text_2_tokenized": ["Twitter", "2020", ":", "\"", "normalize", "manifesting", "a", "toxic", "entanglement", "\"", "\ud83d\ude4f", "\ud83d\udc96", "\ud83d\udc51"]} -{"id": "2785-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Methods to study entanglement using argon-dependent self-replication", "token_idx_1": 3, "text_start_1": 17, "text_end_1": 29, "date_1": "2019-08", "text_2": "Being single & in an entanglement at the same time wearin me thee fuck out \ud83e\udd28", "token_idx_2": 5, "text_start_2": 25, "text_end_2": 37, "date_2": "2020-08", "text_1_tokenized": ["Methods", "to", "study", "entanglement", "using", "argon-dependent", "self-replication"], "text_2_tokenized": ["Being", "single", "&", "in", "an", "entanglement", "at", "the", "same", "time", "wearin", "me", "thee", "fuck", "out", "\ud83e\udd28"]} -{"id": "2786-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "On the toilet I mused if quantum compute could be distributed, thus must expand to ensure entanglement, and arrived back at a multiverse + simulation theory. Was thinking about how to time-shift correlation engines for AI/ML and how to get enough compute/states as data...", "token_idx_1": 17, "text_start_1": 90, "text_end_1": 102, "date_1": "2019-08", "text_2": "\"I can't even date you.\" Me: huh, date me. Calendar mi ti kun. Mi o ni empty space fun date kankan. I'm happy without any form of entanglement", "token_idx_2": 35, "text_start_2": 130, "text_end_2": 142, "date_2": "2020-08", "text_1_tokenized": ["On", "the", "toilet", "I", "mused", "if", "quantum", "compute", "could", "be", "distributed", ",", "thus", "must", "expand", "to", "ensure", "entanglement", ",", "and", "arrived", "back", "at", "a", "multiverse", "+", "simulation", "theory", ".", "Was", "thinking", "about", "how", "to", "time-shift", "correlation", "engines", "for", "AI", "/", "ML", "and", "how", "to", "get", "enough", "compute", "/", "states", "as", "data", "..."], "text_2_tokenized": ["\"", "I", "can't", "even", "date", "you", ".", "\"", "Me", ":", "huh", ",", "date", "me", ".", "Calendar", "mi", "ti", "kun", ".", "Mi", "o", "ni", "empty", "space", "fun", "date", "kankan", ".", "I'm", "happy", "without", "any", "form", "of", "entanglement"]} -{"id": "2787-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "\"Now, quantum entanglement is when two particles are far apart, and yet somehow remain in contact with each other.\"", "token_idx_1": 4, "text_start_1": 14, "text_end_1": 26, "date_1": "2019-08", "text_2": "This whole place na entanglement. Venn diagram with mad intersections.", "token_idx_2": 4, "text_start_2": 20, "text_end_2": 32, "date_2": "2020-08", "text_1_tokenized": ["\"", "Now", ",", "quantum", "entanglement", "is", "when", "two", "particles", "are", "far", "apart", ",", "and", "yet", "somehow", "remain", "in", "contact", "with", "each", "other", ".", "\""], "text_2_tokenized": ["This", "whole", "place", "na", "entanglement", ".", "Venn", "diagram", "with", "mad", "intersections", "."]} -{"id": "2788-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Is gravity the measured strengthening of an approaching entanglement? And are all energy particles entangled until chaos de-entangles the energy particles? Could interaction with other world energy particles add to the chaos. If tuned to an interactable frequency?", "token_idx_1": 8, "text_start_1": 56, "text_end_1": 68, "date_1": "2019-08", "text_2": "Will Smith falls in an entanglement, now everyone wanna hop on the trend. \ud83d\ude02\ud83e\udd23 #DenleChain #SupportYourLocalSanchoSancha \ud83d\ude02\ud83d\udc80", "token_idx_2": 5, "text_start_2": 23, "text_end_2": 35, "date_2": "2020-08", "text_1_tokenized": ["Is", "gravity", "the", "measured", "strengthening", "of", "an", "approaching", "entanglement", "?", "And", "are", "all", "energy", "particles", "entangled", "until", "chaos", "de-entangles", "the", "energy", "particles", "?", "Could", "interaction", "with", "other", "world", "energy", "particles", "add", "to", "the", "chaos", ".", "If", "tuned", "to", "an", "interactable", "frequency", "?"], "text_2_tokenized": ["Will", "Smith", "falls", "in", "an", "entanglement", ",", "now", "everyone", "wanna", "hop", "on", "the", "trend", ".", "\ud83d\ude02", "\ud83e\udd23", "#DenleChain", "#SupportYourLocalSanchoSancha", "\ud83d\ude02", "\ud83d\udc80"]} -{"id": "2789-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "I ground myself to be present rather than get caught up within the entanglement of wanting & wishing. I want you (us) I need you (us) I love you (us) I am grateful for what we share & how deeply we love. Our time is coming. \ud83e\udde1 Never let go.", "token_idx_1": 13, "text_start_1": 67, "text_end_1": 79, "date_1": "2019-08", "text_2": "So Prince and Dorathy should not talk again since they are in an entanglement with Ozwor and Delilah. Please this show is rated 18 if u have a mentality of a 10year old, don't analyze please it is irritating #BBNaija", "token_idx_2": 13, "text_start_2": 65, "text_end_2": 77, "date_2": "2020-08", "text_1_tokenized": ["I", "ground", "myself", "to", "be", "present", "rather", "than", "get", "caught", "up", "within", "the", "entanglement", "of", "wanting", "&", "wishing", ".", "I", "want", "you", "(", "us", ")", "I", "need", "you", "(", "us", ")", "I", "love", "you", "(", "us", ")", "I", "am", "grateful", "for", "what", "we", "share", "&", "how", "deeply", "we", "love", ".", "Our", "time", "is", "coming", ".", "\ud83e\udde1", "Never", "let", "go", "."], "text_2_tokenized": ["So", "Prince", "and", "Dorathy", "should", "not", "talk", "again", "since", "they", "are", "in", "an", "entanglement", "with", "Ozwor", "and", "Delilah", ".", "Please", "this", "show", "is", "rated", "18", "if", "u", "have", "a", "mentality", "of", "a", "10year", "old", ",", "don't", "analyze", "please", "it", "is", "irritating", "#BBNaija"]} -{"id": "2790-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "\"The field is the sole governing agency of the particle\u201d --Einstein we live in a quantum soup of entanglement....the love we give helps us ALL #LoveWins", "token_idx_1": 22, "text_start_1": 97, "text_end_1": 109, "date_1": "2019-08", "text_2": "Erica and Kid are not even dating. It isn't even an entanglement. It is omo gba mapami now she wants to k*ll Kid and baba can't deal again. He's for everyone, abi dem Dey cage breeze? #bbnaijalockdown2020", "token_idx_2": 12, "text_start_2": 52, "text_end_2": 64, "date_2": "2020-08", "text_1_tokenized": ["\"", "The", "field", "is", "the", "sole", "governing", "agency", "of", "the", "particle", "\u201d", "-", "-", "Einstein", "we", "live", "in", "a", "quantum", "soup", "of", "entanglement", "...", "the", "love", "we", "give", "helps", "us", "ALL", "#LoveWins"], "text_2_tokenized": ["Erica", "and", "Kid", "are", "not", "even", "dating", ".", "It", "isn't", "even", "an", "entanglement", ".", "It", "is", "omo", "gba", "mapami", "now", "she", "wants", "to", "k", "*", "ll", "Kid", "and", "baba", "can't", "deal", "again", ".", "He's", "for", "everyone", ",", "abi", "dem", "Dey", "cage", "breeze", "?", "#bbnaijalockdown2020"]} -{"id": "2791-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Two things on my mind tonight: #Parhelion (sun dog) Einstein's characterization of quantum entanglement as \u201cspukhafte Fernwirkung\u201d = \u201cspooky action at a distance\u201d", "token_idx_1": 16, "text_start_1": 91, "text_end_1": 103, "date_1": "2019-08", "text_2": "Maybe that sexy person that only offered you platonic LOVE and healing wasn't rejecting you, but saving you from an entanglement.", "token_idx_2": 21, "text_start_2": 116, "text_end_2": 128, "date_2": "2020-08", "text_1_tokenized": ["Two", "things", "on", "my", "mind", "tonight", ":", "#Parhelion", "(", "sun", "dog", ")", "Einstein's", "characterization", "of", "quantum", "entanglement", "as", "\u201c", "spukhafte", "Fernwirkung", "\u201d", "=", "\u201c", "spooky", "action", "at", "a", "distance", "\u201d"], "text_2_tokenized": ["Maybe", "that", "sexy", "person", "that", "only", "offered", "you", "platonic", "LOVE", "and", "healing", "wasn't", "rejecting", "you", ",", "but", "saving", "you", "from", "an", "entanglement", "."]} -{"id": "2792-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "They all come to #TheMe as if I'm weak, not knowing I'm just a nice person that can feel your pain so you don't have to,.. What some have failed to realize is the entanglement of it's opposite and not so nice.. a flip of a sort..", "token_idx_1": 37, "text_start_1": 163, "text_end_1": 175, "date_1": "2019-08", "text_2": "I'm toxic. You're toxic. Let's be toxic together. But not in a relationship just an entanglement.", "token_idx_2": 18, "text_start_2": 84, "text_end_2": 96, "date_2": "2020-08", "text_1_tokenized": ["They", "all", "come", "to", "#TheMe", "as", "if", "I'm", "weak", ",", "not", "knowing", "I'm", "just", "a", "nice", "person", "that", "can", "feel", "your", "pain", "so", "you", "don't", "have", "to", ",", "..", "What", "some", "have", "failed", "to", "realize", "is", "the", "entanglement", "of", "it's", "opposite", "and", "not", "so", "nice", "..", "a", "flip", "of", "a", "sort", ".."], "text_2_tokenized": ["I'm", "toxic", ".", "You're", "toxic", ".", "Let's", "be", "toxic", "together", ".", "But", "not", "in", "a", "relationship", "just", "an", "entanglement", "."]} -{"id": "2793-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "out the purity and sincerity of motives before those who have misunderstood and misjudged. Perhaps it is a boy or girl whose life is clouded by some entanglement from which it seems impossible to get free.", "token_idx_1": 28, "text_start_1": 149, "text_end_1": 161, "date_1": "2019-08", "text_2": "My depression lasted longer than that entanglement.", "token_idx_2": 6, "text_start_2": 38, "text_end_2": 50, "date_2": "2020-08", "text_1_tokenized": ["out", "the", "purity", "and", "sincerity", "of", "motives", "before", "those", "who", "have", "misunderstood", "and", "misjudged", ".", "Perhaps", "it", "is", "a", "boy", "or", "girl", "whose", "life", "is", "clouded", "by", "some", "entanglement", "from", "which", "it", "seems", "impossible", "to", "get", "free", "."], "text_2_tokenized": ["My", "depression", "lasted", "longer", "than", "that", "entanglement", "."]} -{"id": "2794-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "17.25. Without desiring fruitive results, one should perform various kinds of sacrifice, penance and charity with the word tat. The purpose of such transcendental activities is to get free from material entanglement.", "token_idx_1": 35, "text_start_1": 203, "text_end_1": 215, "date_1": "2019-08", "text_2": "I'm in a love entanglement with captain Morgan admiral Nelson and lady Bligh", "token_idx_2": 4, "text_start_2": 14, "text_end_2": 26, "date_2": "2020-08", "text_1_tokenized": ["17.25", ".", "Without", "desiring", "fruitive", "results", ",", "one", "should", "perform", "various", "kinds", "of", "sacrifice", ",", "penance", "and", "charity", "with", "the", "word", "tat", ".", "The", "purpose", "of", "such", "transcendental", "activities", "is", "to", "get", "free", "from", "material", "entanglement", "."], "text_2_tokenized": ["I'm", "in", "a", "love", "entanglement", "with", "captain", "Morgan", "admiral", "Nelson", "and", "lady", "Bligh"]} -{"id": "2795-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "you heard it here first. Voice over the Quantum Internet Protocol (vquip) - more secure telepathy through entanglement.", "token_idx_1": 20, "text_start_1": 106, "text_end_1": 118, "date_1": "2019-08", "text_2": "I want to heal, and I want you to heal. Although we are not hurting for the same reason, our entanglement opened my eyes to the beautiful pain of trauma bonding. I wish we met at a different time in life. I wish it didn't have to be terminated so abruptly. I wish you the best \u2764\ufe0f.", "token_idx_2": 23, "text_start_2": 93, "text_end_2": 105, "date_2": "2020-08", "text_1_tokenized": ["you", "heard", "it", "here", "first", ".", "Voice", "over", "the", "Quantum", "Internet", "Protocol", "(", "vquip", ")", "-", "more", "secure", "telepathy", "through", "entanglement", "."], "text_2_tokenized": ["I", "want", "to", "heal", ",", "and", "I", "want", "you", "to", "heal", ".", "Although", "we", "are", "not", "hurting", "for", "the", "same", "reason", ",", "our", "entanglement", "opened", "my", "eyes", "to", "the", "beautiful", "pain", "of", "trauma", "bonding", ".", "I", "wish", "we", "met", "at", "a", "different", "time", "in", "life", ".", "I", "wish", "it", "didn't", "have", "to", "be", "terminated", "so", "abruptly", ".", "I", "wish", "you", "the", "best", "\u2764", "\ufe0f", "."]} -{"id": "2796-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Our very being is utterly dependent on submitting to a relational identity with the divine narrative. You cannot have rationality within a relationship while denying emotional entanglement with the other.", "token_idx_1": 27, "text_start_1": 176, "text_end_1": 188, "date_1": "2019-08", "text_2": "Erica is enjoying this entanglement.", "token_idx_2": 4, "text_start_2": 23, "text_end_2": 35, "date_2": "2020-08", "text_1_tokenized": ["Our", "very", "being", "is", "utterly", "dependent", "on", "submitting", "to", "a", "relational", "identity", "with", "the", "divine", "narrative", ".", "You", "cannot", "have", "rationality", "within", "a", "relationship", "while", "denying", "emotional", "entanglement", "with", "the", "other", "."], "text_2_tokenized": ["Erica", "is", "enjoying", "this", "entanglement", "."]} -{"id": "2797-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "For my blood pressure's sake tolonglaa aku nak stop study quantum entanglement stuffs ni. Rumusannya I know nothing thank you no next", "token_idx_1": 11, "text_start_1": 66, "text_end_1": 78, "date_1": "2019-08", "text_2": "\u201cTo my entanglement\u201d lmao \ud83d\ude02 bitch bye", "token_idx_2": 3, "text_start_2": 7, "text_end_2": 19, "date_2": "2020-08", "text_1_tokenized": ["For", "my", "blood", "pressure's", "sake", "tolonglaa", "aku", "nak", "stop", "study", "quantum", "entanglement", "stuffs", "ni", ".", "Rumusannya", "I", "know", "nothing", "thank", "you", "no", "next"], "text_2_tokenized": ["\u201c", "To", "my", "entanglement", "\u201d", "lmao", "\ud83d\ude02", "bitch", "bye"]} -{"id": "2798-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "You know what's crazy? Quantum entanglement. Seriously. Read up on that shit.", "token_idx_1": 6, "text_start_1": 31, "text_end_1": 43, "date_1": "2019-08", "text_2": "2020 is one big entanglement. Complicated asl!", "token_idx_2": 4, "text_start_2": 16, "text_end_2": 28, "date_2": "2020-08", "text_1_tokenized": ["You", "know", "what's", "crazy", "?", "Quantum", "entanglement", ".", "Seriously", ".", "Read", "up", "on", "that", "shit", "."], "text_2_tokenized": ["2020", "is", "one", "big", "entanglement", ".", "Complicated", "asl", "!"]} -{"id": "2799-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "you was doing up \u201c\ud83d\ude0d\ud83d\ude0d\ud83d\ude0d\u201d when I first told you im an actual real life nerd but 3 minutes into me rambling about quantum entanglement your eyes start to glaze over kmt keep up lil hot girl", "token_idx_1": 28, "text_start_1": 118, "text_end_1": 130, "date_1": "2019-08", "text_2": "Relationships, there was open relationship, situationship, entanglement now there is a triangle relationship \ud83d\ude0f", "token_idx_2": 9, "text_start_2": 59, "text_end_2": 71, "date_2": "2020-08", "text_1_tokenized": ["you", "was", "doing", "up", "\u201c", "\ud83d\ude0d", "\ud83d\ude0d", "\ud83d\ude0d", "\u201d", "when", "I", "first", "told", "you", "im", "an", "actual", "real", "life", "nerd", "but", "3", "minutes", "into", "me", "rambling", "about", "quantum", "entanglement", "your", "eyes", "start", "to", "glaze", "over", "kmt", "keep", "up", "lil", "hot", "girl"], "text_2_tokenized": ["Relationships", ",", "there", "was", "open", "relationship", ",", "situationship", ",", "entanglement", "now", "there", "is", "a", "triangle", "relationship", "\ud83d\ude0f"]} -{"id": "2800-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "how to watch your thoughts without belief or disbelief, without entanglement, without struggle.", "token_idx_1": 11, "text_start_1": 64, "text_end_1": 76, "date_1": "2019-08", "text_2": "Remember when I had a two year entanglement with a guitarist so I was like, \u201cNo more musicians\u201d and then I dated a drummer for 7 months", "token_idx_2": 7, "text_start_2": 31, "text_end_2": 43, "date_2": "2020-08", "text_1_tokenized": ["how", "to", "watch", "your", "thoughts", "without", "belief", "or", "disbelief", ",", "without", "entanglement", ",", "without", "struggle", "."], "text_2_tokenized": ["Remember", "when", "I", "had", "a", "two", "year", "entanglement", "with", "a", "guitarist", "so", "I", "was", "like", ",", "\u201c", "No", "more", "musicians", "\u201d", "and", "then", "I", "dated", "a", "drummer", "for", "7", "months"]} -{"id": "2801-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "That I'm aware of... No one can properly explain gravity. No one can properly explain time. No one can properly explain the existence of subatomic particles. No one can properly explain entanglement. And that's not all\u2026 All they can explain is the effects/results thereof.", "token_idx_1": 35, "text_start_1": 186, "text_end_1": 198, "date_1": "2019-08", "text_2": "The only entanglement we had last season was just Gedeoni-Khafi-Venita. Just one simple triangle. This season we are just having entangled heptagons in every corner. #bbnaija", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 21, "date_2": "2020-08", "text_1_tokenized": ["That", "I'm", "aware", "of", "...", "No", "one", "can", "properly", "explain", "gravity", ".", "No", "one", "can", "properly", "explain", "time", ".", "No", "one", "can", "properly", "explain", "the", "existence", "of", "subatomic", "particles", ".", "No", "one", "can", "properly", "explain", "entanglement", ".", "And", "that's", "not", "all", "\u2026", "All", "they", "can", "explain", "is", "the", "effects", "/", "results", "thereof", "."], "text_2_tokenized": ["The", "only", "entanglement", "we", "had", "last", "season", "was", "just", "Gedeoni-Khafi-Venita", ".", "Just", "one", "simple", "triangle", ".", "This", "season", "we", "are", "just", "having", "entangled", "heptagons", "in", "every", "corner", ".", "#bbnaija"]} -{"id": "2802-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Printing out the wikipedia page on quantum entanglement so I can give it to a girl and say \u201cthis made me think of us\u201d", "token_idx_1": 7, "text_start_1": 43, "text_end_1": 55, "date_1": "2019-08", "text_2": "I love my current ... entanglement, if you will.", "token_idx_2": 5, "text_start_2": 22, "text_end_2": 34, "date_2": "2020-08", "text_1_tokenized": ["Printing", "out", "the", "wikipedia", "page", "on", "quantum", "entanglement", "so", "I", "can", "give", "it", "to", "a", "girl", "and", "say", "\u201c", "this", "made", "me", "think", "of", "us", "\u201d"], "text_2_tokenized": ["I", "love", "my", "current", "...", "entanglement", ",", "if", "you", "will", "."]} -{"id": "2803-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "In 2016Moscow Mitch would not sign onto Mr Obama's alert to citizens about Moscow attack on our elections! Now that the financials are known of Moscow Mich,s entanglement in the Aluminium Russion Oolagarki and removal on sanction, KY needs to send Moscow Mitch to ash pile!", "token_idx_1": 30, "text_start_1": 158, "text_end_1": 170, "date_1": "2019-08", "text_2": "bruh august really dropped a song called entanglement........baby if you low on income this not the way to do it \ud83d\ude2d", "token_idx_2": 7, "text_start_2": 41, "text_end_2": 53, "date_2": "2020-08", "text_1_tokenized": ["In", "2016Moscow", "Mitch", "would", "not", "sign", "onto", "Mr", "Obama's", "alert", "to", "citizens", "about", "Moscow", "attack", "on", "our", "elections", "!", "Now", "that", "the", "financials", "are", "known", "of", "Moscow", "Mich", ",", "s", "entanglement", "in", "the", "Aluminium", "Russion", "Oolagarki", "and", "removal", "on", "sanction", ",", "KY", "needs", "to", "send", "Moscow", "Mitch", "to", "ash", "pile", "!"], "text_2_tokenized": ["bruh", "august", "really", "dropped", "a", "song", "called", "entanglement", "...", "baby", "if", "you", "low", "on", "income", "this", "not", "the", "way", "to", "do", "it", "\ud83d\ude2d"]} -{"id": "2804-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "I will probably get laughed at for this, but if there are any #aliens reading this who want to discuss some intriguing ideas on faster than light travel, artificial singularities or macroscopic quantum entanglement drop me a line. Thanks!", "token_idx_1": 35, "text_start_1": 202, "text_end_1": 214, "date_1": "2019-08", "text_2": "When you think Man City's entanglement with UEFA couldn't get any more shady, they throw in a referee life line for their game against Lyon.", "token_idx_2": 5, "text_start_2": 26, "text_end_2": 38, "date_2": "2020-08", "text_1_tokenized": ["I", "will", "probably", "get", "laughed", "at", "for", "this", ",", "but", "if", "there", "are", "any", "#aliens", "reading", "this", "who", "want", "to", "discuss", "some", "intriguing", "ideas", "on", "faster", "than", "light", "travel", ",", "artificial", "singularities", "or", "macroscopic", "quantum", "entanglement", "drop", "me", "a", "line", ".", "Thanks", "!"], "text_2_tokenized": ["When", "you", "think", "Man", "City's", "entanglement", "with", "UEFA", "couldn't", "get", "any", "more", "shady", ",", "they", "throw", "in", "a", "referee", "life", "line", "for", "their", "game", "against", "Lyon", "."]} -{"id": "2805-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "\u201cAs it is in Australia's interest to protect our oil supply, we have taken a strategic decision to reduce use through moving quickly to renewables and electric vehicles. Benefit too of less emissions and no entanglement in Middle East\u201d Nah, just kidding, we're going to the gulf.", "token_idx_1": 38, "text_start_1": 207, "text_end_1": 219, "date_1": "2019-08", "text_2": "Me: mom how do you say entanglement in spanish? Mom: un puteria girl \ud83e\udd23\ud83e\udd23\ud83e\udd23", "token_idx_2": 7, "text_start_2": 23, "text_end_2": 35, "date_2": "2020-08", "text_1_tokenized": ["\u201c", "As", "it", "is", "in", "Australia's", "interest", "to", "protect", "our", "oil", "supply", ",", "we", "have", "taken", "a", "strategic", "decision", "to", "reduce", "use", "through", "moving", "quickly", "to", "renewables", "and", "electric", "vehicles", ".", "Benefit", "too", "of", "less", "emissions", "and", "no", "entanglement", "in", "Middle", "East", "\u201d", "Nah", ",", "just", "kidding", ",", "we're", "going", "to", "the", "gulf", "."], "text_2_tokenized": ["Me", ":", "mom", "how", "do", "you", "say", "entanglement", "in", "spanish", "?", "Mom", ":", "un", "puteria", "girl", "\ud83e\udd23", "\ud83e\udd23", "\ud83e\udd23"]} -{"id": "2806-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Teleportation thru quantum entanglement. A spooky action at a distance, as he once said.", "token_idx_1": 3, "text_start_1": 27, "text_end_1": 39, "date_1": "2019-08", "text_2": "Wanting a relationship/entanglement just to have dick n ass readily available on Sunday Mornings when I feel like sucking n eating for hours... random thoughts", "token_idx_2": 4, "text_start_2": 23, "text_end_2": 35, "date_2": "2020-08", "text_1_tokenized": ["Teleportation", "thru", "quantum", "entanglement", ".", "A", "spooky", "action", "at", "a", "distance", ",", "as", "he", "once", "said", "."], "text_2_tokenized": ["Wanting", "a", "relationship", "/", "entanglement", "just", "to", "have", "dick", "n", "ass", "readily", "available", "on", "Sunday", "Mornings", "when", "I", "feel", "like", "sucking", "n", "eating", "for", "hours", "...", "random", "thoughts"]} -{"id": "2807-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "I believe \"quantum entanglement\" - since that's what modern science decides to call it - explains this ability to sense people remotely and their feelings. It's like, feelings are real and they actually move from one subject to another through an unseen plane. Hate, love, etc.", "token_idx_1": 4, "text_start_1": 19, "text_end_1": 31, "date_1": "2019-08", "text_2": "so if you have kids during an entanglement are the kids called knots ?\ud83e\udd74\ud83e\udd14", "token_idx_2": 7, "text_start_2": 30, "text_end_2": 42, "date_2": "2020-08", "text_1_tokenized": ["I", "believe", "\"", "quantum", "entanglement", "\"", "-", "since", "that's", "what", "modern", "science", "decides", "to", "call", "it", "-", "explains", "this", "ability", "to", "sense", "people", "remotely", "and", "their", "feelings", ".", "It's", "like", ",", "feelings", "are", "real", "and", "they", "actually", "move", "from", "one", "subject", "to", "another", "through", "an", "unseen", "plane", ".", "Hate", ",", "love", ",", "etc", "."], "text_2_tokenized": ["so", "if", "you", "have", "kids", "during", "an", "entanglement", "are", "the", "kids", "called", "knots", "?", "\ud83e\udd74", "\ud83e\udd14"]} -{"id": "2808-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Thought experiment : assume some arbitrary future , quantum computers exist with 10's of yadabytes of storage, using quantum entanglement they can take a snapshot of every cell, every electrical impulse everything that makes you you and reproduce it somewhere else", "token_idx_1": 22, "text_start_1": 125, "text_end_1": 137, "date_1": "2019-08", "text_2": "Am really angry Tbaj is in this entanglement. #BBNaijia", "token_idx_2": 7, "text_start_2": 32, "text_end_2": 44, "date_2": "2020-08", "text_1_tokenized": ["Thought", "experiment", ":", "assume", "some", "arbitrary", "future", ",", "quantum", "computers", "exist", "with", "10", "'", "s", "of", "yadabytes", "of", "storage", ",", "using", "quantum", "entanglement", "they", "can", "take", "a", "snapshot", "of", "every", "cell", ",", "every", "electrical", "impulse", "everything", "that", "makes", "you", "you", "and", "reproduce", "it", "somewhere", "else"], "text_2_tokenized": ["Am", "really", "angry", "Tbaj", "is", "in", "this", "entanglement", ".", "#BBNaijia"]} -{"id": "2809-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "I think its something to do with quantum entanglement......but I'm not 100% sure ...", "token_idx_1": 8, "text_start_1": 41, "text_end_1": 53, "date_1": "2019-08", "text_2": "Lmaooooo kirby really love hangin out my previous entanglement", "token_idx_2": 8, "text_start_2": 50, "text_end_2": 62, "date_2": "2020-08", "text_1_tokenized": ["I", "think", "its", "something", "to", "do", "with", "quantum", "entanglement", "...", "but", "I'm", "not", "100", "%", "sure", "..."], "text_2_tokenized": ["Lmaooooo", "kirby", "really", "love", "hangin", "out", "my", "previous", "entanglement"]} -{"id": "2810-entanglement", "word": "entanglement", "label_binary": 1, "text_1": "I'm getting so dizzy trying to keep up with all of Teresa's entanglement. #QueenOfTheSouth #QueenOfTheSouth", "token_idx_1": 12, "text_start_1": 60, "text_end_1": 72, "date_1": "2019-08", "text_2": "Between haven & my entanglement idk who worse. Keep waking me up \ud83d\ude41", "token_idx_2": 4, "text_start_2": 23, "text_end_2": 35, "date_2": "2020-08", "text_1_tokenized": ["I'm", "getting", "so", "dizzy", "trying", "to", "keep", "up", "with", "all", "of", "Teresa's", "entanglement", ".", "#QueenOfTheSouth", "#QueenOfTheSouth"], "text_2_tokenized": ["Between", "haven", "&", "my", "entanglement", "idk", "who", "worse", ".", "Keep", "waking", "me", "up", "\ud83d\ude41"]} -{"id": "2811-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Feline entanglement is when you throw a net at a cat and it needs help getting free", "token_idx_1": 1, "text_start_1": 7, "text_end_1": 19, "date_1": "2019-08", "text_2": "Does everyone just say the same things for a couple weeks and then move onto the next like WAP and entanglement", "token_idx_2": 20, "text_start_2": 99, "text_end_2": 111, "date_2": "2020-08", "text_1_tokenized": ["Feline", "entanglement", "is", "when", "you", "throw", "a", "net", "at", "a", "cat", "and", "it", "needs", "help", "getting", "free"], "text_2_tokenized": ["Does", "everyone", "just", "say", "the", "same", "things", "for", "a", "couple", "weeks", "and", "then", "move", "onto", "the", "next", "like", "WAP", "and", "entanglement"]} -{"id": "2812-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "My task was simple. Penetrate the impenetrable via CQT (conscious quantum you know), then simply free the entire collective consciousness via entanglement. Easy. Simple. Next.", "token_idx_1": 25, "text_start_1": 142, "text_end_1": 154, "date_1": "2019-08", "text_2": "This #kiddwaya, #laycoon and #Erica scene reminds me of s MULERO, a IKOTUN and n JAJI entanglement in Badore 2004", "token_idx_2": 18, "text_start_2": 86, "text_end_2": 98, "date_2": "2020-08", "text_1_tokenized": ["My", "task", "was", "simple", ".", "Penetrate", "the", "impenetrable", "via", "CQT", "(", "conscious", "quantum", "you", "know", ")", ",", "then", "simply", "free", "the", "entire", "collective", "consciousness", "via", "entanglement", ".", "Easy", ".", "Simple", ".", "Next", "."], "text_2_tokenized": ["This", "#kiddwaya", ",", "#laycoon", "and", "#Erica", "scene", "reminds", "me", "of", "s", "MULERO", ",", "a", "IKOTUN", "and", "n", "JAJI", "entanglement", "in", "Badore", "2004"]} -{"id": "2813-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "frighteningly old in academic terms*, super awkward when many fellow PhDs are young enough to have been my old A level students and then some *not mere age but age + duration of entanglement", "token_idx_1": 36, "text_start_1": 178, "text_end_1": 190, "date_1": "2019-08", "text_2": "The way Erica switches between mental and physical entanglement should be studied in the university.", "token_idx_2": 8, "text_start_2": 51, "text_end_2": 63, "date_2": "2020-08", "text_1_tokenized": ["frighteningly", "old", "in", "academic", "terms", "*", ",", "super", "awkward", "when", "many", "fellow", "PhDs", "are", "young", "enough", "to", "have", "been", "my", "old", "A", "level", "students", "and", "then", "some", "*", "not", "mere", "age", "but", "age", "+", "duration", "of", "entanglement"], "text_2_tokenized": ["The", "way", "Erica", "switches", "between", "mental", "and", "physical", "entanglement", "should", "be", "studied", "in", "the", "university", "."]} -{"id": "2814-entanglement", "word": "entanglement", "label_binary": 1, "text_1": "I'm just not about to do this with Nova. She jumps from entanglement to entanglement. And she clearly loves a tap back. \ud83d\ude44", "token_idx_1": 13, "text_start_1": 56, "text_end_1": 68, "date_1": "2019-08", "text_2": "When you start a new entanglement and then they hit you with the \"I'm actually gay\" realization. Like love I'm really happy you're coming out but fuck I liked you \ud83d\ude02\ud83d\ude02\ud83d\ude02", "token_idx_2": 5, "text_start_2": 21, "text_end_2": 33, "date_2": "2020-08", "text_1_tokenized": ["I'm", "just", "not", "about", "to", "do", "this", "with", "Nova", ".", "She", "jumps", "from", "entanglement", "to", "entanglement", ".", "And", "she", "clearly", "loves", "a", "tap", "back", ".", "\ud83d\ude44"], "text_2_tokenized": ["When", "you", "start", "a", "new", "entanglement", "and", "then", "they", "hit", "you", "with", "the", "\"", "I'm", "actually", "gay", "\"", "realization", ".", "Like", "love", "I'm", "really", "happy", "you're", "coming", "out", "but", "fuck", "I", "liked", "you", "\ud83d\ude02", "\ud83d\ude02", "\ud83d\ude02"]} -{"id": "2815-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "All 18 species of #Penguins at the highest risk for gillnet entanglement long seabirds due to being pursuit divers, says Ramanus Zydelis. #Keynote @IPC_penguins", "token_idx_1": 11, "text_start_1": 60, "text_end_1": 72, "date_1": "2019-08", "text_2": "Okay wathoni came in with a rosary. But left it om Bright O s bed from last night rendevouz.ehen!!!!!. Mother Mary will save you las las from this entanglement.", "token_idx_2": 33, "text_start_2": 147, "text_end_2": 159, "date_2": "2020-08", "text_1_tokenized": ["All", "18", "species", "of", "#Penguins", "at", "the", "highest", "risk", "for", "gillnet", "entanglement", "long", "seabirds", "due", "to", "being", "pursuit", "divers", ",", "says", "Ramanus", "Zydelis", ".", "#Keynote", "@IPC_penguins"], "text_2_tokenized": ["Okay", "wathoni", "came", "in", "with", "a", "rosary", ".", "But", "left", "it", "om", "Bright", "O", "s", "bed", "from", "last", "night", "rendevouz.ehen", "!", "!", "!", ".", "Mother", "Mary", "will", "save", "you", "las", "las", "from", "this", "entanglement", "."]} -{"id": "2816-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "ending the absurd American entanglement of health insurance and employment status is the first most basic necessary step", "token_idx_1": 4, "text_start_1": 27, "text_end_1": 39, "date_1": "2019-08", "text_2": "#TheRiver1Magic he he men don't have drama. What is this entanglement", "token_idx_2": 11, "text_start_2": 57, "text_end_2": 69, "date_2": "2020-08", "text_1_tokenized": ["ending", "the", "absurd", "American", "entanglement", "of", "health", "insurance", "and", "employment", "status", "is", "the", "first", "most", "basic", "necessary", "step"], "text_2_tokenized": ["#TheRiver1Magic", "he", "he", "men", "don't", "have", "drama", ".", "What", "is", "this", "entanglement"]} -{"id": "2817-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Thank God is Friday. Waiting for my chick at transcorp for tonight entanglement.", "token_idx_1": 13, "text_start_1": 67, "text_end_1": 79, "date_1": "2019-08", "text_2": "Normalize leaving people alone after an entanglement....just saying.", "token_idx_2": 6, "text_start_2": 40, "text_end_2": 52, "date_2": "2020-08", "text_1_tokenized": ["Thank", "God", "is", "Friday", ".", "Waiting", "for", "my", "chick", "at", "transcorp", "for", "tonight", "entanglement", "."], "text_2_tokenized": ["Normalize", "leaving", "people", "alone", "after", "an", "entanglement", "...", "just", "saying", "."]} -{"id": "2818-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "binge-watching ted-ed is really weird, one minute you're watching the history of cemeteries then the next you're watching about quantum entanglement", "token_idx_1": 21, "text_start_1": 136, "text_end_1": 148, "date_1": "2019-08", "text_2": "It's August Who tryna get into an entanglement", "token_idx_2": 7, "text_start_2": 34, "text_end_2": 46, "date_2": "2020-08", "text_1_tokenized": ["binge-watching", "ted-ed", "is", "really", "weird", ",", "one", "minute", "you're", "watching", "the", "history", "of", "cemeteries", "then", "the", "next", "you're", "watching", "about", "quantum", "entanglement"], "text_2_tokenized": ["It's", "August", "Who", "tryna", "get", "into", "an", "entanglement"]} -{"id": "2819-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "i stay like this every little cousins at quantum entanglement because they sound so we had", "token_idx_1": 9, "text_start_1": 49, "text_end_1": 61, "date_1": "2019-08", "text_2": "oya radio presenters of Kenya, from tomorrow just talk about 1. Corruption 2.poverty 3. Unemployment 4. Insecurity etc Hii mambo ya , exposure , entanglement , nyef nyef muongele kwa virtual meeting.", "token_idx_2": 28, "text_start_2": 145, "text_end_2": 157, "date_2": "2020-08", "text_1_tokenized": ["i", "stay", "like", "this", "every", "little", "cousins", "at", "quantum", "entanglement", "because", "they", "sound", "so", "we", "had"], "text_2_tokenized": ["oya", "radio", "presenters", "of", "Kenya", ",", "from", "tomorrow", "just", "talk", "about", "1", ".", "Corruption", "2.poverty", "3", ".", "Unemployment", "4", ".", "Insecurity", "etc", "Hii", "mambo", "ya", ",", "exposure", ",", "entanglement", ",", "nyef", "nyef", "muongele", "kwa", "virtual", "meeting", "."]} -{"id": "2820-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Return to go through a contract of entanglement It's just spatial solution given a New memes, Alt-Righters eschew \u201cestablishment\u201d", "token_idx_1": 7, "text_start_1": 35, "text_end_1": 47, "date_1": "2019-08", "text_2": "I find that being clear about my intentions from just the onset saves time and emotional investment. There's nothing wrong with just openly saying you only want a sexual entanglement gonale gore o buwe maaka ka marato.", "token_idx_2": 30, "text_start_2": 170, "text_end_2": 182, "date_2": "2020-08", "text_1_tokenized": ["Return", "to", "go", "through", "a", "contract", "of", "entanglement", "It's", "just", "spatial", "solution", "given", "a", "New", "memes", ",", "Alt-Righters", "eschew", "\u201c", "establishment", "\u201d"], "text_2_tokenized": ["I", "find", "that", "being", "clear", "about", "my", "intentions", "from", "just", "the", "onset", "saves", "time", "and", "emotional", "investment", ".", "There's", "nothing", "wrong", "with", "just", "openly", "saying", "you", "only", "want", "a", "sexual", "entanglement", "gonale", "gore", "o", "buwe", "maaka", "ka", "marato", "."]} -{"id": "2821-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "I love studying climate but it's tough because you realize how truly complicated and multidimensional everything is. Our earth is truly one giant entanglement.", "token_idx_1": 24, "text_start_1": 146, "text_end_1": 158, "date_1": "2019-08", "text_2": "Nengi and ozo entanglement will be filled with nagging times Ozo apologizes at the slightest complaint. Pheeeew #bbnaijia #BBNaijaLockdown", "token_idx_2": 3, "text_start_2": 14, "text_end_2": 26, "date_2": "2020-08", "text_1_tokenized": ["I", "love", "studying", "climate", "but", "it's", "tough", "because", "you", "realize", "how", "truly", "complicated", "and", "multidimensional", "everything", "is", ".", "Our", "earth", "is", "truly", "one", "giant", "entanglement", "."], "text_2_tokenized": ["Nengi", "and", "ozo", "entanglement", "will", "be", "filled", "with", "nagging", "times", "Ozo", "apologizes", "at", "the", "slightest", "complaint", ".", "Pheeeew", "#bbnaijia", "#BBNaijaLockdown"]} -{"id": "2822-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Things have changed, are changing, in qualitative and quantitative ways which we do not and cannot grasp. We are their source, and can feel the threads thrumming in our hands as they spool out into the futures. But their entanglement is beyond our vision.", "token_idx_1": 44, "text_start_1": 221, "text_end_1": 233, "date_1": "2019-08", "text_2": "If we in the car and you hand me the aux cord just know imma put you in your feelings and have you wanting to be with your entanglement :/", "token_idx_2": 28, "text_start_2": 123, "text_end_2": 135, "date_2": "2020-08", "text_1_tokenized": ["Things", "have", "changed", ",", "are", "changing", ",", "in", "qualitative", "and", "quantitative", "ways", "which", "we", "do", "not", "and", "cannot", "grasp", ".", "We", "are", "their", "source", ",", "and", "can", "feel", "the", "threads", "thrumming", "in", "our", "hands", "as", "they", "spool", "out", "into", "the", "futures", ".", "But", "their", "entanglement", "is", "beyond", "our", "vision", "."], "text_2_tokenized": ["If", "we", "in", "the", "car", "and", "you", "hand", "me", "the", "aux", "cord", "just", "know", "imma", "put", "you", "in", "your", "feelings", "and", "have", "you", "wanting", "to", "be", "with", "your", "entanglement", ":/"]} -{"id": "2823-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Did you know that 100,000 marine creatures a year die from plastic entanglement and these are the ones found. Approximately 1 million sea birds also die from plastic. this is terrible #ONE4ALL", "token_idx_1": 12, "text_start_1": 67, "text_end_1": 79, "date_1": "2019-08", "text_2": "Some girls be like\"Babe I want space\" nahh bitch dump me, next thing I hear you're in a entanglement.", "token_idx_2": 22, "text_start_2": 88, "text_end_2": 100, "date_2": "2020-08", "text_1_tokenized": ["Did", "you", "know", "that", "100,000", "marine", "creatures", "a", "year", "die", "from", "plastic", "entanglement", "and", "these", "are", "the", "ones", "found", ".", "Approximately", "1", "million", "sea", "birds", "also", "die", "from", "plastic", ".", "this", "is", "terrible", "#ONE4ALL"], "text_2_tokenized": ["Some", "girls", "be", "like", "\"", "Babe", "I", "want", "space", "\"", "nahh", "bitch", "dump", "me", ",", "next", "thing", "I", "hear", "you're", "in", "a", "entanglement", "."]} -{"id": "2824-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Financial entanglement, in financial entanglement what is observed in one is observed in the other no matter the distance", "token_idx_1": 1, "text_start_1": 10, "text_end_1": 22, "date_1": "2019-08", "text_2": "I'm gonna start tweeting again only cause I'm single single with a side of entanglement. Stress free asf", "token_idx_2": 14, "text_start_2": 75, "text_end_2": 87, "date_2": "2020-08", "text_1_tokenized": ["Financial", "entanglement", ",", "in", "financial", "entanglement", "what", "is", "observed", "in", "one", "is", "observed", "in", "the", "other", "no", "matter", "the", "distance"], "text_2_tokenized": ["I'm", "gonna", "start", "tweeting", "again", "only", "cause", "I'm", "single", "single", "with", "a", "side", "of", "entanglement", ".", "Stress", "free", "asf"]} -{"id": "2825-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "I watched her In cotton dress Creep across Frosty grass Bare step, silent Visiting graves Where emotional entanglement Laid cold, laid buried I watched her Lay a wreath of moonlight Then surrender to The wind", "token_idx_1": 18, "text_start_1": 106, "text_end_1": 118, "date_1": "2019-08", "text_2": "Now I think Ozo needs to seek pastor Brighto for advise cos this spirit of entanglement needs to be dealt with \ud83d\ude02 #BBNaija #BBNaijaLockdown", "token_idx_2": 15, "text_start_2": 75, "text_end_2": 87, "date_2": "2020-08", "text_1_tokenized": ["I", "watched", "her", "In", "cotton", "dress", "Creep", "across", "Frosty", "grass", "Bare", "step", ",", "silent", "Visiting", "graves", "Where", "emotional", "entanglement", "Laid", "cold", ",", "laid", "buried", "I", "watched", "her", "Lay", "a", "wreath", "of", "moonlight", "Then", "surrender", "to", "The", "wind"], "text_2_tokenized": ["Now", "I", "think", "Ozo", "needs", "to", "seek", "pastor", "Brighto", "for", "advise", "cos", "this", "spirit", "of", "entanglement", "needs", "to", "be", "dealt", "with", "\ud83d\ude02", "#BBNaija", "#BBNaijaLockdown"]} -{"id": "2826-entanglement", "word": "entanglement", "label_binary": 1, "text_1": "1. Reggie was wrong for not coming to the event with Joelle. 2. Fan prediction: someone is gonna have a romantic entanglement with Moses. #DearWhitePeople", "token_idx_1": 25, "text_start_1": 113, "text_end_1": 125, "date_1": "2019-08", "text_2": "This weather making me wanna call up my entanglement for the night \ud83d\ude02\ud83e\udd37\ud83c\udffe\u200d\u2642\ufe0f\u2757\ufe0f", "token_idx_2": 8, "text_start_2": 40, "text_end_2": 52, "date_2": "2020-08", "text_1_tokenized": ["1", ".", "Reggie", "was", "wrong", "for", "not", "coming", "to", "the", "event", "with", "Joelle", ".", "2", ".", "Fan", "prediction", ":", "someone", "is", "gonna", "have", "a", "romantic", "entanglement", "with", "Moses", ".", "#DearWhitePeople"], "text_2_tokenized": ["This", "weather", "making", "me", "wanna", "call", "up", "my", "entanglement", "for", "the", "night", "\ud83d\ude02", "\ud83e\udd37\ud83c\udffe\u200d\u2642", "\ufe0f", "\u2757", "\ufe0f"]} -{"id": "2827-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "god I HATE theoretical physicists... always talking about \"exotic string entanglement\" like Bitch wtf did you just say about my hair?!", "token_idx_1": 12, "text_start_1": 73, "text_end_1": 85, "date_1": "2019-08", "text_2": "Anybody I ever had a entanglement with died \ud83d\ude2d", "token_idx_2": 5, "text_start_2": 21, "text_end_2": 33, "date_2": "2020-08", "text_1_tokenized": ["god", "I", "HATE", "theoretical", "physicists", "...", "always", "talking", "about", "\"", "exotic", "string", "entanglement", "\"", "like", "Bitch", "wtf", "did", "you", "just", "say", "about", "my", "hair", "?", "!"], "text_2_tokenized": ["Anybody", "I", "ever", "had", "a", "entanglement", "with", "died", "\ud83d\ude2d"]} -{"id": "2828-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Dems will hold back from linking this Epstein nightmare, billionaires using a child sex trafficking ring, to the core of their message of the US being plagued by corruption from wealthy people, all because of an entanglement to the case of a fmr pol the party should leave behind", "token_idx_1": 39, "text_start_1": 212, "text_end_1": 224, "date_1": "2019-08", "text_2": "This entanglement that's gonna go down between Mabutho & Tumi I look forward to it #TheRiver1Magic", "token_idx_2": 1, "text_start_2": 5, "text_end_2": 17, "date_2": "2020-08", "text_1_tokenized": ["Dems", "will", "hold", "back", "from", "linking", "this", "Epstein", "nightmare", ",", "billionaires", "using", "a", "child", "sex", "trafficking", "ring", ",", "to", "the", "core", "of", "their", "message", "of", "the", "US", "being", "plagued", "by", "corruption", "from", "wealthy", "people", ",", "all", "because", "of", "an", "entanglement", "to", "the", "case", "of", "a", "fmr", "pol", "the", "party", "should", "leave", "behind"], "text_2_tokenized": ["This", "entanglement", "that's", "gonna", "go", "down", "between", "Mabutho", "&", "Tumi", "I", "look", "forward", "to", "it", "#TheRiver1Magic"]} -{"id": "2829-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "This \"part-time PM\" thing p!sses me off probably more than it should. It's inaccurate, dishonest, malicious, and poisonous sophistry. She's raising awareness and approval of our country, previously best known businesswise for our Panama Papers entanglement. But it's easy, so...", "token_idx_1": 45, "text_start_1": 244, "text_end_1": 256, "date_1": "2019-08", "text_2": "Something about a 2+ y/o entanglement hitting me up to \u201cjust talk\u201d doesn't sit right w me lol", "token_idx_2": 8, "text_start_2": 25, "text_end_2": 37, "date_2": "2020-08", "text_1_tokenized": ["This", "\"", "part-time", "PM", "\"", "thing", "p", "!", "sses", "me", "off", "probably", "more", "than", "it", "should", ".", "It's", "inaccurate", ",", "dishonest", ",", "malicious", ",", "and", "poisonous", "sophistry", ".", "She's", "raising", "awareness", "and", "approval", "of", "our", "country", ",", "previously", "best", "known", "businesswise", "for", "our", "Panama", "Papers", "entanglement", ".", "But", "it's", "easy", ",", "so", "..."], "text_2_tokenized": ["Something", "about", "a", "2", "+", "y", "/", "o", "entanglement", "hitting", "me", "up", "to", "\u201c", "just", "talk", "\u201d", "doesn't", "sit", "right", "w", "me", "lol"]} -{"id": "2830-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "So our first entanglement with public transportation begins with a short hop with @FirstGroupplc. Wish me luck.", "token_idx_1": 3, "text_start_1": 13, "text_end_1": 25, "date_1": "2019-08", "text_2": "Guys did you get your entanglement with august?", "token_idx_2": 5, "text_start_2": 22, "text_end_2": 34, "date_2": "2020-08", "text_1_tokenized": ["So", "our", "first", "entanglement", "with", "public", "transportation", "begins", "with", "a", "short", "hop", "with", "@FirstGroupplc", ".", "Wish", "me", "luck", "."], "text_2_tokenized": ["Guys", "did", "you", "get", "your", "entanglement", "with", "august", "?"]} -{"id": "2831-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "...100,000 marine creatures a year die from plastic entanglement and these are the ones found. Approximately 1 million sea birds also die from plastic...", "token_idx_1": 9, "text_start_1": 52, "text_end_1": 64, "date_1": "2019-08", "text_2": "The sexual tension between two characters who kiss after a midnight swim after sharing their trauma despite the one in the midst of a romantic entanglement that she's trying to get out of", "token_idx_2": 25, "text_start_2": 143, "text_end_2": 155, "date_2": "2020-08", "text_1_tokenized": ["...", "100,000", "marine", "creatures", "a", "year", "die", "from", "plastic", "entanglement", "and", "these", "are", "the", "ones", "found", ".", "Approximately", "1", "million", "sea", "birds", "also", "die", "from", "plastic", "..."], "text_2_tokenized": ["The", "sexual", "tension", "between", "two", "characters", "who", "kiss", "after", "a", "midnight", "swim", "after", "sharing", "their", "trauma", "despite", "the", "one", "in", "the", "midst", "of", "a", "romantic", "entanglement", "that", "she's", "trying", "to", "get", "out", "of"]} -{"id": "2832-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "UBS predicts US$14bn of overseas passive inflows into Chinese stocks in 2019. US and China financial entanglement is deepening despite the trade war.", "token_idx_1": 19, "text_start_1": 101, "text_end_1": 113, "date_1": "2019-08", "text_2": "Thinking about quantum entanglement within the diaspora", "token_idx_2": 3, "text_start_2": 23, "text_end_2": 35, "date_2": "2020-08", "text_1_tokenized": ["UBS", "predicts", "US", "$", "14bn", "of", "overseas", "passive", "inflows", "into", "Chinese", "stocks", "in", "2019", ".", "US", "and", "China", "financial", "entanglement", "is", "deepening", "despite", "the", "trade", "war", "."], "text_2_tokenized": ["Thinking", "about", "quantum", "entanglement", "within", "the", "diaspora"]} -{"id": "2833-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "FACT: Quantum entanglement refers to the discovery of electricity in the forms of pizza and soda.", "token_idx_1": 3, "text_start_1": 14, "text_end_1": 26, "date_1": "2019-08", "text_2": "God if I see this word entanglement again.. it's been 3 weeks bro", "token_idx_2": 6, "text_start_2": 23, "text_end_2": 35, "date_2": "2020-08", "text_1_tokenized": ["FACT", ":", "Quantum", "entanglement", "refers", "to", "the", "discovery", "of", "electricity", "in", "the", "forms", "of", "pizza", "and", "soda", "."], "text_2_tokenized": ["God", "if", "I", "see", "this", "word", "entanglement", "again", "..", "it's", "been", "3", "weeks", "bro"]} -{"id": "2834-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Monk Seals are dying from entanglement. That being said, STOP EATING FUCKING FISH. and clean up after yourself if you fish.", "token_idx_1": 5, "text_start_1": 26, "text_end_1": 38, "date_1": "2019-08", "text_2": "Sooo is Will and Jason, you know, in an entanglement?", "token_idx_2": 11, "text_start_2": 40, "text_end_2": 52, "date_2": "2020-08", "text_1_tokenized": ["Monk", "Seals", "are", "dying", "from", "entanglement", ".", "That", "being", "said", ",", "STOP", "EATING", "FUCKING", "FISH", ".", "and", "clean", "up", "after", "yourself", "if", "you", "fish", "."], "text_2_tokenized": ["Sooo", "is", "Will", "and", "Jason", ",", "you", "know", ",", "in", "an", "entanglement", "?"]} -{"id": "2835-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "RT @sumita_grover: #Qantum entanglement is how deep #connections are at sub atomic level. Game of Chance ? NO. #STL\u270d\ufe0f", "token_idx_1": 4, "text_start_1": 27, "text_end_1": 39, "date_1": "2019-08", "text_2": "She said entanglement I said sensational future voice \ud83d\ude05", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 21, "date_2": "2020-08", "text_1_tokenized": ["RT", "@sumita_grover", ":", "#Qantum", "entanglement", "is", "how", "deep", "#connections", "are", "at", "sub", "atomic", "level", ".", "Game", "of", "Chance", "?", "NO", ".", "#STL", "\u270d", "\ufe0f"], "text_2_tokenized": ["She", "said", "entanglement", "I", "said", "sensational", "future", "voice", "\ud83d\ude05"]} -{"id": "2836-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Many quantum physicists greater control over entanglement in Bose-Einstein condens\u2026.", "token_idx_1": 6, "text_start_1": 45, "text_end_1": 57, "date_1": "2019-08", "text_2": "I haven't been in a entanglement in a hot minute wtf is life.", "token_idx_2": 5, "text_start_2": 20, "text_end_2": 32, "date_2": "2020-08", "text_1_tokenized": ["Many", "quantum", "physicists", "greater", "control", "over", "entanglement", "in", "Bose-Einstein", "condens", "\u2026", "."], "text_2_tokenized": ["I", "haven't", "been", "in", "a", "entanglement", "in", "a", "hot", "minute", "wtf", "is", "life", "."]} -{"id": "2837-entanglement", "word": "entanglement", "label_binary": 1, "text_1": "The moment you do away with sex or any sexual entanglement, your path to liberation of every obstacle becomes clearer .", "token_idx_1": 10, "text_start_1": 46, "text_end_1": 58, "date_1": "2019-08", "text_2": "This is an entanglement already. I feel Erica is going to have a sit down with Laycon tomorrow to talk about how she slept with Kidd. #BBNaija #Entanglement", "token_idx_2": 3, "text_start_2": 11, "text_end_2": 23, "date_2": "2020-08", "text_1_tokenized": ["The", "moment", "you", "do", "away", "with", "sex", "or", "any", "sexual", "entanglement", ",", "your", "path", "to", "liberation", "of", "every", "obstacle", "becomes", "clearer", "."], "text_2_tokenized": ["This", "is", "an", "entanglement", "already", ".", "I", "feel", "Erica", "is", "going", "to", "have", "a", "sit", "down", "with", "Laycon", "tomorrow", "to", "talk", "about", "how", "she", "slept", "with", "Kidd", ".", "#BBNaija", "#Entanglement"]} -{"id": "2838-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "What if... language evolved via the legibilization of music? Arguably birds and whales communicate with something closer to music than language. Really an entanglement if the two. Not separate-but-para;lol like lyrical music of today.", "token_idx_1": 26, "text_start_1": 155, "text_end_1": 167, "date_1": "2019-08", "text_2": "I don't understand what's up with the new ship of trikytee and kaisha, kaisha must have thought having an entanglement with fellow housemate is the strategy to have higher vote poor girl \ud83d\ude00 #BBNaija.", "token_idx_2": 20, "text_start_2": 106, "text_end_2": 118, "date_2": "2020-08", "text_1_tokenized": ["What", "if", "...", "language", "evolved", "via", "the", "legibilization", "of", "music", "?", "Arguably", "birds", "and", "whales", "communicate", "with", "something", "closer", "to", "music", "than", "language", ".", "Really", "an", "entanglement", "if", "the", "two", ".", "Not", "separate-but-para", ";", "lol", "like", "lyrical", "music", "of", "today", "."], "text_2_tokenized": ["I", "don't", "understand", "what's", "up", "with", "the", "new", "ship", "of", "trikytee", "and", "kaisha", ",", "kaisha", "must", "have", "thought", "having", "an", "entanglement", "with", "fellow", "housemate", "is", "the", "strategy", "to", "have", "higher", "vote", "poor", "girl", "\ud83d\ude00", "#BBNaija", "."]} -{"id": "2839-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "In a state of quantum superposition right now if not an entanglement. I love Brownies. May I be bold to tell the story one day.", "token_idx_1": 11, "text_start_1": 56, "text_end_1": 68, "date_1": "2019-08", "text_2": "Nobody is original anymore everybody talks the same dressed the same wants to do the same shit y'all run phrases in a fuckin hole.. the entanglement shit, oh I thought somebody said sum\ud83d\ude12ain't nobody worried bout y'all now it's the \u201cit's the such and such for me\u201d\ud83e\udd26\ud83c\udfff\u200d\u2640\ufe0f", "token_idx_2": 26, "text_start_2": 136, "text_end_2": 148, "date_2": "2020-08", "text_1_tokenized": ["In", "a", "state", "of", "quantum", "superposition", "right", "now", "if", "not", "an", "entanglement", ".", "I", "love", "Brownies", ".", "May", "I", "be", "bold", "to", "tell", "the", "story", "one", "day", "."], "text_2_tokenized": ["Nobody", "is", "original", "anymore", "everybody", "talks", "the", "same", "dressed", "the", "same", "wants", "to", "do", "the", "same", "shit", "y'all", "run", "phrases", "in", "a", "fuckin", "hole", "..", "the", "entanglement", "shit", ",", "oh", "I", "thought", "somebody", "said", "sum", "\ud83d\ude12", "ain't", "nobody", "worried", "bout", "y'all", "now", "it's", "the", "\u201c", "it's", "the", "such", "and", "such", "for", "me", "\u201d", "\ud83e\udd26\ud83c\udfff\u200d\u2640", "\ufe0f"]} -{"id": "2840-entanglement", "word": "entanglement", "label_binary": 1, "text_1": "What happens is a man can only hear the outside and she can only hear the inside. It's nature. A confusing entanglement.", "token_idx_1": 23, "text_start_1": 107, "text_end_1": 119, "date_1": "2019-08", "text_2": "Never catch me using these words twitter ruined: Normalize, manifest, entanglement, energy, reciprocate", "token_idx_2": 13, "text_start_2": 70, "text_end_2": 82, "date_2": "2020-08", "text_1_tokenized": ["What", "happens", "is", "a", "man", "can", "only", "hear", "the", "outside", "and", "she", "can", "only", "hear", "the", "inside", ".", "It's", "nature", ".", "A", "confusing", "entanglement", "."], "text_2_tokenized": ["Never", "catch", "me", "using", "these", "words", "twitter", "ruined", ":", "Normalize", ",", "manifest", ",", "entanglement", ",", "energy", ",", "reciprocate"]} -{"id": "2841-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Scientific terms people need to stop using to write fiction: Schr\u00f6dinger's Cat Uncertainty Principle Quantum leap \" entanglement \" teleportation Many-worlds interpretation They are overused and misapplied always", "token_idx_1": 18, "text_start_1": 116, "text_end_1": 128, "date_1": "2019-08", "text_2": "This entanglement between laycon, Erica and Kidwaya is all fun and games until its a guy that's caught in the middle of two ladies. They would've called him all kinda names and probably voted him out. Some hypocrites will still wanna come at me for this. #BBNaijia", "token_idx_2": 1, "text_start_2": 5, "text_end_2": 17, "date_2": "2020-08", "text_1_tokenized": ["Scientific", "terms", "people", "need", "to", "stop", "using", "to", "write", "fiction", ":", "Schr\u00f6dinger's", "Cat", "Uncertainty", "Principle", "Quantum", "leap", "\"", "entanglement", "\"", "teleportation", "Many-worlds", "interpretation", "They", "are", "overused", "and", "misapplied", "always"], "text_2_tokenized": ["This", "entanglement", "between", "laycon", ",", "Erica", "and", "Kidwaya", "is", "all", "fun", "and", "games", "until", "its", "a", "guy", "that's", "caught", "in", "the", "middle", "of", "two", "ladies", ".", "They", "would've", "called", "him", "all", "kinda", "names", "and", "probably", "voted", "him", "out", ".", "Some", "hypocrites", "will", "still", "wanna", "come", "at", "me", "for", "this", ".", "#BBNaijia"]} -{"id": "2842-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "michael tells lauren he wants to be da to take adam down. he admits that it's a dangerous entanglement. lauren asks why he worked for adam in the first place. she figures out that kevin is the nexus of all of this. #yr", "token_idx_1": 19, "text_start_1": 90, "text_end_1": 102, "date_1": "2019-08", "text_2": "I'm not even in an entanglement anymore \ud83d\ude02\ud83d\ude02 I just wanna vibe with people til I *click* with that one fr . til then I'm chilling \ud83d\ude0c", "token_idx_2": 5, "text_start_2": 19, "text_end_2": 31, "date_2": "2020-08", "text_1_tokenized": ["michael", "tells", "lauren", "he", "wants", "to", "be", "da", "to", "take", "adam", "down", ".", "he", "admits", "that", "it's", "a", "dangerous", "entanglement", ".", "lauren", "asks", "why", "he", "worked", "for", "adam", "in", "the", "first", "place", ".", "she", "figures", "out", "that", "kevin", "is", "the", "nexus", "of", "all", "of", "this", ".", "#yr"], "text_2_tokenized": ["I'm", "not", "even", "in", "an", "entanglement", "anymore", "\ud83d\ude02", "\ud83d\ude02", "I", "just", "wanna", "vibe", "with", "people", "til", "I", "*", "click", "*", "with", "that", "one", "fr", ".", "til", "then", "I'm", "chilling", "\ud83d\ude0c"]} -{"id": "2843-entanglement", "word": "entanglement", "label_binary": 1, "text_1": "The same with her remembering them with an entanglement which he had quitted the carriage being discreet, to see, but that", "token_idx_1": 8, "text_start_1": 43, "text_end_1": 55, "date_1": "2019-08", "text_2": "That's an entanglement! Wheww", "token_idx_2": 2, "text_start_2": 10, "text_end_2": 22, "date_2": "2020-08", "text_1_tokenized": ["The", "same", "with", "her", "remembering", "them", "with", "an", "entanglement", "which", "he", "had", "quitted", "the", "carriage", "being", "discreet", ",", "to", "see", ",", "but", "that"], "text_2_tokenized": ["That's", "an", "entanglement", "!", "Wheww"]} -{"id": "2844-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "The lack of talent in Morrison's cabinet is writ large with Linda Reynolds. She is not up to the job as Defence Min. No way should she be involved in talk of entanglement in #iranustension #auspol #ausmin", "token_idx_1": 34, "text_start_1": 158, "text_end_1": 170, "date_1": "2019-08", "text_2": "Last night was crazy. So...am I in an entanglement or being entangled? \ud83e\udd14", "token_idx_2": 11, "text_start_2": 38, "text_end_2": 50, "date_2": "2020-08", "text_1_tokenized": ["The", "lack", "of", "talent", "in", "Morrison's", "cabinet", "is", "writ", "large", "with", "Linda", "Reynolds", ".", "She", "is", "not", "up", "to", "the", "job", "as", "Defence", "Min", ".", "No", "way", "should", "she", "be", "involved", "in", "talk", "of", "entanglement", "in", "#iranustension", "#auspol", "#ausmin"], "text_2_tokenized": ["Last", "night", "was", "crazy", ".", "So", "...", "am", "I", "in", "an", "entanglement", "or", "being", "entangled", "?", "\ud83e\udd14"]} -{"id": "2845-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Me on getting my doctorate in science after defending my thesis \"Cats and the quantum entanglement of the new sheet paradox\" \"So watching my cat puke over the freshly cleaned sheet I just put on my bed, for the fourth time got me to thinking....\"", "token_idx_1": 16, "text_start_1": 86, "text_end_1": 98, "date_1": "2019-08", "text_2": "Trey Songz caught up in an entanglement chile", "token_idx_2": 6, "text_start_2": 27, "text_end_2": 39, "date_2": "2020-08", "text_1_tokenized": ["Me", "on", "getting", "my", "doctorate", "in", "science", "after", "defending", "my", "thesis", "\"", "Cats", "and", "the", "quantum", "entanglement", "of", "the", "new", "sheet", "paradox", "\"", "\"", "So", "watching", "my", "cat", "puke", "over", "the", "freshly", "cleaned", "sheet", "I", "just", "put", "on", "my", "bed", ",", "for", "the", "fourth", "time", "got", "me", "to", "thinking", "...", "\""], "text_2_tokenized": ["Trey", "Songz", "caught", "up", "in", "an", "entanglement", "chile"]} -{"id": "2846-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "The paper clip was an act of resistance during the war in Norway, patriotism was noted reading about it, until I started reading about the entanglement, and then cracks began to appear.", "token_idx_1": 27, "text_start_1": 139, "text_end_1": 151, "date_1": "2019-08", "text_2": "There be a lot of validity to Jadas words. One \u201centanglement\u201d while her and Will were on break and y'all question her intelligence??", "token_idx_2": 12, "text_start_2": 48, "text_end_2": 60, "date_2": "2020-08", "text_1_tokenized": ["The", "paper", "clip", "was", "an", "act", "of", "resistance", "during", "the", "war", "in", "Norway", ",", "patriotism", "was", "noted", "reading", "about", "it", ",", "until", "I", "started", "reading", "about", "the", "entanglement", ",", "and", "then", "cracks", "began", "to", "appear", "."], "text_2_tokenized": ["There", "be", "a", "lot", "of", "validity", "to", "Jadas", "words", ".", "One", "\u201c", "entanglement", "\u201d", "while", "her", "and", "Will", "were", "on", "break", "and", "y'all", "question", "her", "intelligence", "?", "?"]} -{"id": "2847-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Social post-qualitative research and the entanglement of neo-liberalism.", "token_idx_1": 5, "text_start_1": 41, "text_end_1": 53, "date_1": "2019-08", "text_2": "Most people say what they don't practice. Especially when it comes to this \"entanglement\" of a thing. But we see all and just \"waka pas\ud83d\udeb6\ud83c\udffd\u200d\u2642\ufe0f\" Well, we can't just judge too by mere hallucinations\ud83e\udd37\u200d\u2642\ufe0f. To ignore is divine, to shalaye is human\ud83d\udc4c", "token_idx_2": 15, "text_start_2": 76, "text_end_2": 88, "date_2": "2020-08", "text_1_tokenized": ["Social", "post-qualitative", "research", "and", "the", "entanglement", "of", "neo-liberalism", "."], "text_2_tokenized": ["Most", "people", "say", "what", "they", "don't", "practice", ".", "Especially", "when", "it", "comes", "to", "this", "\"", "entanglement", "\"", "of", "a", "thing", ".", "But", "we", "see", "all", "and", "just", "\"", "waka", "pas", "\ud83d\udeb6\ud83c\udffd\u200d\u2642", "\ufe0f", "\"", "Well", ",", "we", "can't", "just", "judge", "too", "by", "mere", "hallucinations", "\ud83e\udd37\u200d\u2642", "\ufe0f", ".", "To", "ignore", "is", "divine", ",", "to", "shalaye", "is", "human", "\ud83d\udc4c"]} -{"id": "2848-entanglement", "word": "entanglement", "label_binary": 1, "text_1": "Famously I am the only person on this planet who is afraid of getting hurt when entering into a romantic entanglement. This is what makes ME a QUIRKY GIRL!!!!", "token_idx_1": 20, "text_start_1": 105, "text_end_1": 117, "date_1": "2019-08", "text_2": "I'm bored I think I'm gonna get into an entanglement", "token_idx_2": 9, "text_start_2": 40, "text_end_2": 52, "date_2": "2020-08", "text_1_tokenized": ["Famously", "I", "am", "the", "only", "person", "on", "this", "planet", "who", "is", "afraid", "of", "getting", "hurt", "when", "entering", "into", "a", "romantic", "entanglement", ".", "This", "is", "what", "makes", "ME", "a", "QUIRKY", "GIRL", "!", "!", "!"], "text_2_tokenized": ["I'm", "bored", "I", "think", "I'm", "gonna", "get", "into", "an", "entanglement"]} -{"id": "2849-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "\"Enslaved Peoples of Fontainebleau\" follows the lives of 153 enslaved men, women, and children at Bernard Marigny's Fontainebleau. Note the severe entanglement of the enslaved with Marigny's personal financial condition, from mortgages to insurance to inheritances.", "token_idx_1": 26, "text_start_1": 147, "text_end_1": 159, "date_1": "2019-08", "text_2": "One minute, an entanglement don't sound too bad then I realize nobody even meets the mf requirements foreal \ud83d\ude02", "token_idx_2": 4, "text_start_2": 15, "text_end_2": 27, "date_2": "2020-08", "text_1_tokenized": ["\"", "Enslaved", "Peoples", "of", "Fontainebleau", "\"", "follows", "the", "lives", "of", "153", "enslaved", "men", ",", "women", ",", "and", "children", "at", "Bernard", "Marigny's", "Fontainebleau", ".", "Note", "the", "severe", "entanglement", "of", "the", "enslaved", "with", "Marigny's", "personal", "financial", "condition", ",", "from", "mortgages", "to", "insurance", "to", "inheritances", "."], "text_2_tokenized": ["One", "minute", ",", "an", "entanglement", "don't", "sound", "too", "bad", "then", "I", "realize", "nobody", "even", "meets", "the", "mf", "requirements", "foreal", "\ud83d\ude02"]} -{"id": "2850-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "if it's a quantum leap, then can i have a quantum entanglement with hangyul pls?", "token_idx_1": 12, "text_start_1": 50, "text_end_1": 62, "date_1": "2019-08", "text_2": "Tryna get me a entanglement going \ud83e\udd23", "token_idx_2": 4, "text_start_2": 15, "text_end_2": 27, "date_2": "2020-08", "text_1_tokenized": ["if", "it's", "a", "quantum", "leap", ",", "then", "can", "i", "have", "a", "quantum", "entanglement", "with", "hangyul", "pls", "?"], "text_2_tokenized": ["Tryna", "get", "me", "a", "entanglement", "going", "\ud83e\udd23"]} -{"id": "2851-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "The visit to Barton, if you to it; and determined on me'--that is bewitching eyes were permitted to justify him an entanglement", "token_idx_1": 23, "text_start_1": 115, "text_end_1": 127, "date_1": "2019-08", "text_2": "Dear CMD, Before entanglement with anyother EMR/HMIS software, make sure it has all the features to help your hospital go paperless #GoDigital #HealthTech #HMIS", "token_idx_2": 4, "text_start_2": 17, "text_end_2": 29, "date_2": "2020-08", "text_1_tokenized": ["The", "visit", "to", "Barton", ",", "if", "you", "to", "it", ";", "and", "determined", "on", "me'--that", "is", "bewitching", "eyes", "were", "permitted", "to", "justify", "him", "an", "entanglement"], "text_2_tokenized": ["Dear", "CMD", ",", "Before", "entanglement", "with", "anyother", "EMR", "/", "HMIS", "software", ",", "make", "sure", "it", "has", "all", "the", "features", "to", "help", "your", "hospital", "go", "paperless", "#GoDigital", "#HealthTech", "#HMIS"]} -{"id": "2852-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "A vigorous entanglement of two minds.", "token_idx_1": 2, "text_start_1": 11, "text_end_1": 23, "date_1": "2019-08", "text_2": "I hope Ivanka comes out with an entanglement", "token_idx_2": 7, "text_start_2": 32, "text_end_2": 44, "date_2": "2020-08", "text_1_tokenized": ["A", "vigorous", "entanglement", "of", "two", "minds", "."], "text_2_tokenized": ["I", "hope", "Ivanka", "comes", "out", "with", "an", "entanglement"]} -{"id": "2853-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "\u201cI have never watched an episode of Drag Race!\u201d and \u201cI'm over it and won't watch another episode of Drag Race!\u201d are sentiments in a quantum entanglement.", "token_idx_1": 32, "text_start_1": 140, "text_end_1": 152, "date_1": "2019-08", "text_2": "do ur nonexistent hoes ever cost u an entanglement?\ud83e\udd74", "token_idx_2": 8, "text_start_2": 38, "text_end_2": 50, "date_2": "2020-08", "text_1_tokenized": ["\u201c", "I", "have", "never", "watched", "an", "episode", "of", "Drag", "Race", "!", "\u201d", "and", "\u201c", "I'm", "over", "it", "and", "won't", "watch", "another", "episode", "of", "Drag", "Race", "!", "\u201d", "are", "sentiments", "in", "a", "quantum", "entanglement", "."], "text_2_tokenized": ["do", "ur", "nonexistent", "hoes", "ever", "cost", "u", "an", "entanglement", "?", "\ud83e\udd74"]} -{"id": "2854-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Today Every weight and entanglement hindering my life, career and business, break now. l am free and delivered, no more bondage or limitation in my life. Amen \ud83d\ude4f", "token_idx_1": 4, "text_start_1": 23, "text_end_1": 35, "date_1": "2019-08", "text_2": "bruh i just i wanna be in a entanglement with you. \ud83d\ude2d", "token_idx_2": 8, "text_start_2": 28, "text_end_2": 40, "date_2": "2020-08", "text_1_tokenized": ["Today", "Every", "weight", "and", "entanglement", "hindering", "my", "life", ",", "career", "and", "business", ",", "break", "now", ".", "l", "am", "free", "and", "delivered", ",", "no", "more", "bondage", "or", "limitation", "in", "my", "life", ".", "Amen", "\ud83d\ude4f"], "text_2_tokenized": ["bruh", "i", "just", "i", "wanna", "be", "in", "a", "entanglement", "with", "you", ".", "\ud83d\ude2d"]} -{"id": "2855-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "The proposed changes to reduce SI and mortality are insufficient unless we also control the widespread sublethal trauma \u2013 25% of the population of NARW get new scars each year. Each new entanglement scar chips away at their body condition: (2/5)", "token_idx_1": 34, "text_start_1": 186, "text_end_1": 198, "date_1": "2019-08", "text_2": "\u25a1 Single \u25a1 Taken \u2611 In an entanglement with my education\ud83e\udd13", "token_idx_2": 7, "text_start_2": 25, "text_end_2": 37, "date_2": "2020-08", "text_1_tokenized": ["The", "proposed", "changes", "to", "reduce", "SI", "and", "mortality", "are", "insufficient", "unless", "we", "also", "control", "the", "widespread", "sublethal", "trauma", "\u2013", "25", "%", "of", "the", "population", "of", "NARW", "get", "new", "scars", "each", "year", ".", "Each", "new", "entanglement", "scar", "chips", "away", "at", "their", "body", "condition", ":", "(", "2/5", ")"], "text_2_tokenized": ["\u25a1", "Single", "\u25a1", "Taken", "\u2611", "In", "an", "entanglement", "with", "my", "education", "\ud83e\udd13"]} -{"id": "2856-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Loving froddveni entanglement..... #BBNaija", "token_idx_1": 2, "text_start_1": 17, "text_end_1": 29, "date_1": "2019-08", "text_2": "I need an entanglement in my life", "token_idx_2": 3, "text_start_2": 10, "text_end_2": 22, "date_2": "2020-08", "text_1_tokenized": ["Loving", "froddveni", "entanglement", "...", "#BBNaija"], "text_2_tokenized": ["I", "need", "an", "entanglement", "in", "my", "life"]} -{"id": "2857-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "The double split experiment and quantum entanglement really has me questioning wtf kinda world we live in", "token_idx_1": 6, "text_start_1": 40, "text_end_1": 52, "date_1": "2019-08", "text_2": "I think I've stumbled onto an entanglement", "token_idx_2": 6, "text_start_2": 30, "text_end_2": 42, "date_2": "2020-08", "text_1_tokenized": ["The", "double", "split", "experiment", "and", "quantum", "entanglement", "really", "has", "me", "questioning", "wtf", "kinda", "world", "we", "live", "in"], "text_2_tokenized": ["I", "think", "I've", "stumbled", "onto", "an", "entanglement"]} -{"id": "2858-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "I can't see Brexit as a land of milk and honey. It is at best a 10 year project to unwind decades of unwanted EU entanglement. Not sure I'm prepared to wait around for it to come good. Long term it might be fine if the socialist tendencies can be resisted. Tax is still too high..", "token_idx_1": 26, "text_start_1": 113, "text_end_1": 125, "date_1": "2019-08", "text_2": "But did you know Mun G has a hit song entitled entanglement", "token_idx_2": 11, "text_start_2": 47, "text_end_2": 59, "date_2": "2020-08", "text_1_tokenized": ["I", "can't", "see", "Brexit", "as", "a", "land", "of", "milk", "and", "honey", ".", "It", "is", "at", "best", "a", "10", "year", "project", "to", "unwind", "decades", "of", "unwanted", "EU", "entanglement", ".", "Not", "sure", "I'm", "prepared", "to", "wait", "around", "for", "it", "to", "come", "good", ".", "Long", "term", "it", "might", "be", "fine", "if", "the", "socialist", "tendencies", "can", "be", "resisted", ".", "Tax", "is", "still", "too", "high", ".."], "text_2_tokenized": ["But", "did", "you", "know", "Mun", "G", "has", "a", "hit", "song", "entitled", "entanglement"]} -{"id": "2859-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "the way my dad just explained to me what quantum entanglement is and i was able to connect it to loona theories.... big brain energy ladies", "token_idx_1": 10, "text_start_1": 49, "text_end_1": 61, "date_1": "2019-08", "text_2": "Hahahahaha Ghana woman u go take do ur entanglement things so say ein body barb am aaa, she go come blackmail you?? Sagaaaa\ud83d\ude05\ud83d\ude05\ud83d\ude05", "token_idx_2": 8, "text_start_2": 39, "text_end_2": 51, "date_2": "2020-08", "text_1_tokenized": ["the", "way", "my", "dad", "just", "explained", "to", "me", "what", "quantum", "entanglement", "is", "and", "i", "was", "able", "to", "connect", "it", "to", "loona", "theories", "...", "big", "brain", "energy", "ladies"], "text_2_tokenized": ["Hahahahaha", "Ghana", "woman", "u", "go", "take", "do", "ur", "entanglement", "things", "so", "say", "ein", "body", "barb", "am", "aaa", ",", "she", "go", "come", "blackmail", "you", "?", "?", "Sagaaaa", "\ud83d\ude05", "\ud83d\ude05", "\ud83d\ude05"]} -{"id": "2860-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "The human brain is a warm, wet environment. In these conditions, quantum entanglement between particles can't be maintained. But Phosphorus-31 is a peculiar isotope with an isolated nuclear spin nestled inside of Posner Molecules. It might be be using entanglement for cognition.", "token_idx_1": 15, "text_start_1": 73, "text_end_1": 85, "date_1": "2019-08", "text_2": "We should call this month entanglement", "token_idx_2": 5, "text_start_2": 26, "text_end_2": 38, "date_2": "2020-08", "text_1_tokenized": ["The", "human", "brain", "is", "a", "warm", ",", "wet", "environment", ".", "In", "these", "conditions", ",", "quantum", "entanglement", "between", "particles", "can't", "be", "maintained", ".", "But", "Phosphorus", "-", "31", "is", "a", "peculiar", "isotope", "with", "an", "isolated", "nuclear", "spin", "nestled", "inside", "of", "Posner", "Molecules", ".", "It", "might", "be", "be", "using", "entanglement", "for", "cognition", "."], "text_2_tokenized": ["We", "should", "call", "this", "month", "entanglement"]} -{"id": "2861-entanglement", "word": "entanglement", "label_binary": 0, "text_1": "Fall Camp News! After what appeared to be a scary entanglement yesterday. It looks like junior safety Cash Gilliam is going to be a full participant at practice today!", "token_idx_1": 11, "text_start_1": 50, "text_end_1": 62, "date_1": "2019-08", "text_2": "if you weren't about me in hs you aren't about me now...your just lonely and I don't want an \u2728entanglement\u2728\ud83d\ude0c", "token_idx_2": 22, "text_start_2": 94, "text_end_2": 106, "date_2": "2020-08", "text_1_tokenized": ["Fall", "Camp", "News", "!", "After", "what", "appeared", "to", "be", "a", "scary", "entanglement", "yesterday", ".", "It", "looks", "like", "junior", "safety", "Cash", "Gilliam", "is", "going", "to", "be", "a", "full", "participant", "at", "practice", "today", "!"], "text_2_tokenized": ["if", "you", "weren't", "about", "me", "in", "hs", "you", "aren't", "about", "me", "now", "...", "your", "just", "lonely", "and", "I", "don't", "want", "an", "\u2728", "entanglement", "\u2728", "\ud83d\ude0c"]} -{"id": "0100-folklore", "word": "folklore", "label_binary": 0, "text_1": "Lol a lot of historical events/folklore would've been funny as hell through a Twitter feed", "token_idx_1": 7, "text_start_1": 31, "text_end_1": 39, "date_1": "2019-08", "text_2": "Has it been 16 weeks since I ordered the folklore vinyl bc it sure feels like it and I would like it to have been shipped yesterday please", "token_idx_2": 9, "text_start_2": 41, "text_end_2": 49, "date_2": "2020-08", "text_1_tokenized": ["Lol", "a", "lot", "of", "historical", "events", "/", "folklore", "would've", "been", "funny", "as", "hell", "through", "a", "Twitter", "feed"], "text_2_tokenized": ["Has", "it", "been", "16", "weeks", "since", "I", "ordered", "the", "folklore", "vinyl", "bc", "it", "sure", "feels", "like", "it", "and", "I", "would", "like", "it", "to", "have", "been", "shipped", "yesterday", "please"]} -{"id": "0101-folklore", "word": "folklore", "label_binary": 0, "text_1": "Seeing @CERobbAuthor on my timeline makes me want to return to my folklore/Mythology roots. Persephone and I may have some catching up to do.", "token_idx_1": 12, "text_start_1": 66, "text_end_1": 74, "date_1": "2019-08", "text_2": "road trip. fall. leaves changing. rain. pastries. chai. folklore. yes.", "token_idx_2": 14, "text_start_2": 56, "text_end_2": 64, "date_2": "2020-08", "text_1_tokenized": ["Seeing", "@CERobbAuthor", "on", "my", "timeline", "makes", "me", "want", "to", "return", "to", "my", "folklore", "/", "Mythology", "roots", ".", "Persephone", "and", "I", "may", "have", "some", "catching", "up", "to", "do", "."], "text_2_tokenized": ["road", "trip", ".", "fall", ".", "leaves", "changing", ".", "rain", ".", "pastries", ".", "chai", ".", "folklore", ".", "yes", "."]} -{"id": "0102-folklore", "word": "folklore", "label_binary": 0, "text_1": "Discovered that there's a teacher in Quebec using my THE AULD MITHER to show kids how to weave folklore into their stories. This makes me happy.", "token_idx_1": 18, "text_start_1": 95, "text_end_1": 103, "date_1": "2019-08", "text_2": "i got my folklore cd in the mail and told my mom it was the \u201crunning like water\u201d edition & she said \u201cisnt the line before that \u2018leaving like a father'?\u201d which made me lose it because: 1. she exposed herself as a folklore stan 2. she dragged the fuck out of me & my daddy issues", "token_idx_2": 3, "text_start_2": 9, "text_end_2": 17, "date_2": "2020-08", "text_1_tokenized": ["Discovered", "that", "there's", "a", "teacher", "in", "Quebec", "using", "my", "THE", "AULD", "MITHER", "to", "show", "kids", "how", "to", "weave", "folklore", "into", "their", "stories", ".", "This", "makes", "me", "happy", "."], "text_2_tokenized": ["i", "got", "my", "folklore", "cd", "in", "the", "mail", "and", "told", "my", "mom", "it", "was", "the", "\u201c", "running", "like", "water", "\u201d", "edition", "&", "she", "said", "\u201c", "isnt", "the", "line", "before", "that", "\u2018", "leaving", "like", "a", "father", "'", "?", "\u201d", "which", "made", "me", "lose", "it", "because", ":", "1", ".", "she", "exposed", "herself", "as", "a", "folklore", "stan", "2", ".", "she", "dragged", "the", "fuck", "out", "of", "me", "&", "my", "daddy", "issues"]} -{"id": "0103-folklore", "word": "folklore", "label_binary": 0, "text_1": "Does anyone have any book recommendations about Nordic folklore?? I feel the need to read about that Right Now", "token_idx_1": 8, "text_start_1": 55, "text_end_1": 63, "date_1": "2019-08", "text_2": "want taylor to do a tiny desk concert for folklore", "token_idx_2": 9, "text_start_2": 42, "text_end_2": 50, "date_2": "2020-08", "text_1_tokenized": ["Does", "anyone", "have", "any", "book", "recommendations", "about", "Nordic", "folklore", "?", "?", "I", "feel", "the", "need", "to", "read", "about", "that", "Right", "Now"], "text_2_tokenized": ["want", "taylor", "to", "do", "a", "tiny", "desk", "concert", "for", "folklore"]} -{"id": "0104-folklore", "word": "folklore", "label_binary": 0, "text_1": "A young person I work with taught me some Scottish folklore I'd never heard of today. Anyone else heard of Alexander \u201cSawney\u201d Bean? #Folklore #Scotland #Myth", "token_idx_1": 10, "text_start_1": 51, "text_end_1": 59, "date_1": "2019-08", "text_2": "It's wild how different the standard edition of folklore feels compared to the deluxe edition. the lakes completely changes the take-home feeling when it ends it's wild how one song can be that powerful!!", "token_idx_2": 8, "text_start_2": 48, "text_end_2": 56, "date_2": "2020-08", "text_1_tokenized": ["A", "young", "person", "I", "work", "with", "taught", "me", "some", "Scottish", "folklore", "I'd", "never", "heard", "of", "today", ".", "Anyone", "else", "heard", "of", "Alexander", "\u201c", "Sawney", "\u201d", "Bean", "?", "#Folklore", "#Scotland", "#Myth"], "text_2_tokenized": ["It's", "wild", "how", "different", "the", "standard", "edition", "of", "folklore", "feels", "compared", "to", "the", "deluxe", "edition", ".", "the", "lakes", "completely", "changes", "the", "take-home", "feeling", "when", "it", "ends", "it's", "wild", "how", "one", "song", "can", "be", "that", "powerful", "!", "!"]} -{"id": "0105-folklore", "word": "folklore", "label_binary": 0, "text_1": "It appears that they have given the oil tanker a new name (tracking id), anyone familiar with maritime folklore will say it's bad luck to re-name a boat/ship, it should be burned(if wooden) or dismantled for scrap", "token_idx_1": 21, "text_start_1": 103, "text_end_1": 111, "date_1": "2019-08", "text_2": "folklore is really THAT girl..... and oh my god I am so in love with her", "token_idx_2": 0, "text_start_2": 0, "text_end_2": 8, "date_2": "2020-08", "text_1_tokenized": ["It", "appears", "that", "they", "have", "given", "the", "oil", "tanker", "a", "new", "name", "(", "tracking", "id", ")", ",", "anyone", "familiar", "with", "maritime", "folklore", "will", "say", "it's", "bad", "luck", "to", "re-name", "a", "boat", "/", "ship", ",", "it", "should", "be", "burned", "(", "if", "wooden", ")", "or", "dismantled", "for", "scrap"], "text_2_tokenized": ["folklore", "is", "really", "THAT", "girl", "...", "and", "oh", "my", "god", "I", "am", "so", "in", "love", "with", "her"]} -{"id": "0106-folklore", "word": "folklore", "label_binary": 1, "text_1": "Say, call that girl with the tan tress awn! Call Wolfhound! Wolf of the sea. Folchu! Folchu! \u2014 Very good now. That folklore's straight from the ass his mouth.", "token_idx_1": 29, "text_start_1": 115, "text_end_1": 125, "date_1": "2019-08", "text_2": "Bloody Mary's are the worst drink. 1: you're just drinking a tomato with celery. 2: it's named after a folklore where a ghost lady scares the shit out of you through a mirror in the dark These are just the facts", "token_idx_2": 23, "text_start_2": 103, "text_end_2": 111, "date_2": "2020-08", "text_1_tokenized": ["Say", ",", "call", "that", "girl", "with", "the", "tan", "tress", "awn", "!", "Call", "Wolfhound", "!", "Wolf", "of", "the", "sea", ".", "Folchu", "!", "Folchu", "!", "\u2014", "Very", "good", "now", ".", "That", "folklore's", "straight", "from", "the", "ass", "his", "mouth", "."], "text_2_tokenized": ["Bloody", "Mary's", "are", "the", "worst", "drink", ".", "1", ":", "you're", "just", "drinking", "a", "tomato", "with", "celery", ".", "2", ":", "it's", "named", "after", "a", "folklore", "where", "a", "ghost", "lady", "scares", "the", "shit", "out", "of", "you", "through", "a", "mirror", "in", "the", "dark", "These", "are", "just", "the", "facts"]} -{"id": "0107-folklore", "word": "folklore", "label_binary": 0, "text_1": "My superficial staddy folklore is RT all her compliments . Grand opening. Grand closing", "token_idx_1": 3, "text_start_1": 22, "text_end_1": 30, "date_1": "2019-08", "text_2": "i asked to my irls what album should win aoty and they said folklore i love them \ud83d\ude2d\ud83d\ude2d", "token_idx_2": 13, "text_start_2": 60, "text_end_2": 68, "date_2": "2020-08", "text_1_tokenized": ["My", "superficial", "staddy", "folklore", "is", "RT", "all", "her", "compliments", ".", "Grand", "opening", ".", "Grand", "closing"], "text_2_tokenized": ["i", "asked", "to", "my", "irls", "what", "album", "should", "win", "aoty", "and", "they", "said", "folklore", "i", "love", "them", "\ud83d\ude2d", "\ud83d\ude2d"]} -{"id": "0108-folklore", "word": "folklore", "label_binary": 1, "text_1": "(sweats immensely) growing up as a girl in SEA is not fun but hey 90% folklore is just dont wander around so much or else you'll get cursed In the weirdest most obscure way you can think of", "token_idx_1": 18, "text_start_1": 70, "text_end_1": 78, "date_1": "2019-08", "text_2": "#TheVigil I can't help but feel that this film wasted it's potential. I find it interesting that writer Keith Thomas explores Jewish culture in detail which is not often seen. Following the religious act of Vigil eventually releases a dybbuk - a demon from Jewish folklore.", "token_idx_2": 47, "text_start_2": 264, "text_end_2": 272, "date_2": "2020-08", "text_1_tokenized": ["(", "sweats", "immensely", ")", "growing", "up", "as", "a", "girl", "in", "SEA", "is", "not", "fun", "but", "hey", "90", "%", "folklore", "is", "just", "dont", "wander", "around", "so", "much", "or", "else", "you'll", "get", "cursed", "In", "the", "weirdest", "most", "obscure", "way", "you", "can", "think", "of"], "text_2_tokenized": ["#TheVigil", "I", "can't", "help", "but", "feel", "that", "this", "film", "wasted", "it's", "potential", ".", "I", "find", "it", "interesting", "that", "writer", "Keith", "Thomas", "explores", "Jewish", "culture", "in", "detail", "which", "is", "not", "often", "seen", ".", "Following", "the", "religious", "act", "of", "Vigil", "eventually", "releases", "a", "dybbuk", "-", "a", "demon", "from", "Jewish", "folklore", "."]} -{"id": "0109-folklore", "word": "folklore", "label_binary": 0, "text_1": "ordered some new books on sustainability & sewing, coastal accessibility & racism, and old ass lit. toss in horror/folklore and shady crime and we'll have the usual mix.", "token_idx_1": 23, "text_start_1": 123, "text_end_1": 131, "date_1": "2019-08", "text_2": "If you see me driving an hour into the countryside to listen to folklore with Starbucks no u didn't \u2764\ufe0f", "token_idx_2": 13, "text_start_2": 64, "text_end_2": 72, "date_2": "2020-08", "text_1_tokenized": ["ordered", "some", "new", "books", "on", "sustainability", "&", "sewing", ",", "coastal", "accessibility", "&", "racism", ",", "and", "old", "ass", "lit", ".", "toss", "in", "horror", "/", "folklore", "and", "shady", "crime", "and", "we'll", "have", "the", "usual", "mix", "."], "text_2_tokenized": ["If", "you", "see", "me", "driving", "an", "hour", "into", "the", "countryside", "to", "listen", "to", "folklore", "with", "Starbucks", "no", "u", "didn't", "\u2764", "\ufe0f"]} -{"id": "0110-folklore", "word": "folklore", "label_binary": 0, "text_1": "I was just about to get out of character but I been doing folklore good and said I wasn't gone let nothing else negative get in my way.", "token_idx_1": 13, "text_start_1": 58, "text_end_1": 66, "date_1": "2019-08", "text_2": "i broke three toes today while dramatically pacing around the house to folklore - it hurt, but i think i deserve praise for lining up my yelp with the key change in betty", "token_idx_2": 12, "text_start_2": 71, "text_end_2": 79, "date_2": "2020-08", "text_1_tokenized": ["I", "was", "just", "about", "to", "get", "out", "of", "character", "but", "I", "been", "doing", "folklore", "good", "and", "said", "I", "wasn't", "gone", "let", "nothing", "else", "negative", "get", "in", "my", "way", "."], "text_2_tokenized": ["i", "broke", "three", "toes", "today", "while", "dramatically", "pacing", "around", "the", "house", "to", "folklore", "-", "it", "hurt", ",", "but", "i", "think", "i", "deserve", "praise", "for", "lining", "up", "my", "yelp", "with", "the", "key", "change", "in", "betty"]} -{"id": "0116-folklore", "word": "folklore", "label_binary": 0, "text_1": "Music playing in my office means: you can enter; I can stop at any point. True crime podcast like @SwordAndScale or a folklore podcast like @lorepodcast that means: Come back later; I have some heavy work to do; I'm focused. #truecrime #podcast #lore", "token_idx_1": 25, "text_start_1": 118, "text_end_1": 126, "date_1": "2019-08", "text_2": "we don't talk enough about folklore being the best soundtrack to any book. just playing the album the moment you begin reading is an outter world experience", "token_idx_2": 5, "text_start_2": 27, "text_end_2": 35, "date_2": "2020-08", "text_1_tokenized": ["Music", "playing", "in", "my", "office", "means", ":", "you", "can", "enter", ";", "I", "can", "stop", "at", "any", "point", ".", "True", "crime", "podcast", "like", "@SwordAndScale", "or", "a", "folklore", "podcast", "like", "@lorepodcast", "that", "means", ":", "Come", "back", "later", ";", "I", "have", "some", "heavy", "work", "to", "do;", "I'm", "focused", ".", "#truecrime", "#podcast", "#lore"], "text_2_tokenized": ["we", "don't", "talk", "enough", "about", "folklore", "being", "the", "best", "soundtrack", "to", "any", "book", ".", "just", "playing", "the", "album", "the", "moment", "you", "begin", "reading", "is", "an", "outter", "world", "experience"]} -{"id": "0117-folklore", "word": "folklore", "label_binary": 0, "text_1": "*grump face* I feel like an article on folklore from 1924 should not cost 20 bucks or more to access. I know there's server upkeep and such but dang.", "token_idx_1": 10, "text_start_1": 39, "text_end_1": 47, "date_1": "2019-08", "text_2": "staring pensively out the window while it thunderstorms listening to folklore and thinking about how much fucking work i have to do", "token_idx_2": 10, "text_start_2": 69, "text_end_2": 77, "date_2": "2020-08", "text_1_tokenized": ["*", "grump", "face", "*", "I", "feel", "like", "an", "article", "on", "folklore", "from", "1924", "should", "not", "cost", "20", "bucks", "or", "more", "to", "access", ".", "I", "know", "there's", "server", "upkeep", "and", "such", "but", "dang", "."], "text_2_tokenized": ["staring", "pensively", "out", "the", "window", "while", "it", "thunderstorms", "listening", "to", "folklore", "and", "thinking", "about", "how", "much", "fucking", "work", "i", "have", "to", "do"]} -{"id": "0118-folklore", "word": "folklore", "label_binary": 0, "text_1": "If you see this tweet and have the time, reply with an entity, creature or spirit from your ethnicity's folklore. Positive or negative.", "token_idx_1": 21, "text_start_1": 104, "text_end_1": 112, "date_1": "2019-08", "text_2": "walking @ the pond by my house listening to @taylorswift13 & thinking how folklore makes me wanna learn acoustic, and then I walk past someone that's playing the guitar on a bench. Sounds like this is a sign me thinks", "token_idx_2": 13, "text_start_2": 78, "text_end_2": 86, "date_2": "2020-08", "text_1_tokenized": ["If", "you", "see", "this", "tweet", "and", "have", "the", "time", ",", "reply", "with", "an", "entity", ",", "creature", "or", "spirit", "from", "your", "ethnicity's", "folklore", ".", "Positive", "or", "negative", "."], "text_2_tokenized": ["walking", "@", "the", "pond", "by", "my", "house", "listening", "to", "@taylorswift13", "&", "thinking", "how", "folklore", "makes", "me", "wanna", "learn", "acoustic", ",", "and", "then", "I", "walk", "past", "someone", "that's", "playing", "the", "guitar", "on", "a", "bench", ".", "Sounds", "like", "this", "is", "a", "sign", "me", "thinks"]} -{"id": "0119-folklore", "word": "folklore", "label_binary": 0, "text_1": "There's a thunderstorm outside so clearly it's the perfect time to watch videos about folklore monsters.", "token_idx_1": 14, "text_start_1": 86, "text_end_1": 94, "date_1": "2019-08", "text_2": "Cardigan on folklore is my favorite song. I wish @taylorswift13 would love me", "token_idx_2": 2, "text_start_2": 12, "text_end_2": 20, "date_2": "2020-08", "text_1_tokenized": ["There's", "a", "thunderstorm", "outside", "so", "clearly", "it's", "the", "perfect", "time", "to", "watch", "videos", "about", "folklore", "monsters", "."], "text_2_tokenized": ["Cardigan", "on", "folklore", "is", "my", "favorite", "song", ".", "I", "wish", "@taylorswift13", "would", "love", "me"]} -{"id": "0120-folklore", "word": "folklore", "label_binary": 0, "text_1": "Match tha ya Abbas Mustan ki movie. Big Ben Stokes has wrote his name in the cricketing folklore. It is moments like these why Test cricket stays alive. #Ashes2019", "token_idx_1": 18, "text_start_1": 88, "text_end_1": 96, "date_1": "2019-08", "text_2": "ever since folklore i've been listening to taylor way more and omg i love her music it's so good", "token_idx_2": 2, "text_start_2": 11, "text_end_2": 19, "date_2": "2020-08", "text_1_tokenized": ["Match", "tha", "ya", "Abbas", "Mustan", "ki", "movie", ".", "Big", "Ben", "Stokes", "has", "wrote", "his", "name", "in", "the", "cricketing", "folklore", ".", "It", "is", "moments", "like", "these", "why", "Test", "cricket", "stays", "alive", ".", "#Ashes2019"], "text_2_tokenized": ["ever", "since", "folklore", "i've", "been", "listening", "to", "taylor", "way", "more", "and", "omg", "i", "love", "her", "music", "it's", "so", "good"]} -{"id": "0121-folklore", "word": "folklore", "label_binary": 0, "text_1": "Are there any pacific island folklore that hint at mermaid like creatures?", "token_idx_1": 5, "text_start_1": 29, "text_end_1": 37, "date_1": "2019-08", "text_2": "the way exile is the best song on folklore", "token_idx_2": 8, "text_start_2": 34, "text_end_2": 42, "date_2": "2020-08", "text_1_tokenized": ["Are", "there", "any", "pacific", "island", "folklore", "that", "hint", "at", "mermaid", "like", "creatures", "?"], "text_2_tokenized": ["the", "way", "exile", "is", "the", "best", "song", "on", "folklore"]} -{"id": "0122-folklore", "word": "folklore", "label_binary": 0, "text_1": "We have planets and other celestial bodies named after: Aztec folklore, Celtic, Easter Island, Gallic, German, Greek, Hawaiian, Inuit, Japanese, Mesopotamian, Norse, Roman and Thai. Don't worry! There are a billion stars up there. The Indian and Chinese gods will get their turn", "token_idx_1": 11, "text_start_1": 62, "text_end_1": 70, "date_1": "2019-08", "text_2": "Also I'm feeling alot of feelings about folklore right now thanks taylor swift", "token_idx_2": 7, "text_start_2": 40, "text_end_2": 48, "date_2": "2020-08", "text_1_tokenized": ["We", "have", "planets", "and", "other", "celestial", "bodies", "named", "after", ":", "Aztec", "folklore", ",", "Celtic", ",", "Easter", "Island", ",", "Gallic", ",", "German", ",", "Greek", ",", "Hawaiian", ",", "Inuit", ",", "Japanese", ",", "Mesopotamian", ",", "Norse", ",", "Roman", "and", "Thai", ".", "Don't", "worry", "!", "There", "are", "a", "billion", "stars", "up", "there", ".", "The", "Indian", "and", "Chinese", "gods", "will", "get", "their", "turn"], "text_2_tokenized": ["Also", "I'm", "feeling", "alot", "of", "feelings", "about", "folklore", "right", "now", "thanks", "taylor", "swift"]} -{"id": "0123-folklore", "word": "folklore", "label_binary": 0, "text_1": "FINAL SCORE | One of the greatest games held at the Gabba! Lincoln McCarthy writes himself into footy folklore. BL 10.15 (75) def GEEL 10.14 (74) #AFLLionsCats", "token_idx_1": 19, "text_start_1": 102, "text_end_1": 110, "date_1": "2019-08", "text_2": "i'm still not convinced folklore is real... you mean to tell me taylor swift dropped a surprise album?? sounds fake", "token_idx_2": 4, "text_start_2": 24, "text_end_2": 32, "date_2": "2020-08", "text_1_tokenized": ["FINAL", "SCORE", "|", "One", "of", "the", "greatest", "games", "held", "at", "the", "Gabba", "!", "Lincoln", "McCarthy", "writes", "himself", "into", "footy", "folklore", ".", "BL", "10.15", "(", "75", ")", "def", "GEEL", "10.14", "(", "74", ")", "#AFLLionsCats"], "text_2_tokenized": ["i'm", "still", "not", "convinced", "folklore", "is", "real", "...", "you", "mean", "to", "tell", "me", "taylor", "swift", "dropped", "a", "surprise", "album", "?", "?", "sounds", "fake"]} -{"id": "0124-folklore", "word": "folklore", "label_binary": 0, "text_1": "Divock Origi etching his name into Liverpool folklore a little more every week", "token_idx_1": 7, "text_start_1": 45, "text_end_1": 53, "date_1": "2019-08", "text_2": "I would do anything for a signed folklore cd \ud83d\ude1e", "token_idx_2": 7, "text_start_2": 33, "text_end_2": 41, "date_2": "2020-08", "text_1_tokenized": ["Divock", "Origi", "etching", "his", "name", "into", "Liverpool", "folklore", "a", "little", "more", "every", "week"], "text_2_tokenized": ["I", "would", "do", "anything", "for", "a", "signed", "folklore", "cd", "\ud83d\ude1e"]} -{"id": "0125-folklore", "word": "folklore", "label_binary": 0, "text_1": "An urban legend, urban myth, urban tale, or contemporary legend is a genre of folklore comprising stories circulated as true, especially...", "token_idx_1": 17, "text_start_1": 78, "text_end_1": 86, "date_1": "2019-08", "text_2": "i have a hard time listening to taylor's folklore because it's just a little too pretty and emotional for me to listen to on a normal occasion and not when i'm having an anxiety/depression spout", "token_idx_2": 8, "text_start_2": 41, "text_end_2": 49, "date_2": "2020-08", "text_1_tokenized": ["An", "urban", "legend", ",", "urban", "myth", ",", "urban", "tale", ",", "or", "contemporary", "legend", "is", "a", "genre", "of", "folklore", "comprising", "stories", "circulated", "as", "true", ",", "especially", "..."], "text_2_tokenized": ["i", "have", "a", "hard", "time", "listening", "to", "taylor's", "folklore", "because", "it's", "just", "a", "little", "too", "pretty", "and", "emotional", "for", "me", "to", "listen", "to", "on", "a", "normal", "occasion", "and", "not", "when", "i'm", "having", "an", "anxiety", "/", "depression", "spout"]} -{"id": "0126-folklore", "word": "folklore", "label_binary": 0, "text_1": "Sat in the Spoon and it's like the United Nations here, I thought the folklore finished yesterday. But seems they are having an impromptu party. Just watched Steven Small cross the demarcation line, won't be the first time and I'm sure not the last. \ud83d\ude02\ud83d\ude01\ud83d\ude05\ud83d\ude06\ud83d\ude00", "token_idx_1": 15, "text_start_1": 70, "text_end_1": 78, "date_1": "2019-08", "text_2": "Hi @halsey what's your favorite song in folklore ?", "token_idx_2": 7, "text_start_2": 40, "text_end_2": 48, "date_2": "2020-08", "text_1_tokenized": ["Sat", "in", "the", "Spoon", "and", "it's", "like", "the", "United", "Nations", "here", ",", "I", "thought", "the", "folklore", "finished", "yesterday", ".", "But", "seems", "they", "are", "having", "an", "impromptu", "party", ".", "Just", "watched", "Steven", "Small", "cross", "the", "demarcation", "line", ",", "won't", "be", "the", "first", "time", "and", "I'm", "sure", "not", "the", "last", ".", "\ud83d\ude02", "\ud83d\ude01", "\ud83d\ude05", "\ud83d\ude06", "\ud83d\ude00"], "text_2_tokenized": ["Hi", "@halsey", "what's", "your", "favorite", "song", "in", "folklore", "?"]} -{"id": "0127-folklore", "word": "folklore", "label_binary": 0, "text_1": "I personally find a certain magnificence in the way the verses of Surah Kahf end. No form of poetry, no folklore, no song can come close to how aesthetically Allah narrates us through his own words a message that deserves to be captivated in our hearts.", "token_idx_1": 22, "text_start_1": 104, "text_end_1": 112, "date_1": "2019-08", "text_2": "The lakes is a masterpiece, the way folklore has no skips. I am not surprised tho its taylor", "token_idx_2": 8, "text_start_2": 36, "text_end_2": 44, "date_2": "2020-08", "text_1_tokenized": ["I", "personally", "find", "a", "certain", "magnificence", "in", "the", "way", "the", "verses", "of", "Surah", "Kahf", "end", ".", "No", "form", "of", "poetry", ",", "no", "folklore", ",", "no", "song", "can", "come", "close", "to", "how", "aesthetically", "Allah", "narrates", "us", "through", "his", "own", "words", "a", "message", "that", "deserves", "to", "be", "captivated", "in", "our", "hearts", "."], "text_2_tokenized": ["The", "lakes", "is", "a", "masterpiece", ",", "the", "way", "folklore", "has", "no", "skips", ".", "I", "am", "not", "surprised", "tho", "its", "taylor"]} -{"id": "0128-folklore", "word": "folklore", "label_binary": 0, "text_1": "I just learned that there's a type of birthmark called a Mongolian spot that babies get and in Korean folklore it's said to come from a spirit slapping the lazy baby's arse to tell it to hurry up and be born I LOVE", "token_idx_1": 19, "text_start_1": 102, "text_end_1": 110, "date_1": "2019-08", "text_2": "reply w ur folklore cd posts i'll rt !", "token_idx_2": 3, "text_start_2": 11, "text_end_2": 19, "date_2": "2020-08", "text_1_tokenized": ["I", "just", "learned", "that", "there's", "a", "type", "of", "birthmark", "called", "a", "Mongolian", "spot", "that", "babies", "get", "and", "in", "Korean", "folklore", "it's", "said", "to", "come", "from", "a", "spirit", "slapping", "the", "lazy", "baby's", "arse", "to", "tell", "it", "to", "hurry", "up", "and", "be", "born", "I", "LOVE"], "text_2_tokenized": ["reply", "w", "ur", "folklore", "cd", "posts", "i'll", "rt", "!"]} -{"id": "0129-folklore", "word": "folklore", "label_binary": 0, "text_1": "Hey guys if you can leave me some cool creatures and ghosts from various things whether it's books, films, myths, legends, and folklore. Would love some things to look up for ideas and inspiration.", "token_idx_1": 26, "text_start_1": 127, "text_end_1": 135, "date_1": "2019-08", "text_2": "just listened to london boy for the first time since folklore came out. serotonin", "token_idx_2": 10, "text_start_2": 53, "text_end_2": 61, "date_2": "2020-08", "text_1_tokenized": ["Hey", "guys", "if", "you", "can", "leave", "me", "some", "cool", "creatures", "and", "ghosts", "from", "various", "things", "whether", "it's", "books", ",", "films", ",", "myths", ",", "legends", ",", "and", "folklore", ".", "Would", "love", "some", "things", "to", "look", "up", "for", "ideas", "and", "inspiration", "."], "text_2_tokenized": ["just", "listened", "to", "london", "boy", "for", "the", "first", "time", "since", "folklore", "came", "out", ".", "serotonin"]} -{"id": "0130-folklore", "word": "folklore", "label_binary": 0, "text_1": "want... to write. a fic based on irish folklore", "token_idx_1": 10, "text_start_1": 39, "text_end_1": 47, "date_1": "2019-08", "text_2": "since the explosion i couldn't stream any music but now I'm streaming folklore for the 1st time w epiphany is hitting so hard omg\ud83d\ude2d", "token_idx_2": 12, "text_start_2": 70, "text_end_2": 78, "date_2": "2020-08", "text_1_tokenized": ["want", "...", "to", "write", ".", "a", "fic", "based", "on", "irish", "folklore"], "text_2_tokenized": ["since", "the", "explosion", "i", "couldn't", "stream", "any", "music", "but", "now", "I'm", "streaming", "folklore", "for", "the", "1st", "time", "w", "epiphany", "is", "hitting", "so", "hard", "omg", "\ud83d\ude2d"]} -{"id": "0131-folklore", "word": "folklore", "label_binary": 0, "text_1": "Countries that were not colonised have the language of home which is the language of governance, of the market place and of science. Most of us that were colonised speak more than one language. Our languages have only kept at the folklore level. Prof Mamdani #PALFA2019Kampala", "token_idx_1": 44, "text_start_1": 230, "text_end_1": 238, "date_1": "2019-08", "text_2": "The best songs on folklore are : my tears ricochet, the last great american dynasty, the one, and my personal favorite, exile", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 26, "date_2": "2020-08", "text_1_tokenized": ["Countries", "that", "were", "not", "colonised", "have", "the", "language", "of", "home", "which", "is", "the", "language", "of", "governance", ",", "of", "the", "market", "place", "and", "of", "science", ".", "Most", "of", "us", "that", "were", "colonised", "speak", "more", "than", "one", "language", ".", "Our", "languages", "have", "only", "kept", "at", "the", "folklore", "level", ".", "Prof", "Mamdani", "#PALFA2019Kampala"], "text_2_tokenized": ["The", "best", "songs", "on", "folklore", "are", ":", "my", "tears", "ricochet", ",", "the", "last", "great", "american", "dynasty", ",", "the", "one", ",", "and", "my", "personal", "favorite", ",", "exile"]} -{"id": "0132-folklore", "word": "folklore", "label_binary": 0, "text_1": "Adrian has his first chance to write himself into Liverpool folklore", "token_idx_1": 10, "text_start_1": 60, "text_end_1": 68, "date_1": "2019-08", "text_2": "I got 2 folklore CDs today! And the star confetti. Picked up my mail, played it in the car, heard the Lakes \ud83c\udf1f, went grocery shopping where they were playing Lover on the speakers yay \ud83c\udf1f\ud83c\udf1f\ud83c\udf1f", "token_idx_2": 3, "text_start_2": 8, "text_end_2": 16, "date_2": "2020-08", "text_1_tokenized": ["Adrian", "has", "his", "first", "chance", "to", "write", "himself", "into", "Liverpool", "folklore"], "text_2_tokenized": ["I", "got", "2", "folklore", "CDs", "today", "!", "And", "the", "star", "confetti", ".", "Picked", "up", "my", "mail", ",", "played", "it", "in", "the", "car", ",", "heard", "the", "Lakes", "\ud83c\udf1f", ",", "went", "grocery", "shopping", "where", "they", "were", "playing", "Lover", "on", "the", "speakers", "yay", "\ud83c\udf1f", "\ud83c\udf1f", "\ud83c\udf1f"]} -{"id": "0133-folklore", "word": "folklore", "label_binary": 1, "text_1": "Ben Stokes is going down in English cricket folklore. #Ashes", "token_idx_1": 8, "text_start_1": 44, "text_end_1": 52, "date_1": "2019-08", "text_2": "Brief Synopsis: A story about learning and maturing with nature, between adverse and poetic situations. The lessons of ecology, folklore and connections with the environment, a rebirth within us.", "token_idx_2": 23, "text_start_2": 128, "text_end_2": 136, "date_2": "2020-08", "text_1_tokenized": ["Ben", "Stokes", "is", "going", "down", "in", "English", "cricket", "folklore", ".", "#Ashes"], "text_2_tokenized": ["Brief", "Synopsis", ":", "A", "story", "about", "learning", "and", "maturing", "with", "nature", ",", "between", "adverse", "and", "poetic", "situations", ".", "The", "lessons", "of", "ecology", ",", "folklore", "and", "connections", "with", "the", "environment", ",", "a", "rebirth", "within", "us", "."]} -{"id": "0134-folklore", "word": "folklore", "label_binary": 0, "text_1": "On way back I reached out to a healthy bit of hawthorn and noticed that the oak next to it was filled with disease. I took this to mean we are in a time of foxes not lions but hey...who needs folklore to deduce we are in such an age?", "token_idx_1": 44, "text_start_1": 192, "text_end_1": 200, "date_1": "2019-08", "text_2": "crying hours provided by folklore", "token_idx_2": 4, "text_start_2": 25, "text_end_2": 33, "date_2": "2020-08", "text_1_tokenized": ["On", "way", "back", "I", "reached", "out", "to", "a", "healthy", "bit", "of", "hawthorn", "and", "noticed", "that", "the", "oak", "next", "to", "it", "was", "filled", "with", "disease", ".", "I", "took", "this", "to", "mean", "we", "are", "in", "a", "time", "of", "foxes", "not", "lions", "but", "hey", "...", "who", "needs", "folklore", "to", "deduce", "we", "are", "in", "such", "an", "age", "?"], "text_2_tokenized": ["crying", "hours", "provided", "by", "folklore"]} -{"id": "0135-folklore", "word": "folklore", "label_binary": 0, "text_1": "hey, Linoone got a 3rd evolve and a Galarian form you think Mightyena could get one too ? Linoone got the type Dark after all so, Mightyena being a Dark type, it could become a Dark/Fairy - Dark/Ghost type as some legend ghost dog like in the UK folklore", "token_idx_1": 55, "text_start_1": 246, "text_end_1": 254, "date_1": "2019-08", "text_2": "folklore is truly an amazing album me thinks", "token_idx_2": 0, "text_start_2": 0, "text_end_2": 8, "date_2": "2020-08", "text_1_tokenized": ["hey", ",", "Linoone", "got", "a", "3rd", "evolve", "and", "a", "Galarian", "form", "you", "think", "Mightyena", "could", "get", "one", "too", "?", "Linoone", "got", "the", "type", "Dark", "after", "all", "so", ",", "Mightyena", "being", "a", "Dark", "type", ",", "it", "could", "become", "a", "Dark", "/", "Fairy", "-", "Dark", "/", "Ghost", "type", "as", "some", "legend", "ghost", "dog", "like", "in", "the", "UK", "folklore"], "text_2_tokenized": ["folklore", "is", "truly", "an", "amazing", "album", "me", "thinks"]} -{"id": "0136-folklore", "word": "folklore", "label_binary": 0, "text_1": "Anyone have any recommendations for books on European folklore, mainly interested in the central and northern regions rather than Mediterranean.", "token_idx_1": 8, "text_start_1": 54, "text_end_1": 62, "date_1": "2019-08", "text_2": "another day of not getting through folklore without crying", "token_idx_2": 6, "text_start_2": 35, "text_end_2": 43, "date_2": "2020-08", "text_1_tokenized": ["Anyone", "have", "any", "recommendations", "for", "books", "on", "European", "folklore", ",", "mainly", "interested", "in", "the", "central", "and", "northern", "regions", "rather", "than", "Mediterranean", "."], "text_2_tokenized": ["another", "day", "of", "not", "getting", "through", "folklore", "without", "crying"]} -{"id": "0137-folklore", "word": "folklore", "label_binary": 0, "text_1": "my classes this semester are in the most random freakin buildings on campus. community nursing in the school of design? folklore and sexuality in one of the engineering buildings? asian american cinema in freaking DRL????", "token_idx_1": 22, "text_start_1": 120, "text_end_1": 128, "date_1": "2019-08", "text_2": "can taylor sing call it what you want on the lover folklore tour", "token_idx_2": 11, "text_start_2": 51, "text_end_2": 59, "date_2": "2020-08", "text_1_tokenized": ["my", "classes", "this", "semester", "are", "in", "the", "most", "random", "freakin", "buildings", "on", "campus", ".", "community", "nursing", "in", "the", "school", "of", "design", "?", "folklore", "and", "sexuality", "in", "one", "of", "the", "engineering", "buildings", "?", "asian", "american", "cinema", "in", "freaking", "DRL", "?", "?", "?"], "text_2_tokenized": ["can", "taylor", "sing", "call", "it", "what", "you", "want", "on", "the", "lover", "folklore", "tour"]} -{"id": "0138-folklore", "word": "folklore", "label_binary": 0, "text_1": "Are there any documentaries about folklore that WILL NOT try to claim that fairy folk/cryptids/etc. are real?", "token_idx_1": 5, "text_start_1": 34, "text_end_1": 42, "date_1": "2019-08", "text_2": "watching f*gs on a blanket listening to folklore do poppers in golden gate park is transporting me to another dimension", "token_idx_2": 9, "text_start_2": 40, "text_end_2": 48, "date_2": "2020-08", "text_1_tokenized": ["Are", "there", "any", "documentaries", "about", "folklore", "that", "WILL", "NOT", "try", "to", "claim", "that", "fairy", "folk", "/", "cryptids", "/", "etc", ".", "are", "real", "?"], "text_2_tokenized": ["watching", "f", "*", "gs", "on", "a", "blanket", "listening", "to", "folklore", "do", "poppers", "in", "golden", "gate", "park", "is", "transporting", "me", "to", "another", "dimension"]} -{"id": "0140-folklore", "word": "folklore", "label_binary": 0, "text_1": "My most recent episode was about sasquatch, which has its roots in the folklore of both Native American and First Nations people as well as European wildman folklore! #FolkloreThursday", "token_idx_1": 14, "text_start_1": 71, "text_end_1": 79, "date_1": "2019-08", "text_2": "Currently lying on the floor listening to folklore and astral projecting", "token_idx_2": 7, "text_start_2": 42, "text_end_2": 50, "date_2": "2020-08", "text_1_tokenized": ["My", "most", "recent", "episode", "was", "about", "sasquatch", ",", "which", "has", "its", "roots", "in", "the", "folklore", "of", "both", "Native", "American", "and", "First", "Nations", "people", "as", "well", "as", "European", "wildman", "folklore", "!", "#FolkloreThursday"], "text_2_tokenized": ["Currently", "lying", "on", "the", "floor", "listening", "to", "folklore", "and", "astral", "projecting"]} -{"id": "0141-folklore", "word": "folklore", "label_binary": 0, "text_1": "this business is full of snake oil salesmen under the guise of some sort of academia (PhD and such). These people are stupid. I hear one more explanation that an inverted yield curve is calling a recession by these fools I will puke. Nothing but folklore by clowns", "token_idx_1": 50, "text_start_1": 246, "text_end_1": 254, "date_1": "2019-08", "text_2": "im streaming all day folklore and august hits different rn", "token_idx_2": 4, "text_start_2": 21, "text_end_2": 29, "date_2": "2020-08", "text_1_tokenized": ["this", "business", "is", "full", "of", "snake", "oil", "salesmen", "under", "the", "guise", "of", "some", "sort", "of", "academia", "(", "PhD", "and", "such", ")", ".", "These", "people", "are", "stupid", ".", "I", "hear", "one", "more", "explanation", "that", "an", "inverted", "yield", "curve", "is", "calling", "a", "recession", "by", "these", "fools", "I", "will", "puke", ".", "Nothing", "but", "folklore", "by", "clowns"], "text_2_tokenized": ["im", "streaming", "all", "day", "folklore", "and", "august", "hits", "different", "rn"]} -{"id": "0142-folklore", "word": "folklore", "label_binary": 0, "text_1": "message for the PhD student who will see this tweet while piecing together a \u201cdigital folklore of early social media\u201d dissertation several decades from now: sup dude .", "token_idx_1": 16, "text_start_1": 86, "text_end_1": 94, "date_1": "2019-08", "text_2": "listen i have so many taylor swift hating years to compensate for so let me spam folklore lyrics", "token_idx_2": 16, "text_start_2": 81, "text_end_2": 89, "date_2": "2020-08", "text_1_tokenized": ["message", "for", "the", "PhD", "student", "who", "will", "see", "this", "tweet", "while", "piecing", "together", "a", "\u201c", "digital", "folklore", "of", "early", "social", "media", "\u201d", "dissertation", "several", "decades", "from", "now", ":", "sup", "dude", "."], "text_2_tokenized": ["listen", "i", "have", "so", "many", "taylor", "swift", "hating", "years", "to", "compensate", "for", "so", "let", "me", "spam", "folklore", "lyrics"]} -{"id": "0143-folklore", "word": "folklore", "label_binary": 0, "text_1": "My great grandfather was a Grand Mason and he was a successful landowner. I wish I could learn more about him. I've always been fascinated about the folklore of #secretsocieties and how some people were given certain privileges. #Americanhistory #SaturdayThoughts", "token_idx_1": 29, "text_start_1": 149, "text_end_1": 157, "date_1": "2019-08", "text_2": "i feel like folklore has pretty much cemented taylor's status as a legend", "token_idx_2": 3, "text_start_2": 12, "text_end_2": 20, "date_2": "2020-08", "text_1_tokenized": ["My", "great", "grandfather", "was", "a", "Grand", "Mason", "and", "he", "was", "a", "successful", "landowner", ".", "I", "wish", "I", "could", "learn", "more", "about", "him", ".", "I've", "always", "been", "fascinated", "about", "the", "folklore", "of", "#secretsocieties", "and", "how", "some", "people", "were", "given", "certain", "privileges", ".", "#Americanhistory", "#SaturdayThoughts"], "text_2_tokenized": ["i", "feel", "like", "folklore", "has", "pretty", "much", "cemented", "taylor's", "status", "as", "a", "legend"]} -{"id": "0144-folklore", "word": "folklore", "label_binary": 0, "text_1": "affordability - i think we all have to agree racing is on a high atm not necessarily because it's an in thing but because technology apps, corporate bookies within a few clicks of the smartphone allow it to be, that and the Australian lifestyle through development of folklore", "token_idx_1": 49, "text_start_1": 268, "text_end_1": 276, "date_1": "2019-08", "text_2": "wendy listening to folklore >>>>", "token_idx_2": 3, "text_start_2": 19, "text_end_2": 27, "date_2": "2020-08", "text_1_tokenized": ["affordability", "-", "i", "think", "we", "all", "have", "to", "agree", "racing", "is", "on", "a", "high", "atm", "not", "necessarily", "because", "it's", "an", "in", "thing", "but", "because", "technology", "apps", ",", "corporate", "bookies", "within", "a", "few", "clicks", "of", "the", "smartphone", "allow", "it", "to", "be", ",", "that", "and", "the", "Australian", "lifestyle", "through", "development", "of", "folklore"], "text_2_tokenized": ["wendy", "listening", "to", "folklore", ">", ">", ">"]} -{"id": "0145-folklore", "word": "folklore", "label_binary": 0, "text_1": "Thank you Dale Morris. A @westernbulldogs legend, through your deeds in red, white and blue you've cast yourself into Footscray folklore. When our dream dangled in front of us, you reached out and grabbed it for us. The #MightyWest salutes you and wishes you well in retirement.", "token_idx_1": 23, "text_start_1": 128, "text_end_1": 136, "date_1": "2019-08", "text_2": "me after listening to folklore in full for the first time, feeling 2863 emotions at once and not being able to process any of them: so she has daddy issues?", "token_idx_2": 4, "text_start_2": 22, "text_end_2": 30, "date_2": "2020-08", "text_1_tokenized": ["Thank", "you", "Dale", "Morris", ".", "A", "@westernbulldogs", "legend", ",", "through", "your", "deeds", "in", "red", ",", "white", "and", "blue", "you've", "cast", "yourself", "into", "Footscray", "folklore", ".", "When", "our", "dream", "dangled", "in", "front", "of", "us", ",", "you", "reached", "out", "and", "grabbed", "it", "for", "us", ".", "The", "#MightyWest", "salutes", "you", "and", "wishes", "you", "well", "in", "retirement", "."], "text_2_tokenized": ["me", "after", "listening", "to", "folklore", "in", "full", "for", "the", "first", "time", ",", "feeling", "2863", "emotions", "at", "once", "and", "not", "being", "able", "to", "process", "any", "of", "them", ":", "so", "she", "has", "daddy", "issues", "?"]} -{"id": "0146-folklore", "word": "folklore", "label_binary": 0, "text_1": "I can't wait for the day that there's an anime about American folklore and we get a sexy chupacabra", "token_idx_1": 12, "text_start_1": 62, "text_end_1": 70, "date_1": "2019-08", "text_2": "The only thing that's keeping me alive through this era is you swifties and the anticipation of folklore's numbers on charts and records being broken, all that too being done by the fans themselves \ud83e\udd74", "token_idx_2": 17, "text_start_2": 96, "text_end_2": 106, "date_2": "2020-08", "text_1_tokenized": ["I", "can't", "wait", "for", "the", "day", "that", "there's", "an", "anime", "about", "American", "folklore", "and", "we", "get", "a", "sexy", "chupacabra"], "text_2_tokenized": ["The", "only", "thing", "that's", "keeping", "me", "alive", "through", "this", "era", "is", "you", "swifties", "and", "the", "anticipation", "of", "folklore's", "numbers", "on", "charts", "and", "records", "being", "broken", ",", "all", "that", "too", "being", "done", "by", "the", "fans", "themselves", "\ud83e\udd74"]} -{"id": "0147-folklore", "word": "folklore", "label_binary": 0, "text_1": "This dude just said \"Boys of the Backstreet\" He made em sound like a whole folklore\ud83d\ude02\ud83d\ude02", "token_idx_1": 17, "text_start_1": 75, "text_end_1": 83, "date_1": "2019-08", "text_2": "my target app said they didn't have folklore cds but when i went inside they had some i'm so happy \ud83e\udd7a", "token_idx_2": 7, "text_start_2": 36, "text_end_2": 44, "date_2": "2020-08", "text_1_tokenized": ["This", "dude", "just", "said", "\"", "Boys", "of", "the", "Backstreet", "\"", "He", "made", "em", "sound", "like", "a", "whole", "folklore", "\ud83d\ude02", "\ud83d\ude02"], "text_2_tokenized": ["my", "target", "app", "said", "they", "didn't", "have", "folklore", "cds", "but", "when", "i", "went", "inside", "they", "had", "some", "i'm", "so", "happy", "\ud83e\udd7a"]} -{"id": "0148-folklore", "word": "folklore", "label_binary": 0, "text_1": "Cometh the hour, cometh the man. @benstokes38 you single handedly wrote your name into Cricket folklore today. I'm in complete astonishment and awe. Very well done man. \ud83d\udc4f\ud83c\udffc\ud83d\udc4f\ud83c\udffc\ud83d\udc4f\ud83c\udffc #Ashes", "token_idx_1": 17, "text_start_1": 95, "text_end_1": 103, "date_1": "2019-08", "text_2": "someone snag me a signed folklore cd I'm begging", "token_idx_2": 5, "text_start_2": 25, "text_end_2": 33, "date_2": "2020-08", "text_1_tokenized": ["Cometh", "the", "hour", ",", "cometh", "the", "man", ".", "@benstokes38", "you", "single", "handedly", "wrote", "your", "name", "into", "Cricket", "folklore", "today", ".", "I'm", "in", "complete", "astonishment", "and", "awe", ".", "Very", "well", "done", "man", ".", "\ud83d\udc4f\ud83c\udffc", "\ud83d\udc4f\ud83c\udffc", "\ud83d\udc4f\ud83c\udffc", "#Ashes"], "text_2_tokenized": ["someone", "snag", "me", "a", "signed", "folklore", "cd", "I'm", "begging"]} -{"id": "0149-folklore", "word": "folklore", "label_binary": 0, "text_1": "suggestions for online resources on Native American mythology/folklore please (and thank you in advance)", "token_idx_1": 9, "text_start_1": 62, "text_end_1": 70, "date_1": "2019-08", "text_2": "listening to folklore in the kitchen of my man who\u2018s not my man but who is my man hits differently........", "token_idx_2": 2, "text_start_2": 13, "text_end_2": 21, "date_2": "2020-08", "text_1_tokenized": ["suggestions", "for", "online", "resources", "on", "Native", "American", "mythology", "/", "folklore", "please", "(", "and", "thank", "you", "in", "advance", ")"], "text_2_tokenized": ["listening", "to", "folklore", "in", "the", "kitchen", "of", "my", "man", "who", "\u2018", "s", "not", "my", "man", "but", "who", "is", "my", "man", "hits", "differently", "..."]} -{"id": "0150-folklore", "word": "folklore", "label_binary": 0, "text_1": "Considering I'll never make it to The UK, even when my book came out from Future Fiction London 10 years ago, I didn't go to a launch or anything. Pesky psychotic breaks. I always enjoy the wee hours dose of British Twitter esp folklore Thursday & the medievalists/history people.", "token_idx_1": 47, "text_start_1": 228, "text_end_1": 236, "date_1": "2019-08", "text_2": "might get snacks, listen to music, and then go cry over folklore hbu?", "token_idx_2": 13, "text_start_2": 56, "text_end_2": 64, "date_2": "2020-08", "text_1_tokenized": ["Considering", "I'll", "never", "make", "it", "to", "The", "UK", ",", "even", "when", "my", "book", "came", "out", "from", "Future", "Fiction", "London", "10", "years", "ago", ",", "I", "didn't", "go", "to", "a", "launch", "or", "anything", ".", "Pesky", "psychotic", "breaks", ".", "I", "always", "enjoy", "the", "wee", "hours", "dose", "of", "British", "Twitter", "esp", "folklore", "Thursday", "&", "the", "medievalists", "/", "history", "people", "."], "text_2_tokenized": ["might", "get", "snacks", ",", "listen", "to", "music", ",", "and", "then", "go", "cry", "over", "folklore", "hbu", "?"]} -{"id": "0151-folklore", "word": "folklore", "label_binary": 0, "text_1": "making a reading list of books and academic articles about the connection between sociology/ culture and folklore, because that's what my bard's area of interest is ... you know, like what normal people do", "token_idx_1": 17, "text_start_1": 105, "text_end_1": 113, "date_1": "2019-08", "text_2": "So jealous of everyone getting signed folklore CDs. Wonder when they will come to the UK... @taylornation13 @taylorswift13", "token_idx_2": 6, "text_start_2": 38, "text_end_2": 46, "date_2": "2020-08", "text_1_tokenized": ["making", "a", "reading", "list", "of", "books", "and", "academic", "articles", "about", "the", "connection", "between", "sociology", "/", "culture", "and", "folklore", ",", "because", "that's", "what", "my", "bard's", "area", "of", "interest", "is", "...", "you", "know", ",", "like", "what", "normal", "people", "do"], "text_2_tokenized": ["So", "jealous", "of", "everyone", "getting", "signed", "folklore", "CDs", ".", "Wonder", "when", "they", "will", "come", "to", "the", "UK", "...", "@taylornation13", "@taylorswift13"]} -{"id": "0152-folklore", "word": "folklore", "label_binary": 0, "text_1": "Is there a book that you read a year or more ago but still think about regularly? For me, it's To Kill A Kingdom by @alliechristo. That book had such a lasting impact on my reading, with its siren folklore and intense action scenes. I think I need a reread ASAP.", "token_idx_1": 43, "text_start_1": 197, "text_end_1": 205, "date_1": "2019-08", "text_2": "cruel summer hits differently after listening to folklore", "token_idx_2": 7, "text_start_2": 49, "text_end_2": 57, "date_2": "2020-08", "text_1_tokenized": ["Is", "there", "a", "book", "that", "you", "read", "a", "year", "or", "more", "ago", "but", "still", "think", "about", "regularly", "?", "For", "me", ",", "it's", "To", "Kill", "A", "Kingdom", "by", "@alliechristo", ".", "That", "book", "had", "such", "a", "lasting", "impact", "on", "my", "reading", ",", "with", "its", "siren", "folklore", "and", "intense", "action", "scenes", ".", "I", "think", "I", "need", "a", "reread", "ASAP", "."], "text_2_tokenized": ["cruel", "summer", "hits", "differently", "after", "listening", "to", "folklore"]} -{"id": "0153-folklore", "word": "folklore", "label_binary": 0, "text_1": "I think @Jaspritbumrah93 is already a part of Indian cricket folklore, whatever happens from here now in cricket with him, he will be talked about always! #ChampionBowler #WhatABowler #INDvsWI", "token_idx_1": 10, "text_start_1": 61, "text_end_1": 69, "date_1": "2019-08", "text_2": "folklore really is a whole ass experience her best lyrics me thinks", "token_idx_2": 0, "text_start_2": 0, "text_end_2": 8, "date_2": "2020-08", "text_1_tokenized": ["I", "think", "@Jaspritbumrah93", "is", "already", "a", "part", "of", "Indian", "cricket", "folklore", ",", "whatever", "happens", "from", "here", "now", "in", "cricket", "with", "him", ",", "he", "will", "be", "talked", "about", "always", "!", "#ChampionBowler", "#WhatABowler", "#INDvsWI"], "text_2_tokenized": ["folklore", "really", "is", "a", "whole", "ass", "experience", "her", "best", "lyrics", "me", "thinks"]} -{"id": "0154-folklore", "word": "folklore", "label_binary": 1, "text_1": "Pip's sorrow; and when one mixes them with folklore, and tries to make Jennifer miss him more.", "token_idx_1": 9, "text_start_1": 43, "text_end_1": 51, "date_1": "2019-08", "text_2": "For the last time mum I'm not a fox furry, this is a japanese inari mask and it stems from centuries old folklore", "token_idx_2": 23, "text_start_2": 105, "text_end_2": 113, "date_2": "2020-08", "text_1_tokenized": ["Pip's", "sorrow", ";", "and", "when", "one", "mixes", "them", "with", "folklore", ",", "and", "tries", "to", "make", "Jennifer", "miss", "him", "more", "."], "text_2_tokenized": ["For", "the", "last", "time", "mum", "I'm", "not", "a", "fox", "furry", ",", "this", "is", "a", "japanese", "inari", "mask", "and", "it", "stems", "from", "centuries", "old", "folklore"]} -{"id": "0155-folklore", "word": "folklore", "label_binary": 0, "text_1": "The point of delaying the #TheOpenBookAPT \ud83d\udcd6 release was so I could sketch out/observe more dynamic, ethereal & distinct characters... Always making sure to alert my friends what's in the storyboard \ud83e\udd14 some became folklore. Now that I've pretty much finished it, I love the 012 \ud83d\udc99", "token_idx_1": 38, "text_start_1": 216, "text_end_1": 224, "date_1": "2019-08", "text_2": "what's your favorite song from folklore and why is it illicit affairs?", "token_idx_2": 5, "text_start_2": 31, "text_end_2": 39, "date_2": "2020-08", "text_1_tokenized": ["The", "point", "of", "delaying", "the", "#TheOpenBookAPT", "\ud83d\udcd6", "release", "was", "so", "I", "could", "sketch", "out", "/", "observe", "more", "dynamic", ",", "ethereal", "&", "distinct", "characters", "...", "Always", "making", "sure", "to", "alert", "my", "friends", "what's", "in", "the", "storyboard", "\ud83e\udd14", "some", "became", "folklore", ".", "Now", "that", "I've", "pretty", "much", "finished", "it", ",", "I", "love", "the", "012", "\ud83d\udc99"], "text_2_tokenized": ["what's", "your", "favorite", "song", "from", "folklore", "and", "why", "is", "it", "illicit", "affairs", "?"]} -{"id": "0156-folklore", "word": "folklore", "label_binary": 0, "text_1": "That's a dope folklore, but not as dope as you", "token_idx_1": 3, "text_start_1": 14, "text_end_1": 22, "date_1": "2019-08", "text_2": "new swiftie is reviewing folklore i\u2018m not ready for seeing swifties hetwash seven", "token_idx_2": 4, "text_start_2": 25, "text_end_2": 33, "date_2": "2020-08", "text_1_tokenized": ["That's", "a", "dope", "folklore", ",", "but", "not", "as", "dope", "as", "you"], "text_2_tokenized": ["new", "swiftie", "is", "reviewing", "folklore", "i", "\u2018", "m", "not", "ready", "for", "seeing", "swifties", "hetwash", "seven"]} -{"id": "0157-folklore", "word": "folklore", "label_binary": 0, "text_1": "You'll go down in folklore @markduffy7. Thank you #bouncekiller #sufc #twitterblades", "token_idx_1": 4, "text_start_1": 18, "text_end_1": 26, "date_1": "2019-08", "text_2": "Can't get over that the biggest slap on folklore is a song about Taylor Swift buying a house", "token_idx_2": 8, "text_start_2": 40, "text_end_2": 48, "date_2": "2020-08", "text_1_tokenized": ["You'll", "go", "down", "in", "folklore", "@markduffy7", ".", "Thank", "you", "#bouncekiller", "#sufc", "#twitterblades"], "text_2_tokenized": ["Can't", "get", "over", "that", "the", "biggest", "slap", "on", "folklore", "is", "a", "song", "about", "Taylor", "Swift", "buying", "a", "house"]} -{"id": "0158-folklore", "word": "folklore", "label_binary": 0, "text_1": "The lesser known and creepier #LocalLegend is little feet. They are a small extra-dimensional race from Native American folklore that mimic local customs, but are only a foot tall and are known to abduct people. Most famously depicted in Louis L'Amour Haunted Mesa.", "token_idx_1": 19, "text_start_1": 120, "text_end_1": 128, "date_1": "2019-08", "text_2": "just randomly started singing exhile while doing dishes, one of the few songs ive been skipping, so i officially love all of folklore", "token_idx_2": 24, "text_start_2": 125, "text_end_2": 133, "date_2": "2020-08", "text_1_tokenized": ["The", "lesser", "known", "and", "creepier", "#LocalLegend", "is", "little", "feet", ".", "They", "are", "a", "small", "extra-dimensional", "race", "from", "Native", "American", "folklore", "that", "mimic", "local", "customs", ",", "but", "are", "only", "a", "foot", "tall", "and", "are", "known", "to", "abduct", "people", ".", "Most", "famously", "depicted", "in", "Louis", "L'Amour", "Haunted", "Mesa", "."], "text_2_tokenized": ["just", "randomly", "started", "singing", "exhile", "while", "doing", "dishes", ",", "one", "of", "the", "few", "songs", "ive", "been", "skipping", ",", "so", "i", "officially", "love", "all", "of", "folklore"]} -{"id": "0159-folklore", "word": "folklore", "label_binary": 0, "text_1": "China and the Asians don't write down, the speak in oral tradition folklore. Confucius and Buddha and the Dao aren't meant to be read at all, they are meant to be heard through voice.", "token_idx_1": 13, "text_start_1": 67, "text_end_1": 75, "date_1": "2019-08", "text_2": "listening to folklore for the first time and wow this is going to hurt", "token_idx_2": 2, "text_start_2": 13, "text_end_2": 21, "date_2": "2020-08", "text_1_tokenized": ["China", "and", "the", "Asians", "don't", "write", "down", ",", "the", "speak", "in", "oral", "tradition", "folklore", ".", "Confucius", "and", "Buddha", "and", "the", "Dao", "aren't", "meant", "to", "be", "read", "at", "all", ",", "they", "are", "meant", "to", "be", "heard", "through", "voice", "."], "text_2_tokenized": ["listening", "to", "folklore", "for", "the", "first", "time", "and", "wow", "this", "is", "going", "to", "hurt"]} -{"id": "0160-folklore", "word": "folklore", "label_binary": 1, "text_1": "Just got home from a drive out of town. So glad I stayed up reading Twitter to relax from several hours behind the wheel (no sarcasm!) - caught a series of threads on Babylonian/Akkadian demons, plus djinn. New folx to follow. Fun folklore to rabbithole down. \ud83d\ude08", "token_idx_1": 51, "text_start_1": 231, "text_end_1": 239, "date_1": "2019-08", "text_2": "can u believe I was working at a summer camp and none of the kids knew folklore", "token_idx_2": 16, "text_start_2": 71, "text_end_2": 79, "date_2": "2020-08", "text_1_tokenized": ["Just", "got", "home", "from", "a", "drive", "out", "of", "town", ".", "So", "glad", "I", "stayed", "up", "reading", "Twitter", "to", "relax", "from", "several", "hours", "behind", "the", "wheel", "(", "no", "sarcasm", "!", ")", "-", "caught", "a", "series", "of", "threads", "on", "Babylonian", "/", "Akkadian", "demons", ",", "plus", "djinn", ".", "New", "folx", "to", "follow", ".", "Fun", "folklore", "to", "rabbithole", "down", ".", "\ud83d\ude08"], "text_2_tokenized": ["can", "u", "believe", "I", "was", "working", "at", "a", "summer", "camp", "and", "none", "of", "the", "kids", "knew", "folklore"]} -{"id": "0161-folklore", "word": "folklore", "label_binary": 0, "text_1": "#Bids.........Blinkers on.....ignore the noise and when theres thats small element of doubt creeping in absorb on repeat JDs words on the podcast. The story will go down in aim folklore.", "token_idx_1": 34, "text_start_1": 177, "text_end_1": 185, "date_1": "2019-08", "text_2": "if you see me in the stop and shop parking lot eating cool ranch doritos and listening to folklore...mind your business", "token_idx_2": 18, "text_start_2": 90, "text_end_2": 98, "date_2": "2020-08", "text_1_tokenized": ["#Bids", "...", "Blinkers", "on", "...", "ignore", "the", "noise", "and", "when", "theres", "thats", "small", "element", "of", "doubt", "creeping", "in", "absorb", "on", "repeat", "JDs", "words", "on", "the", "podcast", ".", "The", "story", "will", "go", "down", "in", "aim", "folklore", "."], "text_2_tokenized": ["if", "you", "see", "me", "in", "the", "stop", "and", "shop", "parking", "lot", "eating", "cool", "ranch", "doritos", "and", "listening", "to", "folklore", "...", "mind", "your", "business"]} -{"id": "0162-folklore", "word": "folklore", "label_binary": 0, "text_1": "For those of you who think African Americans have no \"real\" culture, please see the works of Toni Morrison--it's all in there. The folklore, the music, the religion, the history, the fashion.", "token_idx_1": 27, "text_start_1": 131, "text_end_1": 139, "date_1": "2019-08", "text_2": "How many Grammys do u think Taylor will get for folklore", "token_idx_2": 10, "text_start_2": 48, "text_end_2": 56, "date_2": "2020-08", "text_1_tokenized": ["For", "those", "of", "you", "who", "think", "African", "Americans", "have", "no", "\"", "real", "\"", "culture", ",", "please", "see", "the", "works", "of", "Toni", "Morrison--it's", "all", "in", "there", ".", "The", "folklore", ",", "the", "music", ",", "the", "religion", ",", "the", "history", ",", "the", "fashion", "."], "text_2_tokenized": ["How", "many", "Grammys", "do", "u", "think", "Taylor", "will", "get", "for", "folklore"]} -{"id": "0163-folklore", "word": "folklore", "label_binary": 0, "text_1": "Any Twitter friends here that are versed in Filipino mythology and folklore? Would love to talk about this for this thing I'm doing. Haha. Let me know if you're up for a chat!", "token_idx_1": 11, "text_start_1": 67, "text_end_1": 75, "date_1": "2019-08", "text_2": "Betty is the best song on folklore", "token_idx_2": 6, "text_start_2": 26, "text_end_2": 34, "date_2": "2020-08", "text_1_tokenized": ["Any", "Twitter", "friends", "here", "that", "are", "versed", "in", "Filipino", "mythology", "and", "folklore", "?", "Would", "love", "to", "talk", "about", "this", "for", "this", "thing", "I'm", "doing", ".", "Haha", ".", "Let", "me", "know", "if", "you're", "up", "for", "a", "chat", "!"], "text_2_tokenized": ["Betty", "is", "the", "best", "song", "on", "folklore"]} -{"id": "0164-folklore", "word": "folklore", "label_binary": 0, "text_1": "The referendum was so long ago it's now become a part of political folklore, it's purpose lost in the mists of time.", "token_idx_1": 13, "text_start_1": 67, "text_end_1": 75, "date_1": "2019-08", "text_2": "lets makeout while we play folklore in the bg", "token_idx_2": 5, "text_start_2": 27, "text_end_2": 35, "date_2": "2020-08", "text_1_tokenized": ["The", "referendum", "was", "so", "long", "ago", "it's", "now", "become", "a", "part", "of", "political", "folklore", ",", "it's", "purpose", "lost", "in", "the", "mists", "of", "time", "."], "text_2_tokenized": ["lets", "makeout", "while", "we", "play", "folklore", "in", "the", "bg"]} -{"id": "0165-folklore", "word": "folklore", "label_binary": 0, "text_1": "If you liked Midsommar and want more fucked up Swedish folklore go watch The Ritual on Netflix that shit is so scary", "token_idx_1": 10, "text_start_1": 55, "text_end_1": 63, "date_1": "2019-08", "text_2": "folklore deserves to be at the top, and i'm so proud that taylor's finally getting the recognition she deserves", "token_idx_2": 0, "text_start_2": 0, "text_end_2": 8, "date_2": "2020-08", "text_1_tokenized": ["If", "you", "liked", "Midsommar", "and", "want", "more", "fucked", "up", "Swedish", "folklore", "go", "watch", "The", "Ritual", "on", "Netflix", "that", "shit", "is", "so", "scary"], "text_2_tokenized": ["folklore", "deserves", "to", "be", "at", "the", "top", ",", "and", "i'm", "so", "proud", "that", "taylor's", "finally", "getting", "the", "recognition", "she", "deserves"]} -{"id": "0166-folklore", "word": "folklore", "label_binary": 0, "text_1": "Dude being a creature from folklore would be lit, imagine if your only job was to, like, steal spoons", "token_idx_1": 5, "text_start_1": 27, "text_end_1": 35, "date_1": "2019-08", "text_2": "it's september \ud83d\ude2d it's the genuine folklore season", "token_idx_2": 6, "text_start_2": 34, "text_end_2": 42, "date_2": "2020-08", "text_1_tokenized": ["Dude", "being", "a", "creature", "from", "folklore", "would", "be", "lit", ",", "imagine", "if", "your", "only", "job", "was", "to", ",", "like", ",", "steal", "spoons"], "text_2_tokenized": ["it's", "september", "\ud83d\ude2d", "it's", "the", "genuine", "folklore", "season"]} -{"id": "0167-folklore", "word": "folklore", "label_binary": 0, "text_1": "dnd campaign inspired by asian folklore and culture with elments from the avatar universe? yes please", "token_idx_1": 5, "text_start_1": 31, "text_end_1": 39, "date_1": "2019-08", "text_2": "Right time to ascoltare folklore", "token_idx_2": 4, "text_start_2": 24, "text_end_2": 32, "date_2": "2020-08", "text_1_tokenized": ["dnd", "campaign", "inspired", "by", "asian", "folklore", "and", "culture", "with", "elments", "from", "the", "avatar", "universe", "?", "yes", "please"], "text_2_tokenized": ["Right", "time", "to", "ascoltare", "folklore"]} -{"id": "0168-folklore", "word": "folklore", "label_binary": 0, "text_1": "So I love the paranormal as well as the unexplained, and am totally into listening to narrations pulled from subreddits like letsnotmeet and nosleep. Stuff like : Dyatlov Pass, Crosswade interlopers, Yuba county 5, Henry McCabe, Elisa Lam, cryptids and folklore creatures, etc.", "token_idx_1": 47, "text_start_1": 253, "text_end_1": 261, "date_1": "2019-08", "text_2": "Finally just listened to folklore and you can catch me in 2021, no sooner.", "token_idx_2": 4, "text_start_2": 25, "text_end_2": 33, "date_2": "2020-08", "text_1_tokenized": ["So", "I", "love", "the", "paranormal", "as", "well", "as", "the", "unexplained", ",", "and", "am", "totally", "into", "listening", "to", "narrations", "pulled", "from", "subreddits", "like", "letsnotmeet", "and", "nosleep", ".", "Stuff", "like", ":", "Dyatlov", "Pass", ",", "Crosswade", "interlopers", ",", "Yuba", "county", "5", ",", "Henry", "McCabe", ",", "Elisa", "Lam", ",", "cryptids", "and", "folklore", "creatures", ",", "etc", "."], "text_2_tokenized": ["Finally", "just", "listened", "to", "folklore", "and", "you", "can", "catch", "me", "in", "2021", ",", "no", "sooner", "."]} -{"id": "0169-folklore", "word": "folklore", "label_binary": 0, "text_1": "Griffin Mcelroy:\u201dso many things in life are a prison of your own creation. If you look around you'll really start to see the bars showing\u201d Irish folklore", "token_idx_1": 31, "text_start_1": 145, "text_end_1": 153, "date_1": "2019-08", "text_2": "just sitting here thinking about the folklore album", "token_idx_2": 6, "text_start_2": 37, "text_end_2": 45, "date_2": "2020-08", "text_1_tokenized": ["Griffin", "Mcelroy", ":", "\u201d", "so", "many", "things", "in", "life", "are", "a", "prison", "of", "your", "own", "creation", ".", "If", "you", "look", "around", "you'll", "really", "start", "to", "see", "the", "bars", "showing", "\u201d", "Irish", "folklore"], "text_2_tokenized": ["just", "sitting", "here", "thinking", "about", "the", "folklore", "album"]} -{"id": "0170-folklore", "word": "folklore", "label_binary": 0, "text_1": "Eid AlAdha which is linked to the sacrifice done by Prophet Ibrahim (as) teaches us that religion shouldn't be viewd as a folklore or to be limited to the known rituals.", "token_idx_1": 24, "text_start_1": 122, "text_end_1": 130, "date_1": "2019-08", "text_2": "i missed my opportunity to buy a signed folklore cd and now my day is ruined", "token_idx_2": 8, "text_start_2": 40, "text_end_2": 48, "date_2": "2020-08", "text_1_tokenized": ["Eid", "AlAdha", "which", "is", "linked", "to", "the", "sacrifice", "done", "by", "Prophet", "Ibrahim", "(", "as", ")", "teaches", "us", "that", "religion", "shouldn't", "be", "viewd", "as", "a", "folklore", "or", "to", "be", "limited", "to", "the", "known", "rituals", "."], "text_2_tokenized": ["i", "missed", "my", "opportunity", "to", "buy", "a", "signed", "folklore", "cd", "and", "now", "my", "day", "is", "ruined"]} -{"id": "0171-folklore", "word": "folklore", "label_binary": 0, "text_1": "In the early 19th century, a series of mysterious deaths in Ireland led townspeople to share a folklore of a silent hunter with gigantic trunks that lurked around the swamps in the morning. They called it the 'Trunked Dragon of the Western Jungles'.", "token_idx_1": 18, "text_start_1": 95, "text_end_1": 103, "date_1": "2019-08", "text_2": "im so happy its SEPTEMBER!!!!! this summer was LAME!!!!!! except taylor releasing folklore. that was definitely not lame", "token_idx_2": 18, "text_start_2": 82, "text_end_2": 90, "date_2": "2020-08", "text_1_tokenized": ["In", "the", "early", "19th", "century", ",", "a", "series", "of", "mysterious", "deaths", "in", "Ireland", "led", "townspeople", "to", "share", "a", "folklore", "of", "a", "silent", "hunter", "with", "gigantic", "trunks", "that", "lurked", "around", "the", "swamps", "in", "the", "morning", ".", "They", "called", "it", "the", "'", "Trunked", "Dragon", "of", "the", "Western", "Jungles", "'", "."], "text_2_tokenized": ["im", "so", "happy", "its", "SEPTEMBER", "!", "!", "!", "this", "summer", "was", "LAME", "!", "!", "!", "except", "taylor", "releasing", "folklore", ".", "that", "was", "definitely", "not", "lame"]} -{"id": "0172-folklore", "word": "folklore", "label_binary": 0, "text_1": "fr y'all should watch victor and valentino. it's about these brothers who live in a spooky Mexican town and they go on adventures together. it talks a lot about mesoamerican folklore and is honestly straight up refreshing:D", "token_idx_1": 32, "text_start_1": 174, "text_end_1": 182, "date_1": "2019-08", "text_2": "Alanis Morissette's \"Ablaze\" is better than every single folklore song excepting \"exile\"", "token_idx_2": 10, "text_start_2": 57, "text_end_2": 65, "date_2": "2020-08", "text_1_tokenized": ["fr", "y'all", "should", "watch", "victor", "and", "valentino", ".", "it's", "about", "these", "brothers", "who", "live", "in", "a", "spooky", "Mexican", "town", "and", "they", "go", "on", "adventures", "together", ".", "it", "talks", "a", "lot", "about", "mesoamerican", "folklore", "and", "is", "honestly", "straight", "up", "refreshing", ":D"], "text_2_tokenized": ["Alanis", "Morissette's", "\"", "Ablaze", "\"", "is", "better", "than", "every", "single", "folklore", "song", "excepting", "\"", "exile", "\""]} -{"id": "0173-folklore", "word": "folklore", "label_binary": 0, "text_1": "This is your occasional reminder that, given both the names (latin) and folklore, succubus and incubus are not the gendered terms for two different demons, but the two shapes of one exicitly male shapechanging demon (else it'd be \"succubae\").", "token_idx_1": 15, "text_start_1": 72, "text_end_1": 80, "date_1": "2019-08", "text_2": "illicit affairs might be my stan song from folklore omg", "token_idx_2": 8, "text_start_2": 43, "text_end_2": 51, "date_2": "2020-08", "text_1_tokenized": ["This", "is", "your", "occasional", "reminder", "that", ",", "given", "both", "the", "names", "(", "latin", ")", "and", "folklore", ",", "succubus", "and", "incubus", "are", "not", "the", "gendered", "terms", "for", "two", "different", "demons", ",", "but", "the", "two", "shapes", "of", "one", "exicitly", "male", "shapechanging", "demon", "(", "else", "it'd", "be", "\"", "succubae", "\"", ")", "."], "text_2_tokenized": ["illicit", "affairs", "might", "be", "my", "stan", "song", "from", "folklore", "omg"]} -{"id": "0174-folklore", "word": "folklore", "label_binary": 0, "text_1": "#growingupperuvian is your parents making dance Peruvian folklore even tho you were born in the US because it's culture.", "token_idx_1": 7, "text_start_1": 57, "text_end_1": 65, "date_1": "2019-08", "text_2": "did I spend even more money on folklore merch? of course. will i spend anymore? NO! am i lying? yes.", "token_idx_2": 7, "text_start_2": 31, "text_end_2": 39, "date_2": "2020-08", "text_1_tokenized": ["#growingupperuvian", "is", "your", "parents", "making", "dance", "Peruvian", "folklore", "even", "tho", "you", "were", "born", "in", "the", "US", "because", "it's", "culture", "."], "text_2_tokenized": ["did", "I", "spend", "even", "more", "money", "on", "folklore", "merch", "?", "of", "course", ".", "will", "i", "spend", "anymore", "?", "NO", "!", "am", "i", "lying", "?", "yes", "."]} -{"id": "0175-folklore", "word": "folklore", "label_binary": 0, "text_1": "a stage musical about traditional faerie folklore ughhhhh it would be the perfect medium I want it so bad #musicals #faeries", "token_idx_1": 6, "text_start_1": 41, "text_end_1": 49, "date_1": "2019-08", "text_2": "I bought three folklore albums off Taylor's website and I got one the CD's like a good 4 days ago and I just realized I haven't gotten the other two yet so I'm getting a lil sus.....", "token_idx_2": 3, "text_start_2": 15, "text_end_2": 23, "date_2": "2020-08", "text_1_tokenized": ["a", "stage", "musical", "about", "traditional", "faerie", "folklore", "ughhhhh", "it", "would", "be", "the", "perfect", "medium", "I", "want", "it", "so", "bad", "#musicals", "#faeries"], "text_2_tokenized": ["I", "bought", "three", "folklore", "albums", "off", "Taylor's", "website", "and", "I", "got", "one", "the", "CD's", "like", "a", "good", "4", "days", "ago", "and", "I", "just", "realized", "I", "haven't", "gotten", "the", "other", "two", "yet", "so", "I'm", "getting", "a", "lil", "sus", "..."]} -{"id": "0176-folklore", "word": "folklore", "label_binary": 0, "text_1": "Let me clear this misconceptions; Most folks think that hell will a party folklore, where there will be dancing and endless drinking. My friend you will never SEE a single person in there. That place is too lonely. That what Jesus said. Believe the gospel now.", "token_idx_1": 14, "text_start_1": 74, "text_end_1": 82, "date_1": "2019-08", "text_2": "imagine if folklore had the same promotion as these new albums \ud83d\ude2d hello maam promote please @taylorswift13", "token_idx_2": 2, "text_start_2": 11, "text_end_2": 19, "date_2": "2020-08", "text_1_tokenized": ["Let", "me", "clear", "this", "misconceptions", ";", "Most", "folks", "think", "that", "hell", "will", "a", "party", "folklore", ",", "where", "there", "will", "be", "dancing", "and", "endless", "drinking", ".", "My", "friend", "you", "will", "never", "SEE", "a", "single", "person", "in", "there", ".", "That", "place", "is", "too", "lonely", ".", "That", "what", "Jesus", "said", ".", "Believe", "the", "gospel", "now", "."], "text_2_tokenized": ["imagine", "if", "folklore", "had", "the", "same", "promotion", "as", "these", "new", "albums", "\ud83d\ude2d", "hello", "maam", "promote", "please", "@taylorswift13"]} -{"id": "0177-folklore", "word": "folklore", "label_binary": 0, "text_1": "Fatalistic Hope is something I never thought would crop up in a paper about folklore and comics. #PhD", "token_idx_1": 14, "text_start_1": 76, "text_end_1": 84, "date_1": "2019-08", "text_2": "I've listened to @taylorswift13 folklore album maybe 20x now & I can say ... she's the greatest thing that ever happened to America.", "token_idx_2": 4, "text_start_2": 32, "text_end_2": 40, "date_2": "2020-08", "text_1_tokenized": ["Fatalistic", "Hope", "is", "something", "I", "never", "thought", "would", "crop", "up", "in", "a", "paper", "about", "folklore", "and", "comics", ".", "#PhD"], "text_2_tokenized": ["I've", "listened", "to", "@taylorswift13", "folklore", "album", "maybe", "20x", "now", "&", "I", "can", "say", "...", "she's", "the", "greatest", "thing", "that", "ever", "happened", "to", "America", "."]} -{"id": "0178-folklore", "word": "folklore", "label_binary": 0, "text_1": "The truth is,... Italians are morbid as f*** and I live for it. I have so many examples. A personal one that will go unnamed, literally Lady Gaga, Ariana Grande's whole family apparently, hefty amount of folklore,", "token_idx_1": 46, "text_start_1": 204, "text_end_1": 212, "date_1": "2019-08", "text_2": "not me checking to see if there are any copies of folklore left at the other record store that got it near me \ud83e\udd21\ud83e\udd21\ud83e\udd21 clown nose: on", "token_idx_2": 11, "text_start_2": 50, "text_end_2": 58, "date_2": "2020-08", "text_1_tokenized": ["The", "truth", "is", ",", "...", "Italians", "are", "morbid", "as", "f", "*", "*", "*", "and", "I", "live", "for", "it", ".", "I", "have", "so", "many", "examples", ".", "A", "personal", "one", "that", "will", "go", "unnamed", ",", "literally", "Lady", "Gaga", ",", "Ariana", "Grande's", "whole", "family", "apparently", ",", "hefty", "amount", "of", "folklore", ","], "text_2_tokenized": ["not", "me", "checking", "to", "see", "if", "there", "are", "any", "copies", "of", "folklore", "left", "at", "the", "other", "record", "store", "that", "got", "it", "near", "me", "\ud83e\udd21", "\ud83e\udd21", "\ud83e\udd21", "clown", "nose", ":", "on"]} -{"id": "0179-folklore", "word": "folklore", "label_binary": 0, "text_1": "the world requires success, but instead we allow ideals, laws and folklore to kill our hopes, dreams and aspiration, even as simple as someone else's voice of opinion is enough to shatter confidence, that is how delicate a spieces we really are.", "token_idx_1": 13, "text_start_1": 66, "text_end_1": 74, "date_1": "2019-08", "text_2": "I so desperately want to go aisle through aisle in Target with folklore on my headphones. I would probably cry.", "token_idx_2": 12, "text_start_2": 63, "text_end_2": 71, "date_2": "2020-08", "text_1_tokenized": ["the", "world", "requires", "success", ",", "but", "instead", "we", "allow", "ideals", ",", "laws", "and", "folklore", "to", "kill", "our", "hopes", ",", "dreams", "and", "aspiration", ",", "even", "as", "simple", "as", "someone", "else's", "voice", "of", "opinion", "is", "enough", "to", "shatter", "confidence", ",", "that", "is", "how", "delicate", "a", "spieces", "we", "really", "are", "."], "text_2_tokenized": ["I", "so", "desperately", "want", "to", "go", "aisle", "through", "aisle", "in", "Target", "with", "folklore", "on", "my", "headphones", ".", "I", "would", "probably", "cry", "."]} -{"id": "0180-folklore", "word": "folklore", "label_binary": 0, "text_1": "We advise music is folklore.", "token_idx_1": 4, "text_start_1": 19, "text_end_1": 27, "date_1": "2019-08", "text_2": "the amount of times I think to myself imma do sth in 3 mins like watch a movie or w/e & then I forget & put on folklore & then have 2 listen to the entire thing before I can do the thing I was planning to but then forget again & just end up listening 2 folklore on repeat all day", "token_idx_2": 29, "text_start_2": 119, "text_end_2": 127, "date_2": "2020-08", "text_1_tokenized": ["We", "advise", "music", "is", "folklore", "."], "text_2_tokenized": ["the", "amount", "of", "times", "I", "think", "to", "myself", "imma", "do", "sth", "in", "3", "mins", "like", "watch", "a", "movie", "or", "w", "/", "e", "&", "then", "I", "forget", "&", "put", "on", "folklore", "&", "then", "have", "2", "listen", "to", "the", "entire", "thing", "before", "I", "can", "do", "the", "thing", "I", "was", "planning", "to", "but", "then", "forget", "again", "&", "just", "end", "up", "listening", "2", "folklore", "on", "repeat", "all", "day"]} -{"id": "0181-folklore", "word": "folklore", "label_binary": 1, "text_1": "Me: deep occult exploration into folklore, metaphysics, and Witchery Also me: Glutengala! Super Badger Bourbon Moon! Yeap, living the Gemini life.", "token_idx_1": 6, "text_start_1": 33, "text_end_1": 41, "date_1": "2019-08", "text_2": "In Asian folklore and tales, most commonly in Japan, red is the color of the invisible thread that connects two people that are meant and destined to be together. However, in invisible string, Taylor sings, \"one single thread of gold tied me to you.\" [1/2]", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 17, "date_2": "2020-08", "text_1_tokenized": ["Me", ":", "deep", "occult", "exploration", "into", "folklore", ",", "metaphysics", ",", "and", "Witchery", "Also", "me", ":", "Glutengala", "!", "Super", "Badger", "Bourbon", "Moon", "!", "Yeap", ",", "living", "the", "Gemini", "life", "."], "text_2_tokenized": ["In", "Asian", "folklore", "and", "tales", ",", "most", "commonly", "in", "Japan", ",", "red", "is", "the", "color", "of", "the", "invisible", "thread", "that", "connects", "two", "people", "that", "are", "meant", "and", "destined", "to", "be", "together", ".", "However", ",", "in", "invisible", "string", ",", "Taylor", "sings", ",", "\"", "one", "single", "thread", "of", "gold", "tied", "me", "to", "you", ".", "\"", "[", "1/2", "]"]} -{"id": "0379-parasol", "word": "parasol", "label_binary": 0, "text_1": "hot take: if bandana dee gets in smash he should use his parasol too, not just the spear", "token_idx_1": 13, "text_start_1": 57, "text_end_1": 64, "date_1": "2019-09", "text_2": "Trading mood changing necklace! (looking for diamonds and parasol)", "token_idx_2": 10, "text_start_2": 58, "text_end_2": 65, "date_2": "2020-09", "text_1_tokenized": ["hot", "take", ":", "if", "bandana", "dee", "gets", "in", "smash", "he", "should", "use", "his", "parasol", "too", ",", "not", "just", "the", "spear"], "text_2_tokenized": ["Trading", "mood", "changing", "necklace", "!", "(", "looking", "for", "diamonds", "and", "parasol", ")"]} -{"id": "0380-parasol", "word": "parasol", "label_binary": 0, "text_1": "I got some fairy lights to add to my DB parasol, it's going to look magnificent \ud83d\udc9c", "token_idx_1": 10, "text_start_1": 40, "text_end_1": 47, "date_1": "2019-09", "text_2": "tbh rtds are the only way i can get a halo or parasol now, but im not cool enough to get a halo rtd \ud83d\udc41\ud83d\udc44\ud83d\udc41", "token_idx_2": 12, "text_start_2": 46, "text_end_2": 53, "date_2": "2020-09", "text_1_tokenized": ["I", "got", "some", "fairy", "lights", "to", "add", "to", "my", "DB", "parasol", ",", "it's", "going", "to", "look", "magnificent", "\ud83d\udc9c"], "text_2_tokenized": ["tbh", "rtds", "are", "the", "only", "way", "i", "can", "get", "a", "halo", "or", "parasol", "now", ",", "but", "im", "not", "cool", "enough", "to", "get", "a", "halo", "rtd", "\ud83d\udc41", "\ud83d\udc44", "\ud83d\udc41"]} -{"id": "0381-parasol", "word": "parasol", "label_binary": 0, "text_1": "Ong seongwu umbrella is so fucking big For a second i thought he stole a parasol from an outdoor restaurant or a beach", "token_idx_1": 15, "text_start_1": 73, "text_end_1": 80, "date_1": "2019-09", "text_2": "Trading my old Halloween halo! Mainly looking for a lower tier and parasol <3 IA: Old winter halo, parasol, SE boots (just an IA ;-;) #royalehightrades #royalehightradings #RoyaleHighHalo", "token_idx_2": 13, "text_start_2": 67, "text_end_2": 74, "date_2": "2020-09", "text_1_tokenized": ["Ong", "seongwu", "umbrella", "is", "so", "fucking", "big", "For", "a", "second", "i", "thought", "he", "stole", "a", "parasol", "from", "an", "outdoor", "restaurant", "or", "a", "beach"], "text_2_tokenized": ["Trading", "my", "old", "Halloween", "halo", "!", "Mainly", "looking", "for", "a", "lower", "tier", "and", "parasol", "<3", "IA", ":", "Old", "winter", "halo", ",", "parasol", ",", "SE", "boots", "(", "just", "an", "IA", ";", "-", ";)", "#royalehightrades", "#royalehightradings", "#RoyaleHighHalo"]} -{"id": "0382-parasol", "word": "parasol", "label_binary": 0, "text_1": "I just went to check the mail and the sun is too bright I need a parasol this sucks why isn't it a month later right now", "token_idx_1": 16, "text_start_1": 65, "text_end_1": 72, "date_1": "2019-09", "text_2": "Trading 2020 parasol for ltbs #royalehightrades", "token_idx_2": 2, "text_start_2": 13, "text_end_2": 20, "date_2": "2020-09", "text_1_tokenized": ["I", "just", "went", "to", "check", "the", "mail", "and", "the", "sun", "is", "too", "bright", "I", "need", "a", "parasol", "this", "sucks", "why", "isn't", "it", "a", "month", "later", "right", "now"], "text_2_tokenized": ["Trading", "2020", "parasol", "for", "ltbs", "#royalehightrades"]} -{"id": "0383-parasol", "word": "parasol", "label_binary": 0, "text_1": "WHEW hot walk hot sun hot hot thank ya uv protec parasol I get to be cute AND not burn to death", "token_idx_1": 11, "text_start_1": 49, "text_end_1": 56, "date_1": "2019-09", "text_2": "I'm giving my nfr frost dragon and a ride giffafe and a parasol and 100k in RH for Playful Vampire face or Purple or Pink galaxy gaze-NGF", "token_idx_2": 12, "text_start_2": 56, "text_end_2": 63, "date_2": "2020-09", "text_1_tokenized": ["WHEW", "hot", "walk", "hot", "sun", "hot", "hot", "thank", "ya", "uv", "protec", "parasol", "I", "get", "to", "be", "cute", "AND", "not", "burn", "to", "death"], "text_2_tokenized": ["I'm", "giving", "my", "nfr", "frost", "dragon", "and", "a", "ride", "giffafe", "and", "a", "parasol", "and", "100k", "in", "RH", "for", "Playful", "Vampire", "face", "or", "Purple", "or", "Pink", "galaxy", "gaze-NGF"]} -{"id": "0384-parasol", "word": "parasol", "label_binary": 0, "text_1": "Smacked my head in the corner house last night + walked into a parasol at the beach house last week. Nailing it", "token_idx_1": 13, "text_start_1": 63, "text_end_1": 70, "date_1": "2019-09", "text_2": "WINNER: @HannahSonido1 (of parasol giveaway)", "token_idx_2": 5, "text_start_2": 27, "text_end_2": 34, "date_2": "2020-09", "text_1_tokenized": ["Smacked", "my", "head", "in", "the", "corner", "house", "last", "night", "+", "walked", "into", "a", "parasol", "at", "the", "beach", "house", "last", "week", ".", "Nailing", "it"], "text_2_tokenized": ["WINNER", ":", "@HannahSonido1", "(", "of", "parasol", "giveaway", ")"]} -{"id": "0385-parasol", "word": "parasol", "label_binary": 0, "text_1": "Laundry has been hung. Sheets are drying on the line. \"Alright, you lot,\" Evelyn clapped, \"off you pop. Time to get your armor on.\" Walking shoes. Airy clothes. The baby's stroller. A parasol for her. It's time to march with the People. Hail Freya! #Fridays4Future", "token_idx_1": 46, "text_start_1": 184, "text_end_1": 191, "date_1": "2019-09", "text_2": "Update: Bidding the OG parasol* Highest bid so far: 300k anyone bidding higher? :)", "token_idx_2": 5, "text_start_2": 23, "text_end_2": 30, "date_2": "2020-09", "text_1_tokenized": ["Laundry", "has", "been", "hung", ".", "Sheets", "are", "drying", "on", "the", "line", ".", "\"", "Alright", ",", "you", "lot", ",", "\"", "Evelyn", "clapped", ",", "\"", "off", "you", "pop", ".", "Time", "to", "get", "your", "armor", "on", ".", "\"", "Walking", "shoes", ".", "Airy", "clothes", ".", "The", "baby's", "stroller", ".", "A", "parasol", "for", "her", ".", "It's", "time", "to", "march", "with", "the", "People", ".", "Hail", "Freya", "!", "#Fridays4Future"], "text_2_tokenized": ["Update", ":", "Bidding", "the", "OG", "parasol", "*", "Highest", "bid", "so", "far", ":", "300k", "anyone", "bidding", "higher", "?", ":)"]} -{"id": "0386-parasol", "word": "parasol", "label_binary": 0, "text_1": "Who wants to trade me the parasol for something?", "token_idx_1": 6, "text_start_1": 26, "text_end_1": 33, "date_1": "2019-09", "text_2": "Starting the final year of my LPC under the parasol thanks to the new online virtual streaming \u2600\ufe0f\ud83c\udfd6 back to the classroom tomorrow though!", "token_idx_2": 9, "text_start_2": 44, "text_end_2": 51, "date_2": "2020-09", "text_1_tokenized": ["Who", "wants", "to", "trade", "me", "the", "parasol", "for", "something", "?"], "text_2_tokenized": ["Starting", "the", "final", "year", "of", "my", "LPC", "under", "the", "parasol", "thanks", "to", "the", "new", "online", "virtual", "streaming", "\u2600", "\ufe0f", "\ud83c\udfd6", "back", "to", "the", "classroom", "tomorrow", "though", "!"]} -{"id": "0387-parasol", "word": "parasol", "label_binary": 0, "text_1": "Just finished templating Peach's crown and parasol...I'm not really impressed with the parasol but the crown is stellar. I'll let y'all be the judge tonight when I post a wip", "token_idx_1": 6, "text_start_1": 43, "text_end_1": 50, "date_1": "2019-09", "text_2": "Crapped it there. Just seen two guys coming outta Weatherspoons with what I though was a body bag. On 2nd glance it tuned out to be a white parasol for the alfresco drinking. \ufffc\ufffc Stay safe out there.", "token_idx_2": 30, "text_start_2": 140, "text_end_2": 147, "date_2": "2020-09", "text_1_tokenized": ["Just", "finished", "templating", "Peach's", "crown", "and", "parasol", "...", "I'm", "not", "really", "impressed", "with", "the", "parasol", "but", "the", "crown", "is", "stellar", ".", "I'll", "let", "y'all", "be", "the", "judge", "tonight", "when", "I", "post", "a", "wip"], "text_2_tokenized": ["Crapped", "it", "there", ".", "Just", "seen", "two", "guys", "coming", "outta", "Weatherspoons", "with", "what", "I", "though", "was", "a", "body", "bag", ".", "On", "2nd", "glance", "it", "tuned", "out", "to", "be", "a", "white", "parasol", "for", "the", "alfresco", "drinking", ".", "\ufffc", "\ufffc", "Stay", "safe", "out", "there", "."]} -{"id": "0388-parasol", "word": "parasol", "label_binary": 0, "text_1": "Hey guys, does anybody have a parasol on rh they don't use? If you do I would love to have it I am willing to pay whatever you want for it as long as it's reasonable for the parasol!", "token_idx_1": 7, "text_start_1": 30, "text_end_1": 37, "date_1": "2019-09", "text_2": "There's a storm going on and my parasol that I had BOLTED DOWN just fucking launched itself onto the roof", "token_idx_2": 7, "text_start_2": 32, "text_end_2": 39, "date_2": "2020-09", "text_1_tokenized": ["Hey", "guys", ",", "does", "anybody", "have", "a", "parasol", "on", "rh", "they", "don't", "use", "?", "If", "you", "do", "I", "would", "love", "to", "have", "it", "I", "am", "willing", "to", "pay", "whatever", "you", "want", "for", "it", "as", "long", "as", "it's", "reasonable", "for", "the", "parasol", "!"], "text_2_tokenized": ["There's", "a", "storm", "going", "on", "and", "my", "parasol", "that", "I", "had", "BOLTED", "DOWN", "just", "fucking", "launched", "itself", "onto", "the", "roof"]} -{"id": "0389-parasol", "word": "parasol", "label_binary": 1, "text_1": "This girl in Roblox wanted to be my friend bc of my halos and parasol. How ik? I asked.. Me: \u201care you trying to be my friend?\u201d Her: \u201cyes\u201d Me: \u201cbc I'm rich?\u201d Her: \u201cno. Bc you have pretty halos\u201d Me: \u201cSO YOU WANT ME TO BE YOUR FRIEND BC OF MY HALOS?\u201d Her: \u201cyes. They're beautiful\u201d", "token_idx_1": 14, "text_start_1": 62, "text_end_1": 69, "date_1": "2019-09", "text_2": "Trading cmb ring! IA: parasol and se set or sf set (JUST AN IA-)", "token_idx_2": 6, "text_start_2": 22, "text_end_2": 29, "date_2": "2020-09", "text_1_tokenized": ["This", "girl", "in", "Roblox", "wanted", "to", "be", "my", "friend", "bc", "of", "my", "halos", "and", "parasol", ".", "How", "ik", "?", "I", "asked", "..", "Me", ":", "\u201c", "are", "you", "trying", "to", "be", "my", "friend", "?", "\u201d", "Her", ":", "\u201c", "yes", "\u201d", "Me", ":", "\u201c", "bc", "I'm", "rich", "?", "\u201d", "Her", ":", "\u201c", "no", ".", "Bc", "you", "have", "pretty", "halos", "\u201d", "Me", ":", "\u201c", "SO", "YOU", "WANT", "ME", "TO", "BE", "YOUR", "FRIEND", "BC", "OF", "MY", "HALOS", "?", "\u201d", "Her", ":", "\u201c", "yes", ".", "They're", "beautiful", "\u201d"], "text_2_tokenized": ["Trading", "cmb", "ring", "!", "IA", ":", "parasol", "and", "se", "set", "or", "sf", "set", "(", "JUST", "AN", "IA", "-", ")"]} -{"id": "0390-parasol", "word": "parasol", "label_binary": 0, "text_1": "shoutouts to that one todoroki(?) cosplayer who helped me get my wig untangled from my parasol", "token_idx_1": 18, "text_start_1": 87, "text_end_1": 94, "date_1": "2019-09", "text_2": "Trading parasol, dv corset, thigh high boots, cb sleeves and sf skirt!~ All offers are welcome :3 #royalehigh #royalehighdiamonds #royalehightradings", "token_idx_2": 1, "text_start_2": 8, "text_end_2": 15, "date_2": "2020-09", "text_1_tokenized": ["shoutouts", "to", "that", "one", "todoroki", "(", "?", ")", "cosplayer", "who", "helped", "me", "get", "my", "wig", "untangled", "from", "my", "parasol"], "text_2_tokenized": ["Trading", "parasol", ",", "dv", "corset", ",", "thigh", "high", "boots", ",", "cb", "sleeves", "and", "sf", "skirt", "!", "~", "All", "offers", "are", "welcome", ":", "3", "#royalehigh", "#royalehighdiamonds", "#royalehightradings"]} -{"id": "0391-parasol", "word": "parasol", "label_binary": 0, "text_1": "I just watched a man unwrap a cheeseburger by spinning it on a parasol and then the pickles flew out and then he hacky sacked it and threw it into the crowd I'M", "token_idx_1": 13, "text_start_1": 63, "text_end_1": 70, "date_1": "2019-09", "text_2": "Trading this for light halo: Corrupt, 200k, 2020 parasol, my dolly, ghost costume, lots of other accessories and..thats it! retweets are appreciated <33 #royalehigh #royalehightrading #lighthalo", "token_idx_2": 11, "text_start_2": 49, "text_end_2": 56, "date_2": "2020-09", "text_1_tokenized": ["I", "just", "watched", "a", "man", "unwrap", "a", "cheeseburger", "by", "spinning", "it", "on", "a", "parasol", "and", "then", "the", "pickles", "flew", "out", "and", "then", "he", "hacky", "sacked", "it", "and", "threw", "it", "into", "the", "crowd", "I'M"], "text_2_tokenized": ["Trading", "this", "for", "light", "halo", ":", "Corrupt", ",", "200k", ",", "2020", "parasol", ",", "my", "dolly", ",", "ghost", "costume", ",", "lots", "of", "other", "accessories", "and", "..", "thats", "it", "!", "retweets", "are", "appreciated", "<3", "3", "#royalehigh", "#royalehightrading", "#lighthalo"]} -{"id": "0392-parasol", "word": "parasol", "label_binary": 0, "text_1": "Freshly-picked parasol and field mushrooms for tea, fried in butter with garlic salt, pepper and a few bits of gorgonzola. Dark inky sexy loveliness on toast.", "token_idx_1": 1, "text_start_1": 15, "text_end_1": 22, "date_1": "2019-09", "text_2": "Anyone on ac have an extra petal parasol? Earliest pick up is Thursday #acnh", "token_idx_2": 7, "text_start_2": 33, "text_end_2": 40, "date_2": "2020-09", "text_1_tokenized": ["Freshly-picked", "parasol", "and", "field", "mushrooms", "for", "tea", ",", "fried", "in", "butter", "with", "garlic", "salt", ",", "pepper", "and", "a", "few", "bits", "of", "gorgonzola", ".", "Dark", "inky", "sexy", "loveliness", "on", "toast", "."], "text_2_tokenized": ["Anyone", "on", "ac", "have", "an", "extra", "petal", "parasol", "?", "Earliest", "pick", "up", "is", "Thursday", "#acnh"]} -{"id": "0393-parasol", "word": "parasol", "label_binary": 0, "text_1": "PLEASE bring back the parasol I cannot keep applying sunscreen like this", "token_idx_1": 4, "text_start_1": 22, "text_end_1": 29, "date_1": "2019-09", "text_2": "Can someone tell me how to get rich on royale high? Ive tried farming for half an hour and only got 1,295, and I don't have enough robux for the gamepasses. I've been playing for about 2 years now, and have never got a halo or even a parasol. The most diamonds ive had is 270k.", "token_idx_2": 52, "text_start_2": 234, "text_end_2": 241, "date_2": "2020-09", "text_1_tokenized": ["PLEASE", "bring", "back", "the", "parasol", "I", "cannot", "keep", "applying", "sunscreen", "like", "this"], "text_2_tokenized": ["Can", "someone", "tell", "me", "how", "to", "get", "rich", "on", "royale", "high", "?", "Ive", "tried", "farming", "for", "half", "an", "hour", "and", "only", "got", "1,295", ",", "and", "I", "don't", "have", "enough", "robux", "for", "the", "gamepasses", ".", "I've", "been", "playing", "for", "about", "2", "years", "now", ",", "and", "have", "never", "got", "a", "halo", "or", "even", "a", "parasol", ".", "The", "most", "diamonds", "ive", "had", "is", "270k", "."]} -{"id": "0394-parasol", "word": "parasol", "label_binary": 0, "text_1": "Seeing people at the park with a parasol really cheers me up. I do not know exactly WHY, but it does\ud83e\udd14", "token_idx_1": 7, "text_start_1": 33, "text_end_1": 40, "date_1": "2019-09", "text_2": "Trading 59,000 diamonds, Kitty crossbody bat mo heels, se skirt..and yea!....- for parasol!! #royalhightrading #Royalhightrades", "token_idx_2": 19, "text_start_2": 83, "text_end_2": 90, "date_2": "2020-09", "text_1_tokenized": ["Seeing", "people", "at", "the", "park", "with", "a", "parasol", "really", "cheers", "me", "up", ".", "I", "do", "not", "know", "exactly", "WHY", ",", "but", "it", "does", "\ud83e\udd14"], "text_2_tokenized": ["Trading", "59,000", "diamonds", ",", "Kitty", "crossbody", "bat", "mo", "heels", ",", "se", "skirt", "..", "and", "yea", "!", "...", "-", "for", "parasol", "!", "!", "#royalhightrading", "#Royalhightrades"]} -{"id": "0395-parasol", "word": "parasol", "label_binary": 0, "text_1": "Silent, solitary, I step up the western tower. The moon appears like a hook. The lone parasol tree locks the clear autumn in the deep courtyard. What cannot be cut, nor raveled, is the sorrow of separation: Nothing tastes like that to the heart. Li Yu", "token_idx_1": 20, "text_start_1": 86, "text_end_1": 93, "date_1": "2019-09", "text_2": "Okay once I get the rest of my comms done which shouldn't be too far from now, I am embarrassed to say the parasol is what is slowing me down \ud83d\ude02 I'll be opening three full body comms for 80k. So once I'm done with my last one I'll make a post \ud83d\udc9c", "token_idx_2": 24, "text_start_2": 107, "text_end_2": 114, "date_2": "2020-09", "text_1_tokenized": ["Silent", ",", "solitary", ",", "I", "step", "up", "the", "western", "tower", ".", "The", "moon", "appears", "like", "a", "hook", ".", "The", "lone", "parasol", "tree", "locks", "the", "clear", "autumn", "in", "the", "deep", "courtyard", ".", "What", "cannot", "be", "cut", ",", "nor", "raveled", ",", "is", "the", "sorrow", "of", "separation", ":", "Nothing", "tastes", "like", "that", "to", "the", "heart", ".", "Li", "Yu"], "text_2_tokenized": ["Okay", "once", "I", "get", "the", "rest", "of", "my", "comms", "done", "which", "shouldn't", "be", "too", "far", "from", "now", ",", "I", "am", "embarrassed", "to", "say", "the", "parasol", "is", "what", "is", "slowing", "me", "down", "\ud83d\ude02", "I'll", "be", "opening", "three", "full", "body", "comms", "for", "80k", ".", "So", "once", "I'm", "done", "with", "my", "last", "one", "I'll", "make", "a", "post", "\ud83d\udc9c"]} -{"id": "0396-parasol", "word": "parasol", "label_binary": 1, "text_1": "Today On n'a ranger notre set de table avec parasol , le bbq tout et it really hit me que summer is over ... in a few weeks days will be rainy/ cold , leaves from trees will all fall & seasonal depression szn will be back .", "token_idx_1": 9, "text_start_1": 44, "text_end_1": 51, "date_1": "2019-09", "text_2": "I will no longer say umbrella it'll be parasol from now on", "token_idx_2": 8, "text_start_2": 39, "text_end_2": 46, "date_2": "2020-09", "text_1_tokenized": ["Today", "On", "n'a", "ranger", "notre", "set", "de", "table", "avec", "parasol", ",", "le", "bbq", "tout", "et", "it", "really", "hit", "me", "que", "summer", "is", "over", "...", "in", "a", "few", "weeks", "days", "will", "be", "rainy", "/", "cold", ",", "leaves", "from", "trees", "will", "all", "fall", "&", "seasonal", "depression", "szn", "will", "be", "back", "."], "text_2_tokenized": ["I", "will", "no", "longer", "say", "umbrella", "it'll", "be", "parasol", "from", "now", "on"]} -{"id": "0397-parasol", "word": "parasol", "label_binary": 0, "text_1": "My parasol eagerly awaits for the backside of your head, Virion, if you disturb my darling Lissa.", "token_idx_1": 1, "text_start_1": 3, "text_end_1": 10, "date_1": "2019-09", "text_2": "*Trading Old Val Halo* LF: Other halos, high demand items (i can add parasol depending on the halo/ items) IA: Corrupt (if i add parasol and 7Ok) IA 2: New lucky (i can add) Just an IA!! NLF: Robux, onsale items, Diamond offers, am items. #royalehightrading #royalehighhalo #Rh", "token_idx_2": 18, "text_start_2": 69, "text_end_2": 76, "date_2": "2020-09", "text_1_tokenized": ["My", "parasol", "eagerly", "awaits", "for", "the", "backside", "of", "your", "head", ",", "Virion", ",", "if", "you", "disturb", "my", "darling", "Lissa", "."], "text_2_tokenized": ["*", "Trading", "Old", "Val", "Halo", "*", "LF", ":", "Other", "halos", ",", "high", "demand", "items", "(", "i", "can", "add", "parasol", "depending", "on", "the", "halo", "/", "items", ")", "IA", ":", "Corrupt", "(", "if", "i", "add", "parasol", "and", "7Ok", ")", "IA", "2", ":", "New", "lucky", "(", "i", "can", "add", ")", "Just", "an", "IA", "!", "!", "NLF", ":", "Robux", ",", "onsale", "items", ",", "Diamond", "offers", ",", "am", "items", ".", "#royalehightrading", "#royalehighhalo", "#Rh"]} -{"id": "0398-parasol", "word": "parasol", "label_binary": 0, "text_1": "Despite being able to speak all these languages at least a little for years, I've only just realised that un paraguas, une parapluie, and a parasol are just \u201cfor water\u201d, \u201cfor rain\u201d, and \u201cfor sun\u201d. What the hell is an umbrella?", "token_idx_1": 28, "text_start_1": 140, "text_end_1": 147, "date_1": "2019-09", "text_2": "hi does anybody have a rt deal for a parasol", "token_idx_2": 9, "text_start_2": 37, "text_end_2": 44, "date_2": "2020-09", "text_1_tokenized": ["Despite", "being", "able", "to", "speak", "all", "these", "languages", "at", "least", "a", "little", "for", "years", ",", "I've", "only", "just", "realised", "that", "un", "paraguas", ",", "une", "parapluie", ",", "and", "a", "parasol", "are", "just", "\u201c", "for", "water", "\u201d", ",", "\u201c", "for", "rain", "\u201d", ",", "and", "\u201c", "for", "sun", "\u201d", ".", "What", "the", "hell", "is", "an", "umbrella", "?"], "text_2_tokenized": ["hi", "does", "anybody", "have", "a", "rt", "deal", "for", "a", "parasol"]} -{"id": "0399-parasol", "word": "parasol", "label_binary": 1, "text_1": "today, the p in austin p. mckenzie stands for parasol", "token_idx_1": 11, "text_start_1": 46, "text_end_1": 53, "date_1": "2019-09", "text_2": "my underwear made into a parasol >lrt", "token_idx_2": 5, "text_start_2": 25, "text_end_2": 32, "date_2": "2020-09", "text_1_tokenized": ["today", ",", "the", "p", "in", "austin", "p", ".", "mckenzie", "stands", "for", "parasol"], "text_2_tokenized": ["my", "underwear", "made", "into", "a", "parasol", ">", "lrt"]} -{"id": "0400-parasol", "word": "parasol", "label_binary": 0, "text_1": "Not long ago I misjudged it I thought only smiles would do. Now I see I need The tears and frowns too. Slowly drenched in drizzle No rubber boots, no parasol. It's not a rollercoaster Without the rise and fall", "token_idx_1": 33, "text_start_1": 150, "text_end_1": 157, "date_1": "2019-09", "text_2": "Trading my acc with 8.5mil 3 new vals new mermaid corrupted new lucky Easter parasol shadow empress boots skates and more + Roblox avatar items such as hair accs clothes and more! There's also bloxburg money and a nice house! Looking for real life money $100+. Ngf sry!", "token_idx_2": 15, "text_start_2": 77, "text_end_2": 84, "date_2": "2020-09", "text_1_tokenized": ["Not", "long", "ago", "I", "misjudged", "it", "I", "thought", "only", "smiles", "would", "do", ".", "Now", "I", "see", "I", "need", "The", "tears", "and", "frowns", "too", ".", "Slowly", "drenched", "in", "drizzle", "No", "rubber", "boots", ",", "no", "parasol", ".", "It's", "not", "a", "rollercoaster", "Without", "the", "rise", "and", "fall"], "text_2_tokenized": ["Trading", "my", "acc", "with", "8.5", "mil", "3", "new", "vals", "new", "mermaid", "corrupted", "new", "lucky", "Easter", "parasol", "shadow", "empress", "boots", "skates", "and", "more", "+", "Roblox", "avatar", "items", "such", "as", "hair", "accs", "clothes", "and", "more", "!", "There's", "also", "bloxburg", "money", "and", "a", "nice", "house", "!", "Looking", "for", "real", "life", "money", "$", "100", "+", ".", "Ngf", "sry", "!"]} -{"id": "0401-parasol", "word": "parasol", "label_binary": 1, "text_1": "I'm looking for Miss Lady Rose Skirt,parasol and love me forever handbag dm if you have them!\u2764\ufe0f\u2764\ufe0f\u2764\ufe0f\u2764\ufe0f\u2764\ufe0f#royalehightrades", "token_idx_1": 8, "text_start_1": 37, "text_end_1": 44, "date_1": "2019-09", "text_2": "I miss my parasol and ltbs ;-;", "token_idx_2": 3, "text_start_2": 10, "text_end_2": 17, "date_2": "2020-09", "text_1_tokenized": ["I'm", "looking", "for", "Miss", "Lady", "Rose", "Skirt", ",", "parasol", "and", "love", "me", "forever", "handbag", "dm", "if", "you", "have", "them", "!", "\u2764", "\ufe0f", "\u2764", "\ufe0f", "\u2764", "\ufe0f", "\u2764", "\ufe0f", "\u2764", "\ufe0f", "#royalehightrades"], "text_2_tokenized": ["I", "miss", "my", "parasol", "and", "ltbs", ";", "-", ";"]} -{"id": "0402-parasol", "word": "parasol", "label_binary": 0, "text_1": "But now smiled too angry by the drawing-room were made her and the regiment, whether her parasol, attended himself.", "token_idx_1": 17, "text_start_1": 89, "text_end_1": 96, "date_1": "2019-09", "text_2": "trading val set for parasol (reasons why im not under in thread)", "token_idx_2": 4, "text_start_2": 20, "text_end_2": 27, "date_2": "2020-09", "text_1_tokenized": ["But", "now", "smiled", "too", "angry", "by", "the", "drawing-room", "were", "made", "her", "and", "the", "regiment", ",", "whether", "her", "parasol", ",", "attended", "himself", "."], "text_2_tokenized": ["trading", "val", "set", "for", "parasol", "(", "reasons", "why", "im", "not", "under", "in", "thread", ")"]} -{"id": "0403-parasol", "word": "parasol", "label_binary": 1, "text_1": "Mute is looking for her parasol.", "token_idx_1": 5, "text_start_1": 24, "text_end_1": 31, "date_1": "2019-09", "text_2": "i had a cute parasol all along,,,, i am booboo the fool", "token_idx_2": 4, "text_start_2": 13, "text_end_2": 20, "date_2": "2020-09", "text_1_tokenized": ["Mute", "is", "looking", "for", "her", "parasol", "."], "text_2_tokenized": ["i", "had", "a", "cute", "parasol", "all", "along", ",", ",", ",", "i", "am", "booboo", "the", "fool"]} -{"id": "0404-parasol", "word": "parasol", "label_binary": 0, "text_1": "Me: reads \"A is for Annabelle\" to 3 yo. Me: wouldn't it be cute if there was a real doll with all the like this? I'll google it. Google: Don't do it. Me: What? I'm just looking for a sweet doll with a parasol and-DEAR GOD KILL IT WITH FIRE. Google: Good luck sleeping EVER AGAIN", "token_idx_1": 54, "text_start_1": 201, "text_end_1": 208, "date_1": "2019-09", "text_2": "Trading new mermaid halo! Looking for items like teddy z, ltbs, parasol, halos, etc! Ia: 600k!", "token_idx_2": 14, "text_start_2": 64, "text_end_2": 71, "date_2": "2020-09", "text_1_tokenized": ["Me", ":", "reads", "\"", "A", "is", "for", "Annabelle", "\"", "to", "3", "yo", ".", "Me", ":", "wouldn't", "it", "be", "cute", "if", "there", "was", "a", "real", "doll", "with", "all", "the", "like", "this", "?", "I'll", "google", "it", ".", "Google", ":", "Don't", "do", "it", ".", "Me", ":", "What", "?", "I'm", "just", "looking", "for", "a", "sweet", "doll", "with", "a", "parasol", "and-DEAR", "GOD", "KILL", "IT", "WITH", "FIRE", ".", "Google", ":", "Good", "luck", "sleeping", "EVER", "AGAIN"], "text_2_tokenized": ["Trading", "new", "mermaid", "halo", "!", "Looking", "for", "items", "like", "teddy", "z", ",", "ltbs", ",", "parasol", ",", "halos", ",", "etc", "!", "Ia", ":", "600k", "!"]} -{"id": "0405-parasol", "word": "parasol", "label_binary": 1, "text_1": "oh man i'm not sure if i forgot my parasol for my cosplay tomorrow oh fuck it might be in my car but who knows", "token_idx_1": 9, "text_start_1": 35, "text_end_1": 42, "date_1": "2019-09", "text_2": "Neo, but she uses her parasol for shenanigans, like hooking someone off their feet and throwing them to the ground with the handle and then thumping her rear down on them.", "token_idx_2": 6, "text_start_2": 22, "text_end_2": 29, "date_2": "2020-09", "text_1_tokenized": ["oh", "man", "i'm", "not", "sure", "if", "i", "forgot", "my", "parasol", "for", "my", "cosplay", "tomorrow", "oh", "fuck", "it", "might", "be", "in", "my", "car", "but", "who", "knows"], "text_2_tokenized": ["Neo", ",", "but", "she", "uses", "her", "parasol", "for", "shenanigans", ",", "like", "hooking", "someone", "off", "their", "feet", "and", "throwing", "them", "to", "the", "ground", "with", "the", "handle", "and", "then", "thumping", "her", "rear", "down", "on", "them", "."]} -{"id": "0406-parasol", "word": "parasol", "label_binary": 0, "text_1": "What are your favourite \u201cunique\u201d finishes to matches? Some of mine are: - McManus throws Myers out the ring & into the crowd - Tajiri vs Yano 2010 (mist + parasol) - the time someone lariated Choshu in the middle of a Sasorigatame and the tag partner submitted", "token_idx_1": 35, "text_start_1": 159, "text_end_1": 166, "date_1": "2019-09", "text_2": "I'll post my parasol gw at 920 followers \ud83d\udc40", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 20, "date_2": "2020-09", "text_1_tokenized": ["What", "are", "your", "favourite", "\u201c", "unique", "\u201d", "finishes", "to", "matches", "?", "Some", "of", "mine", "are", ":", "-", "McManus", "throws", "Myers", "out", "the", "ring", "&", "into", "the", "crowd", "-", "Tajiri", "vs", "Yano", "2010", "(", "mist", "+", "parasol", ")", "-", "the", "time", "someone", "lariated", "Choshu", "in", "the", "middle", "of", "a", "Sasorigatame", "and", "the", "tag", "partner", "submitted"], "text_2_tokenized": ["I'll", "post", "my", "parasol", "gw", "at", "920", "followers", "\ud83d\udc40"]} -{"id": "0407-parasol", "word": "parasol", "label_binary": 0, "text_1": "Ok so someone said that a parasol is worth 4,000 robux so uh I'm doing a poll. Is the royale high parasol worth 4,000 ROBUX???#royalehightrades", "token_idx_1": 6, "text_start_1": 26, "text_end_1": 33, "date_1": "2019-09", "text_2": "Pleaseeee tell me if you have a parasol or a large train bow skirt or diamonds ft. I'm offering something fair at the very least, but overpaying definitely because my offer is worth 280k-320k and parasols should be about 100k and ltbs should be about 150k", "token_idx_2": 7, "text_start_2": 32, "text_end_2": 39, "date_2": "2020-09", "text_1_tokenized": ["Ok", "so", "someone", "said", "that", "a", "parasol", "is", "worth", "4,000", "robux", "so", "uh", "I'm", "doing", "a", "poll", ".", "Is", "the", "royale", "high", "parasol", "worth", "4,000", "ROBUX", "?", "?", "?", "#royalehightrades"], "text_2_tokenized": ["Pleaseeee", "tell", "me", "if", "you", "have", "a", "parasol", "or", "a", "large", "train", "bow", "skirt", "or", "diamonds", "ft", ".", "I'm", "offering", "something", "fair", "at", "the", "very", "least", ",", "but", "overpaying", "definitely", "because", "my", "offer", "is", "worth", "280k", "-", "320k", "and", "parasols", "should", "be", "about", "100k", "and", "ltbs", "should", "be", "about", "150k"]} -{"id": "0408-parasol", "word": "parasol", "label_binary": 0, "text_1": "\u0e42\u0e2b\u0e23\u0e39\u0e1b\u0e19\u0e49\u0e2d\u0e07 woman with a parasol in a garden", "token_idx_1": 4, "text_start_1": 23, "text_end_1": 30, "date_1": "2019-09", "text_2": "RH POLL: do u think the se set will be more than og when it comes back-kinda like parasol or normal price im hoping normal price lol", "token_idx_2": 19, "text_start_2": 82, "text_end_2": 89, "date_2": "2020-09", "text_1_tokenized": ["\u0e42\u0e2b\u0e23\u0e39\u0e1b\u0e19\u0e49\u0e2d\u0e07", "woman", "with", "a", "parasol", "in", "a", "garden"], "text_2_tokenized": ["RH", "POLL", ":", "do", "u", "think", "the", "se", "set", "will", "be", "more", "than", "og", "when", "it", "comes", "back-kinda", "like", "parasol", "or", "normal", "price", "im", "hoping", "normal", "price", "lol"]} -{"id": "0409-parasol", "word": "parasol", "label_binary": 0, "text_1": "seriously considering becoming one of those people that carries a parasol", "token_idx_1": 10, "text_start_1": 66, "text_end_1": 73, "date_1": "2019-09", "text_2": "looking for 2020 parasol and ltbs trading 370k for each / 740k in total", "token_idx_2": 3, "text_start_2": 17, "text_end_2": 24, "date_2": "2020-09", "text_1_tokenized": ["seriously", "considering", "becoming", "one", "of", "those", "people", "that", "carries", "a", "parasol"], "text_2_tokenized": ["looking", "for", "2020", "parasol", "and", "ltbs", "trading", "370k", "for", "each", "/", "740k", "in", "total"]} -{"id": "0410-parasol", "word": "parasol", "label_binary": 0, "text_1": "2hrs! That's how long it took me to wash all the paint off of my garden furniture/lights etc today after the neighbours sprayed their fence. Brand new parasol, stones, slabs, bird feeder, rabbit run all covered. Still cant get rid of it all either \ud83d\ude21 \ud83d\ude21\ud83d\ude21", "token_idx_1": 31, "text_start_1": 151, "text_end_1": 158, "date_1": "2019-09", "text_2": "I just gave back the shadow empress set, a light halo, and the parasol 2020 to my friend ^^ It was a birthday present, but I feel really bad for even taking it as a birthday present, so I wanted to give her stuff back \ud83e\udd7a\ud83d\udc95", "token_idx_2": 15, "text_start_2": 63, "text_end_2": 70, "date_2": "2020-09", "text_1_tokenized": ["2hrs", "!", "That's", "how", "long", "it", "took", "me", "to", "wash", "all", "the", "paint", "off", "of", "my", "garden", "furniture", "/", "lights", "etc", "today", "after", "the", "neighbours", "sprayed", "their", "fence", ".", "Brand", "new", "parasol", ",", "stones", ",", "slabs", ",", "bird", "feeder", ",", "rabbit", "run", "all", "covered", ".", "Still", "cant", "get", "rid", "of", "it", "all", "either", "\ud83d\ude21", "\ud83d\ude21", "\ud83d\ude21"], "text_2_tokenized": ["I", "just", "gave", "back", "the", "shadow", "empress", "set", ",", "a", "light", "halo", ",", "and", "the", "parasol", "2020", "to", "my", "friend", "^", "^", "It", "was", "a", "birthday", "present", ",", "but", "I", "feel", "really", "bad", "for", "even", "taking", "it", "as", "a", "birthday", "present", ",", "so", "I", "wanted", "to", "give", "her", "stuff", "back", "\ud83e\udd7a", "\ud83d\udc95"]} -{"id": "0411-parasol", "word": "parasol", "label_binary": 1, "text_1": "I went out into the sun without a parasol once. I looked like a cooked lobster when I came home..", "token_idx_1": 8, "text_start_1": 34, "text_end_1": 41, "date_1": "2019-09", "text_2": "Guide me to them, Puppy. I'd appreciate it if you'd hold up a parasol for me while we're at it.", "token_idx_2": 15, "text_start_2": 62, "text_end_2": 69, "date_2": "2020-09", "text_1_tokenized": ["I", "went", "out", "into", "the", "sun", "without", "a", "parasol", "once", ".", "I", "looked", "like", "a", "cooked", "lobster", "when", "I", "came", "home", ".."], "text_2_tokenized": ["Guide", "me", "to", "them", ",", "Puppy", ".", "I'd", "appreciate", "it", "if", "you'd", "hold", "up", "a", "parasol", "for", "me", "while", "we're", "at", "it", "."]} -{"id": "0412-parasol", "word": "parasol", "label_binary": 0, "text_1": "beach in Vilamoura, 2 sun beds and parasol. Guy charges us \u20ac12.60 Guy goes to next couple and says \u20ac12.60 the couple say we don't want parasol so how much for the 2 beds, guy tells them \u20ac14.00 as it's \u20ac7.00 for each bed!! Obviously chap paid \u20ac12.60 need brexit ASAP please.", "token_idx_1": 8, "text_start_1": 35, "text_end_1": 42, "date_1": "2019-09", "text_2": "Looking for parasol! Offering robux <3 Name your price (Don't steal trades until I decline.)", "token_idx_2": 2, "text_start_2": 12, "text_end_2": 19, "date_2": "2020-09", "text_1_tokenized": ["beach", "in", "Vilamoura", ",", "2", "sun", "beds", "and", "parasol", ".", "Guy", "charges", "us", "\u20ac", "12.60", "Guy", "goes", "to", "next", "couple", "and", "says", "\u20ac", "12.60", "the", "couple", "say", "we", "don't", "want", "parasol", "so", "how", "much", "for", "the", "2", "beds", ",", "guy", "tells", "them", "\u20ac", "14.00", "as", "it's", "\u20ac", "7.00", "for", "each", "bed", "!", "!", "Obviously", "chap", "paid", "\u20ac", "12.60", "need", "brexit", "ASAP", "please", "."], "text_2_tokenized": ["Looking", "for", "parasol", "!", "Offering", "robux", "<3", "Name", "your", "price", "(", "Don't", "steal", "trades", "until", "I", "decline", ".", ")"]} -{"id": "0413-parasol", "word": "parasol", "label_binary": 1, "text_1": "Woman with a parasol in a garden \u0e01\u0e31\u0e1a i like me better \u0e2d\u0e48\u0e30\u0e21\u0e36\u0e07\u0e46\u0e46\u0e46\u0e46\u0e46\u0e46\u0e46\u0e46\u0e46\u0e46\u0e46\u0e46\u0e46", "token_idx_1": 3, "text_start_1": 13, "text_end_1": 20, "date_1": "2019-09", "text_2": "i need a parasol but i want it to be dykey as hell how does one go about this", "token_idx_2": 3, "text_start_2": 9, "text_end_2": 16, "date_2": "2020-09", "text_1_tokenized": ["Woman", "with", "a", "parasol", "in", "a", "garden", "\u0e01\u0e31\u0e1a", "i", "like", "me", "better", "\u0e2d\u0e48\u0e30\u0e21\u0e36\u0e07\u0e46\u0e46\u0e46"], "text_2_tokenized": ["i", "need", "a", "parasol", "but", "i", "want", "it", "to", "be", "dykey", "as", "hell", "how", "does", "one", "go", "about", "this"]} -{"id": "0414-parasol", "word": "parasol", "label_binary": 0, "text_1": "Can't hold my skirt down and hold a parasol at the same time so guess who's getting a tan today", "token_idx_1": 8, "text_start_1": 36, "text_end_1": 43, "date_1": "2019-09", "text_2": "Me: 2020 parasol Them: ltbs and 50k W/l/f QUICKKK", "token_idx_2": 3, "text_start_2": 9, "text_end_2": 16, "date_2": "2020-09", "text_1_tokenized": ["Can't", "hold", "my", "skirt", "down", "and", "hold", "a", "parasol", "at", "the", "same", "time", "so", "guess", "who's", "getting", "a", "tan", "today"], "text_2_tokenized": ["Me", ":", "2020", "parasol", "Them", ":", "ltbs", "and", "50k", "W", "/", "l", "/", "f", "QUICKKK"]} -{"id": "0415-parasol", "word": "parasol", "label_binary": 0, "text_1": "Hey there beautiful and say... are you available around this time?\" He asked @ThiccQueenV as he encounter her by the beach. Seeing laying down on the under the parasol with her back expose really does making him interested to take her out.", "token_idx_1": 32, "text_start_1": 160, "text_end_1": 167, "date_1": "2019-09", "text_2": "Is this a fair trade for Easter halo 1k robux, parasol, mrl set, cb set, 100k diamonds, and I can get like any item in any game you want", "token_idx_2": 11, "text_start_2": 47, "text_end_2": 54, "date_2": "2020-09", "text_1_tokenized": ["Hey", "there", "beautiful", "and", "say", "...", "are", "you", "available", "around", "this", "time", "?", "\"", "He", "asked", "@ThiccQueenV", "as", "he", "encounter", "her", "by", "the", "beach", ".", "Seeing", "laying", "down", "on", "the", "under", "the", "parasol", "with", "her", "back", "expose", "really", "does", "making", "him", "interested", "to", "take", "her", "out", "."], "text_2_tokenized": ["Is", "this", "a", "fair", "trade", "for", "Easter", "halo", "1k", "robux", ",", "parasol", ",", "mrl", "set", ",", "cb", "set", ",", "100k", "diamonds", ",", "and", "I", "can", "get", "like", "any", "item", "in", "any", "game", "you", "want"]} -{"id": "0416-parasol", "word": "parasol", "label_binary": 0, "text_1": "#NP : Queens of the stone age - Monsters in the parasol", "token_idx_1": 11, "text_start_1": 48, "text_end_1": 55, "date_1": "2019-09", "text_2": "marine border parasol is sooooooo cute one of my favorite songs", "token_idx_2": 2, "text_start_2": 14, "text_end_2": 21, "date_2": "2020-09", "text_1_tokenized": ["#NP", ":", "Queens", "of", "the", "stone", "age", "-", "Monsters", "in", "the", "parasol"], "text_2_tokenized": ["marine", "border", "parasol", "is", "sooooooo", "cute", "one", "of", "my", "favorite", "songs"]} -{"id": "0417-parasol", "word": "parasol", "label_binary": 0, "text_1": "I just got beaten by a 1% health parasol... :)) I'm calm", "token_idx_1": 9, "text_start_1": 33, "text_end_1": 40, "date_1": "2019-09", "text_2": "alr so I play royal high so who wants to offer for my parasol 2020 orrrrrr Val halo 2020", "token_idx_2": 13, "text_start_2": 54, "text_end_2": 61, "date_2": "2020-09", "text_1_tokenized": ["I", "just", "got", "beaten", "by", "a", "1", "%", "health", "parasol", "...", ":)", ")", "I'm", "calm"], "text_2_tokenized": ["alr", "so", "I", "play", "royal", "high", "so", "who", "wants", "to", "offer", "for", "my", "parasol", "2020", "orrrrrr", "Val", "halo", "2020"]} -{"id": "0418-parasol", "word": "parasol", "label_binary": 0, "text_1": "Imagine, you have 3 sunbeds around the pool, with a parasol next to one, but down as you want to be in the sun not the shade. A foreign family arrive and sit on the sun beds next to you- so the parasol is in between you... 1/3", "token_idx_1": 12, "text_start_1": 52, "text_end_1": 59, "date_1": "2019-09", "text_2": "I'm certified parasol secondary when I get the game", "token_idx_2": 2, "text_start_2": 14, "text_end_2": 21, "date_2": "2020-09", "text_1_tokenized": ["Imagine", ",", "you", "have", "3", "sunbeds", "around", "the", "pool", ",", "with", "a", "parasol", "next", "to", "one", ",", "but", "down", "as", "you", "want", "to", "be", "in", "the", "sun", "not", "the", "shade", ".", "A", "foreign", "family", "arrive", "and", "sit", "on", "the", "sun", "beds", "next", "to", "you", "-", "so", "the", "parasol", "is", "in", "between", "you", "...", "1/3"], "text_2_tokenized": ["I'm", "certified", "parasol", "secondary", "when", "I", "get", "the", "game"]} -{"id": "0419-parasol", "word": "parasol", "label_binary": 0, "text_1": "Thank you for this cutie parasol, lola Herminia Dela Rosa... \u2602\ufe0f umBEARella :bear +umbrella \ud83d\ude06 #Chloebear", "token_idx_1": 5, "text_start_1": 25, "text_end_1": 32, "date_1": "2019-09", "text_2": "Trading 2020 elegant parasol,shadow empress boots, 332.000 diamonds for a good or middle tier halo! ;w;", "token_idx_2": 3, "text_start_2": 21, "text_end_2": 28, "date_2": "2020-09", "text_1_tokenized": ["Thank", "you", "for", "this", "cutie", "parasol", ",", "lola", "Herminia", "Dela", "Rosa", "...", "\u2602", "\ufe0f", "umBEARella", ":", "bear", "+", "umbrella", "\ud83d\ude06", "#Chloebear"], "text_2_tokenized": ["Trading", "2020", "elegant", "parasol", ",", "shadow", "empress", "boots", ",", "332.000", "diamonds", "for", "a", "good", "or", "middle", "tier", "halo", "!", ";", "w", ";"]} -{"id": "0420-parasol", "word": "parasol", "label_binary": 0, "text_1": "#vss365 Death scowled at the glowing orb, fiercely gripping her scythe-shaped parasol. \"His #soul is worth nothing. After all, he never believed in me.\" Time gasped. \"Daughter, you cannot mean such a thing!\" \"I always do,\" she sighed, \"But you never noticed.\"", "token_idx_1": 12, "text_start_1": 78, "text_end_1": 85, "date_1": "2019-09", "text_2": "Should i do a bid for my 2020 parasol? \ud83e\udd14", "token_idx_2": 8, "text_start_2": 30, "text_end_2": 37, "date_2": "2020-09", "text_1_tokenized": ["#vss365", "Death", "scowled", "at", "the", "glowing", "orb", ",", "fiercely", "gripping", "her", "scythe-shaped", "parasol", ".", "\"", "His", "#soul", "is", "worth", "nothing", ".", "After", "all", ",", "he", "never", "believed", "in", "me", ".", "\"", "Time", "gasped", ".", "\"", "Daughter", ",", "you", "cannot", "mean", "such", "a", "thing", "!", "\"", "\"", "I", "always", "do", ",", "\"", "she", "sighed", ",", "\"", "But", "you", "never", "noticed", ".", "\""], "text_2_tokenized": ["Should", "i", "do", "a", "bid", "for", "my", "2020", "parasol", "?", "\ud83e\udd14"]} -{"id": "0421-parasol", "word": "parasol", "label_binary": 0, "text_1": "things i've blessed hubert with: - additional dad vibes - eating pizza with a fork (no knife) - cursed item collecting and loving - cursed item restoring - taking a dainty goth parasol to the beach", "token_idx_1": 35, "text_start_1": 177, "text_end_1": 184, "date_1": "2019-09", "text_2": "A girl is chasing me for tje halo and parasol that @hoe4izuku gav me and wanting to trade it for diamonds probably 1k", "token_idx_2": 9, "text_start_2": 38, "text_end_2": 45, "date_2": "2020-09", "text_1_tokenized": ["things", "i've", "blessed", "hubert", "with", ":", "-", "additional", "dad", "vibes", "-", "eating", "pizza", "with", "a", "fork", "(", "no", "knife", ")", "-", "cursed", "item", "collecting", "and", "loving", "-", "cursed", "item", "restoring", "-", "taking", "a", "dainty", "goth", "parasol", "to", "the", "beach"], "text_2_tokenized": ["A", "girl", "is", "chasing", "me", "for", "tje", "halo", "and", "parasol", "that", "@hoe4izuku", "gav", "me", "and", "wanting", "to", "trade", "it", "for", "diamonds", "probably", "1k"]} -{"id": "0422-parasol", "word": "parasol", "label_binary": 0, "text_1": "I'm very glad parasol was fixed \u263a\ufe0f", "token_idx_1": 3, "text_start_1": 14, "text_end_1": 21, "date_1": "2019-09", "text_2": "Can we get mido parasol covering izi - emergency room in the next season?", "token_idx_2": 4, "text_start_2": 16, "text_end_2": 23, "date_2": "2020-09", "text_1_tokenized": ["I'm", "very", "glad", "parasol", "was", "fixed", "\u263a", "\ufe0f"], "text_2_tokenized": ["Can", "we", "get", "mido", "parasol", "covering", "izi", "-", "emergency", "room", "in", "the", "next", "season", "?"]} -{"id": "0423-parasol", "word": "parasol", "label_binary": 0, "text_1": "He has a parasol and is quite intrigued with it... \"So.... How the fuck is this thing considered a weapon? Its just a normal umbrella?", "token_idx_1": 3, "text_start_1": 9, "text_end_1": 16, "date_1": "2019-09", "text_2": "Scammers are very bad, they should work hard for their items instead of taking them. I remember first joining the trading hub and I found someone who just got scammed for their parasol, and some of the SE set. Y'all don't know how hard people work for their items.", "token_idx_2": 34, "text_start_2": 177, "text_end_2": 184, "date_2": "2020-09", "text_1_tokenized": ["He", "has", "a", "parasol", "and", "is", "quite", "intrigued", "with", "it", "...", "\"", "So", "...", "How", "the", "fuck", "is", "this", "thing", "considered", "a", "weapon", "?", "Its", "just", "a", "normal", "umbrella", "?"], "text_2_tokenized": ["Scammers", "are", "very", "bad", ",", "they", "should", "work", "hard", "for", "their", "items", "instead", "of", "taking", "them", ".", "I", "remember", "first", "joining", "the", "trading", "hub", "and", "I", "found", "someone", "who", "just", "got", "scammed", "for", "their", "parasol", ",", "and", "some", "of", "the", "SE", "set", ".", "Y'all", "don't", "know", "how", "hard", "people", "work", "for", "their", "items", "."]} -{"id": "0424-parasol", "word": "parasol", "label_binary": 0, "text_1": "Checked out, and spending the day pool side. The man next to me has spent the last 20 mins moaning, about how the parasol type dealios don't offer much cover from the sun, how the coffee doesn't taste like Starbucks, and now the curtains. You're on holiday, David, give it a rest!", "token_idx_1": 26, "text_start_1": 114, "text_end_1": 121, "date_1": "2019-09", "text_2": "I'm trading my whole inventory for the parasol! Any parasol is fine! Please I've been looking for it!! ( I have a good Inventory ) \ud83d\ude1a\ud83d\ude1a", "token_idx_2": 7, "text_start_2": 39, "text_end_2": 46, "date_2": "2020-09", "text_1_tokenized": ["Checked", "out", ",", "and", "spending", "the", "day", "pool", "side", ".", "The", "man", "next", "to", "me", "has", "spent", "the", "last", "20", "mins", "moaning", ",", "about", "how", "the", "parasol", "type", "dealios", "don't", "offer", "much", "cover", "from", "the", "sun", ",", "how", "the", "coffee", "doesn't", "taste", "like", "Starbucks", ",", "and", "now", "the", "curtains", ".", "You're", "on", "holiday", ",", "David", ",", "give", "it", "a", "rest", "!"], "text_2_tokenized": ["I'm", "trading", "my", "whole", "inventory", "for", "the", "parasol", "!", "Any", "parasol", "is", "fine", "!", "Please", "I've", "been", "looking", "for", "it", "!", "!", "(", "I", "have", "a", "good", "Inventory", ")", "\ud83d\ude1a", "\ud83d\ude1a"]} -{"id": "0425-parasol", "word": "parasol", "label_binary": 0, "text_1": "how in the fuk did my child gift me emperor parasol moss when it only Grows outside Tel Mithryn in Raven Rock????", "token_idx_1": 10, "text_start_1": 44, "text_end_1": 51, "date_1": "2019-09", "text_2": "to the girl with the parasol: step on me", "token_idx_2": 5, "text_start_2": 21, "text_end_2": 28, "date_2": "2020-09", "text_1_tokenized": ["how", "in", "the", "fuk", "did", "my", "child", "gift", "me", "emperor", "parasol", "moss", "when", "it", "only", "Grows", "outside", "Tel", "Mithryn", "in", "Raven", "Rock", "?", "?", "?"], "text_2_tokenized": ["to", "the", "girl", "with", "the", "parasol", ":", "step", "on", "me"]} -{"id": "0426-parasol", "word": "parasol", "label_binary": 1, "text_1": "How is that the English came to say \"umbrella\" instead of \"parapluie\" but \"parasol\" is still just \"parasol\"?", "token_idx_1": 18, "text_start_1": 75, "text_end_1": 82, "date_1": "2019-09", "text_2": "someone get Verin Thelyss a parasol STAT so he can go on dates with his sunshine boyfriend :). Darrow gifts Verin sunscreen and when they're prepping to go out and Verin is applying t Darrow always says \u201cdon't forget your ears!\u201d and kisses Verin's ear", "token_idx_2": 5, "text_start_2": 28, "text_end_2": 35, "date_2": "2020-09", "text_1_tokenized": ["How", "is", "that", "the", "English", "came", "to", "say", "\"", "umbrella", "\"", "instead", "of", "\"", "parapluie", "\"", "but", "\"", "parasol", "\"", "is", "still", "just", "\"", "parasol", "\"", "?"], "text_2_tokenized": ["someone", "get", "Verin", "Thelyss", "a", "parasol", "STAT", "so", "he", "can", "go", "on", "dates", "with", "his", "sunshine", "boyfriend", ":)", ".", "Darrow", "gifts", "Verin", "sunscreen", "and", "when", "they're", "prepping", "to", "go", "out", "and", "Verin", "is", "applying", "t", "Darrow", "always", "says", "\u201c", "don't", "forget", "your", "ears", "!", "\u201d", "and", "kisses", "Verin's", "ear"]} -{"id": "0427-parasol", "word": "parasol", "label_binary": 0, "text_1": "Every morning a French Adonis who I've nicknamed 'Parisian Poldark' likes to stand poolside in skin tight Speedos & do his daily workout. Irritated, the husband tutted & announced he was off to get a kebab, but not before asking if I needed a parasol to shade my tongue first. \ud83d\ude02", "token_idx_1": 49, "text_start_1": 251, "text_end_1": 258, "date_1": "2019-09", "text_2": "I'm trading new val halo feel free to offer! lf: high demand halo(s) high demand items all sets parasol teddyz nlf: diamonds (Depends) in shop items small adds low demand/easy to get stuff IA: shadow set, sf set, dv set, parasol, light halo, teddy z, goth sleeves, and like 500k", "token_idx_2": 23, "text_start_2": 96, "text_end_2": 103, "date_2": "2020-09", "text_1_tokenized": ["Every", "morning", "a", "French", "Adonis", "who", "I've", "nicknamed", "'", "Parisian", "Poldark", "'", "likes", "to", "stand", "poolside", "in", "skin", "tight", "Speedos", "&", "do", "his", "daily", "workout", ".", "Irritated", ",", "the", "husband", "tutted", "&", "announced", "he", "was", "off", "to", "get", "a", "kebab", ",", "but", "not", "before", "asking", "if", "I", "needed", "a", "parasol", "to", "shade", "my", "tongue", "first", ".", "\ud83d\ude02"], "text_2_tokenized": ["I'm", "trading", "new", "val", "halo", "feel", "free", "to", "offer", "!", "lf", ":", "high", "demand", "halo", "(", "s", ")", "high", "demand", "items", "all", "sets", "parasol", "teddyz", "nlf", ":", "diamonds", "(", "Depends", ")", "in", "shop", "items", "small", "adds", "low", "demand", "/", "easy", "to", "get", "stuff", "IA", ":", "shadow", "set", ",", "sf", "set", ",", "dv", "set", ",", "parasol", ",", "light", "halo", ",", "teddy", "z", ",", "goth", "sleeves", ",", "and", "like", "500k"]} -{"id": "0428-parasol", "word": "parasol", "label_binary": 0, "text_1": "They really tried to fix parasol again when bayo uptilt still doesn't work", "token_idx_1": 5, "text_start_1": 25, "text_end_1": 32, "date_1": "2019-09", "text_2": "hiii I'm trading 230k for parasol. Anyone who wants to do that trade reply ^w^", "token_idx_2": 5, "text_start_2": 26, "text_end_2": 33, "date_2": "2020-09", "text_1_tokenized": ["They", "really", "tried", "to", "fix", "parasol", "again", "when", "bayo", "uptilt", "still", "doesn't", "work"], "text_2_tokenized": ["hiii", "I'm", "trading", "230k", "for", "parasol", ".", "Anyone", "who", "wants", "to", "do", "that", "trade", "reply", "^", "w", "^"]} -{"id": "0429-parasol", "word": "parasol", "label_binary": 1, "text_1": "The students are having a market at school today where they're selling their own stuff and I bought a hand painted parasol from a 9 year old. Guys, my school is amazing.", "token_idx_1": 21, "text_start_1": 115, "text_end_1": 122, "date_1": "2019-09", "text_2": "whenever I go away again and find myself under a parasol in 35 degree weather drinking a sexy cocktail at 2pm, I will remember how I feel right now, and I'll sob tears of joy thinking of how long I waited to be able to do just that. sip sip. little dip in the pool nearby. bliss.", "token_idx_2": 10, "text_start_2": 49, "text_end_2": 56, "date_2": "2020-09", "text_1_tokenized": ["The", "students", "are", "having", "a", "market", "at", "school", "today", "where", "they're", "selling", "their", "own", "stuff", "and", "I", "bought", "a", "hand", "painted", "parasol", "from", "a", "9", "year", "old", ".", "Guys", ",", "my", "school", "is", "amazing", "."], "text_2_tokenized": ["whenever", "I", "go", "away", "again", "and", "find", "myself", "under", "a", "parasol", "in", "35", "degree", "weather", "drinking", "a", "sexy", "cocktail", "at", "2pm", ",", "I", "will", "remember", "how", "I", "feel", "right", "now", ",", "and", "I'll", "sob", "tears", "of", "joy", "thinking", "of", "how", "long", "I", "waited", "to", "be", "able", "to", "do", "just", "that", ".", "sip", "sip", ".", "little", "dip", "in", "the", "pool", "nearby", ".", "bliss", "."]} -{"id": "0430-parasol", "word": "parasol", "label_binary": 0, "text_1": "#POETHEME 309 Play with Ideas a child no more Toss them Mind bowl draw Grey-zone word toy Shades distinct Themselves to yearn For coffee, cream Berry fields Fuschia parasol bells Sunflower whizz-heads Nod to furnace divide Delft-sieved light \u00a99.9.2019 #AndreaCConnolly", "token_idx_1": 29, "text_start_1": 165, "text_end_1": 172, "date_1": "2019-09", "text_2": "Wfl? Me: parasol and goth sleeves Them: spring halo #royalhighhalo #royalhightrading #royalhightrader", "token_idx_2": 4, "text_start_2": 9, "text_end_2": 16, "date_2": "2020-09", "text_1_tokenized": ["#POETHEME", "309", "Play", "with", "Ideas", "a", "child", "no", "more", "Toss", "them", "Mind", "bowl", "draw", "Grey-zone", "word", "toy", "Shades", "distinct", "Themselves", "to", "yearn", "For", "coffee", ",", "cream", "Berry", "fields", "Fuschia", "parasol", "bells", "Sunflower", "whizz-heads", "Nod", "to", "furnace", "divide", "Delft-sieved", "light", "\u00a9", "9.9", ".", "2019", "#AndreaCConnolly"], "text_2_tokenized": ["Wfl", "?", "Me", ":", "parasol", "and", "goth", "sleeves", "Them", ":", "spring", "halo", "#royalhighhalo", "#royalhightrading", "#royalhightrader"]} -{"id": "0431-parasol", "word": "parasol", "label_binary": 0, "text_1": "Was here to receive parasol this morning. Aghast to see card saying I have to collect. I was here!! Can't read tracking number and no other details except I'm to collect from Depit. Where is that? This has been a catalogue of failures. Waited last Monday as requested. No delivery", "token_idx_1": 4, "text_start_1": 20, "text_end_1": 27, "date_1": "2019-09", "text_2": "Trading my winter halo 2018 for the new parasol + adds! Lf: -High demand items -Sf items? -Dv items? Nlf: -Diamonds -Cb items -Se items -Onsale items IA (let me dream): -Parasol, skates, val skirt, val corset, goth sleeves", "token_idx_2": 8, "text_start_2": 40, "text_end_2": 47, "date_2": "2020-09", "text_1_tokenized": ["Was", "here", "to", "receive", "parasol", "this", "morning", ".", "Aghast", "to", "see", "card", "saying", "I", "have", "to", "collect", ".", "I", "was", "here", "!", "!", "Can't", "read", "tracking", "number", "and", "no", "other", "details", "except", "I'm", "to", "collect", "from", "Depit", ".", "Where", "is", "that", "?", "This", "has", "been", "a", "catalogue", "of", "failures", ".", "Waited", "last", "Monday", "as", "requested", ".", "No", "delivery"], "text_2_tokenized": ["Trading", "my", "winter", "halo", "2018", "for", "the", "new", "parasol", "+", "adds", "!", "Lf", ":", "-", "High", "demand", "items", "-", "Sf", "items", "?", "-", "Dv", "items", "?", "Nlf", ":", "-", "Diamonds", "-", "Cb", "items", "-", "Se", "items", "-", "Onsale", "items", "IA", "(", "let", "me", "dream", "):", "-", "Parasol", ",", "skates", ",", "val", "skirt", ",", "val", "corset", ",", "goth", "sleeves"]} -{"id": "0432-parasol", "word": "parasol", "label_binary": 0, "text_1": "God I would love to recreate my Halloween outfit in rh but I need the parasol...\ud83d\ude14", "token_idx_1": 15, "text_start_1": 70, "text_end_1": 77, "date_1": "2019-09", "text_2": "Trading my whole Fluttering butterfly set and sf bonnet for 75-80 robux! but you can also offer:) Btw I'm NGF unless i know you or traded with you before. #royalehightrade #royalehighoffer #royalehighselling kw: halo, se set, parasol, ltbs", "token_idx_2": 42, "text_start_2": 226, "text_end_2": 233, "date_2": "2020-09", "text_1_tokenized": ["God", "I", "would", "love", "to", "recreate", "my", "Halloween", "outfit", "in", "rh", "but", "I", "need", "the", "parasol", "...", "\ud83d\ude14"], "text_2_tokenized": ["Trading", "my", "whole", "Fluttering", "butterfly", "set", "and", "sf", "bonnet", "for", "75-80", "robux", "!", "but", "you", "can", "also", "offer", ":)", "Btw", "I'm", "NGF", "unless", "i", "know", "you", "or", "traded", "with", "you", "before", ".", "#royalehightrade", "#royalehighoffer", "#royalehighselling", "kw", ":", "halo", ",", "se", "set", ",", "parasol", ",", "ltbs"]} -{"id": "0433-parasol", "word": "parasol", "label_binary": 0, "text_1": "imagine holding up a parasol and not being a character in mario kart.", "token_idx_1": 4, "text_start_1": 21, "text_end_1": 28, "date_1": "2019-09", "text_2": "Level 24 in fighters 2 and i still havent unlocked parasol \ud83d\ude14", "token_idx_2": 10, "text_start_2": 51, "text_end_2": 58, "date_2": "2020-09", "text_1_tokenized": ["imagine", "holding", "up", "a", "parasol", "and", "not", "being", "a", "character", "in", "mario", "kart", "."], "text_2_tokenized": ["Level", "24", "in", "fighters", "2", "and", "i", "still", "havent", "unlocked", "parasol", "\ud83d\ude14"]} -{"id": "0434-parasol", "word": "parasol", "label_binary": 0, "text_1": "Things that rip my nut about people at the poolside: 1. Leave their towel on a lounger at 9am and return at 4pm for 15 minutes then leave. 2. Folk that sit at the pool with the parasol up so they dont get sun and dont go in the pool (why you even here then)", "token_idx_1": 41, "text_start_1": 177, "text_end_1": 184, "date_1": "2019-09", "text_2": "Trading 150 k or so for the 2020 parasol! Anyone? If so comment bellow or dm me^^ Keywords: parasol, 2020parasol, Valentine's Day item, Royal high, rh trading. #royalehightrading #royalhigh #parasol2020", "token_idx_2": 8, "text_start_2": 33, "text_end_2": 40, "date_2": "2020-09", "text_1_tokenized": ["Things", "that", "rip", "my", "nut", "about", "people", "at", "the", "poolside", ":", "1", ".", "Leave", "their", "towel", "on", "a", "lounger", "at", "9am", "and", "return", "at", "4pm", "for", "15", "minutes", "then", "leave", ".", "2", ".", "Folk", "that", "sit", "at", "the", "pool", "with", "the", "parasol", "up", "so", "they", "dont", "get", "sun", "and", "dont", "go", "in", "the", "pool", "(", "why", "you", "even", "here", "then", ")"], "text_2_tokenized": ["Trading", "150", "k", "or", "so", "for", "the", "2020", "parasol", "!", "Anyone", "?", "If", "so", "comment", "bellow", "or", "dm", "me", "^", "^", "Keywords", ":", "parasol", ",", "2020parasol", ",", "Valentine's", "Day", "item", ",", "Royal", "high", ",", "rh", "trading", ".", "#royalehightrading", "#royalhigh", "#parasol2020"]} -{"id": "0435-parasol", "word": "parasol", "label_binary": 0, "text_1": "Just watching my blind cat Floppy on the rattan garden chairs. He is going from one to another & wondering why he is getting wet in the rain. I put the banana parasol down bc the wind is picking up. He will leg it inside when he works it out & il be waiting to dry him off", "token_idx_1": 34, "text_start_1": 163, "text_end_1": 170, "date_1": "2019-09", "text_2": "Did a crosstrade w @Gxrly_Shit she gave me 600 robux for my parasol, recomend trading her!", "token_idx_2": 12, "text_start_2": 60, "text_end_2": 67, "date_2": "2020-09", "text_1_tokenized": ["Just", "watching", "my", "blind", "cat", "Floppy", "on", "the", "rattan", "garden", "chairs", ".", "He", "is", "going", "from", "one", "to", "another", "&", "wondering", "why", "he", "is", "getting", "wet", "in", "the", "rain", ".", "I", "put", "the", "banana", "parasol", "down", "bc", "the", "wind", "is", "picking", "up", ".", "He", "will", "leg", "it", "inside", "when", "he", "works", "it", "out", "&", "il", "be", "waiting", "to", "dry", "him", "off"], "text_2_tokenized": ["Did", "a", "crosstrade", "w", "@Gxrly_Shit", "she", "gave", "me", "600", "robux", "for", "my", "parasol", ",", "recomend", "trading", "her", "!"]} -{"id": "0436-parasol", "word": "parasol", "label_binary": 0, "text_1": "Just had to google \"fancy umbrella\" because I forgot the word for parasol.", "token_idx_1": 14, "text_start_1": 66, "text_end_1": 73, "date_1": "2019-09", "text_2": "Trading new val and spring for fall and a parasol <3", "token_idx_2": 9, "text_start_2": 42, "text_end_2": 49, "date_2": "2020-09", "text_1_tokenized": ["Just", "had", "to", "google", "\"", "fancy", "umbrella", "\"", "because", "I", "forgot", "the", "word", "for", "parasol", "."], "text_2_tokenized": ["Trading", "new", "val", "and", "spring", "for", "fall", "and", "a", "parasol", "<3"]} -{"id": "0437-parasol", "word": "parasol", "label_binary": 0, "text_1": "i'm convinced i was born in the wrong generation! i just want 2 go to a world's fair with my parasol and get swept away by a gentle breeze!", "token_idx_1": 21, "text_start_1": 93, "text_end_1": 100, "date_1": "2019-09", "text_2": "I'm offering new parasol and some diamonds for someone's Spring Halo. How many diamonds should I add?", "token_idx_2": 3, "text_start_2": 17, "text_end_2": 24, "date_2": "2020-09", "text_1_tokenized": ["i'm", "convinced", "i", "was", "born", "in", "the", "wrong", "generation", "!", "i", "just", "want", "2", "go", "to", "a", "world's", "fair", "with", "my", "parasol", "and", "get", "swept", "away", "by", "a", "gentle", "breeze", "!"], "text_2_tokenized": ["I'm", "offering", "new", "parasol", "and", "some", "diamonds", "for", "someone's", "Spring", "Halo", ".", "How", "many", "diamonds", "should", "I", "add", "?"]} -{"id": "0438-parasol", "word": "parasol", "label_binary": 0, "text_1": "santa: umbrella. x1 is an umbrella/parasol shielding x2 from x3, made of material x4, supported by x5", "token_idx_1": 9, "text_start_1": 35, "text_end_1": 42, "date_1": "2019-09", "text_2": "the main reason i kin parasol from tp is that. i kinda wrote most of her lines in episode one and made her crack a funny joke soooo thats why", "token_idx_2": 5, "text_start_2": 22, "text_end_2": 29, "date_2": "2020-09", "text_1_tokenized": ["santa", ":", "umbrella", ".", "x1", "is", "an", "umbrella", "/", "parasol", "shielding", "x2", "from", "x3", ",", "made", "of", "material", "x4", ",", "supported", "by", "x5"], "text_2_tokenized": ["the", "main", "reason", "i", "kin", "parasol", "from", "tp", "is", "that", ".", "i", "kinda", "wrote", "most", "of", "her", "lines", "in", "episode", "one", "and", "made", "her", "crack", "a", "funny", "joke", "soooo", "thats", "why"]} -{"id": "0439-parasol", "word": "parasol", "label_binary": 0, "text_1": "#GBBO I didn't understand Prue's comments about Michelle's cake. Pina coladas are garishly decorated with parasol sticks and other paraphernalia. I \u201cgot\u201d the crazy design.", "token_idx_1": 16, "text_start_1": 106, "text_end_1": 113, "date_1": "2019-09", "text_2": "I am shocked we are not talking more about Thufir Hawat's jaunty parasol", "token_idx_2": 12, "text_start_2": 65, "text_end_2": 72, "date_2": "2020-09", "text_1_tokenized": ["#GBBO", "I", "didn't", "understand", "Prue's", "comments", "about", "Michelle's", "cake", ".", "Pina", "coladas", "are", "garishly", "decorated", "with", "parasol", "sticks", "and", "other", "paraphernalia", ".", "I", "\u201c", "got", "\u201d", "the", "crazy", "design", "."], "text_2_tokenized": ["I", "am", "shocked", "we", "are", "not", "talking", "more", "about", "Thufir", "Hawat's", "jaunty", "parasol"]} -{"id": "0440-parasol", "word": "parasol", "label_binary": 0, "text_1": "#19188 [It had a name?] This is the tip of an umbrella/parasol. This is also the name of the metal tip connecting your pencil and the eraserhead #trivia", "token_idx_1": 16, "text_start_1": 55, "text_end_1": 62, "date_1": "2019-09", "text_2": "There's currently one week left on my parasol giveaway! Check it out on my channel, Royally Dizzy! #royalehighgiveaway", "token_idx_2": 7, "text_start_2": 38, "text_end_2": 45, "date_2": "2020-09", "text_1_tokenized": ["#19188", "[", "It", "had", "a", "name", "?", "]", "This", "is", "the", "tip", "of", "an", "umbrella", "/", "parasol", ".", "This", "is", "also", "the", "name", "of", "the", "metal", "tip", "connecting", "your", "pencil", "and", "the", "eraserhead", "#trivia"], "text_2_tokenized": ["There's", "currently", "one", "week", "left", "on", "my", "parasol", "giveaway", "!", "Check", "it", "out", "on", "my", "channel", ",", "Royally", "Dizzy", "!", "#royalehighgiveaway"]} -{"id": "0441-parasol", "word": "parasol", "label_binary": 0, "text_1": "Am I the only one who enjoys walking with my parasol outside while raining ~? \u2602\ufe0f\ud83d\udca6", "token_idx_1": 10, "text_start_1": 45, "text_end_1": 52, "date_1": "2019-09", "text_2": "SHOULD I DO NEW HAL FOR 5mill + parasol + starfrost set or do 2 new Val and an autumn #royalehightrading #RoyaleHighHalo", "token_idx_2": 8, "text_start_2": 32, "text_end_2": 39, "date_2": "2020-09", "text_1_tokenized": ["Am", "I", "the", "only", "one", "who", "enjoys", "walking", "with", "my", "parasol", "outside", "while", "raining", "~", "?", "\u2602", "\ufe0f", "\ud83d\udca6"], "text_2_tokenized": ["SHOULD", "I", "DO", "NEW", "HAL", "FOR", "5mill", "+", "parasol", "+", "starfrost", "set", "or", "do", "2", "new", "Val", "and", "an", "autumn", "#royalehightrading", "#RoyaleHighHalo"]} -{"id": "0442-parasol", "word": "parasol", "label_binary": 0, "text_1": "You've got a monster in your parasol.", "token_idx_1": 6, "text_start_1": 29, "text_end_1": 36, "date_1": "2019-09", "text_2": "who can give me a halo/parasol rtd? i want one so badly now ;-; rts appreciated!\ud83c\udf3b", "token_idx_2": 7, "text_start_2": 23, "text_end_2": 30, "date_2": "2020-09", "text_1_tokenized": ["You've", "got", "a", "monster", "in", "your", "parasol", "."], "text_2_tokenized": ["who", "can", "give", "me", "a", "halo", "/", "parasol", "rtd", "?", "i", "want", "one", "so", "badly", "now", ";", "-", ";", "rts", "appreciated", "!", "\ud83c\udf3b"]} -{"id": "0443-parasol", "word": "parasol", "label_binary": 1, "text_1": "Saw this old lady walking around campus with a parasol I'm so glad I'm not the only one", "token_idx_1": 9, "text_start_1": 47, "text_end_1": 54, "date_1": "2019-09", "text_2": "Both Morgans playing with the sea while I'm looking them under the parasol because I can't swim \ud83d\ude14", "token_idx_2": 12, "text_start_2": 67, "text_end_2": 74, "date_2": "2020-09", "text_1_tokenized": ["Saw", "this", "old", "lady", "walking", "around", "campus", "with", "a", "parasol", "I'm", "so", "glad", "I'm", "not", "the", "only", "one"], "text_2_tokenized": ["Both", "Morgans", "playing", "with", "the", "sea", "while", "I'm", "looking", "them", "under", "the", "parasol", "because", "I", "can't", "swim", "\ud83d\ude14"]} -{"id": "0444-parasol", "word": "parasol", "label_binary": 0, "text_1": "friend: quick! tape the windows before we're affected by the gas! me, wearing a beret and holding a parasol: [in unplaceable accent] i've been told i'm pretty affected already.", "token_idx_1": 22, "text_start_1": 100, "text_end_1": 107, "date_1": "2019-09", "text_2": "Trading the Easter halo for the light halo,teddy Z,parasol,cozy set, and shadow boots,corset,sleeves and crown. SAY ABC! (Lmao)", "token_idx_2": 12, "text_start_2": 51, "text_end_2": 58, "date_2": "2020-09", "text_1_tokenized": ["friend", ":", "quick", "!", "tape", "the", "windows", "before", "we're", "affected", "by", "the", "gas", "!", "me", ",", "wearing", "a", "beret", "and", "holding", "a", "parasol", ":", "[", "in", "unplaceable", "accent", "]", "i've", "been", "told", "i'm", "pretty", "affected", "already", "."], "text_2_tokenized": ["Trading", "the", "Easter", "halo", "for", "the", "light", "halo", ",", "teddy", "Z", ",", "parasol", ",", "cozy", "set", ",", "and", "shadow", "boots", ",", "corset", ",", "sleeves", "and", "crown", ".", "SAY", "ABC", "!", "(", "Lmao", ")"]} -{"id": "0445-parasol", "word": "parasol", "label_binary": 0, "text_1": "The only tan I got all summer was from me burning my hand at anime north cause I spent the entire time as Hua Cheng making sure Chan, my Xie Lian, was fully under my parasol to protect her from the sun. If that wasn't my crowning in character moment I don't know what was", "token_idx_1": 37, "text_start_1": 166, "text_end_1": 173, "date_1": "2019-09", "text_2": "Trading /selling \u2728glimmering halo\u2728 IA: teddy z , og parasol, old Val halo, 300k, and cat ears from Halloween hunt!", "token_idx_2": 13, "text_start_2": 52, "text_end_2": 59, "date_2": "2020-09", "text_1_tokenized": ["The", "only", "tan", "I", "got", "all", "summer", "was", "from", "me", "burning", "my", "hand", "at", "anime", "north", "cause", "I", "spent", "the", "entire", "time", "as", "Hua", "Cheng", "making", "sure", "Chan", ",", "my", "Xie", "Lian", ",", "was", "fully", "under", "my", "parasol", "to", "protect", "her", "from", "the", "sun", ".", "If", "that", "wasn't", "my", "crowning", "in", "character", "moment", "I", "don't", "know", "what", "was"], "text_2_tokenized": ["Trading", "/", "selling", "\u2728", "glimmering", "halo", "\u2728", "IA", ":", "teddy", "z", ",", "og", "parasol", ",", "old", "Val", "halo", ",", "300k", ",", "and", "cat", "ears", "from", "Halloween", "hunt", "!"]} -{"id": "0446-parasol", "word": "parasol", "label_binary": 0, "text_1": "I wasn't planning on dying today but death by parasol would be a funny way to go", "token_idx_1": 9, "text_start_1": 46, "text_end_1": 53, "date_1": "2019-09", "text_2": "just gave my bestfriend a rtd for a parasol FEELING GOOD", "token_idx_2": 8, "text_start_2": 36, "text_end_2": 43, "date_2": "2020-09", "text_1_tokenized": ["I", "wasn't", "planning", "on", "dying", "today", "but", "death", "by", "parasol", "would", "be", "a", "funny", "way", "to", "go"], "text_2_tokenized": ["just", "gave", "my", "bestfriend", "a", "rtd", "for", "a", "parasol", "FEELING", "GOOD"]} -{"id": "0447-parasol", "word": "parasol", "label_binary": 0, "text_1": "A499.7. Goddess of the parasol. Buddhist myth: Malalasekera 421.", "token_idx_1": 7, "text_start_1": 23, "text_end_1": 30, "date_1": "2019-09", "text_2": "Neon uni,dragon, and starfish are now traded for a parasol!!!! @robloximbored Cross trade, success \u2705", "token_idx_2": 12, "text_start_2": 51, "text_end_2": 58, "date_2": "2020-09", "text_1_tokenized": ["A499", ".", "7", ".", "Goddess", "of", "the", "parasol", ".", "Buddhist", "myth", ":", "Malalasekera", "421", "."], "text_2_tokenized": ["Neon", "uni", ",", "dragon", ",", "and", "starfish", "are", "now", "traded", "for", "a", "parasol", "!", "!", "!", "@robloximbored", "Cross", "trade", ",", "success", "\u2705"]} -{"id": "0448-parasol", "word": "parasol", "label_binary": 0, "text_1": "Does anyone have a little girl to whom I can gift a small parasol?", "token_idx_1": 13, "text_start_1": 58, "text_end_1": 65, "date_1": "2019-09", "text_2": "Which trade should hurry quick >-< trade 1 me hh19 them : light and corrupt and og parasol and cmb ring and goth sleeves and rose boutineer Trade 2 me hh19 them autumn spring and new mermaid and 150k #royalehightrades #royalehightrade #royalehigh", "token_idx_2": 19, "text_start_2": 89, "text_end_2": 96, "date_2": "2020-09", "text_1_tokenized": ["Does", "anyone", "have", "a", "little", "girl", "to", "whom", "I", "can", "gift", "a", "small", "parasol", "?"], "text_2_tokenized": ["Which", "trade", "should", "hurry", "quick", ">", "-", "<", "trade", "1", "me", "hh19", "them", ":", "light", "and", "corrupt", "and", "og", "parasol", "and", "cmb", "ring", "and", "goth", "sleeves", "and", "rose", "boutineer", "Trade", "2", "me", "hh19", "them", "autumn", "spring", "and", "new", "mermaid", "and", "150k", "#royalehightrades", "#royalehightrade", "#royalehigh"]} -{"id": "0449-parasol", "word": "parasol", "label_binary": 0, "text_1": "I thought I would need my both hands to carry the shopping bags on my way back, so I picked my straw hat instead of parasol, but it was windy out and I had to use a hand to hold the hat ^^;", "token_idx_1": 26, "text_start_1": 116, "text_end_1": 123, "date_1": "2019-09", "text_2": "Trading: corrupt halo, mood changing necklace, dv set. ia's for corrupt: old Val plus any other halo/old Val parasol, goth sleeves. ia for mcn: 200k/ parasol? ia for dv set: 300k diamonds/ parasol?? those are just ia's u can offer! #royalehightrades #royalehigh #rhtrades", "token_idx_2": 25, "text_start_2": 109, "text_end_2": 116, "date_2": "2020-09", "text_1_tokenized": ["I", "thought", "I", "would", "need", "my", "both", "hands", "to", "carry", "the", "shopping", "bags", "on", "my", "way", "back", ",", "so", "I", "picked", "my", "straw", "hat", "instead", "of", "parasol", ",", "but", "it", "was", "windy", "out", "and", "I", "had", "to", "use", "a", "hand", "to", "hold", "the", "hat", "^", "^", ";"], "text_2_tokenized": ["Trading", ":", "corrupt", "halo", ",", "mood", "changing", "necklace", ",", "dv", "set", ".", "ia's", "for", "corrupt", ":", "old", "Val", "plus", "any", "other", "halo", "/", "old", "Val", "parasol", ",", "goth", "sleeves", ".", "ia", "for", "mcn", ":", "200k", "/", "parasol", "?", "ia", "for", "dv", "set", ":", "300k", "diamonds", "/", "parasol", "?", "?", "those", "are", "just", "ia's", "u", "can", "offer", "!", "#royalehightrades", "#royalehigh", "#rhtrades"]} -{"id": "0450-parasol", "word": "parasol", "label_binary": 0, "text_1": "I'm turning 45 at the end of the month, so please don't send a pink parasol. I want the new Day of the Dead Barbie doll, please.", "token_idx_1": 16, "text_start_1": 68, "text_end_1": 75, "date_1": "2019-09", "text_2": "First to comment \u201c\ud83c\udf44\u201d gets 2 mush lamps, 1 mush low stool, 1 mush parasol & 30 nmts", "token_idx_2": 18, "text_start_2": 65, "text_end_2": 72, "date_2": "2020-09", "text_1_tokenized": ["I'm", "turning", "45", "at", "the", "end", "of", "the", "month", ",", "so", "please", "don't", "send", "a", "pink", "parasol", ".", "I", "want", "the", "new", "Day", "of", "the", "Dead", "Barbie", "doll", ",", "please", "."], "text_2_tokenized": ["First", "to", "comment", "\u201c", "\ud83c\udf44", "\u201d", "gets", "2", "mush", "lamps", ",", "1", "mush", "low", "stool", ",", "1", "mush", "parasol", "&", "30", "nmts"]} -{"id": "0451-parasol", "word": "parasol", "label_binary": 0, "text_1": "If you've used or drawn a parasol you're either incredibly aesthetic or queer as heck and I'm down to talk", "token_idx_1": 6, "text_start_1": 26, "text_end_1": 33, "date_1": "2019-09", "text_2": "Trading 100k diamonds, mermaid royalty, fan, cb sleeves, etc for a parasol", "token_idx_2": 15, "text_start_2": 67, "text_end_2": 74, "date_2": "2020-09", "text_1_tokenized": ["If", "you've", "used", "or", "drawn", "a", "parasol", "you're", "either", "incredibly", "aesthetic", "or", "queer", "as", "heck", "and", "I'm", "down", "to", "talk"], "text_2_tokenized": ["Trading", "100k", "diamonds", ",", "mermaid", "royalty", ",", "fan", ",", "cb", "sleeves", ",", "etc", "for", "a", "parasol"]} -{"id": "0452-parasol", "word": "parasol", "label_binary": 0, "text_1": "So annoyed \ud83d\ude21 @Hermesparcels you delivered a parcel to me and left it in my garden as I was out then another delivery driver from #Hermes half an hour later has come to pick up a parcel for return which was a parasol stand and has gone in my garden and took my little parcel back", "token_idx_1": 42, "text_start_1": 208, "text_end_1": 215, "date_1": "2019-09", "text_2": "Any offers? -starfrost set minus scepter -108k -shadow sleeves LF: halo, Teddy z, nocturnal kitty ears, parasol, cozy set, goth sleeves, shadow empress items, darling Valentina items, scepter, idk something in demand #royalehightradings #royalehightrades #royalehightrading", "token_idx_2": 24, "text_start_2": 104, "text_end_2": 111, "date_2": "2020-09", "text_1_tokenized": ["So", "annoyed", "\ud83d\ude21", "@Hermesparcels", "you", "delivered", "a", "parcel", "to", "me", "and", "left", "it", "in", "my", "garden", "as", "I", "was", "out", "then", "another", "delivery", "driver", "from", "#Hermes", "half", "an", "hour", "later", "has", "come", "to", "pick", "up", "a", "parcel", "for", "return", "which", "was", "a", "parasol", "stand", "and", "has", "gone", "in", "my", "garden", "and", "took", "my", "little", "parcel", "back"], "text_2_tokenized": ["Any", "offers", "?", "-", "starfrost", "set", "minus", "scepter", "-", "108k", "-", "shadow", "sleeves", "LF", ":", "halo", ",", "Teddy", "z", ",", "nocturnal", "kitty", "ears", ",", "parasol", ",", "cozy", "set", ",", "goth", "sleeves", ",", "shadow", "empress", "items", ",", "darling", "Valentina", "items", ",", "scepter", ",", "idk", "something", "in", "demand", "#royalehightradings", "#royalehightrades", "#royalehightrading"]} -{"id": "0453-parasol", "word": "parasol", "label_binary": 0, "text_1": "Whatever you think of Lacey Evans, you gotta love that parasol. #RAW", "token_idx_1": 11, "text_start_1": 55, "text_end_1": 62, "date_1": "2019-09", "text_2": "trading dv set LF: new parasol #Royalehightrades #royalehightrading #royalehigh", "token_idx_2": 6, "text_start_2": 23, "text_end_2": 30, "date_2": "2020-09", "text_1_tokenized": ["Whatever", "you", "think", "of", "Lacey", "Evans", ",", "you", "gotta", "love", "that", "parasol", ".", "#RAW"], "text_2_tokenized": ["trading", "dv", "set", "LF", ":", "new", "parasol", "#Royalehightrades", "#royalehightrading", "#royalehigh"]} -{"id": "0454-parasol", "word": "parasol", "label_binary": 0, "text_1": "it's parasol weather but i am without \ud83d\ude2a", "token_idx_1": 1, "text_start_1": 5, "text_end_1": 12, "date_1": "2019-09", "text_2": "What are some of you guys farming tips (without multipliers) in the old divinia I saved up to 87k without any multipliers and then I bought a parasol and then never was able to save till that much again.I'm so desperate for diamonds #royalehigh #royalehighdiamonds #royalehighfarm", "token_idx_2": 29, "text_start_2": 142, "text_end_2": 149, "date_2": "2020-09", "text_1_tokenized": ["it's", "parasol", "weather", "but", "i", "am", "without", "\ud83d\ude2a"], "text_2_tokenized": ["What", "are", "some", "of", "you", "guys", "farming", "tips", "(", "without", "multipliers", ")", "in", "the", "old", "divinia", "I", "saved", "up", "to", "87k", "without", "any", "multipliers", "and", "then", "I", "bought", "a", "parasol", "and", "then", "never", "was", "able", "to", "save", "till", "that", "much", "again", ".", "I'm", "so", "desperate", "for", "diamonds", "#royalehigh", "#royalehighdiamonds", "#royalehighfarm"]} -{"id": "0455-parasol", "word": "parasol", "label_binary": 0, "text_1": "Someone buy me a lace parasol.", "token_idx_1": 5, "text_start_1": 22, "text_end_1": 29, "date_1": "2019-09", "text_2": "My friend @patrickstareee is trading his parasol for robux! Looking for 800-1k robux for the 2020 parasol. Dm him your offers or put your offers in the comments!! IA: 1k", "token_idx_2": 6, "text_start_2": 41, "text_end_2": 48, "date_2": "2020-09", "text_1_tokenized": ["Someone", "buy", "me", "a", "lace", "parasol", "."], "text_2_tokenized": ["My", "friend", "@patrickstareee", "is", "trading", "his", "parasol", "for", "robux", "!", "Looking", "for", "800-1", "k", "robux", "for", "the", "2020", "parasol", ".", "Dm", "him", "your", "offers", "or", "put", "your", "offers", "in", "the", "comments", "!", "!", "IA", ":", "1k"]} -{"id": "0456-parasol", "word": "parasol", "label_binary": 0, "text_1": "Hey #StevenUniverseMovie i fucked up and lost a link to the post where someone did a cute ass animation of pearl in the movie holding a parasol making faces as greg was like \u201c???\u201d and i miss it does anyone have a link #StevenUniverseTheMovieSpoilers #stevenuniversefanart", "token_idx_1": 26, "text_start_1": 136, "text_end_1": 143, "date_1": "2019-09", "text_2": "i'm literally so done with rh. they took my halos + my parasol away from me for NOTHING. (no i did not get hacked!) please tag anyone who can help i have proof of ownership of EVERYTHING! and i have dmed everyone! #royalehightrades @launcelot92", "token_idx_2": 13, "text_start_2": 55, "text_end_2": 62, "date_2": "2020-09", "text_1_tokenized": ["Hey", "#StevenUniverseMovie", "i", "fucked", "up", "and", "lost", "a", "link", "to", "the", "post", "where", "someone", "did", "a", "cute", "ass", "animation", "of", "pearl", "in", "the", "movie", "holding", "a", "parasol", "making", "faces", "as", "greg", "was", "like", "\u201c", "?", "?", "?", "\u201d", "and", "i", "miss", "it", "does", "anyone", "have", "a", "link", "#StevenUniverseTheMovieSpoilers", "#stevenuniversefanart"], "text_2_tokenized": ["i'm", "literally", "so", "done", "with", "rh", ".", "they", "took", "my", "halos", "+", "my", "parasol", "away", "from", "me", "for", "NOTHING", ".", "(", "no", "i", "did", "not", "get", "hacked", "!", ")", "please", "tag", "anyone", "who", "can", "help", "i", "have", "proof", "of", "ownership", "of", "EVERYTHING", "!", "and", "i", "have", "dmed", "everyone", "!", "#royalehightrades", "@launcelot92"]} -{"id": "0457-parasol", "word": "parasol", "label_binary": 0, "text_1": "I lost my bandana and \u00a279 bottle of water... forgot my parasol but I probably won't be getting it back till Friday I hope", "token_idx_1": 13, "text_start_1": 55, "text_end_1": 62, "date_1": "2019-09", "text_2": "Me: teddyzilla, 2020 parasol, dv corset, goth sleeves, old val and skates Them: glimmering light halo", "token_idx_2": 5, "text_start_2": 21, "text_end_2": 28, "date_2": "2020-09", "text_1_tokenized": ["I", "lost", "my", "bandana", "and", "\u00a2", "79", "bottle", "of", "water", "...", "forgot", "my", "parasol", "but", "I", "probably", "won't", "be", "getting", "it", "back", "till", "Friday", "I", "hope"], "text_2_tokenized": ["Me", ":", "teddyzilla", ",", "2020", "parasol", ",", "dv", "corset", ",", "goth", "sleeves", ",", "old", "val", "and", "skates", "Them", ":", "glimmering", "light", "halo"]} -{"id": "0458-parasol", "word": "parasol", "label_binary": 1, "text_1": "i'll talk to anybody who owns a parasol for any amount of time", "token_idx_1": 7, "text_start_1": 32, "text_end_1": 39, "date_1": "2019-09", "text_2": "In a masked-up table service drinking tavern, like a parasol trying to hold back the tide.", "token_idx_2": 10, "text_start_2": 53, "text_end_2": 60, "date_2": "2020-09", "text_1_tokenized": ["i'll", "talk", "to", "anybody", "who", "owns", "a", "parasol", "for", "any", "amount", "of", "time"], "text_2_tokenized": ["In", "a", "masked-up", "table", "service", "drinking", "tavern", ",", "like", "a", "parasol", "trying", "to", "hold", "back", "the", "tide", "."]} -{"id": "0459-parasol", "word": "parasol", "label_binary": 0, "text_1": "Wedding planning under the parasol with the kids, in the pouring rain. How entirely British and perfect \u2764", "token_idx_1": 4, "text_start_1": 27, "text_end_1": 34, "date_1": "2019-09", "text_2": "Trading a parasol for a adoptable like an art and artist character that they're giving away or I could give them diamonds #Digitalart #adoptables #royalehigh", "token_idx_2": 2, "text_start_2": 10, "text_end_2": 17, "date_2": "2020-09", "text_1_tokenized": ["Wedding", "planning", "under", "the", "parasol", "with", "the", "kids", ",", "in", "the", "pouring", "rain", ".", "How", "entirely", "British", "and", "perfect", "\u2764"], "text_2_tokenized": ["Trading", "a", "parasol", "for", "a", "adoptable", "like", "an", "art", "and", "artist", "character", "that", "they're", "giving", "away", "or", "I", "could", "give", "them", "diamonds", "#Digitalart", "#adoptables", "#royalehigh"]} -{"id": "0460-parasol", "word": "parasol", "label_binary": 0, "text_1": "Some selfish prick on the sunbed next to me has just put up a parasol that also puts me in the shade because, I heard her say to her friends, \u201cI need to stay white\u201d. Why the actual fuck would you come somewhere like this then??? \ud83d\ude44\u2600\ufe0f\u26f1\ud83e\udd37\u200d\u2640\ufe0f", "token_idx_1": 14, "text_start_1": 62, "text_end_1": 69, "date_1": "2019-09", "text_2": "trading parasol and diamonds for og parasol! #royalehightradings", "token_idx_2": 1, "text_start_2": 8, "text_end_2": 15, "date_2": "2020-09", "text_1_tokenized": ["Some", "selfish", "prick", "on", "the", "sunbed", "next", "to", "me", "has", "just", "put", "up", "a", "parasol", "that", "also", "puts", "me", "in", "the", "shade", "because", ",", "I", "heard", "her", "say", "to", "her", "friends", ",", "\u201c", "I", "need", "to", "stay", "white", "\u201d", ".", "Why", "the", "actual", "fuck", "would", "you", "come", "somewhere", "like", "this", "then", "?", "?", "?", "\ud83d\ude44", "\u2600", "\ufe0f", "\u26f1", "\ud83e\udd37\u200d\u2640", "\ufe0f"], "text_2_tokenized": ["trading", "parasol", "and", "diamonds", "for", "og", "parasol", "!", "#royalehightradings"]} -{"id": "0461-parasol", "word": "parasol", "label_binary": 0, "text_1": "Is de white parasol!!!!! #MudVolcanoEruptTonight", "token_idx_1": 3, "text_start_1": 12, "text_end_1": 19, "date_1": "2019-09", "text_2": "Me:neon bandicoot neon wolf neon otter and normal bee her: 2020 parasol #royalehightrades", "token_idx_2": 14, "text_start_2": 64, "text_end_2": 71, "date_2": "2020-09", "text_1_tokenized": ["Is", "de", "white", "parasol", "!", "!", "!", "#MudVolcanoEruptTonight"], "text_2_tokenized": ["Me", ":", "neon", "bandicoot", "neon", "wolf", "neon", "otter", "and", "normal", "bee", "her", ":", "2020", "parasol", "#royalehightrades"]} -{"id": "0462-parasol", "word": "parasol", "label_binary": 0, "text_1": "\"Woman with a parasol\" Oh my!!@!!!! \ud83d\ude06\ud83d\ude06\ud83d\ude06\ud83d\ude06", "token_idx_1": 4, "text_start_1": 14, "text_end_1": 21, "date_1": "2019-09", "text_2": "Hey everyone will be ending the parasol 2020 giveaway next week! Best of luck to everyone! \u2728\ud83d\udc95", "token_idx_2": 6, "text_start_2": 32, "text_end_2": 39, "date_2": "2020-09", "text_1_tokenized": ["\"", "Woman", "with", "a", "parasol", "\"", "Oh", "my", "!", "!", "@", "!", "!", "!", "\ud83d\ude06", "\ud83d\ude06", "\ud83d\ude06"], "text_2_tokenized": ["Hey", "everyone", "will", "be", "ending", "the", "parasol", "2020", "giveaway", "next", "week", "!", "Best", "of", "luck", "to", "everyone", "!", "\u2728", "\ud83d\udc95"]} -{"id": "0463-parasol", "word": "parasol", "label_binary": 0, "text_1": "arasol is cute n all but it IS one letter away from parasol which just means umbrella in polish and i cant let that go", "token_idx_1": 12, "text_start_1": 52, "text_end_1": 59, "date_1": "2019-09", "text_2": "Trading 800 robux for a parasol! (preferably og, but any works!)", "token_idx_2": 5, "text_start_2": 24, "text_end_2": 31, "date_2": "2020-09", "text_1_tokenized": ["arasol", "is", "cute", "n", "all", "but", "it", "IS", "one", "letter", "away", "from", "parasol", "which", "just", "means", "umbrella", "in", "polish", "and", "i", "cant", "let", "that", "go"], "text_2_tokenized": ["Trading", "800", "robux", "for", "a", "parasol", "!", "(", "preferably", "og", ",", "but", "any", "works", "!", ")"]} -{"id": "0464-parasol", "word": "parasol", "label_binary": 0, "text_1": "Someone stop me before I buy a black and green Lolita dress with a matching parasol", "token_idx_1": 15, "text_start_1": 76, "text_end_1": 83, "date_1": "2019-09", "text_2": "Me: Sf set, val set, teddyz, old val, parasol, thigh high boots, goth sleeves and rose corsage Them: Old mermaid halo", "token_idx_2": 13, "text_start_2": 38, "text_end_2": 45, "date_2": "2020-09", "text_1_tokenized": ["Someone", "stop", "me", "before", "I", "buy", "a", "black", "and", "green", "Lolita", "dress", "with", "a", "matching", "parasol"], "text_2_tokenized": ["Me", ":", "Sf", "set", ",", "val", "set", ",", "teddyz", ",", "old", "val", ",", "parasol", ",", "thigh", "high", "boots", ",", "goth", "sleeves", "and", "rose", "corsage", "Them", ":", "Old", "mermaid", "halo"]} -{"id": "0465-parasol", "word": "parasol", "label_binary": 1, "text_1": "Went to pool to sunbathe put parasol up, very hot 30c. Woman next to me moaned I'd taken her sun away, she was still in sun apart from her legs, later she and her hubby were smoking yuk cig smoke in yer face is much worse than a bit of shade I reckon", "token_idx_1": 6, "text_start_1": 29, "text_end_1": 36, "date_1": "2019-09", "text_2": "My two photo shoot goals before we move to Seattle are: 1) Shoot Kali with @kaseydidwhat at the FW Japanese gardens (with my authentic Japanese parasol that I rediscovered during our recent move!) 2) Shoot Grimm Reaper with @ewide and NOT FORGET MY NECKLACE THIS TIME.", "token_idx_2": 28, "text_start_2": 144, "text_end_2": 151, "date_2": "2020-09", "text_1_tokenized": ["Went", "to", "pool", "to", "sunbathe", "put", "parasol", "up", ",", "very", "hot", "30c", ".", "Woman", "next", "to", "me", "moaned", "I'd", "taken", "her", "sun", "away", ",", "she", "was", "still", "in", "sun", "apart", "from", "her", "legs", ",", "later", "she", "and", "her", "hubby", "were", "smoking", "yuk", "cig", "smoke", "in", "yer", "face", "is", "much", "worse", "than", "a", "bit", "of", "shade", "I", "reckon"], "text_2_tokenized": ["My", "two", "photo", "shoot", "goals", "before", "we", "move", "to", "Seattle", "are", ":", "1", ")", "Shoot", "Kali", "with", "@kaseydidwhat", "at", "the", "FW", "Japanese", "gardens", "(", "with", "my", "authentic", "Japanese", "parasol", "that", "I", "rediscovered", "during", "our", "recent", "move", "!", ")", "2", ")", "Shoot", "Grimm", "Reaper", "with", "@ewide", "and", "NOT", "FORGET", "MY", "NECKLACE", "THIS", "TIME", "."]} -{"id": "0466-parasol", "word": "parasol", "label_binary": 0, "text_1": "The women want the parasol down. The waitress said it won't go down. They haven't stopped discussing said parasol and the fact it won't go down. They are sure there must be a way to do it. How come people can't accept no as an answer ...??", "token_idx_1": 4, "text_start_1": 19, "text_end_1": 26, "date_1": "2019-09", "text_2": "I am trading 2020 parasol , 125 k and bunny slippers for any halo. Will add the graveyard lurker collar and kitchen accident if needed . Ik the values so please don't take advantage. Dm me if its a deal ^^", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 25, "date_2": "2020-09", "text_1_tokenized": ["The", "women", "want", "the", "parasol", "down", ".", "The", "waitress", "said", "it", "won't", "go", "down", ".", "They", "haven't", "stopped", "discussing", "said", "parasol", "and", "the", "fact", "it", "won't", "go", "down", ".", "They", "are", "sure", "there", "must", "be", "a", "way", "to", "do", "it", ".", "How", "come", "people", "can't", "accept", "no", "as", "an", "answer", "...", "?", "?"], "text_2_tokenized": ["I", "am", "trading", "2020", "parasol", ",", "125", "k", "and", "bunny", "slippers", "for", "any", "halo", ".", "Will", "add", "the", "graveyard", "lurker", "collar", "and", "kitchen", "accident", "if", "needed", ".", "Ik", "the", "values", "so", "please", "don't", "take", "advantage", ".", "Dm", "me", "if", "its", "a", "deal", "^", "^"]} -{"id": "0467-parasol", "word": "parasol", "label_binary": 0, "text_1": "FROM LOVE. Softly I felt good for the body, in my own boot, Let him take a parasol where he lived, And slow the olive place of spilt on five, And life and the dead mother of ancestor: Since the priest wasted Paton, for I was a kind, That in the counter-straining throng alive,", "token_idx_1": 20, "text_start_1": 75, "text_end_1": 82, "date_1": "2019-09", "text_2": "Me: corrupt, LTBS, OG parasol Them: old mermaid", "token_idx_2": 7, "text_start_2": 22, "text_end_2": 29, "date_2": "2020-09", "text_1_tokenized": ["FROM", "LOVE", ".", "Softly", "I", "felt", "good", "for", "the", "body", ",", "in", "my", "own", "boot", ",", "Let", "him", "take", "a", "parasol", "where", "he", "lived", ",", "And", "slow", "the", "olive", "place", "of", "spilt", "on", "five", ",", "And", "life", "and", "the", "dead", "mother", "of", "ancestor", ":", "Since", "the", "priest", "wasted", "Paton", ",", "for", "I", "was", "a", "kind", ",", "That", "in", "the", "counter-straining", "throng", "alive", ","], "text_2_tokenized": ["Me", ":", "corrupt", ",", "LTBS", ",", "OG", "parasol", "Them", ":", "old", "mermaid"]} -{"id": "0468-parasol", "word": "parasol", "label_binary": 0, "text_1": "I hate it when you're on patrol Even if you think that you don't Scare me when I've a parasol I'll be hysterical My mummy likes to walk along with me But she won't walk along If she sees you behind her She'll shit herself that I know", "token_idx_1": 19, "text_start_1": 86, "text_end_1": 93, "date_1": "2019-09", "text_2": "Hi! i am trading my new mermaid halo. Looking for: train bow skirt, parasol, sf set, old mermaid, new val, new lucky. I'm willing to add by the offer: Train skirt AND parasol, new val. #roblox #royalehigh #rh", "token_idx_2": 17, "text_start_2": 68, "text_end_2": 75, "date_2": "2020-09", "text_1_tokenized": ["I", "hate", "it", "when", "you're", "on", "patrol", "Even", "if", "you", "think", "that", "you", "don't", "Scare", "me", "when", "I've", "a", "parasol", "I'll", "be", "hysterical", "My", "mummy", "likes", "to", "walk", "along", "with", "me", "But", "she", "won't", "walk", "along", "If", "she", "sees", "you", "behind", "her", "She'll", "shit", "herself", "that", "I", "know"], "text_2_tokenized": ["Hi", "!", "i", "am", "trading", "my", "new", "mermaid", "halo", ".", "Looking", "for", ":", "train", "bow", "skirt", ",", "parasol", ",", "sf", "set", ",", "old", "mermaid", ",", "new", "val", ",", "new", "lucky", ".", "I'm", "willing", "to", "add", "by", "the", "offer", ":", "Train", "skirt", "AND", "parasol", ",", "new", "val", ".", "#roblox", "#royalehigh", "#rh"]} -{"id": "0469-parasol", "word": "parasol", "label_binary": 0, "text_1": "I am really excited about my new parasol \ud83d\ude0d\ud83d\udc95", "token_idx_1": 7, "text_start_1": 33, "text_end_1": 40, "date_1": "2019-09", "text_2": "if anyone has parasol for 124 k please comment ur username #rhtrades #royalehightrades", "token_idx_2": 3, "text_start_2": 14, "text_end_2": 21, "date_2": "2020-09", "text_1_tokenized": ["I", "am", "really", "excited", "about", "my", "new", "parasol", "\ud83d\ude0d", "\ud83d\udc95"], "text_2_tokenized": ["if", "anyone", "has", "parasol", "for", "124", "k", "please", "comment", "ur", "username", "#rhtrades", "#royalehightrades"]} -{"id": "0470-parasol", "word": "parasol", "label_binary": 0, "text_1": "After all, I brought this parasol for you in the first place. I'm glad that it'll be useful to you...\u266a", "token_idx_1": 6, "text_start_1": 26, "text_end_1": 33, "date_1": "2019-09", "text_2": "Anyone trading parasol in royal high?", "token_idx_2": 2, "text_start_2": 15, "text_end_2": 22, "date_2": "2020-09", "text_1_tokenized": ["After", "all", ",", "I", "brought", "this", "parasol", "for", "you", "in", "the", "first", "place", ".", "I'm", "glad", "that", "it'll", "be", "useful", "to", "you", "...", "\u266a"], "text_2_tokenized": ["Anyone", "trading", "parasol", "in", "royal", "high", "?"]} -{"id": "0471-parasol", "word": "parasol", "label_binary": 0, "text_1": "some things to note about ten's teaser even tho i'm sure everyone has notice them 1. among a bunch of flowers, he leaned down to pick the only dead one 2. in the painting, there's the woman with the parasol (taemin) and a boy leaning down to pick up a flower (ten)", "token_idx_1": 43, "text_start_1": 199, "text_end_1": 206, "date_1": "2019-09", "text_2": "If you have a parasol 2020 or normal ft can i plzzz offer", "token_idx_2": 4, "text_start_2": 14, "text_end_2": 21, "date_2": "2020-09", "text_1_tokenized": ["some", "things", "to", "note", "about", "ten's", "teaser", "even", "tho", "i'm", "sure", "everyone", "has", "notice", "them", "1", ".", "among", "a", "bunch", "of", "flowers", ",", "he", "leaned", "down", "to", "pick", "the", "only", "dead", "one", "2", ".", "in", "the", "painting", ",", "there's", "the", "woman", "with", "the", "parasol", "(", "taemin", ")", "and", "a", "boy", "leaning", "down", "to", "pick", "up", "a", "flower", "(", "ten", ")"], "text_2_tokenized": ["If", "you", "have", "a", "parasol", "2020", "or", "normal", "ft", "can", "i", "plzzz", "offer"]} -{"id": "0472-parasol", "word": "parasol", "label_binary": 1, "text_1": "Anyone be wanna trade the parasol for something? I will trade dear dolly set, miss lady rose or the thigh highs. DM or tweet me if you want any of those and have a parasol!! \u2764\ufe0f\u2764\ufe0f", "token_idx_1": 5, "text_start_1": 26, "text_end_1": 33, "date_1": "2019-09", "text_2": "My friend is trading ltbs for any parasol! And u don't have to add xx Rts pls and ty", "token_idx_2": 7, "text_start_2": 34, "text_end_2": 41, "date_2": "2020-09", "text_1_tokenized": ["Anyone", "be", "wanna", "trade", "the", "parasol", "for", "something", "?", "I", "will", "trade", "dear", "dolly", "set", ",", "miss", "lady", "rose", "or", "the", "thigh", "highs", ".", "DM", "or", "tweet", "me", "if", "you", "want", "any", "of", "those", "and", "have", "a", "parasol", "!", "!", "\u2764", "\ufe0f", "\u2764", "\ufe0f"], "text_2_tokenized": ["My", "friend", "is", "trading", "ltbs", "for", "any", "parasol", "!", "And", "u", "don't", "have", "to", "add", "xx", "Rts", "pls", "and", "ty"]} -{"id": "0473-parasol", "word": "parasol", "label_binary": 0, "text_1": "carefree parasol waddle dee (his job is to carry a parasol and he gets paid for doing it)", "token_idx_1": 1, "text_start_1": 9, "text_end_1": 16, "date_1": "2019-09", "text_2": "tradin old val halo new parasol old val halo val set (2 cuffs ) 100k not taking diamonds. also adding dotd head flowers that is on the side and rose corsage", "token_idx_2": 5, "text_start_2": 24, "text_end_2": 31, "date_2": "2020-09", "text_1_tokenized": ["carefree", "parasol", "waddle", "dee", "(", "his", "job", "is", "to", "carry", "a", "parasol", "and", "he", "gets", "paid", "for", "doing", "it", ")"], "text_2_tokenized": ["tradin", "old", "val", "halo", "new", "parasol", "old", "val", "halo", "val", "set", "(", "2", "cuffs", ")", "100k", "not", "taking", "diamonds", ".", "also", "adding", "dotd", "head", "flowers", "that", "is", "on", "the", "side", "and", "rose", "corsage"]} -{"id": "0474-parasol", "word": "parasol", "label_binary": 1, "text_1": "Good morning from sunny Hyogo! It's cool and comfortable. I cannot wait for autumn. But the weather forecast said it'll rain in this evening. It's so hard to choose which I should take a parasol and umbrella.", "token_idx_1": 38, "text_start_1": 187, "text_end_1": 194, "date_1": "2019-09", "text_2": "finally got a parasol...it's cute", "token_idx_2": 3, "text_start_2": 14, "text_end_2": 21, "date_2": "2020-09", "text_1_tokenized": ["Good", "morning", "from", "sunny", "Hyogo", "!", "It's", "cool", "and", "comfortable", ".", "I", "cannot", "wait", "for", "autumn", ".", "But", "the", "weather", "forecast", "said", "it'll", "rain", "in", "this", "evening", ".", "It's", "so", "hard", "to", "choose", "which", "I", "should", "take", "a", "parasol", "and", "umbrella", "."], "text_2_tokenized": ["finally", "got", "a", "parasol", "...", "it's", "cute"]} -{"id": "0475-parasol", "word": "parasol", "label_binary": 0, "text_1": "hanging implied / im going to cosplay dahlia hawthorne but instead of wearing a dress and carrying a parasol im going to be ghost dahlia. im gonna wear a prison uniform (similar to terry fawles since he set the standard for death row in aa) and put rope burn makeup on my neck", "token_idx_1": 18, "text_start_1": 101, "text_end_1": 108, "date_1": "2019-09", "text_2": "doing the halloween halo 2019/hh19 for-old mermaid, goth sleeves, parasol, large train bow, teddzilla, cmb ring, and corrupt halo^^ looking at offers similar to this!", "token_idx_2": 13, "text_start_2": 66, "text_end_2": 73, "date_2": "2020-09", "text_1_tokenized": ["hanging", "implied", "/", "im", "going", "to", "cosplay", "dahlia", "hawthorne", "but", "instead", "of", "wearing", "a", "dress", "and", "carrying", "a", "parasol", "im", "going", "to", "be", "ghost", "dahlia", ".", "im", "gonna", "wear", "a", "prison", "uniform", "(", "similar", "to", "terry", "fawles", "since", "he", "set", "the", "standard", "for", "death", "row", "in", "aa", ")", "and", "put", "rope", "burn", "makeup", "on", "my", "neck"], "text_2_tokenized": ["doing", "the", "halloween", "halo", "2019", "/", "hh19", "for-old", "mermaid", ",", "goth", "sleeves", ",", "parasol", ",", "large", "train", "bow", ",", "teddzilla", ",", "cmb", "ring", ",", "and", "corrupt", "halo", "^", "^", "looking", "at", "offers", "similar", "to", "this", "!"]} -{"id": "0476-parasol", "word": "parasol", "label_binary": 0, "text_1": "The plan is to swallow parasol and drink plenty water hoping this headache will subside. Mans want to kick kung fu today", "token_idx_1": 5, "text_start_1": 23, "text_end_1": 30, "date_1": "2019-09", "text_2": "My friend is trading spring halo, her IA is a parasol and Val corset, she does see offers !NO TRASH ITEMS! #royalhigh #royalhighhalo #royalhightrading", "token_idx_2": 11, "text_start_2": 46, "text_end_2": 53, "date_2": "2020-09", "text_1_tokenized": ["The", "plan", "is", "to", "swallow", "parasol", "and", "drink", "plenty", "water", "hoping", "this", "headache", "will", "subside", ".", "Mans", "want", "to", "kick", "kung", "fu", "today"], "text_2_tokenized": ["My", "friend", "is", "trading", "spring", "halo", ",", "her", "IA", "is", "a", "parasol", "and", "Val", "corset", ",", "she", "does", "see", "offers", "!", "NO", "TRASH", "ITEMS", "!", "#royalhigh", "#royalhighhalo", "#royalhightrading"]} -{"id": "0477-parasol", "word": "parasol", "label_binary": 0, "text_1": "With a parasol in hand of purple lace, the Koumori approaches @magirisque with a smile. \u201cSo you're the new student at the monastery. Professor Rashana said you wished to meet some of us?\u201d", "token_idx_1": 2, "text_start_1": 7, "text_end_1": 14, "date_1": "2019-09", "text_2": "trading val set and bd set for parasol, or 185k thigh high boots, pastel high boots, heirloom corset, and bd set for parasol", "token_idx_2": 7, "text_start_2": 31, "text_end_2": 38, "date_2": "2020-09", "text_1_tokenized": ["With", "a", "parasol", "in", "hand", "of", "purple", "lace", ",", "the", "Koumori", "approaches", "@magirisque", "with", "a", "smile", ".", "\u201c", "So", "you're", "the", "new", "student", "at", "the", "monastery", ".", "Professor", "Rashana", "said", "you", "wished", "to", "meet", "some", "of", "us", "?", "\u201d"], "text_2_tokenized": ["trading", "val", "set", "and", "bd", "set", "for", "parasol", ",", "or", "185k", "thigh", "high", "boots", ",", "pastel", "high", "boots", ",", "heirloom", "corset", ",", "and", "bd", "set", "for", "parasol"]} -{"id": "0478-parasol", "word": "parasol", "label_binary": 0, "text_1": "''Who is yelling about stupid lame pork butts on the beach? Don't you know it's a great time to lay down and relax?'' The bear is smoking a cigarette underneath the parasol while also petting the sleeping Fenrir.", "token_idx_1": 37, "text_start_1": 165, "text_end_1": 172, "date_1": "2019-09", "text_2": "#RoyalehighParasol im offering IOOK for the parasol , comment your username if u will do it", "token_idx_2": 6, "text_start_2": 44, "text_end_2": 51, "date_2": "2020-09", "text_1_tokenized": ["'", "'", "Who", "is", "yelling", "about", "stupid", "lame", "pork", "butts", "on", "the", "beach", "?", "Don't", "you", "know", "it's", "a", "great", "time", "to", "lay", "down", "and", "relax", "?", "'", "'", "The", "bear", "is", "smoking", "a", "cigarette", "underneath", "the", "parasol", "while", "also", "petting", "the", "sleeping", "Fenrir", "."], "text_2_tokenized": ["#RoyalehighParasol", "im", "offering", "IOOK", "for", "the", "parasol", ",", "comment", "your", "username", "if", "u", "will", "do", "it"]} \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:ff7b7b23ee1c8b3b7a87b80d460b145beaed7a7c8f1d2722cd56b2072aa141e7 +size 1574854