diff --git "a/data/tempo_wic/validation.jsonl" "b/data/tempo_wic/validation.jsonl" --- "a/data/tempo_wic/validation.jsonl" +++ "b/data/tempo_wic/validation.jsonl" @@ -1,395 +1,3 @@ -{"id": "1671-impostor", "word": "impostor", "label_binary": 1, "text_1": "Had my coffee with a splash of impostor syndrome this am: Manifested as my 3rd grade teacher who watched as my little English language learning self struggled in front of the whole class spelling words like \"friend\" and \"neighbor\" on the blackboard. Those words? Really? So petty.", "token_idx_1": 7, "text_start_1": 31, "text_end_1": 39, "date_1": "2019-09", "text_2": "I have out of this world impostor reads but I never act on them", "token_idx_2": 6, "text_start_2": 25, "text_end_2": 33, "date_2": "2020-09", "text_1_tokenized": ["Had", "my", "coffee", "with", "a", "splash", "of", "impostor", "syndrome", "this", "am", ":", "Manifested", "as", "my", "3rd", "grade", "teacher", "who", "watched", "as", "my", "little", "English", "language", "learning", "self", "struggled", "in", "front", "of", "the", "whole", "class", "spelling", "words", "like", "\"", "friend", "\"", "and", "\"", "neighbor", "\"", "on", "the", "blackboard", ".", "Those", "words", "?", "Really", "?", "So", "petty", "."], "text_2_tokenized": ["I", "have", "out", "of", "this", "world", "impostor", "reads", "but", "I", "never", "act", "on", "them"]} -{"id": "1672-impostor", "word": "impostor", "label_binary": 0, "text_1": "[ Rejected ] (1/6) #phdchat This week, on a day when I was particularly tired, my very first scientific article was rejected. Thanks to @GrumpyReviewer2. \"WHAT A SHAME....\" This week I cried a lot, I thought my impostor syndrome wasn't one and that I was really an impostor.", "token_idx_1": 47, "text_start_1": 211, "text_end_1": 219, "date_1": "2019-09", "text_2": "cries played among us with strangers i was the impostor and one of them believed in me bcs we both like nct \ud83d\ude2d simp", "token_idx_2": 9, "text_start_2": 47, "text_end_2": 55, "date_2": "2020-09", "text_1_tokenized": ["[", "Rejected", "]", "(", "1/6", ")", "#phdchat", "This", "week", ",", "on", "a", "day", "when", "I", "was", "particularly", "tired", ",", "my", "very", "first", "scientific", "article", "was", "rejected", ".", "Thanks", "to", "@GrumpyReviewer2", ".", "\"", "WHAT", "A", "SHAME", "...", "\"", "This", "week", "I", "cried", "a", "lot", ",", "I", "thought", "my", "impostor", "syndrome", "wasn't", "one", "and", "that", "I", "was", "really", "an", "impostor", "."], "text_2_tokenized": ["cries", "played", "among", "us", "with", "strangers", "i", "was", "the", "impostor", "and", "one", "of", "them", "believed", "in", "me", "bcs", "we", "both", "like", "nct", "\ud83d\ude2d", "simp"]} -{"id": "1673-impostor", "word": "impostor", "label_binary": 0, "text_1": "TIL that the \"proper\" way to spell imposter is impostor. Mind = \ud83e\udd2f", "token_idx_1": 11, "text_start_1": 47, "text_end_1": 55, "date_1": "2019-09", "text_2": "1st game as an impostor ever was a fat W\ud83d\ude0e", "token_idx_2": 4, "text_start_2": 15, "text_end_2": 23, "date_2": "2020-09", "text_1_tokenized": ["TIL", "that", "the", "\"", "proper", "\"", "way", "to", "spell", "imposter", "is", "impostor", ".", "Mind", "=", "\ud83e\udd2f"], "text_2_tokenized": ["1st", "game", "as", "an", "impostor", "ever", "was", "a", "fat", "W", "\ud83d\ude0e"]} -{"id": "1674-impostor", "word": "impostor", "label_binary": 0, "text_1": "my impostor syndrome around the music building is getting out of control!!! im a musician i'm allowed to hang out in the musician's lounge if i want to !!!", "token_idx_1": 1, "text_start_1": 3, "text_end_1": 11, "date_1": "2019-09", "text_2": "it pisses me off when a player rats out his fellow impostor", "token_idx_2": 11, "text_start_2": 51, "text_end_2": 59, "date_2": "2020-09", "text_1_tokenized": ["my", "impostor", "syndrome", "around", "the", "music", "building", "is", "getting", "out", "of", "control", "!", "!", "!", "im", "a", "musician", "i'm", "allowed", "to", "hang", "out", "in", "the", "musician's", "lounge", "if", "i", "want", "to", "!", "!", "!"], "text_2_tokenized": ["it", "pisses", "me", "off", "when", "a", "player", "rats", "out", "his", "fellow", "impostor"]} -{"id": "1675-impostor", "word": "impostor", "label_binary": 1, "text_1": "I'm done with art Hogwarts for this year btw, it was very good and fun, it gets more fun if you're not trying to land a job :p however I did pretend that I don't have impostor syndrome and showed off my prototype and got a bunch of good feedback \u2764\ufe0f", "token_idx_1": 38, "text_start_1": 167, "text_end_1": 175, "date_1": "2019-09", "text_2": "Sayin someone showed my twitter account to my rents...theres fr an impostor among us", "token_idx_2": 13, "text_start_2": 67, "text_end_2": 75, "date_2": "2020-09", "text_1_tokenized": ["I'm", "done", "with", "art", "Hogwarts", "for", "this", "year", "btw", ",", "it", "was", "very", "good", "and", "fun", ",", "it", "gets", "more", "fun", "if", "you're", "not", "trying", "to", "land", "a", "job", ":p", "however", "I", "did", "pretend", "that", "I", "don't", "have", "impostor", "syndrome", "and", "showed", "off", "my", "prototype", "and", "got", "a", "bunch", "of", "good", "feedback", "\u2764", "\ufe0f"], "text_2_tokenized": ["Sayin", "someone", "showed", "my", "twitter", "account", "to", "my", "rents", "...", "theres", "fr", "an", "impostor", "among", "us"]} -{"id": "1676-impostor", "word": "impostor", "label_binary": 0, "text_1": "I embrace impostor syndrome. Heck yes I will fool all of you into thinking I was good at art with my vicious use of reference, frivolous photobashing, generous 3D bases and ruthless studying of masters. I will trick all of you, myself included.", "token_idx_1": 2, "text_start_1": 10, "text_end_1": 18, "date_1": "2019-09", "text_2": "Just had my first games of Among Us. First three rounds were as the impostor and I somehow won each of them? @krak_on_gaming what happened?", "token_idx_2": 15, "text_start_2": 68, "text_end_2": 76, "date_2": "2020-09", "text_1_tokenized": ["I", "embrace", "impostor", "syndrome", ".", "Heck", "yes", "I", "will", "fool", "all", "of", "you", "into", "thinking", "I", "was", "good", "at", "art", "with", "my", "vicious", "use", "of", "reference", ",", "frivolous", "photobashing", ",", "generous", "3D", "bases", "and", "ruthless", "studying", "of", "masters", ".", "I", "will", "trick", "all", "of", "you", ",", "myself", "included", "."], "text_2_tokenized": ["Just", "had", "my", "first", "games", "of", "Among", "Us", ".", "First", "three", "rounds", "were", "as", "the", "impostor", "and", "I", "somehow", "won", "each", "of", "them", "?", "@krak_on_gaming", "what", "happened", "?"]} -{"id": "1677-impostor", "word": "impostor", "label_binary": 1, "text_1": "a impostor in your shell but this butt puerile was my reversal oh let me be your hut", "token_idx_1": 1, "text_start_1": 2, "text_end_1": 10, "date_1": "2019-09", "text_2": "Why play among us when you can feel like the impostor everyday in medical school.", "token_idx_2": 10, "text_start_2": 45, "text_end_2": 53, "date_2": "2020-09", "text_1_tokenized": ["a", "impostor", "in", "your", "shell", "but", "this", "butt", "puerile", "was", "my", "reversal", "oh", "let", "me", "be", "your", "hut"], "text_2_tokenized": ["Why", "play", "among", "us", "when", "you", "can", "feel", "like", "the", "impostor", "everyday", "in", "medical", "school", "."]} -{"id": "1678-impostor", "word": "impostor", "label_binary": 0, "text_1": "I can't believe impostor syndrome just tried to equate the tattoo I'm about to get that symbolizes something I studied for 3.5 years + graduated with a 1.7 in to trying a sport once and getting a tattoo about it, just because I'm sort of mediocre at the actual subject", "token_idx_1": 3, "text_start_1": 16, "text_end_1": 24, "date_1": "2019-09", "text_2": "damn I was in a game of among us and I was SURE red did it like I saw red kill pink but hen red wasn't the impostor??? wtf my mother did suspect I am mildly colorblind \ud83d\ude2a", "token_idx_2": 27, "text_start_2": 107, "text_end_2": 115, "date_2": "2020-09", "text_1_tokenized": ["I", "can't", "believe", "impostor", "syndrome", "just", "tried", "to", "equate", "the", "tattoo", "I'm", "about", "to", "get", "that", "symbolizes", "something", "I", "studied", "for", "3.5", "years", "+", "graduated", "with", "a", "1.7", "in", "to", "trying", "a", "sport", "once", "and", "getting", "a", "tattoo", "about", "it", ",", "just", "because", "I'm", "sort", "of", "mediocre", "at", "the", "actual", "subject"], "text_2_tokenized": ["damn", "I", "was", "in", "a", "game", "of", "among", "us", "and", "I", "was", "SURE", "red", "did", "it", "like", "I", "saw", "red", "kill", "pink", "but", "hen", "red", "wasn't", "the", "impostor", "?", "?", "?", "wtf", "my", "mother", "did", "suspect", "I", "am", "mildly", "colorblind", "\ud83d\ude2a"]} -{"id": "1679-impostor", "word": "impostor", "label_binary": 0, "text_1": "I've got 5 glasses of wine in me. I'm going to let my fingers rant about content creation and personal growth for a minute, and why impostor syndrome is a &%$^.", "token_idx_1": 28, "text_start_1": 132, "text_end_1": 140, "date_1": "2019-09", "text_2": "impostor: *self report* impostor: where", "token_idx_2": 0, "text_start_2": 0, "text_end_2": 8, "date_2": "2020-09", "text_1_tokenized": ["I've", "got", "5", "glasses", "of", "wine", "in", "me", ".", "I'm", "going", "to", "let", "my", "fingers", "rant", "about", "content", "creation", "and", "personal", "growth", "for", "a", "minute", ",", "and", "why", "impostor", "syndrome", "is", "a", "&", "%", "$", "^", "."], "text_2_tokenized": ["impostor", ":", "*", "self", "report", "*", "impostor", ":", "where"]} -{"id": "1680-impostor", "word": "impostor", "label_binary": 0, "text_1": "What a wonderful word is HUMBUG (\u2018hollowness, pretence, fraud, deception; an impostor'). It's such a polite term, redolent of more civilised times. Let's use it more often, together with humbuggable, humbugger and humbuggery. \u2018That humbugger Bercow!' has a nice ring to it.", "token_idx_1": 17, "text_start_1": 77, "text_end_1": 85, "date_1": "2019-09", "text_2": "I sometimes play Among us with a group, and I always get voted out, whether I'm an impostor or not. \ud83d\ude02", "token_idx_2": 19, "text_start_2": 83, "text_end_2": 91, "date_2": "2020-09", "text_1_tokenized": ["What", "a", "wonderful", "word", "is", "HUMBUG", "(", "\u2018", "hollowness", ",", "pretence", ",", "fraud", ",", "deception", ";", "an", "impostor", "'", ")", ".", "It's", "such", "a", "polite", "term", ",", "redolent", "of", "more", "civilised", "times", ".", "Let's", "use", "it", "more", "often", ",", "together", "with", "humbuggable", ",", "humbugger", "and", "humbuggery", ".", "\u2018", "That", "humbugger", "Bercow", "!", "'", "has", "a", "nice", "ring", "to", "it", "."], "text_2_tokenized": ["I", "sometimes", "play", "Among", "us", "with", "a", "group", ",", "and", "I", "always", "get", "voted", "out", ",", "whether", "I'm", "an", "impostor", "or", "not", ".", "\ud83d\ude02"]} -{"id": "1681-impostor", "word": "impostor", "label_binary": 0, "text_1": "He is such an impostor.", "token_idx_1": 4, "text_start_1": 14, "text_end_1": 22, "date_1": "2019-09", "text_2": "gw crewmate & impostor terOP", "token_idx_2": 3, "text_start_2": 18, "text_end_2": 26, "date_2": "2020-09", "text_1_tokenized": ["He", "is", "such", "an", "impostor", "."], "text_2_tokenized": ["gw", "crewmate", "&", "impostor", "terOP"]} -{"id": "1682-impostor", "word": "impostor", "label_binary": 1, "text_1": "I keep going back and forth between feeling like an impostor and feeling like a goddess. Can't wait to see how I feel in an hour!", "token_idx_1": 10, "text_start_1": 52, "text_end_1": 60, "date_1": "2019-09", "text_2": "When will this our photoshop president address us live,his silence is a confirmation that he is an impostor. @Buhari must go", "token_idx_2": 19, "text_start_2": 99, "text_end_2": 107, "date_2": "2020-09", "text_1_tokenized": ["I", "keep", "going", "back", "and", "forth", "between", "feeling", "like", "an", "impostor", "and", "feeling", "like", "a", "goddess", ".", "Can't", "wait", "to", "see", "how", "I", "feel", "in", "an", "hour", "!"], "text_2_tokenized": ["When", "will", "this", "our", "photoshop", "president", "address", "us", "live", ",", "his", "silence", "is", "a", "confirmation", "that", "he", "is", "an", "impostor", ".", "@Buhari", "must", "go"]} -{"id": "1683-impostor", "word": "impostor", "label_binary": 1, "text_1": "I can't wait to see british impostor Bill again", "token_idx_1": 6, "text_start_1": 28, "text_end_1": 36, "date_1": "2019-09", "text_2": "the real impostor is these hands dumb bitch", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 17, "date_2": "2020-09", "text_1_tokenized": ["I", "can't", "wait", "to", "see", "british", "impostor", "Bill", "again"], "text_2_tokenized": ["the", "real", "impostor", "is", "these", "hands", "dumb", "bitch"]} -{"id": "1684-impostor", "word": "impostor", "label_binary": 0, "text_1": "Have you ever been given more responsibility or start a new role and just kind of... freak out. Like impostor syndrome on 100? Like who am I? How did I get here? Are you suuuuuuure you want ME to do this? Nope? Just me? Ok.", "token_idx_1": 21, "text_start_1": 101, "text_end_1": 109, "date_1": "2019-09", "text_2": "\"well well well, it looks like the student has become the master\" @yiannna proclaimed, thinking that she had caught me red-handed after i ended the life of a crewmate, but what she did not know is that her arrogance would lead to her eventual demise in a flawless impostor victory", "token_idx_2": 53, "text_start_2": 264, "text_end_2": 272, "date_2": "2020-09", "text_1_tokenized": ["Have", "you", "ever", "been", "given", "more", "responsibility", "or", "start", "a", "new", "role", "and", "just", "kind", "of", "...", "freak", "out", ".", "Like", "impostor", "syndrome", "on", "100", "?", "Like", "who", "am", "I", "?", "How", "did", "I", "get", "here", "?", "Are", "you", "suuuuuuure", "you", "want", "ME", "to", "do", "this", "?", "Nope", "?", "Just", "me", "?", "Ok", "."], "text_2_tokenized": ["\"", "well", "well", "well", ",", "it", "looks", "like", "the", "student", "has", "become", "the", "master", "\"", "@yiannna", "proclaimed", ",", "thinking", "that", "she", "had", "caught", "me", "red-handed", "after", "i", "ended", "the", "life", "of", "a", "crewmate", ",", "but", "what", "she", "did", "not", "know", "is", "that", "her", "arrogance", "would", "lead", "to", "her", "eventual", "demise", "in", "a", "flawless", "impostor", "victory"]} -{"id": "1685-impostor", "word": "impostor", "label_binary": 0, "text_1": "Fancy new chair coming this week, fancy speakers coming this week, wife going on business trip... looks like it's finally almost time to enjoy... the crushing impostor syndrome that won't let me use my own music studio to create anything.", "token_idx_1": 30, "text_start_1": 159, "text_end_1": 167, "date_1": "2019-09", "text_2": "watching the superm twitter blue room and Baekhyun was so good at being the S Man/impostor. Literally no one suspected him \ud83d\ude06", "token_idx_2": 17, "text_start_2": 82, "text_end_2": 90, "date_2": "2020-09", "text_1_tokenized": ["Fancy", "new", "chair", "coming", "this", "week", ",", "fancy", "speakers", "coming", "this", "week", ",", "wife", "going", "on", "business", "trip", "...", "looks", "like", "it's", "finally", "almost", "time", "to", "enjoy", "...", "the", "crushing", "impostor", "syndrome", "that", "won't", "let", "me", "use", "my", "own", "music", "studio", "to", "create", "anything", "."], "text_2_tokenized": ["watching", "the", "superm", "twitter", "blue", "room", "and", "Baekhyun", "was", "so", "good", "at", "being", "the", "S", "Man", "/", "impostor", ".", "Literally", "no", "one", "suspected", "him", "\ud83d\ude06"]} -{"id": "1686-impostor", "word": "impostor", "label_binary": 0, "text_1": "so frank (my favorite professor, i'm just calling him by his name from now on lol) told me today that my poem selection had the best titles in the history of the class he's been teaching for like 40 years and then my impostor syndrome kicked in. so how's everyone else's day?", "token_idx_1": 46, "text_start_1": 217, "text_end_1": 225, "date_1": "2019-09", "text_2": "I'm never good at being impostor bc my big fat juicy ass never fits in the vents :/", "token_idx_2": 5, "text_start_2": 24, "text_end_2": 32, "date_2": "2020-09", "text_1_tokenized": ["so", "frank", "(", "my", "favorite", "professor", ",", "i'm", "just", "calling", "him", "by", "his", "name", "from", "now", "on", "lol", ")", "told", "me", "today", "that", "my", "poem", "selection", "had", "the", "best", "titles", "in", "the", "history", "of", "the", "class", "he's", "been", "teaching", "for", "like", "40", "years", "and", "then", "my", "impostor", "syndrome", "kicked", "in", ".", "so", "how's", "everyone", "else's", "day", "?"], "text_2_tokenized": ["I'm", "never", "good", "at", "being", "impostor", "bc", "my", "big", "fat", "juicy", "ass", "never", "fits", "in", "the", "vents", ":/"]} -{"id": "1687-impostor", "word": "impostor", "label_binary": 1, "text_1": "The one thing I like about cloud boy is that he can be used by older Eraser and Mic when they suspect the other might be an impostor. Eraser: \"Did you pick up that gift for Shirakumo?\" \"Mic\": \"Oh, ah, no. I'll get it on my way to the radio station.\" E: \"Mm.\"", "token_idx_1": 27, "text_start_1": 124, "text_end_1": 132, "date_1": "2019-09", "text_2": "I love being the impostor, it's so fun", "token_idx_2": 4, "text_start_2": 17, "text_end_2": 25, "date_2": "2020-09", "text_1_tokenized": ["The", "one", "thing", "I", "like", "about", "cloud", "boy", "is", "that", "he", "can", "be", "used", "by", "older", "Eraser", "and", "Mic", "when", "they", "suspect", "the", "other", "might", "be", "an", "impostor", ".", "Eraser", ":", "\"", "Did", "you", "pick", "up", "that", "gift", "for", "Shirakumo", "?", "\"", "\"", "Mic", "\"", ":", "\"", "Oh", ",", "ah", ",", "no", ".", "I'll", "get", "it", "on", "my", "way", "to", "the", "radio", "station", ".", "\"", "E", ":", "\"", "Mm", ".", "\""], "text_2_tokenized": ["I", "love", "being", "the", "impostor", ",", "it's", "so", "fun"]} -{"id": "1688-impostor", "word": "impostor", "label_binary": 0, "text_1": "coffee quells my impostor syndrome temporarily", "token_idx_1": 3, "text_start_1": 17, "text_end_1": 25, "date_1": "2019-09", "text_2": "I can't be an impostor in Among Us, because I can't backstab people in real life \ud83d\ude14", "token_idx_2": 4, "text_start_2": 14, "text_end_2": 22, "date_2": "2020-09", "text_1_tokenized": ["coffee", "quells", "my", "impostor", "syndrome", "temporarily"], "text_2_tokenized": ["I", "can't", "be", "an", "impostor", "in", "Among", "Us", ",", "because", "I", "can't", "backstab", "people", "in", "real", "life", "\ud83d\ude14"]} -{"id": "1690-impostor", "word": "impostor", "label_binary": 0, "text_1": "Muz: believes Mo ascended to outer space on a flying donkey Also muz: believes he worships the same god as xtians do the god that admits he fooled ppl for centuries by saying Jesus is his son and was crucified on a cross while the quran says some impostor was crucified instead", "token_idx_1": 50, "text_start_1": 247, "text_end_1": 255, "date_1": "2019-09", "text_2": "playing impostor is stressing af #AmongUs", "token_idx_2": 1, "text_start_2": 8, "text_end_2": 16, "date_2": "2020-09", "text_1_tokenized": ["Muz", ":", "believes", "Mo", "ascended", "to", "outer", "space", "on", "a", "flying", "donkey", "Also", "muz", ":", "believes", "he", "worships", "the", "same", "god", "as", "xtians", "do", "the", "god", "that", "admits", "he", "fooled", "ppl", "for", "centuries", "by", "saying", "Jesus", "is", "his", "son", "and", "was", "crucified", "on", "a", "cross", "while", "the", "quran", "says", "some", "impostor", "was", "crucified", "instead"], "text_2_tokenized": ["playing", "impostor", "is", "stressing", "af", "#AmongUs"]} -{"id": "1691-impostor", "word": "impostor", "label_binary": 1, "text_1": "Lol love writing emails back to people who have submitted their work for my blog but also feel like such an impostor like I'm not a big deal y'all (at least, yet) \ud83d\ude02", "token_idx_1": 21, "text_start_1": 108, "text_end_1": 116, "date_1": "2019-09", "text_2": "Elvia: You're the impostor. Me: I'M the impostor?!", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 26, "date_2": "2020-09", "text_1_tokenized": ["Lol", "love", "writing", "emails", "back", "to", "people", "who", "have", "submitted", "their", "work", "for", "my", "blog", "but", "also", "feel", "like", "such", "an", "impostor", "like", "I'm", "not", "a", "big", "deal", "y'all", "(", "at", "least", ",", "yet", ")", "\ud83d\ude02"], "text_2_tokenized": ["Elvia", ":", "You're", "the", "impostor", ".", "Me", ":", "I'M", "the", "impostor", "?", "!"]} -{"id": "1692-impostor", "word": "impostor", "label_binary": 0, "text_1": "anyone else's front dumpster all stuffed with the bundts you baked for neighbors that you never delivered cuz of impostor syndrome?? #FridayVibes", "token_idx_1": 19, "text_start_1": 113, "text_end_1": 121, "date_1": "2019-09", "text_2": "why do they always vote me out when i'm not the impostor i'm going crazy rn lol", "token_idx_2": 11, "text_start_2": 48, "text_end_2": 56, "date_2": "2020-09", "text_1_tokenized": ["anyone", "else's", "front", "dumpster", "all", "stuffed", "with", "the", "bundts", "you", "baked", "for", "neighbors", "that", "you", "never", "delivered", "cuz", "of", "impostor", "syndrome", "?", "?", "#FridayVibes"], "text_2_tokenized": ["why", "do", "they", "always", "vote", "me", "out", "when", "i'm", "not", "the", "impostor", "i'm", "going", "crazy", "rn", "lol"]} -{"id": "1693-impostor", "word": "impostor", "label_binary": 0, "text_1": "me jopping in a frequency loud enough to be heard: I know romanov facts a normal amount. anyway did you know eugenia smith and an alexei impostor,", "token_idx_1": 28, "text_start_1": 137, "text_end_1": 145, "date_1": "2019-09", "text_2": "My daughter just said \"I have impostor syndrome. But my impostor's not good enough\". She's fucking ace.", "token_idx_2": 7, "text_start_2": 30, "text_end_2": 38, "date_2": "2020-09", "text_1_tokenized": ["me", "jopping", "in", "a", "frequency", "loud", "enough", "to", "be", "heard", ":", "I", "know", "romanov", "facts", "a", "normal", "amount", ".", "anyway", "did", "you", "know", "eugenia", "smith", "and", "an", "alexei", "impostor", ","], "text_2_tokenized": ["My", "daughter", "just", "said", "\"", "I", "have", "impostor", "syndrome", ".", "But", "my", "impostor's", "not", "good", "enough", "\"", ".", "She's", "fucking", "ace", "."]} -{"id": "1694-impostor", "word": "impostor", "label_binary": 1, "text_1": "Please I need a confirmation about Donald Trump sold military wares to the impostor president in Nigeria, in order to kill Christian and Jews.Nigeria government is a terrorist state for God's sake", "token_idx_1": 13, "text_start_1": 75, "text_end_1": 83, "date_1": "2019-09", "text_2": "It's \u2728impostor\u2728 not imposter", "token_idx_2": 2, "text_start_2": 6, "text_end_2": 14, "date_2": "2020-09", "text_1_tokenized": ["Please", "I", "need", "a", "confirmation", "about", "Donald", "Trump", "sold", "military", "wares", "to", "the", "impostor", "president", "in", "Nigeria", ",", "in", "order", "to", "kill", "Christian", "and", "Jews.Nigeria", "government", "is", "a", "terrorist", "state", "for", "God's", "sake"], "text_2_tokenized": ["It's", "\u2728", "impostor", "\u2728", "not", "imposter"]} -{"id": "1695-impostor", "word": "impostor", "label_binary": 0, "text_1": "Maybe we have to vote for Democrats to get the impostor president out, but when the real reckoning comes, the Democrats will have a lot answer for in light of their cowardice and delay in the face of clear-cut crimes.", "token_idx_1": 10, "text_start_1": 47, "text_end_1": 55, "date_1": "2019-09", "text_2": "My talent in Among Us is getting everyone to suspect a certain person, voting them off all to see that they weren't the impostor. If I'm not the impostor, I'll act like one", "token_idx_2": 24, "text_start_2": 120, "text_end_2": 128, "date_2": "2020-09", "text_1_tokenized": ["Maybe", "we", "have", "to", "vote", "for", "Democrats", "to", "get", "the", "impostor", "president", "out", ",", "but", "when", "the", "real", "reckoning", "comes", ",", "the", "Democrats", "will", "have", "a", "lot", "answer", "for", "in", "light", "of", "their", "cowardice", "and", "delay", "in", "the", "face", "of", "clear-cut", "crimes", "."], "text_2_tokenized": ["My", "talent", "in", "Among", "Us", "is", "getting", "everyone", "to", "suspect", "a", "certain", "person", ",", "voting", "them", "off", "all", "to", "see", "that", "they", "weren't", "the", "impostor", ".", "If", "I'm", "not", "the", "impostor", ",", "I'll", "act", "like", "one"]} -{"id": "1696-impostor", "word": "impostor", "label_binary": 0, "text_1": "It's been awhile since I've been on this account. Partly inertia since my other account has so many folks that I haven't followed here yet and refollowing so many people feels overwhelming. Partly some sense of impostor syndrome and confusion applied to my gender identity, too.", "token_idx_1": 38, "text_start_1": 211, "text_end_1": 219, "date_1": "2019-09", "text_2": "Slack is Among Us for engineers. \u201cwho killed it?\u201d \u201ci feel like an impostor\u201d \u201cbob has started a poll\u201d", "token_idx_2": 18, "text_start_2": 66, "text_end_2": 74, "date_2": "2020-09", "text_1_tokenized": ["It's", "been", "awhile", "since", "I've", "been", "on", "this", "account", ".", "Partly", "inertia", "since", "my", "other", "account", "has", "so", "many", "folks", "that", "I", "haven't", "followed", "here", "yet", "and", "refollowing", "so", "many", "people", "feels", "overwhelming", ".", "Partly", "some", "sense", "of", "impostor", "syndrome", "and", "confusion", "applied", "to", "my", "gender", "identity", ",", "too", "."], "text_2_tokenized": ["Slack", "is", "Among", "Us", "for", "engineers", ".", "\u201c", "who", "killed", "it", "?", "\u201d", "\u201c", "i", "feel", "like", "an", "impostor", "\u201d", "\u201c", "bob", "has", "started", "a", "poll", "\u201d"]} -{"id": "1697-impostor", "word": "impostor", "label_binary": 0, "text_1": "Having a rough start to my doctorate program in both the student and teacher roles and feel down and ashamed. I spoke to faculty and know how to move forward, but while they believe in me I find it hard to believe in myself. How do you fight impostor syndrome? @AcademicChatter", "token_idx_1": 51, "text_start_1": 242, "text_end_1": 250, "date_1": "2019-09", "text_2": "laughed so hard running from impostor friend around the lab table that I gave myself an headache lmao what a good day", "token_idx_2": 5, "text_start_2": 29, "text_end_2": 37, "date_2": "2020-09", "text_1_tokenized": ["Having", "a", "rough", "start", "to", "my", "doctorate", "program", "in", "both", "the", "student", "and", "teacher", "roles", "and", "feel", "down", "and", "ashamed", ".", "I", "spoke", "to", "faculty", "and", "know", "how", "to", "move", "forward", ",", "but", "while", "they", "believe", "in", "me", "I", "find", "it", "hard", "to", "believe", "in", "myself", ".", "How", "do", "you", "fight", "impostor", "syndrome", "?", "@AcademicChatter"], "text_2_tokenized": ["laughed", "so", "hard", "running", "from", "impostor", "friend", "around", "the", "lab", "table", "that", "I", "gave", "myself", "an", "headache", "lmao", "what", "a", "good", "day"]} -{"id": "1698-impostor", "word": "impostor", "label_binary": 0, "text_1": "] In applying for a passport or a national ID card, all Pakistanis are required to sign an oath declaring Mirza Ghulam Ahmad to be an impostor prophet and all Ahmadis to be non-Muslims.[22] \ud83d\udc40 Seriously ?", "token_idx_1": 27, "text_start_1": 134, "text_end_1": 142, "date_1": "2019-09", "text_2": "played among us for THREE HOURS and that damn game didn't make me the impostor once", "token_idx_2": 14, "text_start_2": 70, "text_end_2": 78, "date_2": "2020-09", "text_1_tokenized": ["]", "In", "applying", "for", "a", "passport", "or", "a", "national", "ID", "card", ",", "all", "Pakistanis", "are", "required", "to", "sign", "an", "oath", "declaring", "Mirza", "Ghulam", "Ahmad", "to", "be", "an", "impostor", "prophet", "and", "all", "Ahmadis", "to", "be", "non-Muslims", ".", "[", "22", "]", "\ud83d\udc40", "Seriously", "?"], "text_2_tokenized": ["played", "among", "us", "for", "THREE", "HOURS", "and", "that", "damn", "game", "didn't", "make", "me", "the", "impostor", "once"]} -{"id": "1699-impostor", "word": "impostor", "label_binary": 1, "text_1": "I had a dream last night that Leroy was an impostor???", "token_idx_1": 10, "text_start_1": 43, "text_end_1": 51, "date_1": "2019-09", "text_2": "i remember trolling everyone i was on call/talking to last night when i was the impostor that was a+ content", "token_idx_2": 17, "text_start_2": 80, "text_end_2": 88, "date_2": "2020-09", "text_1_tokenized": ["I", "had", "a", "dream", "last", "night", "that", "Leroy", "was", "an", "impostor", "?", "?", "?"], "text_2_tokenized": ["i", "remember", "trolling", "everyone", "i", "was", "on", "call", "/", "talking", "to", "last", "night", "when", "i", "was", "the", "impostor", "that", "was", "a", "+", "content"]} -{"id": "1700-impostor", "word": "impostor", "label_binary": 0, "text_1": "got a raise impostor syndrome found dead", "token_idx_1": 3, "text_start_1": 12, "text_end_1": 20, "date_1": "2019-09", "text_2": "my brain chemistry may be hopelessly fucked but at least every day i can get on youtube and watch six among us impostor videos in a row, which is better than zoloft", "token_idx_2": 22, "text_start_2": 111, "text_end_2": 119, "date_2": "2020-09", "text_1_tokenized": ["got", "a", "raise", "impostor", "syndrome", "found", "dead"], "text_2_tokenized": ["my", "brain", "chemistry", "may", "be", "hopelessly", "fucked", "but", "at", "least", "every", "day", "i", "can", "get", "on", "youtube", "and", "watch", "six", "among", "us", "impostor", "videos", "in", "a", "row", ",", "which", "is", "better", "than", "zoloft"]} -{"id": "1701-impostor", "word": "impostor", "label_binary": 0, "text_1": "idk im feeling major impostor feelings right now cause i feel like im not smart or good enough for the work my prof picked me out for to do as his assistant lol", "token_idx_1": 4, "text_start_1": 21, "text_end_1": 29, "date_1": "2019-09", "text_2": "i was playing among us as the impostor and on impulse i decided to kill someone in reactor. but then i realized we were the only 2 people there...and i had just reported the body. i was so embarrassed i left the game", "token_idx_2": 7, "text_start_2": 30, "text_end_2": 38, "date_2": "2020-09", "text_1_tokenized": ["idk", "im", "feeling", "major", "impostor", "feelings", "right", "now", "cause", "i", "feel", "like", "im", "not", "smart", "or", "good", "enough", "for", "the", "work", "my", "prof", "picked", "me", "out", "for", "to", "do", "as", "his", "assistant", "lol"], "text_2_tokenized": ["i", "was", "playing", "among", "us", "as", "the", "impostor", "and", "on", "impulse", "i", "decided", "to", "kill", "someone", "in", "reactor", ".", "but", "then", "i", "realized", "we", "were", "the", "only", "2", "people", "there", "...", "and", "i", "had", "just", "reported", "the", "body", ".", "i", "was", "so", "embarrassed", "i", "left", "the", "game"]} -{"id": "1702-impostor", "word": "impostor", "label_binary": 0, "text_1": "#NationalComingOutDay I think I've covered everything: I'm a polysexual, polyamorous, furry, former \u2018sister', demisexual guy who deals with impostor syndrome and social claustrophobia", "token_idx_1": 25, "text_start_1": 140, "text_end_1": 148, "date_1": "2019-09", "text_2": "y'all i was playing among us with random people and i was the impostor and i killed this crewmate not knowing that there was someone standing in the corner and they all called me a dumb fuck in the chat i am so fucking embarrassed", "token_idx_2": 13, "text_start_2": 62, "text_end_2": 70, "date_2": "2020-09", "text_1_tokenized": ["#NationalComingOutDay", "I", "think", "I've", "covered", "everything", ":", "I'm", "a", "polysexual", ",", "polyamorous", ",", "furry", ",", "former", "\u2018", "sister", "'", ",", "demisexual", "guy", "who", "deals", "with", "impostor", "syndrome", "and", "social", "claustrophobia"], "text_2_tokenized": ["y'all", "i", "was", "playing", "among", "us", "with", "random", "people", "and", "i", "was", "the", "impostor", "and", "i", "killed", "this", "crewmate", "not", "knowing", "that", "there", "was", "someone", "standing", "in", "the", "corner", "and", "they", "all", "called", "me", "a", "dumb", "fuck", "in", "the", "chat", "i", "am", "so", "fucking", "embarrassed"]} -{"id": "1703-impostor", "word": "impostor", "label_binary": 1, "text_1": "Donald Trump is not a king, an emperor, an autocrat or a president of any repute. He is an impostor. And that's being kind.", "token_idx_1": 22, "text_start_1": 91, "text_end_1": 99, "date_1": "2019-09", "text_2": "one of my fave overdressed another one of my fave lookin good another one of my fave underdressed.... there are two impostor among us .", "token_idx_2": 22, "text_start_2": 116, "text_end_2": 124, "date_2": "2020-09", "text_1_tokenized": ["Donald", "Trump", "is", "not", "a", "king", ",", "an", "emperor", ",", "an", "autocrat", "or", "a", "president", "of", "any", "repute", ".", "He", "is", "an", "impostor", ".", "And", "that's", "being", "kind", "."], "text_2_tokenized": ["one", "of", "my", "fave", "overdressed", "another", "one", "of", "my", "fave", "lookin", "good", "another", "one", "of", "my", "fave", "underdressed", "...", "there", "are", "two", "impostor", "among", "us", "."]} -{"id": "1704-impostor", "word": "impostor", "label_binary": 1, "text_1": "Rev. Wallis. I believe the anti-Christ is here. He looks like Jesus, sounds like Jesus but isn't Jesus. He's a hate filled, racist, intolerant impostor that millions follow thinking he is Jesus. What say you? @cspanwj", "token_idx_1": 31, "text_start_1": 143, "text_end_1": 151, "date_1": "2019-09", "text_2": "A relationship should be 50/50. He calls me the impostor and I get to vote him off.", "token_idx_2": 10, "text_start_2": 48, "text_end_2": 56, "date_2": "2020-09", "text_1_tokenized": ["Rev", ".", "Wallis", ".", "I", "believe", "the", "anti-Christ", "is", "here", ".", "He", "looks", "like", "Jesus", ",", "sounds", "like", "Jesus", "but", "isn't", "Jesus", ".", "He's", "a", "hate", "filled", ",", "racist", ",", "intolerant", "impostor", "that", "millions", "follow", "thinking", "he", "is", "Jesus", ".", "What", "say", "you", "?", "@cspanwj"], "text_2_tokenized": ["A", "relationship", "should", "be", "50/50", ".", "He", "calls", "me", "the", "impostor", "and", "I", "get", "to", "vote", "him", "off", "."]} -{"id": "1705-impostor", "word": "impostor", "label_binary": 1, "text_1": "I got a very nice email from a reader yesterday telling me how much she liked my story. Nothing too long, just a few sentences. But those little messages really are the best thing. Like, the *best* thing. You read them and... just for a moment that impostor syndrome goes away.", "token_idx_1": 56, "text_start_1": 249, "text_end_1": 257, "date_1": "2019-09", "text_2": "it's cool that my impostor syndrome seamlessly swapped into a new gear and now I find myself sobbing that I've already failed at something I haven't started", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 26, "date_2": "2020-09", "text_1_tokenized": ["I", "got", "a", "very", "nice", "email", "from", "a", "reader", "yesterday", "telling", "me", "how", "much", "she", "liked", "my", "story", ".", "Nothing", "too", "long", ",", "just", "a", "few", "sentences", ".", "But", "those", "little", "messages", "really", "are", "the", "best", "thing", ".", "Like", ",", "the", "*", "best", "*", "thing", ".", "You", "read", "them", "and", "...", "just", "for", "a", "moment", "that", "impostor", "syndrome", "goes", "away", "."], "text_2_tokenized": ["it's", "cool", "that", "my", "impostor", "syndrome", "seamlessly", "swapped", "into", "a", "new", "gear", "and", "now", "I", "find", "myself", "sobbing", "that", "I've", "already", "failed", "at", "something", "I", "haven't", "started"]} -{"id": "1706-impostor", "word": "impostor", "label_binary": 0, "text_1": "Looking at #rEDSurrey line up &got major impostor syndrome!Know I am up against tough competition but if you are there please consider coming to see me for some simple, effective teaching strategies you can implement immediately& are widely applicable! @rEDSurrey2019 @GeogMarsh", "token_idx_1": 8, "text_start_1": 45, "text_end_1": 53, "date_1": "2019-09", "text_2": "Can't catch a break in among us when I do the tasks I get killed immediately when I'm an impostor I lose immediately \ud83d\ude43\ud83d\ude43\ud83d\ude43", "token_idx_2": 19, "text_start_2": 89, "text_end_2": 97, "date_2": "2020-09", "text_1_tokenized": ["Looking", "at", "#rEDSurrey", "line", "up", "&", "got", "major", "impostor", "syndrome", "!", "Know", "I", "am", "up", "against", "tough", "competition", "but", "if", "you", "are", "there", "please", "consider", "coming", "to", "see", "me", "for", "some", "simple", ",", "effective", "teaching", "strategies", "you", "can", "implement", "immediately", "&", "are", "widely", "applicable", "!", "@rEDSurrey2019", "@GeogMarsh"], "text_2_tokenized": ["Can't", "catch", "a", "break", "in", "among", "us", "when", "I", "do", "the", "tasks", "I", "get", "killed", "immediately", "when", "I'm", "an", "impostor", "I", "lose", "immediately", "\ud83d\ude43", "\ud83d\ude43", "\ud83d\ude43"]} -{"id": "1707-impostor", "word": "impostor", "label_binary": 0, "text_1": "Twt game (Pisay ver.) 1.) 27 2.) 3.) 5.) huh 6.) marami 7.) k**** 8.) neither 9.) intern 10.) '24 and '25 11.) si ex-crush(?) 12.) MAY (campus) n Unholy Trinity (inter) 13.) AKO 14.) the pisay contract 15.) \"di ako yung impostor\" Like and I'll send the questions :D", "token_idx_1": 86, "text_start_1": 220, "text_end_1": 228, "date_1": "2019-09", "text_2": "col keep predicting the right impostor... zzzzzz", "token_idx_2": 5, "text_start_2": 30, "text_end_2": 38, "date_2": "2020-09", "text_1_tokenized": ["Twt", "game", "(", "Pisay", "ver", ".", ")", "1", ".", ")", "27", "2", ".", ")", "3", ".", ")", "5", ".", ")", "huh", "6", ".", ")", "marami", "7", ".", ")", "k", "*", "*", "*", "8", ".", ")", "neither", "9", ".", ")", "intern", "10", ".", ")", "'", "24", "and", "'", "25", "11", ".", ")", "si", "ex-crush", "(", "?", ")", "12", ".", ")", "MAY", "(", "campus", ")", "n", "Unholy", "Trinity", "(", "inter", ")", "13", ".", ")", "AKO", "14", ".", ")", "the", "pisay", "contract", "15", ".", ")", "\"", "di", "ako", "yung", "impostor", "\"", "Like", "and", "I'll", "send", "the", "questions", ":D"], "text_2_tokenized": ["col", "keep", "predicting", "the", "right", "impostor", "...", "zzzzzz"]} -{"id": "1708-impostor", "word": "impostor", "label_binary": 0, "text_1": "can u get impostor syndrome from something u havent even done yet,,,,,", "token_idx_1": 3, "text_start_1": 10, "text_end_1": 18, "date_1": "2019-09", "text_2": "I can't believe there are people who play Among Us that don't like being an impostor", "token_idx_2": 15, "text_start_2": 76, "text_end_2": 84, "date_2": "2020-09", "text_1_tokenized": ["can", "u", "get", "impostor", "syndrome", "from", "something", "u", "havent", "even", "done", "yet", ",", ",", ","], "text_2_tokenized": ["I", "can't", "believe", "there", "are", "people", "who", "play", "Among", "Us", "that", "don't", "like", "being", "an", "impostor"]} -{"id": "1709-impostor", "word": "impostor", "label_binary": 0, "text_1": "reject impostor syndrome no one cares you're here at the table & if they do it's temporary everything is", "token_idx_1": 1, "text_start_1": 7, "text_end_1": 15, "date_1": "2019-09", "text_2": "i've played like 9 games of among us and i've not been impostor once what the fuck", "token_idx_2": 12, "text_start_2": 55, "text_end_2": 63, "date_2": "2020-09", "text_1_tokenized": ["reject", "impostor", "syndrome", "no", "one", "cares", "you're", "here", "at", "the", "table", "&", "if", "they", "do", "it's", "temporary", "everything", "is"], "text_2_tokenized": ["i've", "played", "like", "9", "games", "of", "among", "us", "and", "i've", "not", "been", "impostor", "once", "what", "the", "fuck"]} -{"id": "1710-impostor", "word": "impostor", "label_binary": 0, "text_1": "Ausdrucken, einrahmen: \u201eBut far from being the product of a pathology, what seems more likely is that impostor syndrome is a rather natural reaction of anyone from a working-class, disadvantaged or minority background to the various biases they face on a daily basis.\u201c #LastRT", "token_idx_1": 21, "text_start_1": 102, "text_end_1": 110, "date_1": "2019-09", "text_2": "mfs dating a cheater and play among us 24/7 like bro u worried bout the wrong impostor\ud83d\udc80", "token_idx_2": 16, "text_start_2": 78, "text_end_2": 86, "date_2": "2020-09", "text_1_tokenized": ["Ausdrucken", ",", "einrahmen", ":", "\u201e", "But", "far", "from", "being", "the", "product", "of", "a", "pathology", ",", "what", "seems", "more", "likely", "is", "that", "impostor", "syndrome", "is", "a", "rather", "natural", "reaction", "of", "anyone", "from", "a", "working-class", ",", "disadvantaged", "or", "minority", "background", "to", "the", "various", "biases", "they", "face", "on", "a", "daily", "basis", ".", "\u201c", "#LastRT"], "text_2_tokenized": ["mfs", "dating", "a", "cheater", "and", "play", "among", "us", "24/7", "like", "bro", "u", "worried", "bout", "the", "wrong", "impostor", "\ud83d\udc80"]} -{"id": "1711-impostor", "word": "impostor", "label_binary": 0, "text_1": "The impostor syndrome is real 2nite", "token_idx_1": 1, "text_start_1": 4, "text_end_1": 12, "date_1": "2019-09", "text_2": "I'm so sorry to the other impostor in Among Us who I had to report after accidentally locking myself in a room where they just killed. I didn't get the chance to apologize after the game. I'm sorrrryyy!!!", "token_idx_2": 6, "text_start_2": 26, "text_end_2": 34, "date_2": "2020-09", "text_1_tokenized": ["The", "impostor", "syndrome", "is", "real", "2nite"], "text_2_tokenized": ["I'm", "so", "sorry", "to", "the", "other", "impostor", "in", "Among", "Us", "who", "I", "had", "to", "report", "after", "accidentally", "locking", "myself", "in", "a", "room", "where", "they", "just", "killed", ".", "I", "didn't", "get", "the", "chance", "to", "apologize", "after", "the", "game", ".", "I'm", "sorrrryyy", "!", "!", "!"]} -{"id": "1712-impostor", "word": "impostor", "label_binary": 1, "text_1": "Something I've learned about me is that more often than not, I have to be in a good/optimistic mood to listen to a lot of Hip Hop. My senses can't vibe with it if the narrative is positive. It's like \"impostor syndrome.\" I ain't got it like that, so those hype songs feel weird.", "token_idx_1": 46, "text_start_1": 201, "text_end_1": 209, "date_1": "2019-09", "text_2": "maybe it's my impostor syndrome but I never get used to people saying they're fans of my work", "token_idx_2": 3, "text_start_2": 14, "text_end_2": 22, "date_2": "2020-09", "text_1_tokenized": ["Something", "I've", "learned", "about", "me", "is", "that", "more", "often", "than", "not", ",", "I", "have", "to", "be", "in", "a", "good", "/", "optimistic", "mood", "to", "listen", "to", "a", "lot", "of", "Hip", "Hop", ".", "My", "senses", "can't", "vibe", "with", "it", "if", "the", "narrative", "is", "positive", ".", "It's", "like", "\"", "impostor", "syndrome", ".", "\"", "I", "ain't", "got", "it", "like", "that", ",", "so", "those", "hype", "songs", "feel", "weird", "."], "text_2_tokenized": ["maybe", "it's", "my", "impostor", "syndrome", "but", "I", "never", "get", "used", "to", "people", "saying", "they're", "fans", "of", "my", "work"]} -{"id": "1713-impostor", "word": "impostor", "label_binary": 0, "text_1": "hey, so like I've been putting this off for months. Anguish, loneliness, breakdowns, impostor syndrome, more suicidal thoughts, fear, introversion, hiding away There's probably never a good time to say it for everyone, but I just need to be honest with myself. I'm coming out.", "token_idx_1": 18, "text_start_1": 85, "text_end_1": 93, "date_1": "2019-09", "text_2": "Idk how I managed to get impostor 5 times in a row but I'm 4/5 wins soooooo lmao", "token_idx_2": 6, "text_start_2": 25, "text_end_2": 33, "date_2": "2020-09", "text_1_tokenized": ["hey", ",", "so", "like", "I've", "been", "putting", "this", "off", "for", "months", ".", "Anguish", ",", "loneliness", ",", "breakdowns", ",", "impostor", "syndrome", ",", "more", "suicidal", "thoughts", ",", "fear", ",", "introversion", ",", "hiding", "away", "There's", "probably", "never", "a", "good", "time", "to", "say", "it", "for", "everyone", ",", "but", "I", "just", "need", "to", "be", "honest", "with", "myself", ".", "I'm", "coming", "out", "."], "text_2_tokenized": ["Idk", "how", "I", "managed", "to", "get", "impostor", "5", "times", "in", "a", "row", "but", "I'm", "4/5", "wins", "soooooo", "lmao"]} -{"id": "1714-impostor", "word": "impostor", "label_binary": 0, "text_1": "In one of my classes we have to proofread each other's resumes. Are they just trying to give us impostor syndrome or what?", "token_idx_1": 20, "text_start_1": 96, "text_end_1": 104, "date_1": "2019-09", "text_2": "We played javkbox and i was alien twice ( basically impostor in amoun us ) and i won both time \u2764\ufe0f", "token_idx_2": 10, "text_start_2": 52, "text_end_2": 60, "date_2": "2020-09", "text_1_tokenized": ["In", "one", "of", "my", "classes", "we", "have", "to", "proofread", "each", "other's", "resumes", ".", "Are", "they", "just", "trying", "to", "give", "us", "impostor", "syndrome", "or", "what", "?"], "text_2_tokenized": ["We", "played", "javkbox", "and", "i", "was", "alien", "twice", "(", "basically", "impostor", "in", "amoun", "us", ")", "and", "i", "won", "both", "time", "\u2764", "\ufe0f"]} -{"id": "1715-impostor", "word": "impostor", "label_binary": 0, "text_1": "On a mission to get inspired by 'still' music earlier, I listened to the second half of Stravinsky's Sacre, only to have a full on impostor syndrome attack in the library, so I put on some Schubert piano sonatas to calm back down. Rough times.", "token_idx_1": 29, "text_start_1": 131, "text_end_1": 139, "date_1": "2019-09", "text_2": "Please stop calling that intruder @MBuhari our president... He's and impostor claiming to be our president... #EndSARS #EndPoliceBrutalityinNigeraNOW #EndBadGoveranceInNigeria", "token_idx_2": 11, "text_start_2": 69, "text_end_2": 77, "date_2": "2020-09", "text_1_tokenized": ["On", "a", "mission", "to", "get", "inspired", "by", "'", "still", "'", "music", "earlier", ",", "I", "listened", "to", "the", "second", "half", "of", "Stravinsky's", "Sacre", ",", "only", "to", "have", "a", "full", "on", "impostor", "syndrome", "attack", "in", "the", "library", ",", "so", "I", "put", "on", "some", "Schubert", "piano", "sonatas", "to", "calm", "back", "down", ".", "Rough", "times", "."], "text_2_tokenized": ["Please", "stop", "calling", "that", "intruder", "@MBuhari", "our", "president", "...", "He's", "and", "impostor", "claiming", "to", "be", "our", "president", "...", "#EndSARS", "#EndPoliceBrutalityinNigeraNOW", "#EndBadGoveranceInNigeria"]} -{"id": "1716-impostor", "word": "impostor", "label_binary": 1, "text_1": "I might have impostor syndrome but smtms I don't believe it when guys say I give good head..", "token_idx_1": 3, "text_start_1": 13, "text_end_1": 21, "date_1": "2019-09", "text_2": "I have impostor syndrome for being alive", "token_idx_2": 2, "text_start_2": 7, "text_end_2": 15, "date_2": "2020-09", "text_1_tokenized": ["I", "might", "have", "impostor", "syndrome", "but", "smtms", "I", "don't", "believe", "it", "when", "guys", "say", "I", "give", "good", "head", ".."], "text_2_tokenized": ["I", "have", "impostor", "syndrome", "for", "being", "alive"]} -{"id": "1717-impostor", "word": "impostor", "label_binary": 1, "text_1": "Wow!! An impostor right in front if them. # theRookie", "token_idx_1": 4, "text_start_1": 9, "text_end_1": 17, "date_1": "2019-09", "text_2": "so who's the impostor???", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 21, "date_2": "2020-09", "text_1_tokenized": ["Wow", "!", "!", "An", "impostor", "right", "in", "front", "if", "them", ".", "#", "theRookie"], "text_2_tokenized": ["so", "who's", "the", "impostor", "?", "?", "?"]} -{"id": "1718-impostor", "word": "impostor", "label_binary": 1, "text_1": "I always feel like an impostor when writing iOS dev related articles, my last held iOS job was 18 months ago (I currently work as backend dev), and my own iOS / Mac apps usually have just 1 view controller with simple functionality\ud83d\ude05", "token_idx_1": 5, "text_start_1": 22, "text_end_1": 30, "date_1": "2019-09", "text_2": "Stop follow the other Anthony I'm the real one the other anything is An impostor", "token_idx_2": 14, "text_start_2": 72, "text_end_2": 80, "date_2": "2020-09", "text_1_tokenized": ["I", "always", "feel", "like", "an", "impostor", "when", "writing", "iOS", "dev", "related", "articles", ",", "my", "last", "held", "iOS", "job", "was", "18", "months", "ago", "(", "I", "currently", "work", "as", "backend", "dev", ")", ",", "and", "my", "own", "iOS", "/", "Mac", "apps", "usually", "have", "just", "1", "view", "controller", "with", "simple", "functionality", "\ud83d\ude05"], "text_2_tokenized": ["Stop", "follow", "the", "other", "Anthony", "I'm", "the", "real", "one", "the", "other", "anything", "is", "An", "impostor"]} -{"id": "1719-impostor", "word": "impostor", "label_binary": 0, "text_1": "Recently came across the idea that impostor syndrome and egomania are closely related. They seem different, but each stems from the idea that your value derives from your accomplishments/abilities.", "token_idx_1": 6, "text_start_1": 35, "text_end_1": 43, "date_1": "2019-09", "text_2": "nowadays you can't say the word \"impostor\" without someone making an among us reference", "token_idx_2": 7, "text_start_2": 33, "text_end_2": 41, "date_2": "2020-09", "text_1_tokenized": ["Recently", "came", "across", "the", "idea", "that", "impostor", "syndrome", "and", "egomania", "are", "closely", "related", ".", "They", "seem", "different", ",", "but", "each", "stems", "from", "the", "idea", "that", "your", "value", "derives", "from", "your", "accomplishments", "/", "abilities", "."], "text_2_tokenized": ["nowadays", "you", "can't", "say", "the", "word", "\"", "impostor", "\"", "without", "someone", "making", "an", "among", "us", "reference"]} -{"id": "1720-impostor", "word": "impostor", "label_binary": 0, "text_1": "Hi I hate impostor syndrome and it can suck my dick thanks bye", "token_idx_1": 3, "text_start_1": 10, "text_end_1": 18, "date_1": "2019-09", "text_2": "being impostor in among us is nice until someone has a hack and ruins the only time you're impostor", "token_idx_2": 1, "text_start_2": 6, "text_end_2": 14, "date_2": "2020-09", "text_1_tokenized": ["Hi", "I", "hate", "impostor", "syndrome", "and", "it", "can", "suck", "my", "dick", "thanks", "bye"], "text_2_tokenized": ["being", "impostor", "in", "among", "us", "is", "nice", "until", "someone", "has", "a", "hack", "and", "ruins", "the", "only", "time", "you're", "impostor"]} -{"id": "1721-impostor", "word": "impostor", "label_binary": 0, "text_1": "Is it impostor syndrome when you believe you belong at a tech job but everyone else thinks you don't?", "token_idx_1": 2, "text_start_1": 6, "text_end_1": 14, "date_1": "2019-09", "text_2": "itni dair baad impostor banni and i accidentally closed my app :))))", "token_idx_2": 3, "text_start_2": 15, "text_end_2": 23, "date_2": "2020-09", "text_1_tokenized": ["Is", "it", "impostor", "syndrome", "when", "you", "believe", "you", "belong", "at", "a", "tech", "job", "but", "everyone", "else", "thinks", "you", "don't", "?"], "text_2_tokenized": ["itni", "dair", "baad", "impostor", "banni", "and", "i", "accidentally", "closed", "my", "app", ":)", ")", ")"]} -{"id": "1722-impostor", "word": "impostor", "label_binary": 0, "text_1": "me and my impostor syndrome when i get complimented for my cooking: it's not me, it's the pinterest recipe", "token_idx_1": 3, "text_start_1": 10, "text_end_1": 18, "date_1": "2019-09", "text_2": "i wish i was recording the impostor game i had last night. I FOOLED EVERYONE HAHA", "token_idx_2": 6, "text_start_2": 27, "text_end_2": 35, "date_2": "2020-09", "text_1_tokenized": ["me", "and", "my", "impostor", "syndrome", "when", "i", "get", "complimented", "for", "my", "cooking", ":", "it's", "not", "me", ",", "it's", "the", "pinterest", "recipe"], "text_2_tokenized": ["i", "wish", "i", "was", "recording", "the", "impostor", "game", "i", "had", "last", "night", ".", "I", "FOOLED", "EVERYONE", "HAHA"]} -{"id": "1723-impostor", "word": "impostor", "label_binary": 0, "text_1": "was really feeling that impostor syndrome today, ladies.. but then i remembered when karlie 'I take programming classes at nyu' kloss did the 72 vogue questions and described an algorithm as \"a problem waiting to be solved\"", "token_idx_1": 4, "text_start_1": 24, "text_end_1": 32, "date_1": "2019-09", "text_2": "I wonder if u can spell imposter with 2 Os (like impostor)", "token_idx_2": 12, "text_start_2": 49, "text_end_2": 57, "date_2": "2020-09", "text_1_tokenized": ["was", "really", "feeling", "that", "impostor", "syndrome", "today", ",", "ladies", "..", "but", "then", "i", "remembered", "when", "karlie", "'", "I", "take", "programming", "classes", "at", "nyu", "'", "kloss", "did", "the", "72", "vogue", "questions", "and", "described", "an", "algorithm", "as", "\"", "a", "problem", "waiting", "to", "be", "solved", "\""], "text_2_tokenized": ["I", "wonder", "if", "u", "can", "spell", "imposter", "with", "2", "Os", "(", "like", "impostor", ")"]} -{"id": "1724-impostor", "word": "impostor", "label_binary": 0, "text_1": "Can someone tell me who replaced Andy Robertson with this impostor and where the real one is please?!", "token_idx_1": 10, "text_start_1": 58, "text_end_1": 66, "date_1": "2019-09", "text_2": "Facebook writing groups are bad for learning, but great for curing impostor syndrome! \ud83e\udd13", "token_idx_2": 12, "text_start_2": 67, "text_end_2": 75, "date_2": "2020-09", "text_1_tokenized": ["Can", "someone", "tell", "me", "who", "replaced", "Andy", "Robertson", "with", "this", "impostor", "and", "where", "the", "real", "one", "is", "please", "?", "!"], "text_2_tokenized": ["Facebook", "writing", "groups", "are", "bad", "for", "learning", ",", "but", "great", "for", "curing", "impostor", "syndrome", "!", "\ud83e\udd13"]} -{"id": "1725-impostor", "word": "impostor", "label_binary": 0, "text_1": "we might have a blackout and it's a big trigger for me... could you guys please send pictures of: izuru hajime komaeda kirigiri byakuya (impostor or thh both work ;D) makoto", "token_idx_1": 27, "text_start_1": 137, "text_end_1": 145, "date_1": "2019-09", "text_2": "era 3 white diamond would be interesting to play among us with id bet she'd just talk about steven and cool space stuff during the emergency meetings and just vibe when she's the impostor instead of actually playing as an impostor", "token_idx_2": 33, "text_start_2": 179, "text_end_2": 187, "date_2": "2020-09", "text_1_tokenized": ["we", "might", "have", "a", "blackout", "and", "it's", "a", "big", "trigger", "for", "me", "...", "could", "you", "guys", "please", "send", "pictures", "of", ":", "izuru", "hajime", "komaeda", "kirigiri", "byakuya", "(", "impostor", "or", "thh", "both", "work", ";D", ")", "makoto"], "text_2_tokenized": ["era", "3", "white", "diamond", "would", "be", "interesting", "to", "play", "among", "us", "with", "id", "bet", "she'd", "just", "talk", "about", "steven", "and", "cool", "space", "stuff", "during", "the", "emergency", "meetings", "and", "just", "vibe", "when", "she's", "the", "impostor", "instead", "of", "actually", "playing", "as", "an", "impostor"]} -{"id": "1726-impostor", "word": "impostor", "label_binary": 0, "text_1": "I don't mean to brag but i put the impostor in the impostor syndrome", "token_idx_1": 9, "text_start_1": 35, "text_end_1": 43, "date_1": "2019-09", "text_2": "just joined a public lobby in among us and some guy called me 3 different slurs then kicked me for calling him out when he was impostor", "token_idx_2": 26, "text_start_2": 127, "text_end_2": 135, "date_2": "2020-09", "text_1_tokenized": ["I", "don't", "mean", "to", "brag", "but", "i", "put", "the", "impostor", "in", "the", "impostor", "syndrome"], "text_2_tokenized": ["just", "joined", "a", "public", "lobby", "in", "among", "us", "and", "some", "guy", "called", "me", "3", "different", "slurs", "then", "kicked", "me", "for", "calling", "him", "out", "when", "he", "was", "impostor"]} -{"id": "1727-impostor", "word": "impostor", "label_binary": 0, "text_1": "Thing is, I don't really know where to start, after all that time! Because there's my thesis and also my impostor syndrome and also teaching and yet again my impostor syndrome... And also translating Old English into French (A. Night. Mare. And I'm not talking about horses)", "token_idx_1": 23, "text_start_1": 105, "text_end_1": 113, "date_1": "2019-09", "text_2": "Am I the impostor in among us? Yes? Do I get offended when someone suggests so? Also yes", "token_idx_2": 3, "text_start_2": 9, "text_end_2": 17, "date_2": "2020-09", "text_1_tokenized": ["Thing", "is", ",", "I", "don't", "really", "know", "where", "to", "start", ",", "after", "all", "that", "time", "!", "Because", "there's", "my", "thesis", "and", "also", "my", "impostor", "syndrome", "and", "also", "teaching", "and", "yet", "again", "my", "impostor", "syndrome", "...", "And", "also", "translating", "Old", "English", "into", "French", "(", "A", ".", "Night", ".", "Mare", ".", "And", "I'm", "not", "talking", "about", "horses", ")"], "text_2_tokenized": ["Am", "I", "the", "impostor", "in", "among", "us", "?", "Yes", "?", "Do", "I", "get", "offended", "when", "someone", "suggests", "so", "?", "Also", "yes"]} -{"id": "1728-impostor", "word": "impostor", "label_binary": 0, "text_1": "Hey @shanselman, I'm brainstorming ideas for a quasi-'soft skills' session specifically on 'impostor syndrome' to offer at local user groups. Any tips from when you thought about your SXSW '15 proposal prepwork?", "token_idx_1": 15, "text_start_1": 92, "text_end_1": 100, "date_1": "2019-09", "text_2": "i've been the impostor 5 times in a row \ud83d\udc41\ud83d\udc44\ud83d\udc41", "token_idx_2": 3, "text_start_2": 14, "text_end_2": 22, "date_2": "2020-09", "text_1_tokenized": ["Hey", "@shanselman", ",", "I'm", "brainstorming", "ideas", "for", "a", "quasi-'soft", "skills", "'", "session", "specifically", "on", "'", "impostor", "syndrome", "'", "to", "offer", "at", "local", "user", "groups", ".", "Any", "tips", "from", "when", "you", "thought", "about", "your", "SXSW", "'", "15", "proposal", "prepwork", "?"], "text_2_tokenized": ["i've", "been", "the", "impostor", "5", "times", "in", "a", "row", "\ud83d\udc41", "\ud83d\udc44", "\ud83d\udc41"]} -{"id": "1729-impostor", "word": "impostor", "label_binary": 0, "text_1": "Do any other music theorists get a big spoonful of impostor syndrome from the thought of having to present at a non-joint AMS?", "token_idx_1": 10, "text_start_1": 51, "text_end_1": 59, "date_1": "2019-09", "text_2": "played among us for the first time today and i was an impostor once i had no idea what to do", "token_idx_2": 12, "text_start_2": 54, "text_end_2": 62, "date_2": "2020-09", "text_1_tokenized": ["Do", "any", "other", "music", "theorists", "get", "a", "big", "spoonful", "of", "impostor", "syndrome", "from", "the", "thought", "of", "having", "to", "present", "at", "a", "non-joint", "AMS", "?"], "text_2_tokenized": ["played", "among", "us", "for", "the", "first", "time", "today", "and", "i", "was", "an", "impostor", "once", "i", "had", "no", "idea", "what", "to", "do"]} -{"id": "1730-impostor", "word": "impostor", "label_binary": 0, "text_1": "is impostor syndrome about your damn degree a thing because i just caught myself thinking i'm not qualified enough for jobs that literally ask for a biologist", "token_idx_1": 1, "text_start_1": 3, "text_end_1": 11, "date_1": "2019-09", "text_2": "I haven't gotten impostor in 4fucking days", "token_idx_2": 3, "text_start_2": 17, "text_end_2": 25, "date_2": "2020-09", "text_1_tokenized": ["is", "impostor", "syndrome", "about", "your", "damn", "degree", "a", "thing", "because", "i", "just", "caught", "myself", "thinking", "i'm", "not", "qualified", "enough", "for", "jobs", "that", "literally", "ask", "for", "a", "biologist"], "text_2_tokenized": ["I", "haven't", "gotten", "impostor", "in", "4fucking", "days"]} -{"id": "1731-impostor", "word": "impostor", "label_binary": 0, "text_1": "when will I stop feeling impostor syndrome !! \ud83d\ude43\ud83d\ude43\ud83d\ude43\ud83d\ude43\ud83d\ude43 ( do not actually vote on this poll )", "token_idx_1": 5, "text_start_1": 25, "text_end_1": 33, "date_1": "2019-09", "text_2": "new day, another chance to be the impostor", "token_idx_2": 8, "text_start_2": 34, "text_end_2": 42, "date_2": "2020-09", "text_1_tokenized": ["when", "will", "I", "stop", "feeling", "impostor", "syndrome", "!", "!", "\ud83d\ude43", "\ud83d\ude43", "\ud83d\ude43", "(", "do", "not", "actually", "vote", "on", "this", "poll", ")"], "text_2_tokenized": ["new", "day", ",", "another", "chance", "to", "be", "the", "impostor"]} -{"id": "1732-impostor", "word": "impostor", "label_binary": 0, "text_1": "Who else has friend impostor syndrome? Like.. Do they even like me? Do people actually think about me from time to time? Have I done something wrong so they won't talk to me anymore?", "token_idx_1": 4, "text_start_1": 20, "text_end_1": 28, "date_1": "2019-09", "text_2": "/ hi guys we're playing hide and seek on among us <3 rules are: - no venting - only report body to get away from the seeker - the impostor is the seeker (obviously) CODE IS HXQFXQ", "token_idx_2": 30, "text_start_2": 133, "text_end_2": 141, "date_2": "2020-09", "text_1_tokenized": ["Who", "else", "has", "friend", "impostor", "syndrome", "?", "Like", "..", "Do", "they", "even", "like", "me", "?", "Do", "people", "actually", "think", "about", "me", "from", "time", "to", "time", "?", "Have", "I", "done", "something", "wrong", "so", "they", "won't", "talk", "to", "me", "anymore", "?"], "text_2_tokenized": ["/", "hi", "guys", "we're", "playing", "hide", "and", "seek", "on", "among", "us", "<3", "rules", "are", ":", "-", "no", "venting", "-", "only", "report", "body", "to", "get", "away", "from", "the", "seeker", "-", "the", "impostor", "is", "the", "seeker", "(", "obviously", ")", "CODE", "IS", "HXQFXQ"]} -{"id": "1733-impostor", "word": "impostor", "label_binary": 0, "text_1": "Q: What can institutions do to combat impostor syndrome? A: from @erichjarvis Institutions must take steps that helps folks feel a greater sense of belonging #sfn19", "token_idx_1": 8, "text_start_1": 38, "text_end_1": 46, "date_1": "2019-09", "text_2": "anyway if i played among us with bts id protect joonie from the impostor", "token_idx_2": 13, "text_start_2": 64, "text_end_2": 72, "date_2": "2020-09", "text_1_tokenized": ["Q", ":", "What", "can", "institutions", "do", "to", "combat", "impostor", "syndrome", "?", "A", ":", "from", "@erichjarvis", "Institutions", "must", "take", "steps", "that", "helps", "folks", "feel", "a", "greater", "sense", "of", "belonging", "#sfn19"], "text_2_tokenized": ["anyway", "if", "i", "played", "among", "us", "with", "bts", "id", "protect", "joonie", "from", "the", "impostor"]} -{"id": "1734-impostor", "word": "impostor", "label_binary": 0, "text_1": "**me, panicking** \u201cWait! Is it imposter syndrome or impostor syndrome??!\u201d", "token_idx_1": 15, "text_start_1": 52, "text_end_1": 60, "date_1": "2019-09", "text_2": "If you quit after being voted off as impostor you are the lowest of the low", "token_idx_2": 8, "text_start_2": 37, "text_end_2": 45, "date_2": "2020-09", "text_1_tokenized": ["*", "*", "me", ",", "panicking", "*", "*", "\u201c", "Wait", "!", "Is", "it", "imposter", "syndrome", "or", "impostor", "syndrome", "?", "?", "!", "\u201d"], "text_2_tokenized": ["If", "you", "quit", "after", "being", "voted", "off", "as", "impostor", "you", "are", "the", "lowest", "of", "the", "low"]} -{"id": "1735-impostor", "word": "impostor", "label_binary": 0, "text_1": "Starting a new job next week and I can already feel the impostor syndrome creeping in.", "token_idx_1": 12, "text_start_1": 56, "text_end_1": 64, "date_1": "2019-09", "text_2": "Hate being the impostor Itna stress \ud83d\ude2d", "token_idx_2": 3, "text_start_2": 15, "text_end_2": 23, "date_2": "2020-09", "text_1_tokenized": ["Starting", "a", "new", "job", "next", "week", "and", "I", "can", "already", "feel", "the", "impostor", "syndrome", "creeping", "in", "."], "text_2_tokenized": ["Hate", "being", "the", "impostor", "Itna", "stress", "\ud83d\ude2d"]} -{"id": "1736-impostor", "word": "impostor", "label_binary": 0, "text_1": "Mindset is everything...it can mean the difference between succeeding and being held back. If I've been invited into the room, I belong there.- Marcella Barri\u00e9re of @Google responding to questions of how to manage impostor syndrome at today's Women of Influence #naiopmaevent", "token_idx_1": 40, "text_start_1": 214, "text_end_1": 222, "date_1": "2019-09", "text_2": "wait i accidently deleted my kyungho impostor tweet \ud83d\ude33\ud83d\ude2d\ud83d\ude2d\ud83d\ude2d\ud83d\ude2d", "token_idx_2": 6, "text_start_2": 37, "text_end_2": 45, "date_2": "2020-09", "text_1_tokenized": ["Mindset", "is", "everything", "...", "it", "can", "mean", "the", "difference", "between", "succeeding", "and", "being", "held", "back", ".", "If", "I've", "been", "invited", "into", "the", "room", ",", "I", "belong", "there", ".", "-", "Marcella", "Barri\u00e9re", "of", "@Google", "responding", "to", "questions", "of", "how", "to", "manage", "impostor", "syndrome", "at", "today's", "Women", "of", "Influence", "#naiopmaevent"], "text_2_tokenized": ["wait", "i", "accidently", "deleted", "my", "kyungho", "impostor", "tweet", "\ud83d\ude33", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude2d"]} -{"id": "1737-impostor", "word": "impostor", "label_binary": 0, "text_1": "I would like to thank internet, social media and afrofeminism for showing me i didn't need to go to a fancy french school to be an artist. My impostor syndrome is slowly reclining and y'all not ready for when it's completely gone fatherfuckers\ud83d\ude0f", "token_idx_1": 30, "text_start_1": 142, "text_end_1": 150, "date_1": "2019-09", "text_2": "Got into an Among Us lobby today where some 8 year old who was playing for the first time kept following me because I was nice to them. Felt bad when I was eventually the impostor and had to betray that trust.", "token_idx_2": 36, "text_start_2": 171, "text_end_2": 179, "date_2": "2020-09", "text_1_tokenized": ["I", "would", "like", "to", "thank", "internet", ",", "social", "media", "and", "afrofeminism", "for", "showing", "me", "i", "didn't", "need", "to", "go", "to", "a", "fancy", "french", "school", "to", "be", "an", "artist", ".", "My", "impostor", "syndrome", "is", "slowly", "reclining", "and", "y'all", "not", "ready", "for", "when", "it's", "completely", "gone", "fatherfuckers", "\ud83d\ude0f"], "text_2_tokenized": ["Got", "into", "an", "Among", "Us", "lobby", "today", "where", "some", "8", "year", "old", "who", "was", "playing", "for", "the", "first", "time", "kept", "following", "me", "because", "I", "was", "nice", "to", "them", ".", "Felt", "bad", "when", "I", "was", "eventually", "the", "impostor", "and", "had", "to", "betray", "that", "trust", "."]} -{"id": "1738-impostor", "word": "impostor", "label_binary": 0, "text_1": "Third week of start-up life. Running on adrenaline, knackered, filled with impostor syndrome but also the most motivated and fulfilled I have felt in years, so I'll take that. Keep on trucking.", "token_idx_1": 14, "text_start_1": 75, "text_end_1": 83, "date_1": "2019-09", "text_2": "You're a sneaky little impostor, (Aren't you, aren't you?) You're a sneaky little impostor, (Aren't you, aren't you?) But you're among us, I can feel it, I can feel it in my bones, So why don't you show yourself, And leave us all alone?", "token_idx_2": 4, "text_start_2": 23, "text_end_2": 31, "date_2": "2020-09", "text_1_tokenized": ["Third", "week", "of", "start-up", "life", ".", "Running", "on", "adrenaline", ",", "knackered", ",", "filled", "with", "impostor", "syndrome", "but", "also", "the", "most", "motivated", "and", "fulfilled", "I", "have", "felt", "in", "years", ",", "so", "I'll", "take", "that", ".", "Keep", "on", "trucking", "."], "text_2_tokenized": ["You're", "a", "sneaky", "little", "impostor", ",", "(", "Aren't", "you", ",", "aren't", "you", "?", ")", "You're", "a", "sneaky", "little", "impostor", ",", "(", "Aren't", "you", ",", "aren't", "you", "?", ")", "But", "you're", "among", "us", ",", "I", "can", "feel", "it", ",", "I", "can", "feel", "it", "in", "my", "bones", ",", "So", "why", "don't", "you", "show", "yourself", ",", "And", "leave", "us", "all", "alone", "?"]} -{"id": "1739-impostor", "word": "impostor", "label_binary": 0, "text_1": "Why doesn't Justin Trudeau have impostor syndrome?", "token_idx_1": 5, "text_start_1": 32, "text_end_1": 40, "date_1": "2019-09", "text_2": "If Buhari is truly dead, Nigeria has even bigger issues. Who is running the country? An impostor? Why was this not made known to the public? What the actual fuck? These politicians really have fooled us all", "token_idx_2": 19, "text_start_2": 88, "text_end_2": 96, "date_2": "2020-09", "text_1_tokenized": ["Why", "doesn't", "Justin", "Trudeau", "have", "impostor", "syndrome", "?"], "text_2_tokenized": ["If", "Buhari", "is", "truly", "dead", ",", "Nigeria", "has", "even", "bigger", "issues", ".", "Who", "is", "running", "the", "country", "?", "An", "impostor", "?", "Why", "was", "this", "not", "made", "known", "to", "the", "public", "?", "What", "the", "actual", "fuck", "?", "These", "politicians", "really", "have", "fooled", "us", "all"]} -{"id": "1740-impostor", "word": "impostor", "label_binary": 0, "text_1": "ah yes the impostor syndrome", "token_idx_1": 3, "text_start_1": 11, "text_end_1": 19, "date_1": "2019-09", "text_2": "i just a tiktok impostor (hyunjin) k words crewmate (felix) by breaking his neck you undertsand the rest right", "token_idx_2": 4, "text_start_2": 16, "text_end_2": 24, "date_2": "2020-09", "text_1_tokenized": ["ah", "yes", "the", "impostor", "syndrome"], "text_2_tokenized": ["i", "just", "a", "tiktok", "impostor", "(", "hyunjin", ")", "k", "words", "crewmate", "(", "felix", ")", "by", "breaking", "his", "neck", "you", "undertsand", "the", "rest", "right"]} -{"id": "1741-impostor", "word": "impostor", "label_binary": 0, "text_1": "Who am I? Who's that impostor you been following I think you should ask has she got a real photo of this in Seth or just a piddle? I have the account keys how about that?", "token_idx_1": 6, "text_start_1": 21, "text_end_1": 29, "date_1": "2019-09", "text_2": "im playing among us and i got impostor 4 times in a row? that's like a 0.0016% chance of happening according to google", "token_idx_2": 7, "text_start_2": 30, "text_end_2": 38, "date_2": "2020-09", "text_1_tokenized": ["Who", "am", "I", "?", "Who's", "that", "impostor", "you", "been", "following", "I", "think", "you", "should", "ask", "has", "she", "got", "a", "real", "photo", "of", "this", "in", "Seth", "or", "just", "a", "piddle", "?", "I", "have", "the", "account", "keys", "how", "about", "that", "?"], "text_2_tokenized": ["im", "playing", "among", "us", "and", "i", "got", "impostor", "4", "times", "in", "a", "row", "?", "that's", "like", "a", "0.0016", "%", "chance", "of", "happening", "according", "to", "google"]} -{"id": "1742-impostor", "word": "impostor", "label_binary": 0, "text_1": "I feel so drained... impostor syndrome, being outspoken, feeling invisible, betrayal feeling.... results of today's mtg.... #loneliness too", "token_idx_1": 5, "text_start_1": 21, "text_end_1": 29, "date_1": "2019-09", "text_2": "i'm not the impostor how could i fit my big ass in the vent? lol", "token_idx_2": 3, "text_start_2": 12, "text_end_2": 20, "date_2": "2020-09", "text_1_tokenized": ["I", "feel", "so", "drained", "...", "impostor", "syndrome", ",", "being", "outspoken", ",", "feeling", "invisible", ",", "betrayal", "feeling", "...", "results", "of", "today's", "mtg", "...", "#loneliness", "too"], "text_2_tokenized": ["i'm", "not", "the", "impostor", "how", "could", "i", "fit", "my", "big", "ass", "in", "the", "vent", "?", "lol"]} -{"id": "1743-impostor", "word": "impostor", "label_binary": 0, "text_1": "what is happeningggggg, my whole tl is filled with impostor sonicfox accnts", "token_idx_1": 10, "text_start_1": 51, "text_end_1": 59, "date_1": "2019-09", "text_2": "1 impostor vs 6 crewmates hm takdahal ez kill je semua. What a clutch by Reiner", "token_idx_2": 1, "text_start_2": 2, "text_end_2": 10, "date_2": "2020-09", "text_1_tokenized": ["what", "is", "happeningggggg", ",", "my", "whole", "tl", "is", "filled", "with", "impostor", "sonicfox", "accnts"], "text_2_tokenized": ["1", "impostor", "vs", "6", "crewmates", "hm", "takdahal", "ez", "kill", "je", "semua", ".", "What", "a", "clutch", "by", "Reiner"]} -{"id": "1744-impostor", "word": "impostor", "label_binary": 0, "text_1": "You are not an impostor. You are deserving of the good life \ud83c\udf39", "token_idx_1": 4, "text_start_1": 15, "text_end_1": 23, "date_1": "2019-09", "text_2": "1. Join Among Us game 2. Am impostor 3. Four people immediately quit because they're not impostor It's not even fun when you literally have to just fine two people to kill", "token_idx_2": 9, "text_start_2": 28, "text_end_2": 36, "date_2": "2020-09", "text_1_tokenized": ["You", "are", "not", "an", "impostor", ".", "You", "are", "deserving", "of", "the", "good", "life", "\ud83c\udf39"], "text_2_tokenized": ["1", ".", "Join", "Among", "Us", "game", "2", ".", "Am", "impostor", "3", ".", "Four", "people", "immediately", "quit", "because", "they're", "not", "impostor", "It's", "not", "even", "fun", "when", "you", "literally", "have", "to", "just", "fine", "two", "people", "to", "kill"]} -{"id": "1745-impostor", "word": "impostor", "label_binary": 0, "text_1": "Feeling a bit of relief from impostor syndrome today as the woman hired into a role instead of me was fired for being incompetent, and I received a call praising my contributions from someone who does not give praise.", "token_idx_1": 6, "text_start_1": 29, "text_end_1": 37, "date_1": "2019-09", "text_2": "zed comic be like shen: red is pretty sus zed: what no im not [red has been ejected from the kinkou] [red was not the impostor] [one imposter remains]", "token_idx_2": 30, "text_start_2": 118, "text_end_2": 126, "date_2": "2020-09", "text_1_tokenized": ["Feeling", "a", "bit", "of", "relief", "from", "impostor", "syndrome", "today", "as", "the", "woman", "hired", "into", "a", "role", "instead", "of", "me", "was", "fired", "for", "being", "incompetent", ",", "and", "I", "received", "a", "call", "praising", "my", "contributions", "from", "someone", "who", "does", "not", "give", "praise", "."], "text_2_tokenized": ["zed", "comic", "be", "like", "shen", ":", "red", "is", "pretty", "sus", "zed", ":", "what", "no", "im", "not", "[", "red", "has", "been", "ejected", "from", "the", "kinkou", "]", "[", "red", "was", "not", "the", "impostor", "]", "[", "one", "imposter", "remains", "]"]} -{"id": "1746-impostor", "word": "impostor", "label_binary": 0, "text_1": "Finally started The Good Place season 4 and I'm gonna CALL IT right now that at the end of ep 1 when Michael puts the demon on the train, the Michael that comes back is actually the Bad Place Michael skin suit impostor \ud83d\udc40 Bad Janet's gloating about how they're too stupid to >>", "token_idx_1": 43, "text_start_1": 210, "text_end_1": 218, "date_1": "2019-09", "text_2": "U think ur unlucky, imagine playing among us for three hours and getting impostor twice", "token_idx_2": 14, "text_start_2": 73, "text_end_2": 81, "date_2": "2020-09", "text_1_tokenized": ["Finally", "started", "The", "Good", "Place", "season", "4", "and", "I'm", "gonna", "CALL", "IT", "right", "now", "that", "at", "the", "end", "of", "ep", "1", "when", "Michael", "puts", "the", "demon", "on", "the", "train", ",", "the", "Michael", "that", "comes", "back", "is", "actually", "the", "Bad", "Place", "Michael", "skin", "suit", "impostor", "\ud83d\udc40", "Bad", "Janet's", "gloating", "about", "how", "they're", "too", "stupid", "to", ">", ">"], "text_2_tokenized": ["U", "think", "ur", "unlucky", ",", "imagine", "playing", "among", "us", "for", "three", "hours", "and", "getting", "impostor", "twice"]} -{"id": "1747-impostor", "word": "impostor", "label_binary": 0, "text_1": "How to live with impostor syndrome; asking for a friend of course.", "token_idx_1": 4, "text_start_1": 17, "text_end_1": 25, "date_1": "2019-09", "text_2": "\u201cYeah and then you killed me and I wasn't the impostor.\u201d", "token_idx_2": 11, "text_start_2": 46, "text_end_2": 54, "date_2": "2020-09", "text_1_tokenized": ["How", "to", "live", "with", "impostor", "syndrome", ";", "asking", "for", "a", "friend", "of", "course", "."], "text_2_tokenized": ["\u201c", "Yeah", "and", "then", "you", "killed", "me", "and", "I", "wasn't", "the", "impostor", ".", "\u201d"]} -{"id": "1748-impostor", "word": "impostor", "label_binary": 0, "text_1": "This certain rotomdex episode, I wanna talk about it! The episode where rotom gets separated from his pokedex. He encounters another rotom and rotemdex(less) Gets stuck in this conveniently shaped washing machine, whoops. Now the impostor is in the dex. (continue in thread)", "token_idx_1": 43, "text_start_1": 230, "text_end_1": 238, "date_1": "2019-09", "text_2": "worst impostor play ever @claudeandclair", "token_idx_2": 1, "text_start_2": 6, "text_end_2": 14, "date_2": "2020-09", "text_1_tokenized": ["This", "certain", "rotomdex", "episode", ",", "I", "wanna", "talk", "about", "it", "!", "The", "episode", "where", "rotom", "gets", "separated", "from", "his", "pokedex", ".", "He", "encounters", "another", "rotom", "and", "rotemdex", "(", "less", ")", "Gets", "stuck", "in", "this", "conveniently", "shaped", "washing", "machine", ",", "whoops", ".", "Now", "the", "impostor", "is", "in", "the", "dex", ".", "(", "continue", "in", "thread", ")"], "text_2_tokenized": ["worst", "impostor", "play", "ever", "@claudeandclair"]} -{"id": "1749-impostor", "word": "impostor", "label_binary": 1, "text_1": "Today's #modwrite is being served with a large side-serving of impostor syndrome.", "token_idx_1": 10, "text_start_1": 63, "text_end_1": 71, "date_1": "2019-09", "text_2": "Every time I have to apply for a job or scholarship my impostor syndrome kicks in heavy. like, I know I am a bad bitch but sometimes I forget.", "token_idx_2": 12, "text_start_2": 55, "text_end_2": 63, "date_2": "2020-09", "text_1_tokenized": ["Today's", "#modwrite", "is", "being", "served", "with", "a", "large", "side-serving", "of", "impostor", "syndrome", "."], "text_2_tokenized": ["Every", "time", "I", "have", "to", "apply", "for", "a", "job", "or", "scholarship", "my", "impostor", "syndrome", "kicks", "in", "heavy", ".", "like", ",", "I", "know", "I", "am", "a", "bad", "bitch", "but", "sometimes", "I", "forget", "."]} -{"id": "1750-impostor", "word": "impostor", "label_binary": 0, "text_1": "I don't know if it's impostor syndrome or what but sometimes when I remember I'm a college professor I get a good laugh", "token_idx_1": 5, "text_start_1": 21, "text_end_1": 29, "date_1": "2019-09", "text_2": "In among us im an actual god at impostor but as a crewmate I only focus on my tasks and never find bodies", "token_idx_2": 8, "text_start_2": 32, "text_end_2": 40, "date_2": "2020-09", "text_1_tokenized": ["I", "don't", "know", "if", "it's", "impostor", "syndrome", "or", "what", "but", "sometimes", "when", "I", "remember", "I'm", "a", "college", "professor", "I", "get", "a", "good", "laugh"], "text_2_tokenized": ["In", "among", "us", "im", "an", "actual", "god", "at", "impostor", "but", "as", "a", "crewmate", "I", "only", "focus", "on", "my", "tasks", "and", "never", "find", "bodies"]} -{"id": "1751-impostor", "word": "impostor", "label_binary": 1, "text_1": "Well today has been a thoroughly humbling experience as a software engineer. It's not impostor syndrome if people literally think you're a shitty engineer right?", "token_idx_1": 15, "text_start_1": 86, "text_end_1": 94, "date_1": "2019-09", "text_2": "Most days I have impostor syndrome about my anxiety being a real disability. Like, somebody obviously made a mistake and I should just suck it up. Then I remember I tried to check the mail last night and somebody was out walking their dog so I still haven't been outside since.", "token_idx_2": 4, "text_start_2": 17, "text_end_2": 25, "date_2": "2020-09", "text_1_tokenized": ["Well", "today", "has", "been", "a", "thoroughly", "humbling", "experience", "as", "a", "software", "engineer", ".", "It's", "not", "impostor", "syndrome", "if", "people", "literally", "think", "you're", "a", "shitty", "engineer", "right", "?"], "text_2_tokenized": ["Most", "days", "I", "have", "impostor", "syndrome", "about", "my", "anxiety", "being", "a", "real", "disability", ".", "Like", ",", "somebody", "obviously", "made", "a", "mistake", "and", "I", "should", "just", "suck", "it", "up", ".", "Then", "I", "remember", "I", "tried", "to", "check", "the", "mail", "last", "night", "and", "somebody", "was", "out", "walking", "their", "dog", "so", "I", "still", "haven't", "been", "outside", "since", "."]} -{"id": "1752-impostor", "word": "impostor", "label_binary": 0, "text_1": "it's a silly, small thing, but something that gives me big ADHD impostor syndrome is that I listen to a LOT of audiobooks and pocasts and I'm always seeing people say \"oh I can't listen to those because ADHD\" and I need to keep reminding myself not all ADHD people work the same", "token_idx_1": 14, "text_start_1": 64, "text_end_1": 72, "date_1": "2019-09", "text_2": "holy fuck i'm such a good impostor it's starting to scare me,, also i'm gon make an anubis impostor fancam \ud83d\udd7a\ud83c\udffd", "token_idx_2": 6, "text_start_2": 26, "text_end_2": 34, "date_2": "2020-09", "text_1_tokenized": ["it's", "a", "silly", ",", "small", "thing", ",", "but", "something", "that", "gives", "me", "big", "ADHD", "impostor", "syndrome", "is", "that", "I", "listen", "to", "a", "LOT", "of", "audiobooks", "and", "pocasts", "and", "I'm", "always", "seeing", "people", "say", "\"", "oh", "I", "can't", "listen", "to", "those", "because", "ADHD", "\"", "and", "I", "need", "to", "keep", "reminding", "myself", "not", "all", "ADHD", "people", "work", "the", "same"], "text_2_tokenized": ["holy", "fuck", "i'm", "such", "a", "good", "impostor", "it's", "starting", "to", "scare", "me", ",", ",", "also", "i'm", "gon", "make", "an", "anubis", "impostor", "fancam", "\ud83d\udd7a\ud83c\udffd"]} -{"id": "1753-impostor", "word": "impostor", "label_binary": 1, "text_1": "Why will u not believe Buhari is getting married tomorrow, when u took your bath with salt water against Ebola. You even went further to believe Jubril El Sudani is the impostor President. I expect u to be shouting Jubril is getting married and not Buhari. Clowns everywhere\ud83d\ude02", "token_idx_1": 33, "text_start_1": 169, "text_end_1": 177, "date_1": "2019-09", "text_2": "it's always \u201cwho is the impostor\u201d never \u201chow is the impostor\u201d\ud83d\ude14", "token_idx_2": 6, "text_start_2": 24, "text_end_2": 32, "date_2": "2020-09", "text_1_tokenized": ["Why", "will", "u", "not", "believe", "Buhari", "is", "getting", "married", "tomorrow", ",", "when", "u", "took", "your", "bath", "with", "salt", "water", "against", "Ebola", ".", "You", "even", "went", "further", "to", "believe", "Jubril", "El", "Sudani", "is", "the", "impostor", "President", ".", "I", "expect", "u", "to", "be", "shouting", "Jubril", "is", "getting", "married", "and", "not", "Buhari", ".", "Clowns", "everywhere", "\ud83d\ude02"], "text_2_tokenized": ["it's", "always", "\u201c", "who", "is", "the", "impostor", "\u201d", "never", "\u201c", "how", "is", "the", "impostor", "\u201d", "\ud83d\ude14"]} -{"id": "1754-impostor", "word": "impostor", "label_binary": 0, "text_1": "Trivia at Elation tonight. Come on out at 6:30. The theme is 90s impostor movies.", "token_idx_1": 15, "text_start_1": 65, "text_end_1": 73, "date_1": "2019-09", "text_2": "When you get impostor 5 times in a row and then no one believes you when you're a crewmate because you are, and I quote, \"something of a serial killer\" \ud83d\ude14", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 21, "date_2": "2020-09", "text_1_tokenized": ["Trivia", "at", "Elation", "tonight", ".", "Come", "on", "out", "at", "6:30", ".", "The", "theme", "is", "90s", "impostor", "movies", "."], "text_2_tokenized": ["When", "you", "get", "impostor", "5", "times", "in", "a", "row", "and", "then", "no", "one", "believes", "you", "when", "you're", "a", "crewmate", "because", "you", "are", ",", "and", "I", "quote", ",", "\"", "something", "of", "a", "serial", "killer", "\"", "\ud83d\ude14"]} -{"id": "1755-impostor", "word": "impostor", "label_binary": 0, "text_1": "Homies gonna say they have impostor syndrome. But they're not actually good enough to have impostor syndrome. Ya, it's me. I'm Sherlock Holmes.", "token_idx_1": 5, "text_start_1": 27, "text_end_1": 35, "date_1": "2019-09", "text_2": "these impostor videos are actually getting millions of views literally just lie", "token_idx_2": 1, "text_start_2": 6, "text_end_2": 14, "date_2": "2020-09", "text_1_tokenized": ["Homies", "gonna", "say", "they", "have", "impostor", "syndrome", ".", "But", "they're", "not", "actually", "good", "enough", "to", "have", "impostor", "syndrome", ".", "Ya", ",", "it's", "me", ".", "I'm", "Sherlock", "Holmes", "."], "text_2_tokenized": ["these", "impostor", "videos", "are", "actually", "getting", "millions", "of", "views", "literally", "just", "lie"]} -{"id": "1756-impostor", "word": "impostor", "label_binary": 1, "text_1": "my first session as a DM involved exposing an impostor, a cult and political assassination. I'm excited.", "token_idx_1": 9, "text_start_1": 46, "text_end_1": 54, "date_1": "2019-09", "text_2": "man i ain't watch the debate who was the impostor?", "token_idx_2": 9, "text_start_2": 41, "text_end_2": 49, "date_2": "2020-09", "text_1_tokenized": ["my", "first", "session", "as", "a", "DM", "involved", "exposing", "an", "impostor", ",", "a", "cult", "and", "political", "assassination", ".", "I'm", "excited", "."], "text_2_tokenized": ["man", "i", "ain't", "watch", "the", "debate", "who", "was", "the", "impostor", "?"]} -{"id": "1757-impostor", "word": "impostor", "label_binary": 0, "text_1": "Indians and impostor from different legs of first man.", "token_idx_1": 2, "text_start_1": 12, "text_end_1": 20, "date_1": "2019-09", "text_2": "if im impostor i simply kill cano", "token_idx_2": 2, "text_start_2": 6, "text_end_2": 14, "date_2": "2020-09", "text_1_tokenized": ["Indians", "and", "impostor", "from", "different", "legs", "of", "first", "man", "."], "text_2_tokenized": ["if", "im", "impostor", "i", "simply", "kill", "cano"]} -{"id": "1758-impostor", "word": "impostor", "label_binary": 0, "text_1": "I received a chorus of \u201cbravos\u201d after my reading tonight and my impostor syndrome is so deep-seared that my first thought was was being mocked. #callthetherapist", "token_idx_1": 14, "text_start_1": 64, "text_end_1": 72, "date_1": "2019-09", "text_2": "not the fan accs i follow on ig still following that hc impostor acc eugh", "token_idx_2": 12, "text_start_2": 56, "text_end_2": 64, "date_2": "2020-09", "text_1_tokenized": ["I", "received", "a", "chorus", "of", "\u201c", "bravos", "\u201d", "after", "my", "reading", "tonight", "and", "my", "impostor", "syndrome", "is", "so", "deep-seared", "that", "my", "first", "thought", "was", "was", "being", "mocked", ".", "#callthetherapist"], "text_2_tokenized": ["not", "the", "fan", "accs", "i", "follow", "on", "ig", "still", "following", "that", "hc", "impostor", "acc", "eugh"]} -{"id": "1759-impostor", "word": "impostor", "label_binary": 0, "text_1": "People who have me in my timeline probably think an impostor has taken over my account given how much praise I've been giving Sanders for last night \ud83d\ude02", "token_idx_1": 10, "text_start_1": 52, "text_end_1": 60, "date_1": "2019-09", "text_2": "Played my first Among Us games today. Didn't realize my heart would implode when \u201cimpostor\u201d popped on the screen. #AmongUs", "token_idx_2": 16, "text_start_2": 82, "text_end_2": 90, "date_2": "2020-09", "text_1_tokenized": ["People", "who", "have", "me", "in", "my", "timeline", "probably", "think", "an", "impostor", "has", "taken", "over", "my", "account", "given", "how", "much", "praise", "I've", "been", "giving", "Sanders", "for", "last", "night", "\ud83d\ude02"], "text_2_tokenized": ["Played", "my", "first", "Among", "Us", "games", "today", ".", "Didn't", "realize", "my", "heart", "would", "implode", "when", "\u201c", "impostor", "\u201d", "popped", "on", "the", "screen", ".", "#AmongUs"]} -{"id": "1760-impostor", "word": "impostor", "label_binary": 0, "text_1": "\u201cI made it a goal in my career to make more women come along with me.\u201d In this episode of #HowIGotHere, Emily Graham talks about breaking the glass ceiling, contending with the gender pay gap, and overcoming impostor syndrome.", "token_idx_1": 43, "text_start_1": 208, "text_end_1": 216, "date_1": "2019-09", "text_2": "who's the best at being the impostor in among us? would you play with orbits sometime? @loonatheworld #LOONA #\uc774\ub2ec\uc758\uc18c\ub140 #ASK_LOONA", "token_idx_2": 6, "text_start_2": 28, "text_end_2": 36, "date_2": "2020-09", "text_1_tokenized": ["\u201c", "I", "made", "it", "a", "goal", "in", "my", "career", "to", "make", "more", "women", "come", "along", "with", "me", ".", "\u201d", "In", "this", "episode", "of", "#HowIGotHere", ",", "Emily", "Graham", "talks", "about", "breaking", "the", "glass", "ceiling", ",", "contending", "with", "the", "gender", "pay", "gap", ",", "and", "overcoming", "impostor", "syndrome", "."], "text_2_tokenized": ["who's", "the", "best", "at", "being", "the", "impostor", "in", "among", "us", "?", "would", "you", "play", "with", "orbits", "sometime", "?", "@loonatheworld", "#LOONA", "#\uc774\ub2ec\uc758\uc18c\ub140", "#ASK_LOONA"]} -{"id": "1761-impostor", "word": "impostor", "label_binary": 0, "text_1": "Waiting for something epic to materialize can be missed if you forget that \"epic\" things usually happen as the accumulation of the mundane. Reminding myself to enjoy the journey, or rather, the meetings, the Slacks, the Zooms, the uncertainty, the peek-a-boo impostor syndrome...", "token_idx_1": 50, "text_start_1": 259, "text_end_1": 267, "date_1": "2019-09", "text_2": "if your name is \u201c13\u201d in among us and you were the color red and we played 5 rounds together and you were the one who lied for me the entire game and let me get away with being the impostor HI BESTIE COME BACK I MISS YOU", "token_idx_2": 42, "text_start_2": 180, "text_end_2": 188, "date_2": "2020-09", "text_1_tokenized": ["Waiting", "for", "something", "epic", "to", "materialize", "can", "be", "missed", "if", "you", "forget", "that", "\"", "epic", "\"", "things", "usually", "happen", "as", "the", "accumulation", "of", "the", "mundane", ".", "Reminding", "myself", "to", "enjoy", "the", "journey", ",", "or", "rather", ",", "the", "meetings", ",", "the", "Slacks", ",", "the", "Zooms", ",", "the", "uncertainty", ",", "the", "peek-a-boo", "impostor", "syndrome", "..."], "text_2_tokenized": ["if", "your", "name", "is", "\u201c", "13", "\u201d", "in", "among", "us", "and", "you", "were", "the", "color", "red", "and", "we", "played", "5", "rounds", "together", "and", "you", "were", "the", "one", "who", "lied", "for", "me", "the", "entire", "game", "and", "let", "me", "get", "away", "with", "being", "the", "impostor", "HI", "BESTIE", "COME", "BACK", "I", "MISS", "YOU"]} -{"id": "1762-impostor", "word": "impostor", "label_binary": 0, "text_1": "I unfortunately couldn't afford a ticket to PAX this year (and felt too impostor syndrome-y to apply for media sjkldfhjskhdf), but I might come on one of the days with candy to say hi to people outside (: lemme know if you wanna cross paths~", "token_idx_1": 14, "text_start_1": 72, "text_end_1": 80, "date_1": "2019-09", "text_2": "Sis: I saw yellow vent Everyone:*voting yellow* Yellow: that's fair *votes on themselves * Yellow was the impostor lmao", "token_idx_2": 24, "text_start_2": 106, "text_end_2": 114, "date_2": "2020-09", "text_1_tokenized": ["I", "unfortunately", "couldn't", "afford", "a", "ticket", "to", "PAX", "this", "year", "(", "and", "felt", "too", "impostor", "syndrome-y", "to", "apply", "for", "media", "sjkldfhjskhdf", ")", ",", "but", "I", "might", "come", "on", "one", "of", "the", "days", "with", "candy", "to", "say", "hi", "to", "people", "outside", "(:", "lemme", "know", "if", "you", "wanna", "cross", "paths", "~"], "text_2_tokenized": ["Sis", ":", "I", "saw", "yellow", "vent", "Everyone", ":", "*", "voting", "yellow", "*", "Yellow", ":", "that's", "fair", "*", "votes", "on", "themselves", "*", "Yellow", "was", "the", "impostor", "lmao"]} -{"id": "1763-impostor", "word": "impostor", "label_binary": 1, "text_1": "Cure for impostor syndrome: realising that a big part of expertise is knowing that you don't know the answer, instead of thinking that you do.", "token_idx_1": 2, "text_start_1": 9, "text_end_1": 17, "date_1": "2019-09", "text_2": "fuck fuck fuck impostor syndrome is here. goddammit why can't i draw without reference i hate it", "token_idx_2": 3, "text_start_2": 15, "text_end_2": 23, "date_2": "2020-09", "text_1_tokenized": ["Cure", "for", "impostor", "syndrome", ":", "realising", "that", "a", "big", "part", "of", "expertise", "is", "knowing", "that", "you", "don't", "know", "the", "answer", ",", "instead", "of", "thinking", "that", "you", "do", "."], "text_2_tokenized": ["fuck", "fuck", "fuck", "impostor", "syndrome", "is", "here", ".", "goddammit", "why", "can't", "i", "draw", "without", "reference", "i", "hate", "it"]} -{"id": "1764-impostor", "word": "impostor", "label_binary": 0, "text_1": "What if it's not impostor syndrome and I just suck \ud83d\ude10", "token_idx_1": 4, "text_start_1": 17, "text_end_1": 25, "date_1": "2019-09", "text_2": "Dudut the greatest impostor amp", "token_idx_2": 3, "text_start_2": 19, "text_end_2": 27, "date_2": "2020-09", "text_1_tokenized": ["What", "if", "it's", "not", "impostor", "syndrome", "and", "I", "just", "suck", "\ud83d\ude10"], "text_2_tokenized": ["Dudut", "the", "greatest", "impostor", "amp"]} -{"id": "1765-impostor", "word": "impostor", "label_binary": 0, "text_1": "\u201cI graduated from library school not even knowing if I belonged in librarianship let alone in academic libraries.\u201d - Ebony McDonald on impostor syndrome and her role as the first diversity resident at LSU Libraries #louisuc19", "token_idx_1": 25, "text_start_1": 135, "text_end_1": 143, "date_1": "2019-09", "text_2": "I've played like 10 plus games and still haven't gotten impostor bro", "token_idx_2": 10, "text_start_2": 56, "text_end_2": 64, "date_2": "2020-09", "text_1_tokenized": ["\u201c", "I", "graduated", "from", "library", "school", "not", "even", "knowing", "if", "I", "belonged", "in", "librarianship", "let", "alone", "in", "academic", "libraries", ".", "\u201d", "-", "Ebony", "McDonald", "on", "impostor", "syndrome", "and", "her", "role", "as", "the", "first", "diversity", "resident", "at", "LSU", "Libraries", "#louisuc19"], "text_2_tokenized": ["I've", "played", "like", "10", "plus", "games", "and", "still", "haven't", "gotten", "impostor", "bro"]} -{"id": "1766-impostor", "word": "impostor", "label_binary": 0, "text_1": "You may wonder why I, an experienced editor, am worried about taking an editing test. The answer is, what if I'm actually not good at this thing that I chose to do for a living specifically bc I was good at it? That's impostor syndrome, baby!!", "token_idx_1": 48, "text_start_1": 218, "text_end_1": 226, "date_1": "2019-09", "text_2": "There should be a hide and seek mode in among us You can't report a body or sabotage anything Set impostor view distance really low (high for crewmate) The impostor has to find/kill everyone The last person alive will wins", "token_idx_2": 20, "text_start_2": 98, "text_end_2": 106, "date_2": "2020-09", "text_1_tokenized": ["You", "may", "wonder", "why", "I", ",", "an", "experienced", "editor", ",", "am", "worried", "about", "taking", "an", "editing", "test", ".", "The", "answer", "is", ",", "what", "if", "I'm", "actually", "not", "good", "at", "this", "thing", "that", "I", "chose", "to", "do", "for", "a", "living", "specifically", "bc", "I", "was", "good", "at", "it", "?", "That's", "impostor", "syndrome", ",", "baby", "!", "!"], "text_2_tokenized": ["There", "should", "be", "a", "hide", "and", "seek", "mode", "in", "among", "us", "You", "can't", "report", "a", "body", "or", "sabotage", "anything", "Set", "impostor", "view", "distance", "really", "low", "(", "high", "for", "crewmate", ")", "The", "impostor", "has", "to", "find", "/", "kill", "everyone", "The", "last", "person", "alive", "will", "wins"]} -{"id": "1767-impostor", "word": "impostor", "label_binary": 0, "text_1": "you know what would be great? to not have crippling impostor syndrome in every single aspect of my life", "token_idx_1": 11, "text_start_1": 52, "text_end_1": 60, "date_1": "2019-09", "text_2": "I can't be the only one who doesn't mind being a crew mate right being impostor is stressful", "token_idx_2": 15, "text_start_2": 71, "text_end_2": 79, "date_2": "2020-09", "text_1_tokenized": ["you", "know", "what", "would", "be", "great", "?", "to", "not", "have", "crippling", "impostor", "syndrome", "in", "every", "single", "aspect", "of", "my", "life"], "text_2_tokenized": ["I", "can't", "be", "the", "only", "one", "who", "doesn't", "mind", "being", "a", "crew", "mate", "right", "being", "impostor", "is", "stressful"]} -{"id": "1768-impostor", "word": "impostor", "label_binary": 1, "text_1": "\ud83d\udcd6 50: Another impostor assuming name of Philip appears in Macedonia but is vanquished by quaestor Lucius Tremellius", "token_idx_1": 4, "text_start_1": 14, "text_end_1": 22, "date_1": "2019-09", "text_2": "after doing like 8 hours of homework yesterday, a fellow impostor ratted me out (they locked the doors after i killed someone and we were stuck together) and after playing dumb until everyone voted me, i got so mad that i", "token_idx_2": 11, "text_start_2": 57, "text_end_2": 65, "date_2": "2020-09", "text_1_tokenized": ["\ud83d\udcd6", "50", ":", "Another", "impostor", "assuming", "name", "of", "Philip", "appears", "in", "Macedonia", "but", "is", "vanquished", "by", "quaestor", "Lucius", "Tremellius"], "text_2_tokenized": ["after", "doing", "like", "8", "hours", "of", "homework", "yesterday", ",", "a", "fellow", "impostor", "ratted", "me", "out", "(", "they", "locked", "the", "doors", "after", "i", "killed", "someone", "and", "we", "were", "stuck", "together", ")", "and", "after", "playing", "dumb", "until", "everyone", "voted", "me", ",", "i", "got", "so", "mad", "that", "i"]} -{"id": "3053-lotte", "word": "lotte", "label_binary": 1, "text_1": "litr me and my sister were talking abt this the other day but his lotte performance hair was the peak", "token_idx_1": 14, "text_start_1": 66, "text_end_1": 71, "date_1": "2019-09", "text_2": "i'm soo sad i couldn't watch the lotte duty free concert bc of school \ud83d\ude14\ud83d\ude20but i have watched the bangbangcon, bangbangcon the live and map of the soul on:e", "token_idx_2": 7, "text_start_2": 33, "text_end_2": 38, "date_2": "2020-09", "text_1_tokenized": ["litr", "me", "and", "my", "sister", "were", "talking", "abt", "this", "the", "other", "day", "but", "his", "lotte", "performance", "hair", "was", "the", "peak"], "text_2_tokenized": ["i'm", "soo", "sad", "i", "couldn't", "watch", "the", "lotte", "duty", "free", "concert", "bc", "of", "school", "\ud83d\ude14", "\ud83d\ude20", "but", "i", "have", "watched", "the", "bangbangcon", ",", "bangbangcon", "the", "live", "and", "map", "of", "the", "soul", "on", ":", "e"]} -{"id": "3054-lotte", "word": "lotte", "label_binary": 1, "text_1": "I still need to booked between lotte world or everland but my lil sis refused to get the Halloween special event ticket \ud83e\udd23\ud83e\udd23\ud83e\udd23\ud83e\udd23\ud83e\udd23\ud83e\udd23 She said it's a BIG NO \ud83e\udd23\ud83e\udd23\ud83e\udd23\ud83e\udd23 just a regular ticket is OK", "token_idx_1": 6, "text_start_1": 31, "text_end_1": 36, "date_1": "2019-09", "text_2": "oh my god when jungkook showed up to the lotte concert with his cherry hair i lost it i remember it like it was yesterday", "token_idx_2": 9, "text_start_2": 41, "text_end_2": 46, "date_2": "2020-09", "text_1_tokenized": ["I", "still", "need", "to", "booked", "between", "lotte", "world", "or", "everland", "but", "my", "lil", "sis", "refused", "to", "get", "the", "Halloween", "special", "event", "ticket", "\ud83e\udd23", "\ud83e\udd23", "\ud83e\udd23", "She", "said", "it's", "a", "BIG", "NO", "\ud83e\udd23", "\ud83e\udd23", "\ud83e\udd23", "just", "a", "regular", "ticket", "is", "OK"], "text_2_tokenized": ["oh", "my", "god", "when", "jungkook", "showed", "up", "to", "the", "lotte", "concert", "with", "his", "cherry", "hair", "i", "lost", "it", "i", "remember", "it", "like", "it", "was", "yesterday"]} -{"id": "3055-lotte", "word": "lotte", "label_binary": 0, "text_1": "Iced mocha latte I smoke a lotte", "token_idx_1": 6, "text_start_1": 27, "text_end_1": 32, "date_1": "2019-09", "text_2": "I\ufe0f could've watched the lotte performance but I\ufe0f had the absolute worst migraine last night... when I\ufe0f get really mad I\ufe0f get migraines now EMBARRASSING", "token_idx_2": 4, "text_start_2": 24, "text_end_2": 29, "date_2": "2020-09", "text_1_tokenized": ["Iced", "mocha", "latte", "I", "smoke", "a", "lotte"], "text_2_tokenized": ["I\ufe0f", "could've", "watched", "the", "lotte", "performance", "but", "I\ufe0f", "had", "the", "absolute", "worst", "migraine", "last", "night", "...", "when", "I\ufe0f", "get", "really", "mad", "I\ufe0f", "get", "migraines", "now", "EMBARRASSING"]} -{"id": "3056-lotte", "word": "lotte", "label_binary": 0, "text_1": "Best bodo lotte world ni", "token_idx_1": 2, "text_start_1": 10, "text_end_1": 15, "date_1": "2019-09", "text_2": "taehyun was NOT playing at lotte family concert-- he was hitting notes LEFT and RIGHT !!", "token_idx_2": 5, "text_start_2": 27, "text_end_2": 32, "date_2": "2020-09", "text_1_tokenized": ["Best", "bodo", "lotte", "world", "ni"], "text_2_tokenized": ["taehyun", "was", "NOT", "playing", "at", "lotte", "family", "concert", "-", "-", "he", "was", "hitting", "notes", "LEFT", "and", "RIGHT", "!", "!"]} -{"id": "3057-lotte", "word": "lotte", "label_binary": 1, "text_1": "Hope lotte open beside my workplace ~June 2020 in Changi airport!", "token_idx_1": 1, "text_start_1": 5, "text_end_1": 10, "date_1": "2019-09", "text_2": "I'm patiently waiting for that lotte duty free concert photos to be uploaded on @bts_bighit and am still waiting for idol performance photo.... \ud83d\ude42", "token_idx_2": 5, "text_start_2": 31, "text_end_2": 36, "date_2": "2020-09", "text_1_tokenized": ["Hope", "lotte", "open", "beside", "my", "workplace", "~", "June", "2020", "in", "Changi", "airport", "!"], "text_2_tokenized": ["I'm", "patiently", "waiting", "for", "that", "lotte", "duty", "free", "concert", "photos", "to", "be", "uploaded", "on", "@bts_bighit", "and", "am", "still", "waiting", "for", "idol", "performance", "photo", "...", "\ud83d\ude42"]} -{"id": "3058-lotte", "word": "lotte", "label_binary": 0, "text_1": "i still think about lotte duty free concert jungkook everyday \ud83d\ude14", "token_idx_1": 4, "text_start_1": 20, "text_end_1": 25, "date_1": "2019-09", "text_2": "no one was gonna tell us that lotte happened and I MISSED IT ?? YALL LACKING", "token_idx_2": 7, "text_start_2": 30, "text_end_2": 35, "date_2": "2020-09", "text_1_tokenized": ["i", "still", "think", "about", "lotte", "duty", "free", "concert", "jungkook", "everyday", "\ud83d\ude14"], "text_2_tokenized": ["no", "one", "was", "gonna", "tell", "us", "that", "lotte", "happened", "and", "I", "MISSED", "IT", "?", "?", "YALL", "LACKING"]} -{"id": "3059-lotte", "word": "lotte", "label_binary": 0, "text_1": "If y'all ever go to korea pls know that lotte isn't just lotte world \ud83d\ude2d", "token_idx_1": 9, "text_start_1": 40, "text_end_1": 45, "date_1": "2019-09", "text_2": "oh to cosplay lotte and akko from little witch academia", "token_idx_2": 3, "text_start_2": 14, "text_end_2": 19, "date_2": "2020-09", "text_1_tokenized": ["If", "y'all", "ever", "go", "to", "korea", "pls", "know", "that", "lotte", "isn't", "just", "lotte", "world", "\ud83d\ude2d"], "text_2_tokenized": ["oh", "to", "cosplay", "lotte", "and", "akko", "from", "little", "witch", "academia"]} -{"id": "3060-lotte", "word": "lotte", "label_binary": 1, "text_1": "lotte mart, walmart -> lontemart, wholemart", "token_idx_1": 0, "text_start_1": 0, "text_end_1": 5, "date_1": "2019-09", "text_2": "La performance de bangtan en el lotte duty free family concert > toda.", "token_idx_2": 6, "text_start_2": 32, "text_end_2": 37, "date_2": "2020-09", "text_1_tokenized": ["lotte", "mart", ",", "walmart", "->", "lontemart", ",", "wholemart"], "text_2_tokenized": ["La", "performance", "de", "bangtan", "en", "el", "lotte", "duty", "free", "family", "concert", ">", "toda", "."]} -{"id": "3061-lotte", "word": "lotte", "label_binary": 0, "text_1": "I bought massaman curry paste at lotte so you know what that means \ud83d\ude08", "token_idx_1": 6, "text_start_1": 33, "text_end_1": 38, "date_1": "2019-09", "text_2": "wtf i just noticed I missed the lotte concert ... i forgot how tired I was yesterday \ud83d\ude2d\ud83d\ude2d\ud83d\ude2d", "token_idx_2": 7, "text_start_2": 32, "text_end_2": 37, "date_2": "2020-09", "text_1_tokenized": ["I", "bought", "massaman", "curry", "paste", "at", "lotte", "so", "you", "know", "what", "that", "means", "\ud83d\ude08"], "text_2_tokenized": ["wtf", "i", "just", "noticed", "I", "missed", "the", "lotte", "concert", "...", "i", "forgot", "how", "tired", "I", "was", "yesterday", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude2d"]} -{"id": "3062-lotte", "word": "lotte", "label_binary": 0, "text_1": "lotte is snoring so loud. baby", "token_idx_1": 0, "text_start_1": 0, "text_end_1": 5, "date_1": "2019-09", "text_2": "I dont think lotte found my face hehehe", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 18, "date_2": "2020-09", "text_1_tokenized": ["lotte", "is", "snoring", "so", "loud", ".", "baby"], "text_2_tokenized": ["I", "dont", "think", "lotte", "found", "my", "face", "hehehe"]} -{"id": "3063-lotte", "word": "lotte", "label_binary": 0, "text_1": "\u201cBoxy\u201d is never a word I thought of to describe yuzu but lotte is pushing the boundaries today", "token_idx_1": 14, "text_start_1": 57, "text_end_1": 62, "date_1": "2019-09", "text_2": "okay im finally home time to watch the lotte concert", "token_idx_2": 8, "text_start_2": 39, "text_end_2": 44, "date_2": "2020-09", "text_1_tokenized": ["\u201c", "Boxy", "\u201d", "is", "never", "a", "word", "I", "thought", "of", "to", "describe", "yuzu", "but", "lotte", "is", "pushing", "the", "boundaries", "today"], "text_2_tokenized": ["okay", "im", "finally", "home", "time", "to", "watch", "the", "lotte", "concert"]} -{"id": "3064-lotte", "word": "lotte", "label_binary": 0, "text_1": "Mina was really in the lotte cf sis was out here working in Korea", "token_idx_1": 5, "text_start_1": 23, "text_end_1": 28, "date_1": "2019-09", "text_2": "nsfw mention???? everyday i log onto twt. everyday i read lotte tweeting abt how 4 kagepro characters have canonically canoodled. everyday i die a little more", "token_idx_2": 14, "text_start_2": 58, "text_end_2": 63, "date_2": "2020-09", "text_1_tokenized": ["Mina", "was", "really", "in", "the", "lotte", "cf", "sis", "was", "out", "here", "working", "in", "Korea"], "text_2_tokenized": ["nsfw", "mention", "?", "?", "?", "everyday", "i", "log", "onto", "twt", ".", "everyday", "i", "read", "lotte", "tweeting", "abt", "how", "4", "kagepro", "characters", "have", "canonically", "canoodled", ".", "everyday", "i", "die", "a", "little", "more"]} -{"id": "3065-lotte", "word": "lotte", "label_binary": 1, "text_1": "I recently watched the bts run episode where they were running around lotte and omg seokjin actor", "token_idx_1": 12, "text_start_1": 70, "text_end_1": 75, "date_1": "2019-09", "text_2": "i'm finally watching the lotte performances and the harmonies in make it right \ud83d\ude2d", "token_idx_2": 4, "text_start_2": 25, "text_end_2": 30, "date_2": "2020-09", "text_1_tokenized": ["I", "recently", "watched", "the", "bts", "run", "episode", "where", "they", "were", "running", "around", "lotte", "and", "omg", "seokjin", "actor"], "text_2_tokenized": ["i'm", "finally", "watching", "the", "lotte", "performances", "and", "the", "harmonies", "in", "make", "it", "right", "\ud83d\ude2d"]} -{"id": "3066-lotte", "word": "lotte", "label_binary": 0, "text_1": "dont tell lotte but might change my sf9 bias \ud83d\ude17", "token_idx_1": 2, "text_start_1": 10, "text_end_1": 15, "date_1": "2019-09", "text_2": "I said I wasn't going to get into more groups for awhile but I've been listening to txt since their lotte performance I'm a \ud83e\udd21", "token_idx_2": 20, "text_start_2": 100, "text_end_2": 105, "date_2": "2020-09", "text_1_tokenized": ["dont", "tell", "lotte", "but", "might", "change", "my", "sf9", "bias", "\ud83d\ude17"], "text_2_tokenized": ["I", "said", "I", "wasn't", "going", "to", "get", "into", "more", "groups", "for", "awhile", "but", "I've", "been", "listening", "to", "txt", "since", "their", "lotte", "performance", "I'm", "a", "\ud83e\udd21"]} -{"id": "3067-lotte", "word": "lotte", "label_binary": 0, "text_1": "jinyoung carrying his laundry in a plastic bag to a coin laundry when the man could 10000000% afford hotel laundry service is honestly just another positive data point for my theory that he got those massive thick-strapped dust masks on bulk sale at like lotte mart", "token_idx_1": 45, "text_start_1": 255, "text_end_1": 260, "date_1": "2019-09", "text_2": "me and. lotte r getting leggings 4 school help", "token_idx_2": 3, "text_start_2": 8, "text_end_2": 13, "date_2": "2020-09", "text_1_tokenized": ["jinyoung", "carrying", "his", "laundry", "in", "a", "plastic", "bag", "to", "a", "coin", "laundry", "when", "the", "man", "could", "10000000", "%", "afford", "hotel", "laundry", "service", "is", "honestly", "just", "another", "positive", "data", "point", "for", "my", "theory", "that", "he", "got", "those", "massive", "thick-strapped", "dust", "masks", "on", "bulk", "sale", "at", "like", "lotte", "mart"], "text_2_tokenized": ["me", "and", ".", "lotte", "r", "getting", "leggings", "4", "school", "help"]} -{"id": "3068-lotte", "word": "lotte", "label_binary": 0, "text_1": "So far guesses for album cover are purple, green and blue. Blue from bts merch shop.. Green from bangtantv logo from vlog announcemnet\u2026. And that one leaked photo of album covers or sumn\u2026. Purple from lotte family concert vcr\u2026 Soo which one is it going to be?", "token_idx_1": 42, "text_start_1": 201, "text_end_1": 206, "date_1": "2019-09", "text_2": "toukenranbu flamima clear file collab more like how many lotte almond chocolate crisp can one nyannyan eat without perishing from this world.....", "token_idx_2": 9, "text_start_2": 57, "text_end_2": 62, "date_2": "2020-09", "text_1_tokenized": ["So", "far", "guesses", "for", "album", "cover", "are", "purple", ",", "green", "and", "blue", ".", "Blue", "from", "bts", "merch", "shop", "..", "Green", "from", "bangtantv", "logo", "from", "vlog", "announcemnet", "\u2026", ".", "And", "that", "one", "leaked", "photo", "of", "album", "covers", "or", "sumn", "\u2026", ".", "Purple", "from", "lotte", "family", "concert", "vcr", "\u2026", "Soo", "which", "one", "is", "it", "going", "to", "be", "?"], "text_2_tokenized": ["toukenranbu", "flamima", "clear", "file", "collab", "more", "like", "how", "many", "lotte", "almond", "chocolate", "crisp", "can", "one", "nyannyan", "eat", "without", "perishing", "from", "this", "world", "..."]} -{"id": "3069-lotte", "word": "lotte", "label_binary": 1, "text_1": "to be completely honest i haven't watched a full run episode in one sitting since lotte world", "token_idx_1": 15, "text_start_1": 82, "text_end_1": 87, "date_1": "2019-09", "text_2": "rewatching the lotte family concert and i cant get over txt \u2018s performance", "token_idx_2": 2, "text_start_2": 15, "text_end_2": 20, "date_2": "2020-09", "text_1_tokenized": ["to", "be", "completely", "honest", "i", "haven't", "watched", "a", "full", "run", "episode", "in", "one", "sitting", "since", "lotte", "world"], "text_2_tokenized": ["rewatching", "the", "lotte", "family", "concert", "and", "i", "cant", "get", "over", "txt", "\u2018", "s", "performance"]} -{"id": "3070-lotte", "word": "lotte", "label_binary": 0, "text_1": "what the fuck is a little lotte", "token_idx_1": 6, "text_start_1": 26, "text_end_1": 31, "date_1": "2019-09", "text_2": "stolen judge me based off some of my kins luz, kipo, asher, marcy, anne, akko, diana, amity, lotte, lapis lazuli, connie (su), gwen (spiderverse), willow, skara", "token_idx_2": 25, "text_start_2": 93, "text_end_2": 98, "date_2": "2020-09", "text_1_tokenized": ["what", "the", "fuck", "is", "a", "little", "lotte"], "text_2_tokenized": ["stolen", "judge", "me", "based", "off", "some", "of", "my", "kins", "luz", ",", "kipo", ",", "asher", ",", "marcy", ",", "anne", ",", "akko", ",", "diana", ",", "amity", ",", "lotte", ",", "lapis", "lazuli", ",", "connie", "(", "su", ")", ",", "gwen", "(", "spiderverse", ")", ",", "willow", ",", "skara"]} -{"id": "3071-lotte", "word": "lotte", "label_binary": 0, "text_1": "so Paramore isnt coming back and lotte just needs to learn to accept it", "token_idx_1": 6, "text_start_1": 33, "text_end_1": 38, "date_1": "2019-09", "text_2": "also i fell asleep before bts came on the lotte concert thingy\ud83d\ude14", "token_idx_2": 9, "text_start_2": 42, "text_end_2": 47, "date_2": "2020-09", "text_1_tokenized": ["so", "Paramore", "isnt", "coming", "back", "and", "lotte", "just", "needs", "to", "learn", "to", "accept", "it"], "text_2_tokenized": ["also", "i", "fell", "asleep", "before", "bts", "came", "on", "the", "lotte", "concert", "thingy", "\ud83d\ude14"]} -{"id": "3072-lotte", "word": "lotte", "label_binary": 1, "text_1": "Going lotte world instead of airport for iKON We call this progress", "token_idx_1": 1, "text_start_1": 6, "text_end_1": 11, "date_1": "2019-09", "text_2": "ive just seen the lotte duty free bts add and,,, so cute :((", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 23, "date_2": "2020-09", "text_1_tokenized": ["Going", "lotte", "world", "instead", "of", "airport", "for", "iKON", "We", "call", "this", "progress"], "text_2_tokenized": ["ive", "just", "seen", "the", "lotte", "duty", "free", "bts", "add", "and", ",", ",", ",", "so", "cute", ":(", "("]} -{"id": "3073-lotte", "word": "lotte", "label_binary": 1, "text_1": "Friendly reminder that their vcr at the lotte concert said something coming Oct 4 and now all this shit happening", "token_idx_1": 7, "text_start_1": 40, "text_end_1": 45, "date_1": "2019-09", "text_2": "i can't believe i didn't wake up for lotte concert....i hate myself", "token_idx_2": 8, "text_start_2": 37, "text_end_2": 42, "date_2": "2020-09", "text_1_tokenized": ["Friendly", "reminder", "that", "their", "vcr", "at", "the", "lotte", "concert", "said", "something", "coming", "Oct", "4", "and", "now", "all", "this", "shit", "happening"], "text_2_tokenized": ["i", "can't", "believe", "i", "didn't", "wake", "up", "for", "lotte", "concert", "...", "i", "hate", "myself"]} -{"id": "3074-lotte", "word": "lotte", "label_binary": 1, "text_1": "Watching the lotte family concert and having a breakdown haha don't mind me", "token_idx_1": 2, "text_start_1": 13, "text_end_1": 18, "date_1": "2019-09", "text_2": "i finally saw the lotte duty free performance and now i watched the little ads of each member and excuse me but yoongi's was so wholesome omggg", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 23, "date_2": "2020-09", "text_1_tokenized": ["Watching", "the", "lotte", "family", "concert", "and", "having", "a", "breakdown", "haha", "don't", "mind", "me"], "text_2_tokenized": ["i", "finally", "saw", "the", "lotte", "duty", "free", "performance", "and", "now", "i", "watched", "the", "little", "ads", "of", "each", "member", "and", "excuse", "me", "but", "yoongi's", "was", "so", "wholesome", "omggg"]} -{"id": "3075-lotte", "word": "lotte", "label_binary": 1, "text_1": "Just watched a 13 min video on how to make your jk mattel doll into jk's lotte concert look slsjdhdkld it was so satisfying \ud83d\ude0d\ud83e\udd2d", "token_idx_1": 16, "text_start_1": 73, "text_end_1": 78, "date_1": "2019-09", "text_2": "I join first concert in bangbangcon! But if we count lotte world and tiny desk concert and todays concert this is gonna be my 4th concert!! \ud83e\udd27\ud83d\udc9c", "token_idx_2": 11, "text_start_2": 53, "text_end_2": 58, "date_2": "2020-09", "text_1_tokenized": ["Just", "watched", "a", "13", "min", "video", "on", "how", "to", "make", "your", "jk", "mattel", "doll", "into", "jk's", "lotte", "concert", "look", "slsjdhdkld", "it", "was", "so", "satisfying", "\ud83d\ude0d", "\ud83e\udd2d"], "text_2_tokenized": ["I", "join", "first", "concert", "in", "bangbangcon", "!", "But", "if", "we", "count", "lotte", "world", "and", "tiny", "desk", "concert", "and", "todays", "concert", "this", "is", "gonna", "be", "my", "4th", "concert", "!", "!", "\ud83e\udd27", "\ud83d\udc9c"]} -{"id": "3076-lotte", "word": "lotte", "label_binary": 0, "text_1": "In lotte park and it feels weird to see the carousell and viking where BTS stood", "token_idx_1": 1, "text_start_1": 3, "text_end_1": 8, "date_1": "2019-09", "text_2": "Nah I'm not believe bts are reading thirst tweets. Y'all lied about there being 3 hours of that lotte concert", "token_idx_2": 19, "text_start_2": 96, "text_end_2": 101, "date_2": "2020-09", "text_1_tokenized": ["In", "lotte", "park", "and", "it", "feels", "weird", "to", "see", "the", "carousell", "and", "viking", "where", "BTS", "stood"], "text_2_tokenized": ["Nah", "I'm", "not", "believe", "bts", "are", "reading", "thirst", "tweets", ".", "Y'all", "lied", "about", "there", "being", "3", "hours", "of", "that", "lotte", "concert"]} -{"id": "3077-lotte", "word": "lotte", "label_binary": 0, "text_1": "Someone bring bts lotte duty free song on my tl", "token_idx_1": 3, "text_start_1": 18, "text_end_1": 23, "date_1": "2019-09", "text_2": "Wholesome memory, but I remember jisoo mc-ing with v at sbs gayo, and jisoo waved and interacted with armys and she was so shocked when k-armys screamed for her after \ud83d\ude2d and how when bp's perf was cut off at lotte, karmys waved at jisooagain and she waved back eye-", "token_idx_2": 42, "text_start_2": 207, "text_end_2": 212, "date_2": "2020-09", "text_1_tokenized": ["Someone", "bring", "bts", "lotte", "duty", "free", "song", "on", "my", "tl"], "text_2_tokenized": ["Wholesome", "memory", ",", "but", "I", "remember", "jisoo", "mc-ing", "with", "v", "at", "sbs", "gayo", ",", "and", "jisoo", "waved", "and", "interacted", "with", "armys", "and", "she", "was", "so", "shocked", "when", "k-armys", "screamed", "for", "her", "after", "\ud83d\ude2d", "and", "how", "when", "bp's", "perf", "was", "cut", "off", "at", "lotte", ",", "karmys", "waved", "at", "jisooagain", "and", "she", "waved", "back", "eye", "-"]} -{"id": "3078-lotte", "word": "lotte", "label_binary": 0, "text_1": "I'll cry I love char lotte :(", "token_idx_1": 5, "text_start_1": 21, "text_end_1": 26, "date_1": "2019-09", "text_2": "me when me hayley n jay go to lotte world and everland", "token_idx_2": 8, "text_start_2": 30, "text_end_2": 35, "date_2": "2020-09", "text_1_tokenized": ["I'll", "cry", "I", "love", "char", "lotte", ":("], "text_2_tokenized": ["me", "when", "me", "hayley", "n", "jay", "go", "to", "lotte", "world", "and", "everland"]} -{"id": "3079-lotte", "word": "lotte", "label_binary": 1, "text_1": "I'm stuck between going to lotte world tomorrow and living it up, or yolo'ing to Busan this weekend and see if I can get a ticket for Princess Aya, lol. Don't have money to do both \ud83e\udd27", "token_idx_1": 5, "text_start_1": 27, "text_end_1": 32, "date_1": "2019-09", "text_2": "wait omg the lotte concert is bh fam concert???", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 18, "date_2": "2020-09", "text_1_tokenized": ["I'm", "stuck", "between", "going", "to", "lotte", "world", "tomorrow", "and", "living", "it", "up", ",", "or", "yolo'ing", "to", "Busan", "this", "weekend", "and", "see", "if", "I", "can", "get", "a", "ticket", "for", "Princess", "Aya", ",", "lol", ".", "Don't", "have", "money", "to", "do", "both", "\ud83e\udd27"], "text_2_tokenized": ["wait", "omg", "the", "lotte", "concert", "is", "bh", "fam", "concert", "?", "?", "?"]} -{"id": "3080-lotte", "word": "lotte", "label_binary": 0, "text_1": "kinda wonder what next major ad gig loona will have we had lotte app, heejin lg and app, 1/3 innis free (we need ot12), that gaming app (heejin to kim lip), yeojin mylive, heejin ft hyunjin avajar.. what else \ud83e\udd14 I wanna see a @ChipsAhoy cf next (please get them one) or fashion", "token_idx_1": 12, "text_start_1": 59, "text_end_1": 64, "date_1": "2019-09", "text_2": "the 2019 lotte concert jungkook lives in my head rent free", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 14, "date_2": "2020-09", "text_1_tokenized": ["kinda", "wonder", "what", "next", "major", "ad", "gig", "loona", "will", "have", "we", "had", "lotte", "app", ",", "heejin", "lg", "and", "app", ",", "1/3", "innis", "free", "(", "we", "need", "ot12", ")", ",", "that", "gaming", "app", "(", "heejin", "to", "kim", "lip", ")", ",", "yeojin", "mylive", ",", "heejin", "ft", "hyunjin", "avajar", "..", "what", "else", "\ud83e\udd14", "I", "wanna", "see", "a", "@ChipsAhoy", "cf", "next", "(", "please", "get", "them", "one", ")", "or", "fashion"], "text_2_tokenized": ["the", "2019", "lotte", "concert", "jungkook", "lives", "in", "my", "head", "rent", "free"]} -{"id": "3081-lotte", "word": "lotte", "label_binary": 1, "text_1": "Jungkook from the lotte family concert still haunts me", "token_idx_1": 3, "text_start_1": 18, "text_end_1": 23, "date_1": "2019-09", "text_2": "soo lotte concert, jason derulo and album release man we feasting a lot in a day", "token_idx_2": 1, "text_start_2": 4, "text_end_2": 9, "date_2": "2020-09", "text_1_tokenized": ["Jungkook", "from", "the", "lotte", "family", "concert", "still", "haunts", "me"], "text_2_tokenized": ["soo", "lotte", "concert", ",", "jason", "derulo", "and", "album", "release", "man", "we", "feasting", "a", "lot", "in", "a", "day"]} -{"id": "3082-lotte", "word": "lotte", "label_binary": 1, "text_1": "kpop groups rly make lotte duty free look great it was. Underwhelming", "token_idx_1": 4, "text_start_1": 21, "text_end_1": 26, "date_1": "2019-09", "text_2": "uhm? was the lotte concert a bh family concert with hwang chiyeul?\ud83e\udd78", "token_idx_2": 4, "text_start_2": 13, "text_end_2": 18, "date_2": "2020-09", "text_1_tokenized": ["kpop", "groups", "rly", "make", "lotte", "duty", "free", "look", "great", "it", "was", ".", "Underwhelming"], "text_2_tokenized": ["uhm", "?", "was", "the", "lotte", "concert", "a", "bh", "family", "concert", "with", "hwang", "chiyeul", "?", "\ud83e\udd78"]} -{"id": "3083-lotte", "word": "lotte", "label_binary": 0, "text_1": "lotte stop beep beeping me challenge", "token_idx_1": 0, "text_start_1": 0, "text_end_1": 5, "date_1": "2019-09", "text_2": "i need to vent bc holy shit txt are literally monster roookies wtf i'm watching runaway from the lotte family concert and you can tell their mics are fucking ON, ESPECIALLY TAEHYUN like, you can hear the inhales and the exhales as they sing, and you can hear near the +", "token_idx_2": 18, "text_start_2": 97, "text_end_2": 102, "date_2": "2020-09", "text_1_tokenized": ["lotte", "stop", "beep", "beeping", "me", "challenge"], "text_2_tokenized": ["i", "need", "to", "vent", "bc", "holy", "shit", "txt", "are", "literally", "monster", "roookies", "wtf", "i'm", "watching", "runaway", "from", "the", "lotte", "family", "concert", "and", "you", "can", "tell", "their", "mics", "are", "fucking", "ON", ",", "ESPECIALLY", "TAEHYUN", "like", ",", "you", "can", "hear", "the", "inhales", "and", "the", "exhales", "as", "they", "sing", ",", "and", "you", "can", "hear", "near", "the", "+"]} -{"id": "3084-lotte", "word": "lotte", "label_binary": 1, "text_1": "is lotte world worth it??", "token_idx_1": 1, "text_start_1": 3, "text_end_1": 8, "date_1": "2019-09", "text_2": "not at how i'm only running on 2 hours of sleep bc i stayed up watching the lotte concert\ud83d\ude13", "token_idx_2": 17, "text_start_2": 76, "text_end_2": 81, "date_2": "2020-09", "text_1_tokenized": ["is", "lotte", "world", "worth", "it", "?", "?"], "text_2_tokenized": ["not", "at", "how", "i'm", "only", "running", "on", "2", "hours", "of", "sleep", "bc", "i", "stayed", "up", "watching", "the", "lotte", "concert", "\ud83d\ude13"]} -{"id": "3085-lotte", "word": "lotte", "label_binary": 1, "text_1": "i just hope they do the shows fine and for it not to be like the lotte concert when they were so tired and exhausted", "token_idx_1": 16, "text_start_1": 65, "text_end_1": 70, "date_1": "2019-09", "text_2": "ive been ded today here huh \ud83d\ude2d\ud83d\ude2d didnt even get to watch the lotte concert,,,, maybe ill be ia for a while, i have to rest \ud83e\udd17", "token_idx_2": 14, "text_start_2": 59, "text_end_2": 64, "date_2": "2020-09", "text_1_tokenized": ["i", "just", "hope", "they", "do", "the", "shows", "fine", "and", "for", "it", "not", "to", "be", "like", "the", "lotte", "concert", "when", "they", "were", "so", "tired", "and", "exhausted"], "text_2_tokenized": ["ive", "been", "ded", "today", "here", "huh", "\ud83d\ude2d", "\ud83d\ude2d", "didnt", "even", "get", "to", "watch", "the", "lotte", "concert", ",", ",", ",", "maybe", "ill", "be", "ia", "for", "a", "while", ",", "i", "have", "to", "rest", "\ud83e\udd17"]} -{"id": "3086-lotte", "word": "lotte", "label_binary": 1, "text_1": "aw i wanna go ice skating! i miss ice skating in lotte world \ud83d\ude14", "token_idx_1": 12, "text_start_1": 49, "text_end_1": 54, "date_1": "2019-09", "text_2": "yo i miss lotte chicken nuggets from when i was in korea...... those hit different", "token_idx_2": 3, "text_start_2": 10, "text_end_2": 15, "date_2": "2020-09", "text_1_tokenized": ["aw", "i", "wanna", "go", "ice", "skating", "!", "i", "miss", "ice", "skating", "in", "lotte", "world", "\ud83d\ude14"], "text_2_tokenized": ["yo", "i", "miss", "lotte", "chicken", "nuggets", "from", "when", "i", "was", "in", "korea", "...", "those", "hit", "different"]} -{"id": "3087-lotte", "word": "lotte", "label_binary": 1, "text_1": "i bought the latelier perfume last week when they had the pop up in lotte and i won a free cica foundation on a lucky draw but i dont think the foundation shade matches me so hmmm do i do a giveaway or sell it", "token_idx_1": 14, "text_start_1": 68, "text_end_1": 73, "date_1": "2019-09", "text_2": "does anyone have that pic of no air kevin in a green jacket??? its an airport pic,, not the lotte world one", "token_idx_2": 24, "text_start_2": 92, "text_end_2": 97, "date_2": "2020-09", "text_1_tokenized": ["i", "bought", "the", "latelier", "perfume", "last", "week", "when", "they", "had", "the", "pop", "up", "in", "lotte", "and", "i", "won", "a", "free", "cica", "foundation", "on", "a", "lucky", "draw", "but", "i", "dont", "think", "the", "foundation", "shade", "matches", "me", "so", "hmmm", "do", "i", "do", "a", "giveaway", "or", "sell", "it"], "text_2_tokenized": ["does", "anyone", "have", "that", "pic", "of", "no", "air", "kevin", "in", "a", "green", "jacket", "?", "?", "?", "its", "an", "airport", "pic", ",", ",", "not", "the", "lotte", "world", "one"]} -{"id": "3088-lotte", "word": "lotte", "label_binary": 1, "text_1": "are we ready to discuss how jungkooks best hair length was at the lotte concert", "token_idx_1": 13, "text_start_1": 66, "text_end_1": 71, "date_1": "2019-09", "text_2": "I can't believe I skipped lotte family concert.. I mean, I didn't even know that bts will perform today, I feel so strange.", "token_idx_2": 5, "text_start_2": 26, "text_end_2": 31, "date_2": "2020-09", "text_1_tokenized": ["are", "we", "ready", "to", "discuss", "how", "jungkooks", "best", "hair", "length", "was", "at", "the", "lotte", "concert"], "text_2_tokenized": ["I", "can't", "believe", "I", "skipped", "lotte", "family", "concert", "..", "I", "mean", ",", "I", "didn't", "even", "know", "that", "bts", "will", "perform", "today", ",", "I", "feel", "so", "strange", "."]} -{"id": "3089-lotte", "word": "lotte", "label_binary": 0, "text_1": "lost in japan shawn x lotte concert jungkook... we wouldn't be ready for the TASTE", "token_idx_1": 5, "text_start_1": 22, "text_end_1": 27, "date_1": "2019-09", "text_2": "mots:one's my time made 2019 lotte's idol look like a playground game", "token_idx_2": 7, "text_start_2": 29, "text_end_2": 36, "date_2": "2020-09", "text_1_tokenized": ["lost", "in", "japan", "shawn", "x", "lotte", "concert", "jungkook", "...", "we", "wouldn't", "be", "ready", "for", "the", "TASTE"], "text_2_tokenized": ["mots", ":", "one's", "my", "time", "made", "2019", "lotte's", "idol", "look", "like", "a", "playground", "game"]} -{"id": "3090-lotte", "word": "lotte", "label_binary": 1, "text_1": "My friend bought me tickets to lotte world \ud83d\ude2d\ud83d\ude2d\ud83d\ude2d\ud83d\udc95", "token_idx_1": 6, "text_start_1": 31, "text_end_1": 36, "date_1": "2019-09", "text_2": "I'm watching lotte family concert with my mom and she ask me whose my bias, i dont know what to sayyy im at the point of my life that i dont what to choose", "token_idx_2": 2, "text_start_2": 13, "text_end_2": 18, "date_2": "2020-09", "text_1_tokenized": ["My", "friend", "bought", "me", "tickets", "to", "lotte", "world", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\udc95"], "text_2_tokenized": ["I'm", "watching", "lotte", "family", "concert", "with", "my", "mom", "and", "she", "ask", "me", "whose", "my", "bias", ",", "i", "dont", "know", "what", "to", "sayyy", "im", "at", "the", "point", "of", "my", "life", "that", "i", "dont", "what", "to", "choose"]} -{"id": "3091-lotte", "word": "lotte", "label_binary": 1, "text_1": "there's a whole lot of pointing going on in the concept trailer, combined with the lotte commercial it's kinda sus \ud83e\udd14 i might make a compilation @TXT_members @TXT_bighit", "token_idx_1": 16, "text_start_1": 83, "text_end_1": 88, "date_1": "2019-09", "text_2": "txt na lotte duty free mv de any album do bts oq esta acontecendo", "token_idx_2": 2, "text_start_2": 7, "text_end_2": 12, "date_2": "2020-09", "text_1_tokenized": ["there's", "a", "whole", "lot", "of", "pointing", "going", "on", "in", "the", "concept", "trailer", ",", "combined", "with", "the", "lotte", "commercial", "it's", "kinda", "sus", "\ud83e\udd14", "i", "might", "make", "a", "compilation", "@TXT_members", "@TXT_bighit"], "text_2_tokenized": ["txt", "na", "lotte", "duty", "free", "mv", "de", "any", "album", "do", "bts", "oq", "esta", "acontecendo"]} -{"id": "3092-lotte", "word": "lotte", "label_binary": 1, "text_1": "first it was Interpark fkning up now it's lotte \ud83e\udd21", "token_idx_1": 8, "text_start_1": 42, "text_end_1": 47, "date_1": "2019-09", "text_2": "Omg i just watched an ep. of the 9th when they went to lotte world and Minho was sooo excited, he had a big smile on his face the whole time. I haven\u2018t seen him like this in forever \u2639\ufe0f", "token_idx_2": 14, "text_start_2": 55, "text_end_2": 60, "date_2": "2020-09", "text_1_tokenized": ["first", "it", "was", "Interpark", "fkning", "up", "now", "it's", "lotte", "\ud83e\udd21"], "text_2_tokenized": ["Omg", "i", "just", "watched", "an", "ep", ".", "of", "the", "9th", "when", "they", "went", "to", "lotte", "world", "and", "Minho", "was", "sooo", "excited", ",", "he", "had", "a", "big", "smile", "on", "his", "face", "the", "whole", "time", ".", "I", "haven", "\u2018", "t", "seen", "him", "like", "this", "in", "forever", "\u2639", "\ufe0f"]} -{"id": "3093-lotte", "word": "lotte", "label_binary": 1, "text_1": "Today run bts was epic as the Christmas one or the one in lotte world, the boys competitiveness is..\ud83d\ude28\ud83d\ude01 @BTS_twt", "token_idx_1": 13, "text_start_1": 58, "text_end_1": 63, "date_1": "2019-09", "text_2": "so we've got namjoon's live on youtube, cns 1 year anniversary, the lotte online concert, tiktok ot7, and the new album announcement today \ud83e\udd29\ud83e\udd29\ud83e\udd29", "token_idx_2": 14, "text_start_2": 68, "text_end_2": 73, "date_2": "2020-09", "text_1_tokenized": ["Today", "run", "bts", "was", "epic", "as", "the", "Christmas", "one", "or", "the", "one", "in", "lotte", "world", ",", "the", "boys", "competitiveness", "is", "..", "\ud83d\ude28", "\ud83d\ude01", "@BTS_twt"], "text_2_tokenized": ["so", "we've", "got", "namjoon's", "live", "on", "youtube", ",", "cns", "1", "year", "anniversary", ",", "the", "lotte", "online", "concert", ",", "tiktok", "ot7", ",", "and", "the", "new", "album", "announcement", "today", "\ud83e\udd29", "\ud83e\udd29", "\ud83e\udd29"]} -{"id": "3094-lotte", "word": "lotte", "label_binary": 1, "text_1": "please omg does anyone remember the crowd at the lotte duty concert this year?? like the crowd went \ud83d\udca4\ud83d\udca4 until twice an bts performed that was HILARIOUS", "token_idx_1": 9, "text_start_1": 49, "text_end_1": 54, "date_1": "2019-09", "text_2": "dang, lol look at @BTS_twt flexing those live vocals on lotte", "token_idx_2": 11, "text_start_2": 56, "text_end_2": 61, "date_2": "2020-09", "text_1_tokenized": ["please", "omg", "does", "anyone", "remember", "the", "crowd", "at", "the", "lotte", "duty", "concert", "this", "year", "?", "?", "like", "the", "crowd", "went", "\ud83d\udca4", "\ud83d\udca4", "until", "twice", "an", "bts", "performed", "that", "was", "HILARIOUS"], "text_2_tokenized": ["dang", ",", "lol", "look", "at", "@BTS_twt", "flexing", "those", "live", "vocals", "on", "lotte"]} -{"id": "3095-lotte", "word": "lotte", "label_binary": 0, "text_1": "also rp tl ugly cause lotte is plastered all over it so yoinks! cant go anywhere . might actually do my homework before midnight for once instead", "token_idx_1": 5, "text_start_1": 22, "text_end_1": 27, "date_1": "2019-09", "text_2": "watch it be for some damn fila or lotte duty free commercial thats not shown until march 2021", "token_idx_2": 8, "text_start_2": 34, "text_end_2": 39, "date_2": "2020-09", "text_1_tokenized": ["also", "rp", "tl", "ugly", "cause", "lotte", "is", "plastered", "all", "over", "it", "so", "yoinks", "!", "cant", "go", "anywhere", ".", "might", "actually", "do", "my", "homework", "before", "midnight", "for", "once", "instead"], "text_2_tokenized": ["watch", "it", "be", "for", "some", "damn", "fila", "or", "lotte", "duty", "free", "commercial", "thats", "not", "shown", "until", "march", "2021"]} -{"id": "3096-lotte", "word": "lotte", "label_binary": 1, "text_1": "im going to lotte world on saturday im so exciteeeeeed", "token_idx_1": 3, "text_start_1": 12, "text_end_1": 17, "date_1": "2019-09", "text_2": "lotte performance, yoongi in That fit earlier, the savage love dance thingy nd remix(??? \ud83d\udc40) nd the be pre orders suddenly dropping SERIOUSLY LET ME BREATHEYFHGHJGVURRTGCDTGD", "token_idx_2": 0, "text_start_2": 0, "text_end_2": 5, "date_2": "2020-09", "text_1_tokenized": ["im", "going", "to", "lotte", "world", "on", "saturday", "im", "so", "exciteeeeeed"], "text_2_tokenized": ["lotte", "performance", ",", "yoongi", "in", "That", "fit", "earlier", ",", "the", "savage", "love", "dance", "thingy", "nd", "remix", "(", "?", "?", "?", "\ud83d\udc40", ")", "nd", "the", "be", "pre", "orders", "suddenly", "dropping", "SERIOUSLY", "LET", "ME", "BREATHEYFHGHJGVURRTGCDTGD"]} -{"id": "3097-lotte", "word": "lotte", "label_binary": 0, "text_1": "don't lie \u2022 - like 2/3 years ago \u2022 - a few months ago \u2022 - nah \u2022 - never \u270c \u2022 - like one \u2022 - c \u2022 - w \u2022 - nah \u2022 - lotte sjdh hi love \u2022 - nope \u2022 - nah hun \u2022 - on the cheek yes ofc \ud83d\ude14\u270c drop an emoji and i'll send you the questions", "token_idx_1": 36, "text_start_1": 111, "text_end_1": 116, "date_1": "2019-09", "text_2": "i still haven't watched lotte stages \ud83d\ude43", "token_idx_2": 4, "text_start_2": 24, "text_end_2": 29, "date_2": "2020-09", "text_1_tokenized": ["don't", "lie", "\u2022", "-", "like", "2/3", "years", "ago", "\u2022", "-", "a", "few", "months", "ago", "\u2022", "-", "nah", "\u2022", "-", "never", "\u270c", "\u2022", "-", "like", "one", "\u2022", "-", "c", "\u2022", "-", "w", "\u2022", "-", "nah", "\u2022", "-", "lotte", "sjdh", "hi", "love", "\u2022", "-", "nope", "\u2022", "-", "nah", "hun", "\u2022", "-", "on", "the", "cheek", "yes", "ofc", "\ud83d\ude14", "\u270c", "drop", "an", "emoji", "and", "i'll", "send", "you", "the", "questions"], "text_2_tokenized": ["i", "still", "haven't", "watched", "lotte", "stages", "\ud83d\ude43"]} -{"id": "3098-lotte", "word": "lotte", "label_binary": 1, "text_1": "finally went to see the skz handprints at lotte department and my hands are bigger than all of them as i expected my friend was dying laughing # embarassing", "token_idx_1": 8, "text_start_1": 42, "text_end_1": 47, "date_1": "2019-09", "text_2": "200929 jungkook put 2019 lotte family concert jungkook to shame", "token_idx_2": 4, "text_start_2": 25, "text_end_2": 30, "date_2": "2020-09", "text_1_tokenized": ["finally", "went", "to", "see", "the", "skz", "handprints", "at", "lotte", "department", "and", "my", "hands", "are", "bigger", "than", "all", "of", "them", "as", "i", "expected", "my", "friend", "was", "dying", "laughing", "#", "embarassing"], "text_2_tokenized": ["200929", "jungkook", "put", "2019", "lotte", "family", "concert", "jungkook", "to", "shame"]} -{"id": "3099-lotte", "word": "lotte", "label_binary": 1, "text_1": "EXO 'Have you ever...?' is literally all around of lotte avenue in here", "token_idx_1": 13, "text_start_1": 51, "text_end_1": 56, "date_1": "2019-09", "text_2": "thinking about how i cried when txt performed run away at the lotte family concert gosh they make me feel so many happy emotions i love them the absolute most :((( i cannot wait for blue hour", "token_idx_2": 12, "text_start_2": 62, "text_end_2": 67, "date_2": "2020-09", "text_1_tokenized": ["EXO", "'", "Have", "you", "ever", "...", "?", "'", "is", "literally", "all", "around", "of", "lotte", "avenue", "in", "here"], "text_2_tokenized": ["thinking", "about", "how", "i", "cried", "when", "txt", "performed", "run", "away", "at", "the", "lotte", "family", "concert", "gosh", "they", "make", "me", "feel", "so", "many", "happy", "emotions", "i", "love", "them", "the", "absolute", "most", ":(", "(", "(", "i", "cannot", "wait", "for", "blue", "hour"]} -{"id": "3100-lotte", "word": "lotte", "label_binary": 1, "text_1": "Judging from jk hair length in the VCR they probably film the videos before the lotte concert and before their official break", "token_idx_1": 15, "text_start_1": 80, "text_end_1": 85, "date_1": "2019-09", "text_2": "I slept through my alarm for the lotte concert LMAOOOOO", "token_idx_2": 7, "text_start_2": 33, "text_end_2": 38, "date_2": "2020-09", "text_1_tokenized": ["Judging", "from", "jk", "hair", "length", "in", "the", "VCR", "they", "probably", "film", "the", "videos", "before", "the", "lotte", "concert", "and", "before", "their", "official", "break"], "text_2_tokenized": ["I", "slept", "through", "my", "alarm", "for", "the", "lotte", "concert", "LMAOOOOO"]} -{"id": "3101-lotte", "word": "lotte", "label_binary": 1, "text_1": "pupunta bang lotte world blackpink today", "token_idx_1": 2, "text_start_1": 13, "text_end_1": 18, "date_1": "2019-09", "text_2": "So you're telling me that i missed the lotte family concert...haha....okay..", "token_idx_2": 8, "text_start_2": 39, "text_end_2": 44, "date_2": "2020-09", "text_1_tokenized": ["pupunta", "bang", "lotte", "world", "blackpink", "today"], "text_2_tokenized": ["So", "you're", "telling", "me", "that", "i", "missed", "the", "lotte", "family", "concert", "...", "haha", "...", "okay", ".."]} -{"id": "3102-lotte", "word": "lotte", "label_binary": 0, "text_1": "I bet they were watching that lotte episode before shooting this bdkssbsu", "token_idx_1": 6, "text_start_1": 30, "text_end_1": 35, "date_1": "2019-09", "text_2": "I've already loved that song since release, but ever since the lotte duty free family con, I've never stopped humming to @GFRDofficial's Apple \ud83d\ude05 (not really a stan and I'm not familiar with most of the lyrics yet lol)", "token_idx_2": 12, "text_start_2": 63, "text_end_2": 68, "date_2": "2020-09", "text_1_tokenized": ["I", "bet", "they", "were", "watching", "that", "lotte", "episode", "before", "shooting", "this", "bdkssbsu"], "text_2_tokenized": ["I've", "already", "loved", "that", "song", "since", "release", ",", "but", "ever", "since", "the", "lotte", "duty", "free", "family", "con", ",", "I've", "never", "stopped", "humming", "to", "@GFRDofficial", "'", "s", "Apple", "\ud83d\ude05", "(", "not", "really", "a", "stan", "and", "I'm", "not", "familiar", "with", "most", "of", "the", "lyrics", "yet", "lol", ")"]} -{"id": "3103-lotte", "word": "lotte", "label_binary": 0, "text_1": "it's october so do yall think... it's time to change to pumpkin spice lotte again", "token_idx_1": 14, "text_start_1": 70, "text_end_1": 75, "date_1": "2019-09", "text_2": "okay so I missed a lot while I was sleeping so does anyone wanna direct me to where I can see the lotte duty free concert?", "token_idx_2": 22, "text_start_2": 98, "text_end_2": 103, "date_2": "2020-09", "text_1_tokenized": ["it's", "october", "so", "do", "yall", "think", "...", "it's", "time", "to", "change", "to", "pumpkin", "spice", "lotte", "again"], "text_2_tokenized": ["okay", "so", "I", "missed", "a", "lot", "while", "I", "was", "sleeping", "so", "does", "anyone", "wanna", "direct", "me", "to", "where", "I", "can", "see", "the", "lotte", "duty", "free", "concert", "?"]} -{"id": "3104-lotte", "word": "lotte", "label_binary": 1, "text_1": "is chanyeol decorating his snoopy statue for lotte museum???", "token_idx_1": 7, "text_start_1": 45, "text_end_1": 50, "date_1": "2019-09", "text_2": "where can i watch the lotte concert \ud83e\udd7a\ud83d\udc97", "token_idx_2": 5, "text_start_2": 22, "text_end_2": 27, "date_2": "2020-09", "text_1_tokenized": ["is", "chanyeol", "decorating", "his", "snoopy", "statue", "for", "lotte", "museum", "?", "?", "?"], "text_2_tokenized": ["where", "can", "i", "watch", "the", "lotte", "concert", "\ud83e\udd7a", "\ud83d\udc97"]} -{"id": "3105-lotte", "word": "lotte", "label_binary": 1, "text_1": "Anyone selling Bts Speak Yourself Tour: the final Live viewing cinema tickets in Seoul? I'm willing to pay someone that can offer a reasonable price, pls dm me! Cgv, megabox, or lotte cinema is okay and i only need 1 ticket, Ty #SpeakYourselfTour", "token_idx_1": 37, "text_start_1": 178, "text_end_1": 183, "date_1": "2019-09", "text_2": "oh shit did i miss the lotte concert last night", "token_idx_2": 6, "text_start_2": 23, "text_end_2": 28, "date_2": "2020-09", "text_1_tokenized": ["Anyone", "selling", "Bts", "Speak", "Yourself", "Tour", ":", "the", "final", "Live", "viewing", "cinema", "tickets", "in", "Seoul", "?", "I'm", "willing", "to", "pay", "someone", "that", "can", "offer", "a", "reasonable", "price", ",", "pls", "dm", "me", "!", "Cgv", ",", "megabox", ",", "or", "lotte", "cinema", "is", "okay", "and", "i", "only", "need", "1", "ticket", ",", "Ty", "#SpeakYourselfTour"], "text_2_tokenized": ["oh", "shit", "did", "i", "miss", "the", "lotte", "concert", "last", "night"]} -{"id": "3106-lotte", "word": "lotte", "label_binary": 0, "text_1": "As a Peanuts' fan, I think I need to go to lotte", "token_idx_1": 13, "text_start_1": 43, "text_end_1": 48, "date_1": "2019-09", "text_2": "lowkey looks like lotte world jk?? his hair is wet \ud83d\ude27", "token_idx_2": 3, "text_start_2": 18, "text_end_2": 23, "date_2": "2020-09", "text_1_tokenized": ["As", "a", "Peanuts", "'", "fan", ",", "I", "think", "I", "need", "to", "go", "to", "lotte"], "text_2_tokenized": ["lowkey", "looks", "like", "lotte", "world", "jk", "?", "?", "his", "hair", "is", "wet", "\ud83d\ude27"]} -{"id": "3107-lotte", "word": "lotte", "label_binary": 1, "text_1": "kesetrum pencetan lift lotte mart bgzd", "token_idx_1": 3, "text_start_1": 23, "text_end_1": 28, "date_1": "2019-09", "text_2": "lets be honest... jungkook knows what makes us horny and he's using it against us..... pied piper... lotte 2019.... mots on:e...", "token_idx_2": 20, "text_start_2": 101, "text_end_2": 106, "date_2": "2020-09", "text_1_tokenized": ["kesetrum", "pencetan", "lift", "lotte", "mart", "bgzd"], "text_2_tokenized": ["lets", "be", "honest", "...", "jungkook", "knows", "what", "makes", "us", "horny", "and", "he's", "using", "it", "against", "us", "...", "pied", "piper", "...", "lotte", "2019", "...", "mots", "on", ":", "e", "..."]} -{"id": "3108-lotte", "word": "lotte", "label_binary": 0, "text_1": "i see lotte has committed a crime once again", "token_idx_1": 2, "text_start_1": 6, "text_end_1": 11, "date_1": "2019-09", "text_2": "Okay wait a damn minute just let me breath for a sec, a lot happened today first the lotte duty free concert 2nd the tiktok video and Armys finding out what was the reason why they post that video on tiktok and 3rd the announcement of the Album..now... How am i going to sleep?", "token_idx_2": 19, "text_start_2": 85, "text_end_2": 90, "date_2": "2020-09", "text_1_tokenized": ["i", "see", "lotte", "has", "committed", "a", "crime", "once", "again"], "text_2_tokenized": ["Okay", "wait", "a", "damn", "minute", "just", "let", "me", "breath", "for", "a", "sec", ",", "a", "lot", "happened", "today", "first", "the", "lotte", "duty", "free", "concert", "2nd", "the", "tiktok", "video", "and", "Armys", "finding", "out", "what", "was", "the", "reason", "why", "they", "post", "that", "video", "on", "tiktok", "and", "3rd", "the", "announcement", "of", "the", "Album", "..", "now", "...", "How", "am", "i", "going", "to", "sleep", "?"]} -{"id": "3109-lotte", "word": "lotte", "label_binary": 1, "text_1": "My classmate is so sweet because she gave me a lotte duty free brochure of bangtan \ud83e\udd70\ud83e\udd7a", "token_idx_1": 10, "text_start_1": 47, "text_end_1": 52, "date_1": "2019-09", "text_2": "anyone got a link 4 the full lotte concert?", "token_idx_2": 7, "text_start_2": 29, "text_end_2": 34, "date_2": "2020-09", "text_1_tokenized": ["My", "classmate", "is", "so", "sweet", "because", "she", "gave", "me", "a", "lotte", "duty", "free", "brochure", "of", "bangtan", "\ud83e\udd70", "\ud83e\udd7a"], "text_2_tokenized": ["anyone", "got", "a", "link", "4", "the", "full", "lotte", "concert", "?"]} -{"id": "3110-lotte", "word": "lotte", "label_binary": 0, "text_1": "lotte is finnish n her mom appears to b saami im gonna cry \ud83e\udd7a\ud83e\udd7a\ud83e\udd7a\ud83e\udd7a\ud83e\udd7a\u2764\ufe0f\u2764\ufe0f\u2764\ufe0f\u2764\ufe0f", "token_idx_1": 0, "text_start_1": 0, "text_end_1": 5, "date_1": "2019-09", "text_2": "I feel so stupid because I forgot that lotte concert was yesterday \ud83d\ude2d\ud83d\ude2d\ud83d\ude2d\ud83d\ude2d\ud83d\ude2d\ud83d\ude2d This happens when I'm absent from twitter \ud83d\ude22\ud83d\ude22\ud83d\ude22", "token_idx_2": 8, "text_start_2": 39, "text_end_2": 44, "date_2": "2020-09", "text_1_tokenized": ["lotte", "is", "finnish", "n", "her", "mom", "appears", "to", "b", "saami", "im", "gonna", "cry", "\ud83e\udd7a", "\ud83e\udd7a", "\ud83e\udd7a", "\u2764", "\ufe0f", "\u2764", "\ufe0f", "\u2764", "\ufe0f", "\u2764", "\ufe0f"], "text_2_tokenized": ["I", "feel", "so", "stupid", "because", "I", "forgot", "that", "lotte", "concert", "was", "yesterday", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude2d", "This", "happens", "when", "I'm", "absent", "from", "twitter", "\ud83d\ude22", "\ud83d\ude22", "\ud83d\ude22"]} -{"id": "3111-lotte", "word": "lotte", "label_binary": 1, "text_1": "October is the month of being horny for vampires on main and now i can't stop thinking about the vampires in lotte world", "token_idx_1": 21, "text_start_1": 109, "text_end_1": 114, "date_1": "2019-09", "text_2": "The lotte duty free concert was so short\ud83d\udc94", "token_idx_2": 1, "text_start_2": 4, "text_end_2": 9, "date_2": "2020-09", "text_1_tokenized": ["October", "is", "the", "month", "of", "being", "horny", "for", "vampires", "on", "main", "and", "now", "i", "can't", "stop", "thinking", "about", "the", "vampires", "in", "lotte", "world"], "text_2_tokenized": ["The", "lotte", "duty", "free", "concert", "was", "so", "short", "\ud83d\udc94"]} -{"id": "3112-lotte", "word": "lotte", "label_binary": 1, "text_1": "going through the whole 9 floors of lotte departement store to get stamps so i get free bts samples i-", "token_idx_1": 7, "text_start_1": 36, "text_end_1": 41, "date_1": "2019-09", "text_2": "so who is yeonjun not too into txt yet but he was neat during the lotte concert and now i'm learning he is very Gender", "token_idx_2": 15, "text_start_2": 66, "text_end_2": 71, "date_2": "2020-09", "text_1_tokenized": ["going", "through", "the", "whole", "9", "floors", "of", "lotte", "departement", "store", "to", "get", "stamps", "so", "i", "get", "free", "bts", "samples", "i", "-"], "text_2_tokenized": ["so", "who", "is", "yeonjun", "not", "too", "into", "txt", "yet", "but", "he", "was", "neat", "during", "the", "lotte", "concert", "and", "now", "i'm", "learning", "he", "is", "very", "Gender"]} -{"id": "3113-lotte", "word": "lotte", "label_binary": 0, "text_1": "best of me hits differently since the lotte concert", "token_idx_1": 7, "text_start_1": 38, "text_end_1": 43, "date_1": "2019-09", "text_2": "han dong hee's turn. 3 run HR and lotte up 15-0 in the third \ud83e\udd29", "token_idx_2": 9, "text_start_2": 34, "text_end_2": 39, "date_2": "2020-09", "text_1_tokenized": ["best", "of", "me", "hits", "differently", "since", "the", "lotte", "concert"], "text_2_tokenized": ["han", "dong", "hee's", "turn", ".", "3", "run", "HR", "and", "lotte", "up", "15-0", "in", "the", "third", "\ud83e\udd29"]} -{"id": "3114-lotte", "word": "lotte", "label_binary": 1, "text_1": "people saying their harrie mutuals are unfollowing them rn for live tweeting the show takes me back to the lotte show when one of my fav mutuals softblocked :/", "token_idx_1": 19, "text_start_1": 107, "text_end_1": 112, "date_1": "2019-09", "text_2": "So the boys performed bwl, mir, and black swan at the lotte concert and i missed every bit of it \ud83d\ude1e", "token_idx_2": 13, "text_start_2": 54, "text_end_2": 59, "date_2": "2020-09", "text_1_tokenized": ["people", "saying", "their", "harrie", "mutuals", "are", "unfollowing", "them", "rn", "for", "live", "tweeting", "the", "show", "takes", "me", "back", "to", "the", "lotte", "show", "when", "one", "of", "my", "fav", "mutuals", "softblocked", ":/"], "text_2_tokenized": ["So", "the", "boys", "performed", "bwl", ",", "mir", ",", "and", "black", "swan", "at", "the", "lotte", "concert", "and", "i", "missed", "every", "bit", "of", "it", "\ud83d\ude1e"]} -{"id": "3115-lotte", "word": "lotte", "label_binary": 0, "text_1": "worm update: galil is just zoomin' around the nougat bar. lotte seems to be building a cocoon of sorts?", "token_idx_1": 13, "text_start_1": 58, "text_end_1": 63, "date_1": "2019-09", "text_2": "Why is my tl just tweets from when the lotte concert was happening", "token_idx_2": 9, "text_start_2": 39, "text_end_2": 44, "date_2": "2020-09", "text_1_tokenized": ["worm", "update", ":", "galil", "is", "just", "zoomin", "'", "around", "the", "nougat", "bar", ".", "lotte", "seems", "to", "be", "building", "a", "cocoon", "of", "sorts", "?"], "text_2_tokenized": ["Why", "is", "my", "tl", "just", "tweets", "from", "when", "the", "lotte", "concert", "was", "happening"]} -{"id": "3116-lotte", "word": "lotte", "label_binary": 1, "text_1": "I really thought we were past the phase where we rode at dawn over screen time or line distribution \ud83e\udd74\ud83e\udd74 what is this fvcking lotte commercial all over again or smth", "token_idx_1": 25, "text_start_1": 124, "text_end_1": 129, "date_1": "2019-09", "text_2": "think about it if this year went to plan most of of us would have only been to like 2 bts concerts but no we got to go to bang bang con part one and 2 and then bang bang con the live, lotte concert and now on:e concert for 2 days so in conclusion @BTS_twt said fuck 2020", "token_idx_2": 44, "text_start_2": 184, "text_end_2": 189, "date_2": "2020-09", "text_1_tokenized": ["I", "really", "thought", "we", "were", "past", "the", "phase", "where", "we", "rode", "at", "dawn", "over", "screen", "time", "or", "line", "distribution", "\ud83e\udd74", "\ud83e\udd74", "what", "is", "this", "fvcking", "lotte", "commercial", "all", "over", "again", "or", "smth"], "text_2_tokenized": ["think", "about", "it", "if", "this", "year", "went", "to", "plan", "most", "of", "of", "us", "would", "have", "only", "been", "to", "like", "2", "bts", "concerts", "but", "no", "we", "got", "to", "go", "to", "bang", "bang", "con", "part", "one", "and", "2", "and", "then", "bang", "bang", "con", "the", "live", ",", "lotte", "concert", "and", "now", "on", ":", "e", "concert", "for", "2", "days", "so", "in", "conclusion", "@BTS_twt", "said", "fuck", "2020"]} -{"id": "3117-lotte", "word": "lotte", "label_binary": 0, "text_1": "Hoping lotte gets fired tonight on #TheApprentice2019", "token_idx_1": 1, "text_start_1": 7, "text_end_1": 12, "date_1": "2019-09", "text_2": "I stayed up to watch the lotte duty concert and I couldn't sleep after. I was completely amazed by their performance \ud83e\udd70", "token_idx_2": 6, "text_start_2": 25, "text_end_2": 30, "date_2": "2020-09", "text_1_tokenized": ["Hoping", "lotte", "gets", "fired", "tonight", "on", "#TheApprentice2019"], "text_2_tokenized": ["I", "stayed", "up", "to", "watch", "the", "lotte", "duty", "concert", "and", "I", "couldn't", "sleep", "after", ".", "I", "was", "completely", "amazed", "by", "their", "performance", "\ud83e\udd70"]} -{"id": "3118-lotte", "word": "lotte", "label_binary": 0, "text_1": "wayment in that lotte duty free commerical thingy they did they were LITERALLY DOING MAGIC #TXTOBER", "token_idx_1": 3, "text_start_1": 16, "text_end_1": 21, "date_1": "2019-09", "text_2": "not at how the lotte concert, album announcement, and preorder happened all in the same day for me, IM OVERWHELMED TO SAY THE LEAST", "token_idx_2": 4, "text_start_2": 15, "text_end_2": 20, "date_2": "2020-09", "text_1_tokenized": ["wayment", "in", "that", "lotte", "duty", "free", "commerical", "thingy", "they", "did", "they", "were", "LITERALLY", "DOING", "MAGIC", "#TXTOBER"], "text_2_tokenized": ["not", "at", "how", "the", "lotte", "concert", ",", "album", "announcement", ",", "and", "preorder", "happened", "all", "in", "the", "same", "day", "for", "me", ",", "IM", "OVERWHELMED", "TO", "SAY", "THE", "LEAST"]} -{"id": "3119-lotte", "word": "lotte", "label_binary": 1, "text_1": "does the lotte cinema site not workfor anyone else?", "token_idx_1": 2, "text_start_1": 9, "text_end_1": 14, "date_1": "2019-09", "text_2": "todays jungkook gave me a lot of lotte duty family concert vibes yknow from like when they performed best of me #MapOfTheSoulOne_D1 #MAP_OF_THE_SOUL_ONE", "token_idx_2": 7, "text_start_2": 33, "text_end_2": 38, "date_2": "2020-09", "text_1_tokenized": ["does", "the", "lotte", "cinema", "site", "not", "workfor", "anyone", "else", "?"], "text_2_tokenized": ["todays", "jungkook", "gave", "me", "a", "lot", "of", "lotte", "duty", "family", "concert", "vibes", "yknow", "from", "like", "when", "they", "performed", "best", "of", "me", "#MapOfTheSoulOne_D1", "#MAP_OF_THE_SOUL_ONE"]} -{"id": "3120-lotte", "word": "lotte", "label_binary": 0, "text_1": "CRYING my mom told me i wasnt even a month old when she took me to lotte world \ud83d\ude2d who let that happen!!!", "token_idx_1": 16, "text_start_1": 67, "text_end_1": 72, "date_1": "2019-09", "text_2": "berarti ntar ada lah ya lotte concert behind special clips\ud83e\udd21", "token_idx_2": 5, "text_start_2": 24, "text_end_2": 29, "date_2": "2020-09", "text_1_tokenized": ["CRYING", "my", "mom", "told", "me", "i", "wasnt", "even", "a", "month", "old", "when", "she", "took", "me", "to", "lotte", "world", "\ud83d\ude2d", "who", "let", "that", "happen", "!", "!", "!"], "text_2_tokenized": ["berarti", "ntar", "ada", "lah", "ya", "lotte", "concert", "behind", "special", "clips", "\ud83e\udd21"]} -{"id": "3121-lotte", "word": "lotte", "label_binary": 0, "text_1": "\ud83d\udc23/ friendly reminder lotte is korea uses roaming data ($$$). it's hard to update, post, or even generally load a page using her wifi egg. mb she'll find a free time on her vacation to try and reply to messages but she won't be back until nov. 4th^^", "token_idx_1": 4, "text_start_1": 21, "text_end_1": 26, "date_1": "2019-09", "text_2": "Ateez in lotte world with the cute headbands PLSSSSS", "token_idx_2": 2, "text_start_2": 9, "text_end_2": 14, "date_2": "2020-09", "text_1_tokenized": ["\ud83d\udc23", "/", "friendly", "reminder", "lotte", "is", "korea", "uses", "roaming", "data", "(", "$", "$", "$", ")", ".", "it's", "hard", "to", "update", ",", "post", ",", "or", "even", "generally", "load", "a", "page", "using", "her", "wifi", "egg", ".", "mb", "she'll", "find", "a", "free", "time", "on", "her", "vacation", "to", "try", "and", "reply", "to", "messages", "but", "she", "won't", "be", "back", "until", "nov", ".", "4th", "^", "^"], "text_2_tokenized": ["Ateez", "in", "lotte", "world", "with", "the", "cute", "headbands", "PLSSSSS"]} -{"id": "3122-lotte", "word": "lotte", "label_binary": 1, "text_1": "Does anyone know what floor of lotte plaza I can find ikon goods?", "token_idx_1": 6, "text_start_1": 31, "text_end_1": 36, "date_1": "2019-09", "text_2": "ive only been to everland but i wanna go to lotte world too", "token_idx_2": 10, "text_start_2": 44, "text_end_2": 49, "date_2": "2020-09", "text_1_tokenized": ["Does", "anyone", "know", "what", "floor", "of", "lotte", "plaza", "I", "can", "find", "ikon", "goods", "?"], "text_2_tokenized": ["ive", "only", "been", "to", "everland", "but", "i", "wanna", "go", "to", "lotte", "world", "too"]} -{"id": "3123-lotte", "word": "lotte", "label_binary": 0, "text_1": "so what is this tingly feeling i get in my tummy when i watch videos of koo from the lotte family concert?", "token_idx_1": 19, "text_start_1": 85, "text_end_1": 90, "date_1": "2019-09", "text_2": "so...nct world 2.0 at lotte tower sky bridge huh :o", "token_idx_2": 6, "text_start_2": 22, "text_end_2": 27, "date_2": "2020-09", "text_1_tokenized": ["so", "what", "is", "this", "tingly", "feeling", "i", "get", "in", "my", "tummy", "when", "i", "watch", "videos", "of", "koo", "from", "the", "lotte", "family", "concert", "?"], "text_2_tokenized": ["so", "...", "nct", "world", "2.0", "at", "lotte", "tower", "sky", "bridge", "huh", ":", "o"]} -{"id": "3124-lotte", "word": "lotte", "label_binary": 0, "text_1": "Hey , i like you a lotte . Its like a lot but a little \ud83d\ude15", "token_idx_1": 6, "text_start_1": 19, "text_end_1": 24, "date_1": "2019-09", "text_2": "if you've been my oomf for a while now you'd know that i watched the lotte concert by the ocean \ud83d\ude02 guess what? Im at the cemetery right now", "token_idx_2": 15, "text_start_2": 69, "text_end_2": 74, "date_2": "2020-09", "text_1_tokenized": ["Hey", ",", "i", "like", "you", "a", "lotte", ".", "Its", "like", "a", "lot", "but", "a", "little", "\ud83d\ude15"], "text_2_tokenized": ["if", "you've", "been", "my", "oomf", "for", "a", "while", "now", "you'd", "know", "that", "i", "watched", "the", "lotte", "concert", "by", "the", "ocean", "\ud83d\ude02", "guess", "what", "?", "Im", "at", "the", "cemetery", "right", "now"]} -{"id": "3125-lotte", "word": "lotte", "label_binary": 1, "text_1": "lotte concert &bts never let me down", "token_idx_1": 0, "text_start_1": 0, "text_end_1": 5, "date_1": "2019-09", "text_2": "was watching bbc news with my relatives and the bts lotte duty free ad played. i had to keep my as/s calm.", "token_idx_2": 10, "text_start_2": 52, "text_end_2": 57, "date_2": "2020-09", "text_1_tokenized": ["lotte", "concert", "&", "bts", "never", "let", "me", "down"], "text_2_tokenized": ["was", "watching", "bbc", "news", "with", "my", "relatives", "and", "the", "bts", "lotte", "duty", "free", "ad", "played", ".", "i", "had", "to", "keep", "my", "as", "/", "s", "calm", "."]} -{"id": "3126-lotte", "word": "lotte", "label_binary": 1, "text_1": "ok but idk if we should go to everland or lotte world im torn \ud83d\ude16", "token_idx_1": 10, "text_start_1": 42, "text_end_1": 47, "date_1": "2019-09", "text_2": "im so upset i want to watch lotte concert", "token_idx_2": 7, "text_start_2": 28, "text_end_2": 33, "date_2": "2020-09", "text_1_tokenized": ["ok", "but", "idk", "if", "we", "should", "go", "to", "everland", "or", "lotte", "world", "im", "torn", "\ud83d\ude16"], "text_2_tokenized": ["im", "so", "upset", "i", "want", "to", "watch", "lotte", "concert"]} -{"id": "3127-lotte", "word": "lotte", "label_binary": 0, "text_1": "oh so monopoly is around lotte and it closes by 6pm and weekend too? ohhh", "token_idx_1": 5, "text_start_1": 25, "text_end_1": 30, "date_1": "2019-09", "text_2": "why does it feel like a whole week has passed since the lotte duty free concert when it was literally this morning-", "token_idx_2": 12, "text_start_2": 56, "text_end_2": 61, "date_2": "2020-09", "text_1_tokenized": ["oh", "so", "monopoly", "is", "around", "lotte", "and", "it", "closes", "by", "6pm", "and", "weekend", "too", "?", "ohhh"], "text_2_tokenized": ["why", "does", "it", "feel", "like", "a", "whole", "week", "has", "passed", "since", "the", "lotte", "duty", "free", "concert", "when", "it", "was", "literally", "this", "morning", "-"]} -{"id": "3128-lotte", "word": "lotte", "label_binary": 0, "text_1": "cant stop thinking about jaws the lotte iced drink", "token_idx_1": 6, "text_start_1": 34, "text_end_1": 39, "date_1": "2019-09", "text_2": "how jennie endorses a lotte product but they don't perform in their concerts \ud83e\udd28", "token_idx_2": 4, "text_start_2": 22, "text_end_2": 27, "date_2": "2020-09", "text_1_tokenized": ["cant", "stop", "thinking", "about", "jaws", "the", "lotte", "iced", "drink"], "text_2_tokenized": ["how", "jennie", "endorses", "a", "lotte", "product", "but", "they", "don't", "perform", "in", "their", "concerts", "\ud83e\udd28"]} -{"id": "3129-lotte", "word": "lotte", "label_binary": 1, "text_1": "i want on spotify the song bts did for lotte duty free it's been in my head lately i need to listen to it like 6 times every day for selfcare", "token_idx_1": 9, "text_start_1": 39, "text_end_1": 44, "date_1": "2019-09", "text_2": "wait i jus realized i wasn't blessed w the lotte concert pcs \ud83e\uddcd\ud83c\udffb\u200d\u2640\ufe0f", "token_idx_2": 9, "text_start_2": 43, "text_end_2": 48, "date_2": "2020-09", "text_1_tokenized": ["i", "want", "on", "spotify", "the", "song", "bts", "did", "for", "lotte", "duty", "free", "it's", "been", "in", "my", "head", "lately", "i", "need", "to", "listen", "to", "it", "like", "6", "times", "every", "day", "for", "selfcare"], "text_2_tokenized": ["wait", "i", "jus", "realized", "i", "wasn't", "blessed", "w", "the", "lotte", "concert", "pcs", "\ud83e\uddcd\ud83c\udffb\u200d\u2640", "\ufe0f"]} -{"id": "3130-lotte", "word": "lotte", "label_binary": 1, "text_1": "finally home after a day at lotte world with my kids and i'm so dead hhhhhhh", "token_idx_1": 6, "text_start_1": 28, "text_end_1": 33, "date_1": "2019-09", "text_2": "the lotte duty free concert was last night??? on god idk what's going on anymore i give up", "token_idx_2": 1, "text_start_2": 4, "text_end_2": 9, "date_2": "2020-09", "text_1_tokenized": ["finally", "home", "after", "a", "day", "at", "lotte", "world", "with", "my", "kids", "and", "i'm", "so", "dead", "hhhhhhh"], "text_2_tokenized": ["the", "lotte", "duty", "free", "concert", "was", "last", "night", "?", "?", "?", "on", "god", "idk", "what's", "going", "on", "anymore", "i", "give", "up"]} -{"id": "3131-lotte", "word": "lotte", "label_binary": 1, "text_1": "ahhh kook's hair from muster, lotte concert, until japan concert was the best aaaahhhhh", "token_idx_1": 6, "text_start_1": 30, "text_end_1": 35, "date_1": "2019-09", "text_2": "watched gfriend's stage in lotte fam concert 'cos why not. im so proud of them. really.", "token_idx_2": 4, "text_start_2": 27, "text_end_2": 32, "date_2": "2020-09", "text_1_tokenized": ["ahhh", "kook's", "hair", "from", "muster", ",", "lotte", "concert", ",", "until", "japan", "concert", "was", "the", "best", "aaaahhhhh"], "text_2_tokenized": ["watched", "gfriend's", "stage", "in", "lotte", "fam", "concert", "'", "cos", "why", "not", ".", "im", "so", "proud", "of", "them", ".", "really", "."]} -{"id": "3132-lotte", "word": "lotte", "label_binary": 1, "text_1": "i also watched their lotte world run bts ep sdkskdk im jimin when it comes to riding that pirate ship \ud83d\udc80", "token_idx_1": 4, "text_start_1": 21, "text_end_1": 26, "date_1": "2019-09", "text_2": "OMG i just recently watched @BTS_twt lotte duttt free family concert and im totally loving their white and blue clothes, they're so freacking gorg \ud83d\ude0d", "token_idx_2": 6, "text_start_2": 37, "text_end_2": 42, "date_2": "2020-09", "text_1_tokenized": ["i", "also", "watched", "their", "lotte", "world", "run", "bts", "ep", "sdkskdk", "im", "jimin", "when", "it", "comes", "to", "riding", "that", "pirate", "ship", "\ud83d\udc80"], "text_2_tokenized": ["OMG", "i", "just", "recently", "watched", "@BTS_twt", "lotte", "duttt", "free", "family", "concert", "and", "im", "totally", "loving", "their", "white", "and", "blue", "clothes", ",", "they're", "so", "freacking", "gorg", "\ud83d\ude0d"]} -{"id": "3133-lotte", "word": "lotte", "label_binary": 1, "text_1": "mfs callin taeil tinie clearly havent seen the lotte duty free mv \ud83d\ude44", "token_idx_1": 8, "text_start_1": 47, "text_end_1": 52, "date_1": "2019-09", "text_2": "wait i also missed the lotte concert \ud83d\ude2d\ud83d\ude2d", "token_idx_2": 5, "text_start_2": 23, "text_end_2": 28, "date_2": "2020-09", "text_1_tokenized": ["mfs", "callin", "taeil", "tinie", "clearly", "havent", "seen", "the", "lotte", "duty", "free", "mv", "\ud83d\ude44"], "text_2_tokenized": ["wait", "i", "also", "missed", "the", "lotte", "concert", "\ud83d\ude2d", "\ud83d\ude2d"]} -{"id": "3134-lotte", "word": "lotte", "label_binary": 1, "text_1": "the lotte advertisement that they did and the short dance part does look like magic themed tho", "token_idx_1": 1, "text_start_1": 4, "text_end_1": 9, "date_1": "2019-09", "text_2": "that sounds like a lotte or chilsung commercial haha", "token_idx_2": 4, "text_start_2": 19, "text_end_2": 24, "date_2": "2020-09", "text_1_tokenized": ["the", "lotte", "advertisement", "that", "they", "did", "and", "the", "short", "dance", "part", "does", "look", "like", "magic", "themed", "tho"], "text_2_tokenized": ["that", "sounds", "like", "a", "lotte", "or", "chilsung", "commercial", "haha"]} -{"id": "3135-lotte", "word": "lotte", "label_binary": 1, "text_1": "going to lotte world tomorrow!! can't wait to experience all the halloween stuff eeeee :D", "token_idx_1": 2, "text_start_1": 9, "text_end_1": 14, "date_1": "2019-09", "text_2": "finallyyy we're back in the city, I can now watch the lotte concert \ud83d\ude2d", "token_idx_2": 12, "text_start_2": 54, "text_end_2": 59, "date_2": "2020-09", "text_1_tokenized": ["going", "to", "lotte", "world", "tomorrow", "!", "!", "can't", "wait", "to", "experience", "all", "the", "halloween", "stuff", "eeeee", ":D"], "text_2_tokenized": ["finallyyy", "we're", "back", "in", "the", "city", ",", "I", "can", "now", "watch", "the", "lotte", "concert", "\ud83d\ude2d"]} -{"id": "3136-lotte", "word": "lotte", "label_binary": 0, "text_1": "wts haechan lotte duty free + star avenue pcs qyop ($40+) dm if interested~", "token_idx_1": 2, "text_start_1": 12, "text_end_1": 17, "date_1": "2019-09", "text_2": "lotte and sucy tru ride or dies \ud83d\ude22\ud83e\udd1d", "token_idx_2": 0, "text_start_2": 0, "text_end_2": 5, "date_2": "2020-09", "text_1_tokenized": ["wts", "haechan", "lotte", "duty", "free", "+", "star", "avenue", "pcs", "qyop", "(", "$", "40", "+", ")", "dm", "if", "interested", "~"], "text_2_tokenized": ["lotte", "and", "sucy", "tru", "ride", "or", "dies", "\ud83d\ude22", "\ud83e\udd1d"]} -{"id": "3137-lotte", "word": "lotte", "label_binary": 0, "text_1": "i'm at lotte world for halloween and there are alot of couples here erm kdrama max \ud83d\ude02", "token_idx_1": 2, "text_start_1": 7, "text_end_1": 12, "date_1": "2019-09", "text_2": "lotte really decided to make me cry today i love lotte so much pls @GOLDENXAVERY \ud83e\udd7a\ud83e\udd7a\ud83e\udd7a", "token_idx_2": 0, "text_start_2": 0, "text_end_2": 5, "date_2": "2020-09", "text_1_tokenized": ["i'm", "at", "lotte", "world", "for", "halloween", "and", "there", "are", "alot", "of", "couples", "here", "erm", "kdrama", "max", "\ud83d\ude02"], "text_2_tokenized": ["lotte", "really", "decided", "to", "make", "me", "cry", "today", "i", "love", "lotte", "so", "much", "pls", "@GOLDENXAVERY", "\ud83e\udd7a", "\ud83e\udd7a", "\ud83e\udd7a"]} -{"id": "3138-lotte", "word": "lotte", "label_binary": 0, "text_1": "lotte bottled barley tea is strong as fuck lol", "token_idx_1": 0, "text_start_1": 0, "text_end_1": 5, "date_1": "2019-09", "text_2": "when's the lotte family concert?", "token_idx_2": 2, "text_start_2": 11, "text_end_2": 16, "date_2": "2020-09", "text_1_tokenized": ["lotte", "bottled", "barley", "tea", "is", "strong", "as", "fuck", "lol"], "text_2_tokenized": ["when's", "the", "lotte", "family", "concert", "?"]} -{"id": "3139-lotte", "word": "lotte", "label_binary": 1, "text_1": "Me: goes to lotte mall and spends 5,000 won at lotteria \ud83c\udf54\ud83c\udf5f Jin: goes to lotte mall to visit Balenciaga \ud83d\udcb0\ud83d\udcb0 Me & Jin: visits Line Friends \ud83e\udd1d \ud83e\udd23\ud83d\ude02", "token_idx_1": 4, "text_start_1": 12, "text_end_1": 17, "date_1": "2019-09", "text_2": "i couldn't watch lotte family concert and i'm sick and i feel like i've missed so much and now i feel extra sad and \ud83d\ude2d\ud83d\ude2d\ud83d\ude2d", "token_idx_2": 3, "text_start_2": 17, "text_end_2": 22, "date_2": "2020-09", "text_1_tokenized": ["Me", ":", "goes", "to", "lotte", "mall", "and", "spends", "5,000", "won", "at", "lotteria", "\ud83c\udf54", "\ud83c\udf5f", "Jin", ":", "goes", "to", "lotte", "mall", "to", "visit", "Balenciaga", "\ud83d\udcb0", "\ud83d\udcb0", "Me", "&", "Jin", ":", "visits", "Line", "Friends", "\ud83e\udd1d", "\ud83e\udd23", "\ud83d\ude02"], "text_2_tokenized": ["i", "couldn't", "watch", "lotte", "family", "concert", "and", "i'm", "sick", "and", "i", "feel", "like", "i've", "missed", "so", "much", "and", "now", "i", "feel", "extra", "sad", "and", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude2d"]} -{"id": "3140-lotte", "word": "lotte", "label_binary": 0, "text_1": "i just want to share a story when we visited korea (jype bldg),,,, we first went to lotte world bcoz it is our first time in korea so ofc we have an itinerary, our tour guide mr. park (he is rlly the best tour guide and has the greatest services) said that the new jype bldg is\u2013", "token_idx_1": 22, "text_start_1": 84, "text_end_1": 89, "date_1": "2019-09", "text_2": "Hi!! my names lotte and i'm new to dttwt! -my favs are wilbur sapnap george dream tommy tubbo skeppy dream", "token_idx_2": 5, "text_start_2": 14, "text_end_2": 19, "date_2": "2020-09", "text_1_tokenized": ["i", "just", "want", "to", "share", "a", "story", "when", "we", "visited", "korea", "(", "jype", "bldg", ")", ",", ",", ",", "we", "first", "went", "to", "lotte", "world", "bcoz", "it", "is", "our", "first", "time", "in", "korea", "so", "ofc", "we", "have", "an", "itinerary", ",", "our", "tour", "guide", "mr", ".", "park", "(", "he", "is", "rlly", "the", "best", "tour", "guide", "and", "has", "the", "greatest", "services", ")", "said", "that", "the", "new", "jype", "bldg", "is", "\u2013"], "text_2_tokenized": ["Hi", "!", "!", "my", "names", "lotte", "and", "i'm", "new", "to", "dttwt", "!", "-", "my", "favs", "are", "wilbur", "sapnap", "george", "dream", "tommy", "tubbo", "skeppy", "dream"]} -{"id": "3141-lotte", "word": "lotte", "label_binary": 1, "text_1": "27th oct gonna be a very hectic day bcs we're gonna queue for merch and gonna enter event zone and i need to get ready to enter the stadium and they'll be going to cinema. and the next day we're going to lotte world oh god let me breathe", "token_idx_1": 43, "text_start_1": 204, "text_end_1": 209, "date_1": "2019-09", "text_2": "yoongi and jk harmonizing the best lotte of make it right live", "token_idx_2": 6, "text_start_2": 35, "text_end_2": 40, "date_2": "2020-09", "text_1_tokenized": ["27th", "oct", "gonna", "be", "a", "very", "hectic", "day", "bcs", "we're", "gonna", "queue", "for", "merch", "and", "gonna", "enter", "event", "zone", "and", "i", "need", "to", "get", "ready", "to", "enter", "the", "stadium", "and", "they'll", "be", "going", "to", "cinema", ".", "and", "the", "next", "day", "we're", "going", "to", "lotte", "world", "oh", "god", "let", "me", "breathe"], "text_2_tokenized": ["yoongi", "and", "jk", "harmonizing", "the", "best", "lotte", "of", "make", "it", "right", "live"]} -{"id": "3142-lotte", "word": "lotte", "label_binary": 0, "text_1": "uhhh pfp change time because lotte makes me feel like tessa", "token_idx_1": 5, "text_start_1": 29, "text_end_1": 34, "date_1": "2019-09", "text_2": "watching the lotte concert in a few minutes leave me alone. I need to feel like im caught up", "token_idx_2": 2, "text_start_2": 13, "text_end_2": 18, "date_2": "2020-09", "text_1_tokenized": ["uhhh", "pfp", "change", "time", "because", "lotte", "makes", "me", "feel", "like", "tessa"], "text_2_tokenized": ["watching", "the", "lotte", "concert", "in", "a", "few", "minutes", "leave", "me", "alone", ".", "I", "need", "to", "feel", "like", "im", "caught", "up"]} -{"id": "3143-lotte", "word": "lotte", "label_binary": 1, "text_1": "That same betrayal as that one episode the boys running around the lotte mall collecting cards \ud83d\ude02 everyone lied to each other", "token_idx_1": 12, "text_start_1": 67, "text_end_1": 72, "date_1": "2019-09", "text_2": "does anyone have any hd bts pics from the family lotte concert? if so please reply with some", "token_idx_2": 10, "text_start_2": 49, "text_end_2": 54, "date_2": "2020-09", "text_1_tokenized": ["That", "same", "betrayal", "as", "that", "one", "episode", "the", "boys", "running", "around", "the", "lotte", "mall", "collecting", "cards", "\ud83d\ude02", "everyone", "lied", "to", "each", "other"], "text_2_tokenized": ["does", "anyone", "have", "any", "hd", "bts", "pics", "from", "the", "family", "lotte", "concert", "?", "if", "so", "please", "reply", "with", "some"]} -{"id": "3144-lotte", "word": "lotte", "label_binary": 1, "text_1": "Hello anyone here pernah beli lotte world theme park ticket dari klook?", "token_idx_1": 5, "text_start_1": 30, "text_end_1": 35, "date_1": "2019-09", "text_2": "binbar really went from morse code to a water show in lotte world \ud83d\ude02", "token_idx_2": 11, "text_start_2": 54, "text_end_2": 59, "date_2": "2020-09", "text_1_tokenized": ["Hello", "anyone", "here", "pernah", "beli", "lotte", "world", "theme", "park", "ticket", "dari", "klook", "?"], "text_2_tokenized": ["binbar", "really", "went", "from", "morse", "code", "to", "a", "water", "show", "in", "lotte", "world", "\ud83d\ude02"]} -{"id": "3145-lotte", "word": "lotte", "label_binary": 1, "text_1": "i watched that lotte world run episode again and now i want to go back and ride french revolution it was so much fun :(", "token_idx_1": 3, "text_start_1": 15, "text_end_1": 20, "date_1": "2019-09", "text_2": "Dude I still haven't recovered from lotte family I'm half asleep how do I even react to be?", "token_idx_2": 6, "text_start_2": 36, "text_end_2": 41, "date_2": "2020-09", "text_1_tokenized": ["i", "watched", "that", "lotte", "world", "run", "episode", "again", "and", "now", "i", "want", "to", "go", "back", "and", "ride", "french", "revolution", "it", "was", "so", "much", "fun", ":("], "text_2_tokenized": ["Dude", "I", "still", "haven't", "recovered", "from", "lotte", "family", "I'm", "half", "asleep", "how", "do", "I", "even", "react", "to", "be", "?"]} -{"id": "3146-lotte", "word": "lotte", "label_binary": 0, "text_1": "in lotte market and orange caramel is playing . Thank u", "token_idx_1": 1, "text_start_1": 3, "text_end_1": 8, "date_1": "2019-09", "text_2": "lrt HAHAHAHAHAH jihoon you genius lotte fan", "token_idx_2": 5, "text_start_2": 34, "text_end_2": 39, "date_2": "2020-09", "text_1_tokenized": ["in", "lotte", "market", "and", "orange", "caramel", "is", "playing", ".", "Thank", "u"], "text_2_tokenized": ["lrt", "HAHAHAHAHAH", "jihoon", "you", "genius", "lotte", "fan"]} -{"id": "3147-lotte", "word": "lotte", "label_binary": 1, "text_1": "i think lotte family concert is the best visuals so far", "token_idx_1": 2, "text_start_1": 8, "text_end_1": 13, "date_1": "2019-09", "text_2": "bts tocando black swan no lotte duty free concert oooooooh que sabor", "token_idx_2": 5, "text_start_2": 26, "text_end_2": 31, "date_2": "2020-09", "text_1_tokenized": ["i", "think", "lotte", "family", "concert", "is", "the", "best", "visuals", "so", "far"], "text_2_tokenized": ["bts", "tocando", "black", "swan", "no", "lotte", "duty", "free", "concert", "oooooooh", "que", "sabor"]} -{"id": "3148-lotte", "word": "lotte", "label_binary": 1, "text_1": "I wanngo ggcon but in that day for buy tickets 16 I have to go lotte world with my friends \ud83d\ude2d .. maybe i will not go? A fan meet...", "token_idx_1": 15, "text_start_1": 63, "text_end_1": 68, "date_1": "2019-09", "text_2": "wait lotte concert happened alreasy???", "token_idx_2": 1, "text_start_2": 5, "text_end_2": 10, "date_2": "2020-09", "text_1_tokenized": ["I", "wanngo", "ggcon", "but", "in", "that", "day", "for", "buy", "tickets", "16", "I", "have", "to", "go", "lotte", "world", "with", "my", "friends", "\ud83d\ude2d", "..", "maybe", "i", "will", "not", "go", "?", "A", "fan", "meet", "..."], "text_2_tokenized": ["wait", "lotte", "concert", "happened", "alreasy", "?", "?", "?"]} -{"id": "3149-lotte", "word": "lotte", "label_binary": 1, "text_1": "...myeongdong shopping, caf\u00e9 tajagi, yongsan district, chicken and beer, rooftop hangs, americano x manjoo, lotte duty free, kangol, tacobell, SUGAR by brockhampton, dean ig live...", "token_idx_1": 21, "text_start_1": 108, "text_end_1": 113, "date_1": "2019-09", "text_2": "let me go rewatch the lotte performances I was little too delirious when I watched it at 4am", "token_idx_2": 5, "text_start_2": 22, "text_end_2": 27, "date_2": "2020-09", "text_1_tokenized": ["...", "myeongdong", "shopping", ",", "caf\u00e9", "tajagi", ",", "yongsan", "district", ",", "chicken", "and", "beer", ",", "rooftop", "hangs", ",", "americano", "x", "manjoo", ",", "lotte", "duty", "free", ",", "kangol", ",", "tacobell", ",", "SUGAR", "by", "brockhampton", ",", "dean", "ig", "live", "..."], "text_2_tokenized": ["let", "me", "go", "rewatch", "the", "lotte", "performances", "I", "was", "little", "too", "delirious", "when", "I", "watched", "it", "at", "4am"]} -{"id": "3150-lotte", "word": "lotte", "label_binary": 0, "text_1": "This episode is like the lotte episode \ud83d\ude02 jimin whining and yoongi teasing", "token_idx_1": 5, "text_start_1": 25, "text_end_1": 30, "date_1": "2019-09", "text_2": "i can't believe i fucking fall asleep during the lotte concert", "token_idx_2": 9, "text_start_2": 49, "text_end_2": 54, "date_2": "2020-09", "text_1_tokenized": ["This", "episode", "is", "like", "the", "lotte", "episode", "\ud83d\ude02", "jimin", "whining", "and", "yoongi", "teasing"], "text_2_tokenized": ["i", "can't", "believe", "i", "fucking", "fall", "asleep", "during", "the", "lotte", "concert"]} -{"id": "1174-recount", "word": "recount", "label_binary": 1, "text_1": "\u201cWe have no other option before us but to canvass, or certify, the recount.\u201d", "token_idx_1": 16, "text_start_1": 67, "text_end_1": 74, "date_1": "2019-11", "text_2": "DISTRICT 42: Prelim recount results are in. Democratic challenger Ed Cardillo Jr. adds two votes to his total and Frank Ricci loses one vote. Cardillo's lead stands at 3,512 to 3,399, and he looks to be the winner of the vacant @stephen_ucci seat.", "token_idx_2": 4, "text_start_2": 20, "text_end_2": 27, "date_2": "2020-11", "text_1_tokenized": ["\u201c", "We", "have", "no", "other", "option", "before", "us", "but", "to", "canvass", ",", "or", "certify", ",", "the", "recount", ".", "\u201d"], "text_2_tokenized": ["DISTRICT", "42", ":", "Prelim", "recount", "results", "are", "in", ".", "Democratic", "challenger", "Ed", "Cardillo", "Jr", ".", "adds", "two", "votes", "to", "his", "total", "and", "Frank", "Ricci", "loses", "one", "vote", ".", "Cardillo's", "lead", "stands", "at", "3,512", "to", "3,399", ",", "and", "he", "looks", "to", "be", "the", "winner", "of", "the", "vacant", "@stephen_ucci", "seat", "."]} -{"id": "1175-recount", "word": "recount", "label_binary": 1, "text_1": "I'm a precinct judge with our county Board of Elections. We use paper ballots which are sent through a M100 (scantron like device) for counting. All of our ballots are preserved. If there's an issue, we go get the paper ballots and recount. Our counts are exact - every time.", "token_idx_1": 48, "text_start_1": 232, "text_end_1": 239, "date_1": "2019-11", "text_2": "So the Trump campaign paid $3 million for a recount in Milwaukee and Madison. The Milwaukee recount just concluded... and Joe Biden's lead *grew* by 132 votes, per @PostRoz.", "token_idx_2": 10, "text_start_2": 44, "text_end_2": 51, "date_2": "2020-11", "text_1_tokenized": ["I'm", "a", "precinct", "judge", "with", "our", "county", "Board", "of", "Elections", ".", "We", "use", "paper", "ballots", "which", "are", "sent", "through", "a", "M100", "(", "scantron", "like", "device", ")", "for", "counting", ".", "All", "of", "our", "ballots", "are", "preserved", ".", "If", "there's", "an", "issue", ",", "we", "go", "get", "the", "paper", "ballots", "and", "recount", ".", "Our", "counts", "are", "exact", "-", "every", "time", "."], "text_2_tokenized": ["So", "the", "Trump", "campaign", "paid", "$", "3", "million", "for", "a", "recount", "in", "Milwaukee", "and", "Madison", ".", "The", "Milwaukee", "recount", "just", "concluded", "...", "and", "Joe", "Biden's", "lead", "*", "grew", "*", "by", "132", "votes", ",", "per", "@PostRoz", "."]} -{"id": "1176-recount", "word": "recount", "label_binary": 0, "text_1": "Mrs B asked me last night what @mrjamesob's story was. I was able to recount his entire life, from birth & adoption to schooling, early career in tailoring to the celeb pages, gaming, early work on TV, to his recent successes on LBC and HTBRIAWGW. I need to lay off the @LBC.", "token_idx_1": 17, "text_start_1": 69, "text_end_1": 76, "date_1": "2019-11", "text_2": "Imagine having to recount millions of ballots by hand, during a pandemic, and you can't lick your thumb. Maddening! God bless those poll workers!!! #GeorgiaVotes #CountAllVotes", "token_idx_2": 3, "text_start_2": 18, "text_end_2": 25, "date_2": "2020-11", "text_1_tokenized": ["Mrs", "B", "asked", "me", "last", "night", "what", "@mrjamesob", "'", "s", "story", "was", ".", "I", "was", "able", "to", "recount", "his", "entire", "life", ",", "from", "birth", "&", "adoption", "to", "schooling", ",", "early", "career", "in", "tailoring", "to", "the", "celeb", "pages", ",", "gaming", ",", "early", "work", "on", "TV", ",", "to", "his", "recent", "successes", "on", "LBC", "and", "HTBRIAWGW", ".", "I", "need", "to", "lay", "off", "the", "@LBC", "."], "text_2_tokenized": ["Imagine", "having", "to", "recount", "millions", "of", "ballots", "by", "hand", ",", "during", "a", "pandemic", ",", "and", "you", "can't", "lick", "your", "thumb", ".", "Maddening", "!", "God", "bless", "those", "poll", "workers", "!", "!", "!", "#GeorgiaVotes", "#CountAllVotes"]} -{"id": "1177-recount", "word": "recount", "label_binary": 0, "text_1": "A recount of me watching awae s3 ep 10: #renewannewithane", "token_idx_1": 1, "text_start_1": 2, "text_end_1": 9, "date_1": "2019-11", "text_2": "I keep hearing that Trump was denied a peaceful transfer of power. I missed the news stories when they had to drag Obama out of the WH? I missed the +35 law suits Clinton filed with states she lost demanding a recount? I missed Clinton refusing to concede for weeks? Helluva nap.", "token_idx_2": 44, "text_start_2": 210, "text_end_2": 217, "date_2": "2020-11", "text_1_tokenized": ["A", "recount", "of", "me", "watching", "awae", "s3", "ep", "10", ":", "#renewannewithane"], "text_2_tokenized": ["I", "keep", "hearing", "that", "Trump", "was", "denied", "a", "peaceful", "transfer", "of", "power", ".", "I", "missed", "the", "news", "stories", "when", "they", "had", "to", "drag", "Obama", "out", "of", "the", "WH", "?", "I", "missed", "the", "+", "35", "law", "suits", "Clinton", "filed", "with", "states", "she", "lost", "demanding", "a", "recount", "?", "I", "missed", "Clinton", "refusing", "to", "concede", "for", "weeks", "?", "Helluva", "nap", "."]} -{"id": "1178-recount", "word": "recount", "label_binary": 1, "text_1": "The fact Matt Bevin is demeaning a recount is ridiculous.... buddy, you lost", "token_idx_1": 7, "text_start_1": 35, "text_end_1": 42, "date_1": "2019-11", "text_2": "Biden is not the President-elect. Until states recount, verify and certify their vote counts, it's still undecided #electionfraud", "token_idx_2": 8, "text_start_2": 47, "text_end_2": 54, "date_2": "2020-11", "text_1_tokenized": ["The", "fact", "Matt", "Bevin", "is", "demeaning", "a", "recount", "is", "ridiculous", "...", "buddy", ",", "you", "lost"], "text_2_tokenized": ["Biden", "is", "not", "the", "President-elect", ".", "Until", "states", "recount", ",", "verify", "and", "certify", "their", "vote", "counts", ",", "it's", "still", "undecided", "#electionfraud"]} -{"id": "1179-recount", "word": "recount", "label_binary": 1, "text_1": "Per @JimKinney413, Mike McCabe has conceded in #WestfieldMA mayor's race. Apparently, McCabe thought a recount was automatic (it's not) and opted against forcing one.", "token_idx_1": 17, "text_start_1": 103, "text_end_1": 110, "date_1": "2019-11", "text_2": "Al Gore won the popular vote and lost the EC. He demanded a recount in one state for cause and lost. Hillary won the PV and lost the EC. She conceded graciously the next day. Trump lost BOTH the PV and the EC. No evidence of fraud in 6 states. Explain why he has a shot.", "token_idx_2": 14, "text_start_2": 60, "text_end_2": 67, "date_2": "2020-11", "text_1_tokenized": ["Per", "@JimKinney413", ",", "Mike", "McCabe", "has", "conceded", "in", "#WestfieldMA", "mayor's", "race", ".", "Apparently", ",", "McCabe", "thought", "a", "recount", "was", "automatic", "(", "it's", "not", ")", "and", "opted", "against", "forcing", "one", "."], "text_2_tokenized": ["Al", "Gore", "won", "the", "popular", "vote", "and", "lost", "the", "EC", ".", "He", "demanded", "a", "recount", "in", "one", "state", "for", "cause", "and", "lost", ".", "Hillary", "won", "the", "PV", "and", "lost", "the", "EC", ".", "She", "conceded", "graciously", "the", "next", "day", ".", "Trump", "lost", "BOTH", "the", "PV", "and", "the", "EC", ".", "No", "evidence", "of", "fraud", "in", "6", "states", ".", "Explain", "why", "he", "has", "a", "shot", "."]} -{"id": "1180-recount", "word": "recount", "label_binary": 1, "text_1": "st. guillen reverses course, calls for recount as she trails mejia by 10 votes #bospoli #mapoli", "token_idx_1": 8, "text_start_1": 39, "text_end_1": 46, "date_1": "2019-11", "text_2": "These poor swing states being harassed by this president and cost taxpayers $ for his none sense recount , can he just concede, ppl are trying to live normal life from his insanity", "token_idx_2": 17, "text_start_2": 97, "text_end_2": 104, "date_2": "2020-11", "text_1_tokenized": ["st", ".", "guillen", "reverses", "course", ",", "calls", "for", "recount", "as", "she", "trails", "mejia", "by", "10", "votes", "#bospoli", "#mapoli"], "text_2_tokenized": ["These", "poor", "swing", "states", "being", "harassed", "by", "this", "president", "and", "cost", "taxpayers", "$", "for", "his", "none", "sense", "recount", ",", "can", "he", "just", "concede", ",", "ppl", "are", "trying", "to", "live", "normal", "life", "from", "his", "insanity"]} -{"id": "1181-recount", "word": "recount", "label_binary": 1, "text_1": "How did y'all let @Iamkelmitchell lose on #dwts. I demand a recount!", "token_idx_1": 12, "text_start_1": 60, "text_end_1": 67, "date_1": "2019-11", "text_2": "I got a feeling they left Manhattan boarded up cuz of this recount \ud83d\udca9", "token_idx_2": 12, "text_start_2": 59, "text_end_2": 66, "date_2": "2020-11", "text_1_tokenized": ["How", "did", "y'all", "let", "@Iamkelmitchell", "lose", "on", "#dwts", ".", "I", "demand", "a", "recount", "!"], "text_2_tokenized": ["I", "got", "a", "feeling", "they", "left", "Manhattan", "boarded", "up", "cuz", "of", "this", "recount", "\ud83d\udca9"]} -{"id": "1182-recount", "word": "recount", "label_binary": 1, "text_1": "They robbed Rayshard Ashby... 2nd team my ass... All ACC linebacker 5 weeks... You 3rd in votes for defensive player of year but don't make first team all ACC... I demand a recount...", "token_idx_1": 36, "text_start_1": 173, "text_end_1": 180, "date_1": "2019-11", "text_2": "Tax payers should not have to pay for this pointless recount in Georgia, especially with so many in need of stimulus funds. Disgusting.", "token_idx_2": 10, "text_start_2": 53, "text_end_2": 60, "date_2": "2020-11", "text_1_tokenized": ["They", "robbed", "Rayshard", "Ashby", "...", "2nd", "team", "my", "ass", "...", "All", "ACC", "linebacker", "5", "weeks", "...", "You", "3rd", "in", "votes", "for", "defensive", "player", "of", "year", "but", "don't", "make", "first", "team", "all", "ACC", "...", "I", "demand", "a", "recount", "..."], "text_2_tokenized": ["Tax", "payers", "should", "not", "have", "to", "pay", "for", "this", "pointless", "recount", "in", "Georgia", ",", "especially", "with", "so", "many", "in", "need", "of", "stimulus", "funds", ".", "Disgusting", "."]} -{"id": "1183-recount", "word": "recount", "label_binary": 0, "text_1": "We give thanks to you, O God; we give thanks, for your name is near. We recount your wondrous deeds. Psalms 75:1 ESV", "token_idx_1": 20, "text_start_1": 72, "text_end_1": 79, "date_1": "2019-11", "text_2": "Can we please recount South Carolina now???", "token_idx_2": 3, "text_start_2": 14, "text_end_2": 21, "date_2": "2020-11", "text_1_tokenized": ["We", "give", "thanks", "to", "you", ",", "O", "God", ";", "we", "give", "thanks", ",", "for", "your", "name", "is", "near", ".", "We", "recount", "your", "wondrous", "deeds", ".", "Psalms", "75:1", "ESV"], "text_2_tokenized": ["Can", "we", "please", "recount", "South", "Carolina", "now", "?", "?", "?"]} -{"id": "1184-recount", "word": "recount", "label_binary": 1, "text_1": "Trump challenges his loss in Kentucky and Bevin does not accept his defeat. GOP will pay for the recount.", "token_idx_1": 19, "text_start_1": 97, "text_end_1": 104, "date_1": "2019-11", "text_2": "The whole ballot recount and fraud thing: Biden is going to win Period. That's that. But I'm all for exposing corruption and I agree everything should be examined. It's not a who wins and who loses thing it's an integrity thing. It's about making the next one in four years better", "token_idx_2": 3, "text_start_2": 17, "text_end_2": 24, "date_2": "2020-11", "text_1_tokenized": ["Trump", "challenges", "his", "loss", "in", "Kentucky", "and", "Bevin", "does", "not", "accept", "his", "defeat", ".", "GOP", "will", "pay", "for", "the", "recount", "."], "text_2_tokenized": ["The", "whole", "ballot", "recount", "and", "fraud", "thing", ":", "Biden", "is", "going", "to", "win", "Period", ".", "That's", "that", ".", "But", "I'm", "all", "for", "exposing", "corruption", "and", "I", "agree", "everything", "should", "be", "examined", ".", "It's", "not", "a", "who", "wins", "and", "who", "loses", "thing", "it's", "an", "integrity", "thing", ".", "It's", "about", "making", "the", "next", "one", "in", "four", "years", "better"]} -{"id": "1185-recount", "word": "recount", "label_binary": 1, "text_1": "Based on the results of a manual hand recount, the votes for Ocean Shores Mayor have not changed. Crystal Dingler holds a 3 point lead over Susan Conniry stands, pending certification, and will continue as Mayor.", "token_idx_1": 8, "text_start_1": 38, "text_end_1": 45, "date_1": "2019-11", "text_2": "Trump really paid $3 million for a recount, only to give Biden 132 more votes \ud83d\ude2d give it up", "token_idx_2": 8, "text_start_2": 35, "text_end_2": 42, "date_2": "2020-11", "text_1_tokenized": ["Based", "on", "the", "results", "of", "a", "manual", "hand", "recount", ",", "the", "votes", "for", "Ocean", "Shores", "Mayor", "have", "not", "changed", ".", "Crystal", "Dingler", "holds", "a", "3", "point", "lead", "over", "Susan", "Conniry", "stands", ",", "pending", "certification", ",", "and", "will", "continue", "as", "Mayor", "."], "text_2_tokenized": ["Trump", "really", "paid", "$", "3", "million", "for", "a", "recount", ",", "only", "to", "give", "Biden", "132", "more", "votes", "\ud83d\ude2d", "give", "it", "up"]} -{"id": "1186-recount", "word": "recount", "label_binary": 1, "text_1": "That's it. Auburn can't win this game if they can't make a tackle on a 12 yard completion. Best tackling secondary in the country? They may need to recount those votes. #ALVSAU", "token_idx_1": 31, "text_start_1": 148, "text_end_1": 155, "date_1": "2019-11", "text_2": "Trump spent 3 million to recount a partial vote and they found thousands more votes for Biden lmaoo", "token_idx_2": 5, "text_start_2": 25, "text_end_2": 32, "date_2": "2020-11", "text_1_tokenized": ["That's", "it", ".", "Auburn", "can't", "win", "this", "game", "if", "they", "can't", "make", "a", "tackle", "on", "a", "12", "yard", "completion", ".", "Best", "tackling", "secondary", "in", "the", "country", "?", "They", "may", "need", "to", "recount", "those", "votes", ".", "#ALVSAU"], "text_2_tokenized": ["Trump", "spent", "3", "million", "to", "recount", "a", "partial", "vote", "and", "they", "found", "thousands", "more", "votes", "for", "Biden", "lmaoo"]} -{"id": "1187-recount", "word": "recount", "label_binary": 0, "text_1": "Is Putin's puppet going down? Thank you Mr. Holmes for your clear and precise recount of these events. Do Republicans and Trumpers truly want to live in an oligarchy-type regime as they have in Russia. Washington and all the fathers of Confederation must be turning in their grave", "token_idx_1": 16, "text_start_1": 78, "text_end_1": 85, "date_1": "2019-11", "text_2": "Ok ok but consider this: if we have a recount, we get to see Turnip lose twice.", "token_idx_2": 10, "text_start_2": 38, "text_end_2": 45, "date_2": "2020-11", "text_1_tokenized": ["Is", "Putin's", "puppet", "going", "down", "?", "Thank", "you", "Mr", ".", "Holmes", "for", "your", "clear", "and", "precise", "recount", "of", "these", "events", ".", "Do", "Republicans", "and", "Trumpers", "truly", "want", "to", "live", "in", "an", "oligarchy-type", "regime", "as", "they", "have", "in", "Russia", ".", "Washington", "and", "all", "the", "fathers", "of", "Confederation", "must", "be", "turning", "in", "their", "grave"], "text_2_tokenized": ["Ok", "ok", "but", "consider", "this", ":", "if", "we", "have", "a", "recount", ",", "we", "get", "to", "see", "Turnip", "lose", "twice", "."]} -{"id": "1188-recount", "word": "recount", "label_binary": 1, "text_1": "I mean, I KNOW there's gonna be a recount...", "token_idx_1": 9, "text_start_1": 34, "text_end_1": 41, "date_1": "2019-11", "text_2": "This irregular mail in ballot is the culprit, we Trump supporters dam and a recount. Trump stays in the white house until new results.", "token_idx_2": 15, "text_start_2": 76, "text_end_2": 83, "date_2": "2020-11", "text_1_tokenized": ["I", "mean", ",", "I", "KNOW", "there's", "gonna", "be", "a", "recount", "..."], "text_2_tokenized": ["This", "irregular", "mail", "in", "ballot", "is", "the", "culprit", ",", "we", "Trump", "supporters", "dam", "and", "a", "recount", ".", "Trump", "stays", "in", "the", "white", "house", "until", "new", "results", "."]} -{"id": "1189-recount", "word": "recount", "label_binary": 1, "text_1": ".@CherylTurpinVB's campaign just sent over a note saying she conceded to @JenKiggans. The Virginia Beach senate race was tight, and on early, unverified results looked like Turpin would be able to ask for a recount.", "token_idx_1": 40, "text_start_1": 207, "text_end_1": 214, "date_1": "2019-11", "text_2": "Pennsylvania's top elections official confirmed Friday she will not be ordering a recount or recanvass of her state's election results as Republicans rail against officials there over groundless voter fraud claims.", "token_idx_2": 12, "text_start_2": 82, "text_end_2": 89, "date_2": "2020-11", "text_1_tokenized": [".", "@CherylTurpinVB", "'", "s", "campaign", "just", "sent", "over", "a", "note", "saying", "she", "conceded", "to", "@JenKiggans", ".", "The", "Virginia", "Beach", "senate", "race", "was", "tight", ",", "and", "on", "early", ",", "unverified", "results", "looked", "like", "Turpin", "would", "be", "able", "to", "ask", "for", "a", "recount", "."], "text_2_tokenized": ["Pennsylvania's", "top", "elections", "official", "confirmed", "Friday", "she", "will", "not", "be", "ordering", "a", "recount", "or", "recanvass", "of", "her", "state's", "election", "results", "as", "Republicans", "rail", "against", "officials", "there", "over", "groundless", "voter", "fraud", "claims", "."]} -{"id": "1190-recount", "word": "recount", "label_binary": 1, "text_1": "I want a recount @BETAwards Lizzo shouldn't have won", "token_idx_1": 3, "text_start_1": 9, "text_end_1": 16, "date_1": "2019-11", "text_2": "Enough potential fraudulent ballots identified to change outcome in WI. AZ margin is now only 400-500 votes above automatic recount, and there's a lawsuit there over 180 ballots that state admitted was wrong. If we could flip WI and AZ, throw out secret PA votes, DJT wins", "token_idx_2": 20, "text_start_2": 124, "text_end_2": 131, "date_2": "2020-11", "text_1_tokenized": ["I", "want", "a", "recount", "@BETAwards", "Lizzo", "shouldn't", "have", "won"], "text_2_tokenized": ["Enough", "potential", "fraudulent", "ballots", "identified", "to", "change", "outcome", "in", "WI", ".", "AZ", "margin", "is", "now", "only", "400-500", "votes", "above", "automatic", "recount", ",", "and", "there's", "a", "lawsuit", "there", "over", "180", "ballots", "that", "state", "admitted", "was", "wrong", ".", "If", "we", "could", "flip", "WI", "and", "AZ", ",", "throw", "out", "secret", "PA", "votes", ",", "DJT", "wins"]} -{"id": "1191-recount", "word": "recount", "label_binary": 1, "text_1": "The word game I'm playing didn't accept \u201cgank\u201d as a word. I want a recount.", "token_idx_1": 17, "text_start_1": 67, "text_end_1": 74, "date_1": "2019-11", "text_2": "Trump would have to pay $8mil, up front, to get a recount in Wisconsin. So, that's not happening. He's never paid \u2018up front', for anything... Well, maybe the porn stars... \ud83e\udd14", "token_idx_2": 14, "text_start_2": 50, "text_end_2": 57, "date_2": "2020-11", "text_1_tokenized": ["The", "word", "game", "I'm", "playing", "didn't", "accept", "\u201c", "gank", "\u201d", "as", "a", "word", ".", "I", "want", "a", "recount", "."], "text_2_tokenized": ["Trump", "would", "have", "to", "pay", "$", "8mil", ",", "up", "front", ",", "to", "get", "a", "recount", "in", "Wisconsin", ".", "So", ",", "that's", "not", "happening", ".", "He's", "never", "paid", "\u2018", "up", "front", "'", ",", "for", "anything", "...", "Well", ",", "maybe", "the", "porn", "stars", "...", "\ud83e\udd14"]} -{"id": "1192-recount", "word": "recount", "label_binary": 1, "text_1": "Pittsfield recount update: Mayor Tyer says the recount has validated the results of the November 5th election, and dismissed the Mazzeo campaign's claims as nonsense. She said the results were mostly flat with slight gains for her numbers. @WAMCNews", "token_idx_1": 1, "text_start_1": 11, "text_end_1": 18, "date_1": "2019-11", "text_2": "Why is no one calling for a recount of the Senate vote for #LeningradLindsey ?", "token_idx_2": 7, "text_start_2": 28, "text_end_2": 35, "date_2": "2020-11", "text_1_tokenized": ["Pittsfield", "recount", "update", ":", "Mayor", "Tyer", "says", "the", "recount", "has", "validated", "the", "results", "of", "the", "November", "5th", "election", ",", "and", "dismissed", "the", "Mazzeo", "campaign's", "claims", "as", "nonsense", ".", "She", "said", "the", "results", "were", "mostly", "flat", "with", "slight", "gains", "for", "her", "numbers", ".", "@WAMCNews"], "text_2_tokenized": ["Why", "is", "no", "one", "calling", "for", "a", "recount", "of", "the", "Senate", "vote", "for", "#LeningradLindsey", "?"]} -{"id": "1193-recount", "word": "recount", "label_binary": 1, "text_1": "Dear Jo Swinson, If you achieve 52% of the vote share in a particular constituency, will you regard that as a victory ? Or would you welcome a recount ? No, thought not.", "token_idx_1": 31, "text_start_1": 143, "text_end_1": 150, "date_1": "2019-11", "text_2": "If there's nothing to hide and the results will end the same why is the left afraid of a recount in these key states??\ud83e\udd14 @POTUS @realDonaldTrump", "token_idx_2": 19, "text_start_2": 89, "text_end_2": 96, "date_2": "2020-11", "text_1_tokenized": ["Dear", "Jo", "Swinson", ",", "If", "you", "achieve", "52", "%", "of", "the", "vote", "share", "in", "a", "particular", "constituency", ",", "will", "you", "regard", "that", "as", "a", "victory", "?", "Or", "would", "you", "welcome", "a", "recount", "?", "No", ",", "thought", "not", "."], "text_2_tokenized": ["If", "there's", "nothing", "to", "hide", "and", "the", "results", "will", "end", "the", "same", "why", "is", "the", "left", "afraid", "of", "a", "recount", "in", "these", "key", "states", "?", "?", "\ud83e\udd14", "@POTUS", "@realDonaldTrump"]} -{"id": "1194-recount", "word": "recount", "label_binary": 1, "text_1": "Every vote counts as demonstrated by the Mt. Clemens Commission results which may be headed for a recount http://18.221.153.194/m27/17.html", "token_idx_1": 18, "text_start_1": 98, "text_end_1": 105, "date_1": "2019-11", "text_2": "What is the point in a second Georgia recount if Biden has 270 electoral college votes with/without it, asking for a friend", "token_idx_2": 8, "text_start_2": 38, "text_end_2": 45, "date_2": "2020-11", "text_1_tokenized": ["Every", "vote", "counts", "as", "demonstrated", "by", "the", "Mt", ".", "Clemens", "Commission", "results", "which", "may", "be", "headed", "for", "a", "recount", "http://18.221.153.194/m27/17.html"], "text_2_tokenized": ["What", "is", "the", "point", "in", "a", "second", "Georgia", "recount", "if", "Biden", "has", "270", "electoral", "college", "votes", "with", "/", "without", "it", ",", "asking", "for", "a", "friend"]} -{"id": "1195-recount", "word": "recount", "label_binary": 1, "text_1": "#Boston we do not fully have the results for City Councilor At-large election. Please stop by JP office (Jackson Sq) or Dudley Sq office to sign the petition for recount. please understand this was 10 (!!!) votes difference, so every single vote counts!!! #BosPoli @astguillen", "token_idx_1": 32, "text_start_1": 162, "text_end_1": 169, "date_1": "2019-11", "text_2": "GA recount: Getting reports from observers in our county elections office that local GOP has been reinforcing their brand as Entitled A**holes. Start time supposed to be 10 AM. Elections manager rearranged room to make it easier on monitors to see process.", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 10, "date_2": "2020-11", "text_1_tokenized": ["#Boston", "we", "do", "not", "fully", "have", "the", "results", "for", "City", "Councilor", "At-large", "election", ".", "Please", "stop", "by", "JP", "office", "(", "Jackson", "Sq", ")", "or", "Dudley", "Sq", "office", "to", "sign", "the", "petition", "for", "recount", ".", "please", "understand", "this", "was", "10", "(", "!", "!", "!", ")", "votes", "difference", ",", "so", "every", "single", "vote", "counts", "!", "!", "!", "#BosPoli", "@astguillen"], "text_2_tokenized": ["GA", "recount", ":", "Getting", "reports", "from", "observers", "in", "our", "county", "elections", "office", "that", "local", "GOP", "has", "been", "reinforcing", "their", "brand", "as", "Entitled", "A", "*", "*", "holes", ".", "Start", "time", "supposed", "to", "be", "10", "AM", ".", "Elections", "manager", "rearranged", "room", "to", "make", "it", "easier", "on", "monitors", "to", "see", "process", "."]} -{"id": "1196-recount", "word": "recount", "label_binary": 1, "text_1": "I'm mad as hell.. so the Republican candidate for Governor loses, and instead of conceding, he's crying like a baby, and no recount .. the Kentucky general Assembly might pick the winner\ud83e\udd2c\ud83e\udd2cyou know someone will get paid large sums to say Bevin is the winner,nothing is sacred", "token_idx_1": 26, "text_start_1": 124, "text_end_1": 131, "date_1": "2019-11", "text_2": "I want a recount of the so-called million racist March.", "token_idx_2": 3, "text_start_2": 9, "text_end_2": 16, "date_2": "2020-11", "text_1_tokenized": ["I'm", "mad", "as", "hell", "..", "so", "the", "Republican", "candidate", "for", "Governor", "loses", ",", "and", "instead", "of", "conceding", ",", "he's", "crying", "like", "a", "baby", ",", "and", "no", "recount", "..", "the", "Kentucky", "general", "Assembly", "might", "pick", "the", "winner", "\ud83e\udd2c", "\ud83e\udd2c", "you", "know", "someone", "will", "get", "paid", "large", "sums", "to", "say", "Bevin", "is", "the", "winner", ",", "nothing", "is", "sacred"], "text_2_tokenized": ["I", "want", "a", "recount", "of", "the", "so-called", "million", "racist", "March", "."]} -{"id": "1197-recount", "word": "recount", "label_binary": 1, "text_1": "Peak 2019: The same people who haven't accepted that Hillary lost in 2016 are pissed about Bevin wanting a recount on a 4,000 vote difference. Interesting. Recount won't change the outcome most likely. But can you imagine their meltdown if it did? This could be hilarious.", "token_idx_1": 20, "text_start_1": 107, "text_end_1": 114, "date_1": "2019-11", "text_2": "Looking at my instructor feedback, out of 20 Students in the 2 courses that did the survey, I have one student who either HATED me and the course... or got confused if 1 was good or bad on the Likert scale. I demand a recount, I will not accept these results.", "token_idx_2": 48, "text_start_2": 218, "text_end_2": 225, "date_2": "2020-11", "text_1_tokenized": ["Peak", "2019", ":", "The", "same", "people", "who", "haven't", "accepted", "that", "Hillary", "lost", "in", "2016", "are", "pissed", "about", "Bevin", "wanting", "a", "recount", "on", "a", "4,000", "vote", "difference", ".", "Interesting", ".", "Recount", "won't", "change", "the", "outcome", "most", "likely", ".", "But", "can", "you", "imagine", "their", "meltdown", "if", "it", "did", "?", "This", "could", "be", "hilarious", "."], "text_2_tokenized": ["Looking", "at", "my", "instructor", "feedback", ",", "out", "of", "20", "Students", "in", "the", "2", "courses", "that", "did", "the", "survey", ",", "I", "have", "one", "student", "who", "either", "HATED", "me", "and", "the", "course", "...", "or", "got", "confused", "if", "1", "was", "good", "or", "bad", "on", "the", "Likert", "scale", ".", "I", "demand", "a", "recount", ",", "I", "will", "not", "accept", "these", "results", "."]} -{"id": "1198-recount", "word": "recount", "label_binary": 0, "text_1": "#popmaster recount 36,nooooo.Didnt get year", "token_idx_1": 1, "text_start_1": 11, "text_end_1": 18, "date_1": "2019-11", "text_2": "I would love love love to see a big fat \ud83d\udd95 in the Wisconsin audit/recount that turns this ship around and gets the maggots in a frenzy. How about you?", "token_idx_2": 16, "text_start_2": 65, "text_end_2": 72, "date_2": "2020-11", "text_1_tokenized": ["#popmaster", "recount", "36", ",", "nooooo.Didnt", "get", "year"], "text_2_tokenized": ["I", "would", "love", "love", "love", "to", "see", "a", "big", "fat", "\ud83d\udd95", "in", "the", "Wisconsin", "audit", "/", "recount", "that", "turns", "this", "ship", "around", "and", "gets", "the", "maggots", "in", "a", "frenzy", ".", "How", "about", "you", "?"]} -{"id": "1199-recount", "word": "recount", "label_binary": 1, "text_1": "Cries of \"shame\" at #vicalpconf19 as @HutchHussein announces the result of the recount, conceding she counted wrongly last time. The motion passes 314 to 227. Hussein says she accidentally counted those for as 214 last time.", "token_idx_1": 14, "text_start_1": 79, "text_end_1": 86, "date_1": "2019-11", "text_2": "The GA recount is stinky, stinky, stinky. An audit is required immediately.", "token_idx_2": 2, "text_start_2": 7, "text_end_2": 14, "date_2": "2020-11", "text_1_tokenized": ["Cries", "of", "\"", "shame", "\"", "at", "#vicalpconf19", "as", "@HutchHussein", "announces", "the", "result", "of", "the", "recount", ",", "conceding", "she", "counted", "wrongly", "last", "time", ".", "The", "motion", "passes", "314", "to", "227", ".", "Hussein", "says", "she", "accidentally", "counted", "those", "for", "as", "214", "last", "time", "."], "text_2_tokenized": ["The", "GA", "recount", "is", "stinky", ",", "stinky", ",", "stinky", ".", "An", "audit", "is", "required", "immediately", "."]} -{"id": "1200-recount", "word": "recount", "label_binary": 0, "text_1": "collapsing into the booth at the restaurant as my deaf dad and loud mom yell in recount of the cum scene from there's something about mary", "token_idx_1": 16, "text_start_1": 80, "text_end_1": 87, "date_1": "2019-11", "text_2": "1/2 \ud83d\udea8BREAKING: A third recount in Georgia begins today. Poll workers will have until 11:59 p.m. on Dec. 2 to finish counting all votes. All counties will give a public notice of when they will be counting in order to allow...", "token_idx_2": 6, "text_start_2": 23, "text_end_2": 30, "date_2": "2020-11", "text_1_tokenized": ["collapsing", "into", "the", "booth", "at", "the", "restaurant", "as", "my", "deaf", "dad", "and", "loud", "mom", "yell", "in", "recount", "of", "the", "cum", "scene", "from", "there's", "something", "about", "mary"], "text_2_tokenized": ["1/2", "\ud83d\udea8", "BREAKING", ":", "A", "third", "recount", "in", "Georgia", "begins", "today", ".", "Poll", "workers", "will", "have", "until", "11:59", "p", ".", "m", ".", "on", "Dec", ".", "2", "to", "finish", "counting", "all", "votes", ".", "All", "counties", "will", "give", "a", "public", "notice", "of", "when", "they", "will", "be", "counting", "in", "order", "to", "allow", "..."]} -{"id": "1201-recount", "word": "recount", "label_binary": 0, "text_1": "I mean, I'd participate in the whole #academicshaming thing, but as an #AcademicCasualty (& TBH just a person who's been through grad school), I've been publicly shamed more times than I care to recount. (I get that the # is fun & for people revealing their own weak pts, OK)", "token_idx_1": 38, "text_start_1": 199, "text_end_1": 206, "date_1": "2019-11", "text_2": "I demand a recount on @realDonaldTrump's golf score from today.", "token_idx_2": 3, "text_start_2": 11, "text_end_2": 18, "date_2": "2020-11", "text_1_tokenized": ["I", "mean", ",", "I'd", "participate", "in", "the", "whole", "#academicshaming", "thing", ",", "but", "as", "an", "#AcademicCasualty", "(", "&", "TBH", "just", "a", "person", "who's", "been", "through", "grad", "school", ")", ",", "I've", "been", "publicly", "shamed", "more", "times", "than", "I", "care", "to", "recount", ".", "(", "I", "get", "that", "the", "#", "is", "fun", "&", "for", "people", "revealing", "their", "own", "weak", "pts", ",", "OK", ")"], "text_2_tokenized": ["I", "demand", "a", "recount", "on", "@realDonaldTrump", "'", "s", "golf", "score", "from", "today", "."]} -{"id": "1202-recount", "word": "recount", "label_binary": 0, "text_1": "Hey @Sarah_Montague, longtime listener, first time tweeter - there's probably a better way to verify recent examples of anti-Semitism linked to Labour leadership than asking a victim of racial abuse to recount it live on air and potentially retraumatise herself #bbcwato", "token_idx_1": 33, "text_start_1": 202, "text_end_1": 209, "date_1": "2019-11", "text_2": "So let me get this straight...Blue = massive fraud, we must recount. Red = All is fine, carry on. #TrumpIsLosing #BidenHarris2020 #Resist", "token_idx_2": 14, "text_start_2": 60, "text_end_2": 67, "date_2": "2020-11", "text_1_tokenized": ["Hey", "@Sarah_Montague", ",", "longtime", "listener", ",", "first", "time", "tweeter", "-", "there's", "probably", "a", "better", "way", "to", "verify", "recent", "examples", "of", "anti-Semitism", "linked", "to", "Labour", "leadership", "than", "asking", "a", "victim", "of", "racial", "abuse", "to", "recount", "it", "live", "on", "air", "and", "potentially", "retraumatise", "herself", "#bbcwato"], "text_2_tokenized": ["So", "let", "me", "get", "this", "straight", "...", "Blue", "=", "massive", "fraud", ",", "we", "must", "recount", ".", "Red", "=", "All", "is", "fine", ",", "carry", "on", ".", "#TrumpIsLosing", "#BidenHarris2020", "#Resist"]} -{"id": "1203-recount", "word": "recount", "label_binary": 1, "text_1": "I wonder how many petitions we will see for a recount of the #BallonDor2019 from the gobshite bin dippers", "token_idx_1": 10, "text_start_1": 46, "text_end_1": 53, "date_1": "2019-11", "text_2": "my tik tok was blowin up and then it just stopped overnight maybe these recount people are on to something", "token_idx_2": 14, "text_start_2": 72, "text_end_2": 79, "date_2": "2020-11", "text_1_tokenized": ["I", "wonder", "how", "many", "petitions", "we", "will", "see", "for", "a", "recount", "of", "the", "#BallonDor2019", "from", "the", "gobshite", "bin", "dippers"], "text_2_tokenized": ["my", "tik", "tok", "was", "blowin", "up", "and", "then", "it", "just", "stopped", "overnight", "maybe", "these", "recount", "people", "are", "on", "to", "something"]} -{"id": "1204-recount", "word": "recount", "label_binary": 1, "text_1": "\ud83e\udd2e Matt Bevin is not conceding the election and wants a recount. He as to go to a Judge to get one. Final results 49.2% Beshear to 48.8% Bevin. Al; votes have been counted!\ud83e\udd2e", "token_idx_1": 11, "text_start_1": 55, "text_end_1": 62, "date_1": "2019-11", "text_2": "Macklemore trending for all the right reasons. If anything merited a recount, it's the voting for his Grammy.", "token_idx_2": 12, "text_start_2": 69, "text_end_2": 76, "date_2": "2020-11", "text_1_tokenized": ["\ud83e\udd2e", "Matt", "Bevin", "is", "not", "conceding", "the", "election", "and", "wants", "a", "recount", ".", "He", "as", "to", "go", "to", "a", "Judge", "to", "get", "one", ".", "Final", "results", "49.2", "%", "Beshear", "to", "48.8", "%", "Bevin", ".", "Al", ";", "votes", "have", "been", "counted", "!", "\ud83e\udd2e"], "text_2_tokenized": ["Macklemore", "trending", "for", "all", "the", "right", "reasons", ".", "If", "anything", "merited", "a", "recount", ",", "it's", "the", "voting", "for", "his", "Grammy", "."]} -{"id": "1205-recount", "word": "recount", "label_binary": 0, "text_1": "That's bullshit. I demand a recount! #DWTS", "token_idx_1": 6, "text_start_1": 28, "text_end_1": 35, "date_1": "2019-11", "text_2": "FUCK!! Shes literally 17! SEVENTEEN!!! The characters perspective of things/how we at that age process things so differently, how warped and extreme we experience life/recount memories.... + whatever effect the drugs are doing. It's fucking mindblowing #Euphoria #EuphoriaHBO", "token_idx_2": 35, "text_start_2": 168, "text_end_2": 175, "date_2": "2020-11", "text_1_tokenized": ["That's", "bullshit", ".", "I", "demand", "a", "recount", "!", "#DWTS"], "text_2_tokenized": ["FUCK", "!", "!", "Shes", "literally", "17", "!", "SEVENTEEN", "!", "!", "!", "The", "characters", "perspective", "of", "things", "/", "how", "we", "at", "that", "age", "process", "things", "so", "differently", ",", "how", "warped", "and", "extreme", "we", "experience", "life", "/", "recount", "memories", "...", "+", "whatever", "effect", "the", "drugs", "are", "doing", ".", "It's", "fucking", "mindblowing", "#Euphoria", "#EuphoriaHBO"]} -{"id": "1206-recount", "word": "recount", "label_binary": 1, "text_1": "Does Kentucky trigger an automatic recount for close elections?", "token_idx_1": 5, "text_start_1": 35, "text_end_1": 42, "date_1": "2019-11", "text_2": "Monica Palmer isn't going to be satisfied with any counts unless she alone is allowed to perform the recount of MI in its entirety - herself. Evidently everyone else is incompetent.", "token_idx_2": 18, "text_start_2": 101, "text_end_2": 108, "date_2": "2020-11", "text_1_tokenized": ["Does", "Kentucky", "trigger", "an", "automatic", "recount", "for", "close", "elections", "?"], "text_2_tokenized": ["Monica", "Palmer", "isn't", "going", "to", "be", "satisfied", "with", "any", "counts", "unless", "she", "alone", "is", "allowed", "to", "perform", "the", "recount", "of", "MI", "in", "its", "entirety", "-", "herself", ".", "Evidently", "everyone", "else", "is", "incompetent", "."]} -{"id": "1207-recount", "word": "recount", "label_binary": 1, "text_1": "I was obviously around durning the Florida recount of the 2000 election, but I had never heard about the \u201cbrooks brothers riot\u201d, where a group of gop members stopped a recount of rejected computer votes. If you don't know about it, look it up, very interesting and disturbing.", "token_idx_1": 7, "text_start_1": 43, "text_end_1": 50, "date_1": "2019-11", "text_2": "Calling the recount for bitcoin at 21 million.", "token_idx_2": 2, "text_start_2": 12, "text_end_2": 19, "date_2": "2020-11", "text_1_tokenized": ["I", "was", "obviously", "around", "durning", "the", "Florida", "recount", "of", "the", "2000", "election", ",", "but", "I", "had", "never", "heard", "about", "the", "\u201c", "brooks", "brothers", "riot", "\u201d", ",", "where", "a", "group", "of", "gop", "members", "stopped", "a", "recount", "of", "rejected", "computer", "votes", ".", "If", "you", "don't", "know", "about", "it", ",", "look", "it", "up", ",", "very", "interesting", "and", "disturbing", "."], "text_2_tokenized": ["Calling", "the", "recount", "for", "bitcoin", "at", "21", "million", "."]} -{"id": "1208-recount", "word": "recount", "label_binary": 0, "text_1": "fun challenge: recount the plot of Pirates of the Caribbean 3, from memory, without losing your mind. if you're still alive after your attempt you win", "token_idx_1": 3, "text_start_1": 15, "text_end_1": 22, "date_1": "2019-11", "text_2": "Man I hope to god they do recount the votes and Biden wins by even more than he did the first time\ud83d\ude02\ud83d\ude02 fuck off you hypocritical idiots", "token_idx_2": 7, "text_start_2": 26, "text_end_2": 33, "date_2": "2020-11", "text_1_tokenized": ["fun", "challenge", ":", "recount", "the", "plot", "of", "Pirates", "of", "the", "Caribbean", "3", ",", "from", "memory", ",", "without", "losing", "your", "mind", ".", "if", "you're", "still", "alive", "after", "your", "attempt", "you", "win"], "text_2_tokenized": ["Man", "I", "hope", "to", "god", "they", "do", "recount", "the", "votes", "and", "Biden", "wins", "by", "even", "more", "than", "he", "did", "the", "first", "time", "\ud83d\ude02", "\ud83d\ude02", "fuck", "off", "you", "hypocritical", "idiots"]} -{"id": "1209-recount", "word": "recount", "label_binary": 1, "text_1": "If Bevin wants a recount he should hire a couple math teachers \ud83e\udd37\ud83c\udffd\u200d\u2642\ufe0f", "token_idx_1": 4, "text_start_1": 17, "text_end_1": 24, "date_1": "2019-11", "text_2": "All of the ballots from this past election should be archived. And stored in a secret location in case anyone need another recount.", "token_idx_2": 23, "text_start_2": 123, "text_end_2": 130, "date_2": "2020-11", "text_1_tokenized": ["If", "Bevin", "wants", "a", "recount", "he", "should", "hire", "a", "couple", "math", "teachers", "\ud83e\udd37\ud83c\udffd\u200d\u2642", "\ufe0f"], "text_2_tokenized": ["All", "of", "the", "ballots", "from", "this", "past", "election", "should", "be", "archived", ".", "And", "stored", "in", "a", "secret", "location", "in", "case", "anyone", "need", "another", "recount", "."]} -{"id": "1210-recount", "word": "recount", "label_binary": 1, "text_1": "FWIW: The campaign of Daniel Davis Clayton, who missed the #HD100 special election runoff by five votes Tuesday, says he's still considering whether to ask for a recount. #txlege", "token_idx_1": 30, "text_start_1": 162, "text_end_1": 169, "date_1": "2019-11", "text_2": "Forget the recount. A good old fashioned 3 legged race will do", "token_idx_2": 2, "text_start_2": 11, "text_end_2": 18, "date_2": "2020-11", "text_1_tokenized": ["FWIW", ":", "The", "campaign", "of", "Daniel", "Davis", "Clayton", ",", "who", "missed", "the", "#HD100", "special", "election", "runoff", "by", "five", "votes", "Tuesday", ",", "says", "he's", "still", "considering", "whether", "to", "ask", "for", "a", "recount", ".", "#txlege"], "text_2_tokenized": ["Forget", "the", "recount", ".", "A", "good", "old", "fashioned", "3", "legged", "race", "will", "do"]} -{"id": "1211-recount", "word": "recount", "label_binary": 0, "text_1": "Woweeee I'm embarrassed by my Spotify recount \ud83d\udc80\ud83d\ude02", "token_idx_1": 6, "text_start_1": 38, "text_end_1": 45, "date_1": "2019-11", "text_2": "The media does not have the authority to call the election this is not over the illegal ballots will be found the recount will continue", "token_idx_2": 22, "text_start_2": 114, "text_end_2": 121, "date_2": "2020-11", "text_1_tokenized": ["Woweeee", "I'm", "embarrassed", "by", "my", "Spotify", "recount", "\ud83d\udc80", "\ud83d\ude02"], "text_2_tokenized": ["The", "media", "does", "not", "have", "the", "authority", "to", "call", "the", "election", "this", "is", "not", "over", "the", "illegal", "ballots", "will", "be", "found", "the", "recount", "will", "continue"]} -{"id": "1212-recount", "word": "recount", "label_binary": 0, "text_1": "When life leaves you in a dark pit, don't forget about the goodness of the Lord. It will be your source of strength and joy. When recount what God did for you in the past, it encourages you to move on. You know that sooner or later, God will change your story.", "token_idx_1": 29, "text_start_1": 130, "text_end_1": 137, "date_1": "2019-11", "text_2": "Trump denies penalty shoot out win. Demands recount.", "token_idx_2": 8, "text_start_2": 44, "text_end_2": 51, "date_2": "2020-11", "text_1_tokenized": ["When", "life", "leaves", "you", "in", "a", "dark", "pit", ",", "don't", "forget", "about", "the", "goodness", "of", "the", "Lord", ".", "It", "will", "be", "your", "source", "of", "strength", "and", "joy", ".", "When", "recount", "what", "God", "did", "for", "you", "in", "the", "past", ",", "it", "encourages", "you", "to", "move", "on", ".", "You", "know", "that", "sooner", "or", "later", ",", "God", "will", "change", "your", "story", "."], "text_2_tokenized": ["Trump", "denies", "penalty", "shoot", "out", "win", ".", "Demands", "recount", "."]} -{"id": "1213-recount", "word": "recount", "label_binary": 1, "text_1": "Really hope this recount that Matt Bevin is insisting on shows even more votes for Beshear", "token_idx_1": 3, "text_start_1": 17, "text_end_1": 24, "date_1": "2019-11", "text_2": "Trump's PAC spent $3m on his Wisconsin recount. Result? 132 more votes for Biden. That works out to $22,727.27 per vote. Well done, donors!", "token_idx_2": 8, "text_start_2": 39, "text_end_2": 46, "date_2": "2020-11", "text_1_tokenized": ["Really", "hope", "this", "recount", "that", "Matt", "Bevin", "is", "insisting", "on", "shows", "even", "more", "votes", "for", "Beshear"], "text_2_tokenized": ["Trump's", "PAC", "spent", "$", "3m", "on", "his", "Wisconsin", "recount", ".", "Result", "?", "132", "more", "votes", "for", "Biden", ".", "That", "works", "out", "to", "$", "22,727", ".", "27", "per", "vote", ".", "Well", "done", ",", "donors", "!"]} -{"id": "1214-recount", "word": "recount", "label_binary": 1, "text_1": "Everyone celebrating Kentucky governor race when you know the Republican won't concede even after a recount. Expect the most repugnant strategy, tactics, and messaging that they're already planning.", "token_idx_1": 15, "text_start_1": 100, "text_end_1": 107, "date_1": "2019-11", "text_2": "Still waiting for @GovKemp to explain why taxpayers have to pay for another recount....", "token_idx_2": 13, "text_start_2": 76, "text_end_2": 83, "date_2": "2020-11", "text_1_tokenized": ["Everyone", "celebrating", "Kentucky", "governor", "race", "when", "you", "know", "the", "Republican", "won't", "concede", "even", "after", "a", "recount", ".", "Expect", "the", "most", "repugnant", "strategy", ",", "tactics", ",", "and", "messaging", "that", "they're", "already", "planning", "."], "text_2_tokenized": ["Still", "waiting", "for", "@GovKemp", "to", "explain", "why", "taxpayers", "have", "to", "pay", "for", "another", "recount", "..."]} -{"id": "1215-recount", "word": "recount", "label_binary": 1, "text_1": "If the legislature really does decide the KY governor's election and gives it to Bevin even though he CLEARLY LOST, we all have to raise hell Unless a recount changes it, Andy Bashear WON THE ELECTION. He had MORE VOTES. If something stops that we can't just be like \u201cthat sucks\u201d", "token_idx_1": 29, "text_start_1": 151, "text_end_1": 158, "date_1": "2019-11", "text_2": "I love Georgia being all \u201cyea I can get my manager but he's just gonna tell you the same thing\u201d while agreeing to a recount knowing the current number is THE number no matter what.", "token_idx_2": 26, "text_start_2": 116, "text_end_2": 123, "date_2": "2020-11", "text_1_tokenized": ["If", "the", "legislature", "really", "does", "decide", "the", "KY", "governor's", "election", "and", "gives", "it", "to", "Bevin", "even", "though", "he", "CLEARLY", "LOST", ",", "we", "all", "have", "to", "raise", "hell", "Unless", "a", "recount", "changes", "it", ",", "Andy", "Bashear", "WON", "THE", "ELECTION", ".", "He", "had", "MORE", "VOTES", ".", "If", "something", "stops", "that", "we", "can't", "just", "be", "like", "\u201c", "that", "sucks", "\u201d"], "text_2_tokenized": ["I", "love", "Georgia", "being", "all", "\u201c", "yea", "I", "can", "get", "my", "manager", "but", "he's", "just", "gonna", "tell", "you", "the", "same", "thing", "\u201d", "while", "agreeing", "to", "a", "recount", "knowing", "the", "current", "number", "is", "THE", "number", "no", "matter", "what", "."]} -{"id": "1216-recount", "word": "recount", "label_binary": 1, "text_1": "Even tho @AndyBeshearKY won KY Govenor race tonight @MattBevin will \"request\" a recount and try to \"dismiss\" votes that went to Beshear", "token_idx_1": 14, "text_start_1": 80, "text_end_1": 87, "date_1": "2019-11", "text_2": "Inbox: President Donald J. Trump's re-election campaign filed a lawsuit to the Wisconsin Supreme Court following the campaign's requested recount to uncover fraud and abuse that irrefutably altered the outcome of this election.", "token_idx_2": 21, "text_start_2": 138, "text_end_2": 145, "date_2": "2020-11", "text_1_tokenized": ["Even", "tho", "@AndyBeshearKY", "won", "KY", "Govenor", "race", "tonight", "@MattBevin", "will", "\"", "request", "\"", "a", "recount", "and", "try", "to", "\"", "dismiss", "\"", "votes", "that", "went", "to", "Beshear"], "text_2_tokenized": ["Inbox", ":", "President", "Donald", "J", ".", "Trump's", "re-election", "campaign", "filed", "a", "lawsuit", "to", "the", "Wisconsin", "Supreme", "Court", "following", "the", "campaign's", "requested", "recount", "to", "uncover", "fraud", "and", "abuse", "that", "irrefutably", "altered", "the", "outcome", "of", "this", "election", "."]} -{"id": "1217-recount", "word": "recount", "label_binary": 1, "text_1": "also can we discuss how like i saw a very pretty boy tonight and if he doesn't turn out to be my soul mate i want a recount and a refund all in one transaction pls and thank u sir and madam", "token_idx_1": 27, "text_start_1": 116, "text_end_1": 123, "date_1": "2019-11", "text_2": "So Donald Trump lost GA on election day. Then had the vote audited and still lost. Now he wants a recount that every expert says will still result in him losing. I swear, GA going blue is like waiting for a bus. You wait for ages for one to come along, then three show up at once.", "token_idx_2": 22, "text_start_2": 98, "text_end_2": 105, "date_2": "2020-11", "text_1_tokenized": ["also", "can", "we", "discuss", "how", "like", "i", "saw", "a", "very", "pretty", "boy", "tonight", "and", "if", "he", "doesn't", "turn", "out", "to", "be", "my", "soul", "mate", "i", "want", "a", "recount", "and", "a", "refund", "all", "in", "one", "transaction", "pls", "and", "thank", "u", "sir", "and", "madam"], "text_2_tokenized": ["So", "Donald", "Trump", "lost", "GA", "on", "election", "day", ".", "Then", "had", "the", "vote", "audited", "and", "still", "lost", ".", "Now", "he", "wants", "a", "recount", "that", "every", "expert", "says", "will", "still", "result", "in", "him", "losing", ".", "I", "swear", ",", "GA", "going", "blue", "is", "like", "waiting", "for", "a", "bus", ".", "You", "wait", "for", "ages", "for", "one", "to", "come", "along", ",", "then", "three", "show", "up", "at", "once", "."]} -{"id": "1218-recount", "word": "recount", "label_binary": 1, "text_1": "count the money then recount it cause it bring a lot a joy", "token_idx_1": 4, "text_start_1": 21, "text_end_1": 28, "date_1": "2019-11", "text_2": "imagine firing several people because they claimed there was no voting fraud so an ignorant toddler asked to recount some states in order to receive a different outcome in the election.. oh wait @realDonaldTrump", "token_idx_2": 18, "text_start_2": 109, "text_end_2": 116, "date_2": "2020-11", "text_1_tokenized": ["count", "the", "money", "then", "recount", "it", "cause", "it", "bring", "a", "lot", "a", "joy"], "text_2_tokenized": ["imagine", "firing", "several", "people", "because", "they", "claimed", "there", "was", "no", "voting", "fraud", "so", "an", "ignorant", "toddler", "asked", "to", "recount", "some", "states", "in", "order", "to", "receive", "a", "different", "outcome", "in", "the", "election", "..", "oh", "wait", "@realDonaldTrump"]} -{"id": "1219-recount", "word": "recount", "label_binary": 1, "text_1": "BREAKING: Vanderburgh County Republican Chair Wayne Parke just now says no decision on recount has been made but he says it is likely.", "token_idx_1": 14, "text_start_1": 87, "text_end_1": 94, "date_1": "2019-11", "text_2": "And they're trying it again in Wisconsin the exactly the same approach that they've lost in every other state they've tried! The recount showed Biden one by nearly 20,700 votes, gaining 87 votes in the recount that Trump paid for.", "token_idx_2": 23, "text_start_2": 129, "text_end_2": 136, "date_2": "2020-11", "text_1_tokenized": ["BREAKING", ":", "Vanderburgh", "County", "Republican", "Chair", "Wayne", "Parke", "just", "now", "says", "no", "decision", "on", "recount", "has", "been", "made", "but", "he", "says", "it", "is", "likely", "."], "text_2_tokenized": ["And", "they're", "trying", "it", "again", "in", "Wisconsin", "the", "exactly", "the", "same", "approach", "that", "they've", "lost", "in", "every", "other", "state", "they've", "tried", "!", "The", "recount", "showed", "Biden", "one", "by", "nearly", "20,700", "votes", ",", "gaining", "87", "votes", "in", "the", "recount", "that", "Trump", "paid", "for", "."]} -{"id": "1220-recount", "word": "recount", "label_binary": 1, "text_1": "So FMA is the number 1 anime but SDS ain't even make the list? I needs a recount of votes", "token_idx_1": 18, "text_start_1": 73, "text_end_1": 80, "date_1": "2019-11", "text_2": "As the American electoral drama plays out, I become increasingly suspicious that a monumental swindle is in progress. The only reassurance I and other Americans can have is that a radically transparent recount is conducted.", "token_idx_2": 34, "text_start_2": 202, "text_end_2": 209, "date_2": "2020-11", "text_1_tokenized": ["So", "FMA", "is", "the", "number", "1", "anime", "but", "SDS", "ain't", "even", "make", "the", "list", "?", "I", "needs", "a", "recount", "of", "votes"], "text_2_tokenized": ["As", "the", "American", "electoral", "drama", "plays", "out", ",", "I", "become", "increasingly", "suspicious", "that", "a", "monumental", "swindle", "is", "in", "progress", ".", "The", "only", "reassurance", "I", "and", "other", "Americans", "can", "have", "is", "that", "a", "radically", "transparent", "recount", "is", "conducted", "."]} -{"id": "1221-recount", "word": "recount", "label_binary": 1, "text_1": "The real question...is Matt Bevin going to demand a recount after last night's Kentucky game? #BBN #kentuckyelections", "token_idx_1": 11, "text_start_1": 52, "text_end_1": 59, "date_1": "2019-11", "text_2": "Trump spent $3M on Wisconsin recount but Biden picked up 132 more votes.", "token_idx_2": 6, "text_start_2": 29, "text_end_2": 36, "date_2": "2020-11", "text_1_tokenized": ["The", "real", "question", "...", "is", "Matt", "Bevin", "going", "to", "demand", "a", "recount", "after", "last", "night's", "Kentucky", "game", "?", "#BBN", "#kentuckyelections"], "text_2_tokenized": ["Trump", "spent", "$", "3M", "on", "Wisconsin", "recount", "but", "Biden", "picked", "up", "132", "more", "votes", "."]} -{"id": "1222-recount", "word": "recount", "label_binary": 1, "text_1": "Its amazing, when Dems call for a recount, we're called snowflakes who refuse to accept election results but when Republicans lose, they shout recount and dont expect any push back.\ud83e\udd26\u200d\u2640\ufe0f", "token_idx_1": 8, "text_start_1": 34, "text_end_1": 41, "date_1": "2019-11", "text_2": "It's tough being @realDonaldTrump ! Pay for a recount in Milwaukee County! Spend lots of money! AND they find 132 miscounted votes.........................for president-elect @JoeBiden ! #trumplost #TrumpConcede", "token_idx_2": 8, "text_start_2": 46, "text_end_2": 53, "date_2": "2020-11", "text_1_tokenized": ["Its", "amazing", ",", "when", "Dems", "call", "for", "a", "recount", ",", "we're", "called", "snowflakes", "who", "refuse", "to", "accept", "election", "results", "but", "when", "Republicans", "lose", ",", "they", "shout", "recount", "and", "dont", "expect", "any", "push", "back", ".", "\ud83e\udd26\u200d\u2640", "\ufe0f"], "text_2_tokenized": ["It's", "tough", "being", "@realDonaldTrump", "!", "Pay", "for", "a", "recount", "in", "Milwaukee", "County", "!", "Spend", "lots", "of", "money", "!", "AND", "they", "find", "132", "miscounted", "votes", "...", "for", "president-elect", "@JoeBiden", "!", "#trumplost", "#TrumpConcede"]} -{"id": "1223-recount", "word": "recount", "label_binary": 1, "text_1": "The people have spoken, and they have demanded a recount. Game Devs: Who do you think would give the best head?", "token_idx_1": 10, "text_start_1": 49, "text_end_1": 56, "date_1": "2019-11", "text_2": "I went to bed with an undeniable cache of containers full of leftovers. Today my DOMINION Refrigerator is completely empty except for some droppings of Cranberry sauce on the middle shelf. What now, a recount? #StopTheSteaI2020 #ElectionResults2020", "token_idx_2": 37, "text_start_2": 201, "text_end_2": 208, "date_2": "2020-11", "text_1_tokenized": ["The", "people", "have", "spoken", ",", "and", "they", "have", "demanded", "a", "recount", ".", "Game", "Devs", ":", "Who", "do", "you", "think", "would", "give", "the", "best", "head", "?"], "text_2_tokenized": ["I", "went", "to", "bed", "with", "an", "undeniable", "cache", "of", "containers", "full", "of", "leftovers", ".", "Today", "my", "DOMINION", "Refrigerator", "is", "completely", "empty", "except", "for", "some", "droppings", "of", "Cranberry", "sauce", "on", "the", "middle", "shelf", ".", "What", "now", ",", "a", "recount", "?", "#StopTheSteaI2020", "#ElectionResults2020"]} -{"id": "1224-recount", "word": "recount", "label_binary": 0, "text_1": "November at its tail..what will you say entering in last 4 weeks of 2019 ? I don't know about you.. \u201cI WILL praise You, O Lord, with my whole heart; I will show forth (recount and tell aloud) all Your marvelous works and wonderful deeds! -Psalm 9:1-AMPC \ud83d\ude4f\ud83c\udffe\ud83d\ude4f\ud83c\udffe", "token_idx_1": 42, "text_start_1": 168, "text_end_1": 175, "date_1": "2019-11", "text_2": "In Wisconsin, 64 of 72 counties completed canvassing, resulting in Biden gaining 167 votes, Trump 26. If Trump pays for a recount he will lose again by more. He is about to lose Georgia again, so spreading lies. It is over. America has spoke. Our democracy works.", "token_idx_2": 25, "text_start_2": 122, "text_end_2": 129, "date_2": "2020-11", "text_1_tokenized": ["November", "at", "its", "tail", "..", "what", "will", "you", "say", "entering", "in", "last", "4", "weeks", "of", "2019", "?", "I", "don't", "know", "about", "you", "..", "\u201c", "I", "WILL", "praise", "You", ",", "O", "Lord", ",", "with", "my", "whole", "heart", ";", "I", "will", "show", "forth", "(", "recount", "and", "tell", "aloud", ")", "all", "Your", "marvelous", "works", "and", "wonderful", "deeds", "!", "-", "Psalm", "9:1-", "AMPC", "\ud83d\ude4f\ud83c\udffe", "\ud83d\ude4f\ud83c\udffe"], "text_2_tokenized": ["In", "Wisconsin", ",", "64", "of", "72", "counties", "completed", "canvassing", ",", "resulting", "in", "Biden", "gaining", "167", "votes", ",", "Trump", "26", ".", "If", "Trump", "pays", "for", "a", "recount", "he", "will", "lose", "again", "by", "more", ".", "He", "is", "about", "to", "lose", "Georgia", "again", ",", "so", "spreading", "lies", ".", "It", "is", "over", ".", "America", "has", "spoke", ".", "Our", "democracy", "works", "."]} -{"id": "1225-recount", "word": "recount", "label_binary": 1, "text_1": "BREAKING: Trump demands a recount of the 2016 Election. \u201cMaybe Crooked Hillary won. I don't need this shit!\u201d #LockHimUp", "token_idx_1": 5, "text_start_1": 26, "text_end_1": 33, "date_1": "2019-11", "text_2": "Trump is demanding a 3rd recount in Georgia. Hiw many times can he demand recounts?", "token_idx_2": 5, "text_start_2": 25, "text_end_2": 32, "date_2": "2020-11", "text_1_tokenized": ["BREAKING", ":", "Trump", "demands", "a", "recount", "of", "the", "2016", "Election", ".", "\u201c", "Maybe", "Crooked", "Hillary", "won", ".", "I", "don't", "need", "this", "shit", "!", "\u201d", "#LockHimUp"], "text_2_tokenized": ["Trump", "is", "demanding", "a", "3rd", "recount", "in", "Georgia", ".", "Hiw", "many", "times", "can", "he", "demand", "recounts", "?"]} -{"id": "1226-recount", "word": "recount", "label_binary": 1, "text_1": "Some old farts: Impeaching a president for bad behavior and breaking laws is trying to undo an election. Same old farts: We don't like results showing democratic win. We won't concede, we want a recount, or state to decide.", "token_idx_1": 39, "text_start_1": 195, "text_end_1": 202, "date_1": "2019-11", "text_2": "A recount in Georgia?!? Fucking great", "token_idx_2": 1, "text_start_2": 2, "text_end_2": 9, "date_2": "2020-11", "text_1_tokenized": ["Some", "old", "farts", ":", "Impeaching", "a", "president", "for", "bad", "behavior", "and", "breaking", "laws", "is", "trying", "to", "undo", "an", "election", ".", "Same", "old", "farts", ":", "We", "don't", "like", "results", "showing", "democratic", "win", ".", "We", "won't", "concede", ",", "we", "want", "a", "recount", ",", "or", "state", "to", "decide", "."], "text_2_tokenized": ["A", "recount", "in", "Georgia", "?", "!", "?", "Fucking", "great"]} -{"id": "1227-recount", "word": "recount", "label_binary": 0, "text_1": "posting a video recount on Youtube now", "token_idx_1": 3, "text_start_1": 16, "text_end_1": 23, "date_1": "2019-11", "text_2": ".@LindseyGrahamSC we need a recount in South Carolina and Texas. Every vote must be counted. Don't you agree?", "token_idx_2": 5, "text_start_2": 28, "text_end_2": 35, "date_2": "2020-11", "text_1_tokenized": ["posting", "a", "video", "recount", "on", "Youtube", "now"], "text_2_tokenized": [".", "@LindseyGrahamSC", "we", "need", "a", "recount", "in", "South", "Carolina", "and", "Texas", ".", "Every", "vote", "must", "be", "counted", ".", "Don't", "you", "agree", "?"]} -{"id": "1228-recount", "word": "recount", "label_binary": 1, "text_1": "Nunes, The potus wasn't elected. He was \"selected\" By Russia MSM Comey Lots of white guys. I still believe a recount was in order. If it was a man, it would have been done.", "token_idx_1": 25, "text_start_1": 109, "text_end_1": 116, "date_1": "2019-11", "text_2": "Many people are shocked at Trump and his supporters fighting for a recount, audit, investigation, etc. Did these people sleep for the last 4 years when OUR $ was spent on investigating Trump, lying on FISA applications? Rip Van Winkle syndrome, perhaps.", "token_idx_2": 12, "text_start_2": 67, "text_end_2": 74, "date_2": "2020-11", "text_1_tokenized": ["Nunes", ",", "The", "potus", "wasn't", "elected", ".", "He", "was", "\"", "selected", "\"", "By", "Russia", "MSM", "Comey", "Lots", "of", "white", "guys", ".", "I", "still", "believe", "a", "recount", "was", "in", "order", ".", "If", "it", "was", "a", "man", ",", "it", "would", "have", "been", "done", "."], "text_2_tokenized": ["Many", "people", "are", "shocked", "at", "Trump", "and", "his", "supporters", "fighting", "for", "a", "recount", ",", "audit", ",", "investigation", ",", "etc", ".", "Did", "these", "people", "sleep", "for", "the", "last", "4", "years", "when", "OUR", "$", "was", "spent", "on", "investigating", "Trump", ",", "lying", "on", "FISA", "applications", "?", "Rip", "Van", "Winkle", "syndrome", ",", "perhaps", "."]} -{"id": "1229-recount", "word": "recount", "label_binary": 0, "text_1": "I demand a recount @Spotify! \ud83e\udd23\ud83d\ude48 \ud83c\udfb6 \ud83c\udfb6\ud83e\udd37\ud83c\udffd\u200d\u2640\ufe0f #spotifywrapped2019", "token_idx_1": 3, "text_start_1": 11, "text_end_1": 18, "date_1": "2019-11", "text_2": "I've been assigned to monitor vote recount in Milwaukee WI Nov 21-25 6 hour shifts", "token_idx_2": 6, "text_start_2": 35, "text_end_2": 42, "date_2": "2020-11", "text_1_tokenized": ["I", "demand", "a", "recount", "@Spotify", "!", "\ud83e\udd23", "\ud83d\ude48", "\ud83c\udfb6", "\ud83c\udfb6", "\ud83e\udd37\ud83c\udffd\u200d\u2640", "\ufe0f", "#spotifywrapped2019"], "text_2_tokenized": ["I've", "been", "assigned", "to", "monitor", "vote", "recount", "in", "Milwaukee", "WI", "Nov", "21-25", "6", "hour", "shifts"]} -{"id": "1230-recount", "word": "recount", "label_binary": 1, "text_1": "Someone just told me that KY law says if Bevin wants a recount? HE has to pay for it! Now ain't THAT some fun news for the little whiner! He lost...show some dignity dude.", "token_idx_1": 12, "text_start_1": 55, "text_end_1": 62, "date_1": "2019-11", "text_2": "Go ahead and recount. There was probably voter fraud so go ahead and catch people who committed it. It is a crime. I, however, do not believe it was widespread enough to effect the results of the election we have now.", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 20, "date_2": "2020-11", "text_1_tokenized": ["Someone", "just", "told", "me", "that", "KY", "law", "says", "if", "Bevin", "wants", "a", "recount", "?", "HE", "has", "to", "pay", "for", "it", "!", "Now", "ain't", "THAT", "some", "fun", "news", "for", "the", "little", "whiner", "!", "He", "lost", "...", "show", "some", "dignity", "dude", "."], "text_2_tokenized": ["Go", "ahead", "and", "recount", ".", "There", "was", "probably", "voter", "fraud", "so", "go", "ahead", "and", "catch", "people", "who", "committed", "it", ".", "It", "is", "a", "crime", ".", "I", ",", "however", ",", "do", "not", "believe", "it", "was", "widespread", "enough", "to", "effect", "the", "results", "of", "the", "election", "we", "have", "now", "."]} -{"id": "1231-recount", "word": "recount", "label_binary": 1, "text_1": "Someone fill me in please. In Kentucky is the gubernatorial race decided already, or will there be a recount? Usually elections this close get recounts.", "token_idx_1": 20, "text_start_1": 101, "text_end_1": 108, "date_1": "2019-11", "text_2": "The Milwaukee County Sheriff's Office is proud to work alongside our partners at the Milwaukee County Clerk's Office and Election Commission to ensure a safe and secure recount. MCSO personnel will secure both the recount proceedings and the county's ballots 24/7.", "token_idx_2": 27, "text_start_2": 169, "text_end_2": 176, "date_2": "2020-11", "text_1_tokenized": ["Someone", "fill", "me", "in", "please", ".", "In", "Kentucky", "is", "the", "gubernatorial", "race", "decided", "already", ",", "or", "will", "there", "be", "a", "recount", "?", "Usually", "elections", "this", "close", "get", "recounts", "."], "text_2_tokenized": ["The", "Milwaukee", "County", "Sheriff's", "Office", "is", "proud", "to", "work", "alongside", "our", "partners", "at", "the", "Milwaukee", "County", "Clerk's", "Office", "and", "Election", "Commission", "to", "ensure", "a", "safe", "and", "secure", "recount", ".", "MCSO", "personnel", "will", "secure", "both", "the", "recount", "proceedings", "and", "the", "county's", "ballots", "24/7", "."]} -{"id": "1232-recount", "word": "recount", "label_binary": 1, "text_1": "A historic night! Democratic Governor in Kentucky (will be a recount, no doubt) and both Houses in Virginia flipped red to BLUE. Huge support in Virginia from @MomsDemand who outspent the freaking NRA!", "token_idx_1": 12, "text_start_1": 61, "text_end_1": 68, "date_1": "2019-11", "text_2": "Where is defeated @realDonaldTrump? Hardly any tweets. Is he saving money on his Twitter account to finance another recount failure. Why Is Dangerous Don hiding in the basement of OUR white house while $20k-a-day Rudy does his dirty work? C'mon Don, give it up!", "token_idx_2": 20, "text_start_2": 116, "text_end_2": 123, "date_2": "2020-11", "text_1_tokenized": ["A", "historic", "night", "!", "Democratic", "Governor", "in", "Kentucky", "(", "will", "be", "a", "recount", ",", "no", "doubt", ")", "and", "both", "Houses", "in", "Virginia", "flipped", "red", "to", "BLUE", ".", "Huge", "support", "in", "Virginia", "from", "@MomsDemand", "who", "outspent", "the", "freaking", "NRA", "!"], "text_2_tokenized": ["Where", "is", "defeated", "@realDonaldTrump", "?", "Hardly", "any", "tweets", ".", "Is", "he", "saving", "money", "on", "his", "Twitter", "account", "to", "finance", "another", "recount", "failure", ".", "Why", "Is", "Dangerous", "Don", "hiding", "in", "the", "basement", "of", "OUR", "white", "house", "while", "$", "20k", "-", "a-day", "Rudy", "does", "his", "dirty", "work", "?", "C'mon", "Don", ",", "give", "it", "up", "!"]} -{"id": "1233-recount", "word": "recount", "label_binary": 0, "text_1": "On @HowardStern today, James Taylor's recount of meeting Mark David Chapman the day before Lennon was murdered was chilling. Great interview", "token_idx_1": 6, "text_start_1": 38, "text_end_1": 45, "date_1": "2019-11", "text_2": "Lol Donald Lame Duck now shouting to stop the recount in GA. Stop the count! Count the votes! Stop the count! Recount! Stop the recount!", "token_idx_2": 9, "text_start_2": 46, "text_end_2": 53, "date_2": "2020-11", "text_1_tokenized": ["On", "@HowardStern", "today", ",", "James", "Taylor's", "recount", "of", "meeting", "Mark", "David", "Chapman", "the", "day", "before", "Lennon", "was", "murdered", "was", "chilling", ".", "Great", "interview"], "text_2_tokenized": ["Lol", "Donald", "Lame", "Duck", "now", "shouting", "to", "stop", "the", "recount", "in", "GA", ".", "Stop", "the", "count", "!", "Count", "the", "votes", "!", "Stop", "the", "count", "!", "Recount", "!", "Stop", "the", "recount", "!"]} -{"id": "1234-recount", "word": "recount", "label_binary": 1, "text_1": "Verona Murphy has congratulated George Lawlor and confirms she won't be seeking a recount in #Wexford after just 71 votes separated the two candidates #iestaff via @Elaine_Loughlin", "token_idx_1": 13, "text_start_1": 82, "text_end_1": 89, "date_1": "2019-11", "text_2": "Trump don't need a recount he just has to prove the fraud! That's the easy part! @realDonaldTrump", "token_idx_2": 4, "text_start_2": 19, "text_end_2": 26, "date_2": "2020-11", "text_1_tokenized": ["Verona", "Murphy", "has", "congratulated", "George", "Lawlor", "and", "confirms", "she", "won't", "be", "seeking", "a", "recount", "in", "#Wexford", "after", "just", "71", "votes", "separated", "the", "two", "candidates", "#iestaff", "via", "@Elaine_Loughlin"], "text_2_tokenized": ["Trump", "don't", "need", "a", "recount", "he", "just", "has", "to", "prove", "the", "fraud", "!", "That's", "the", "easy", "part", "!", "@realDonaldTrump"]} -{"id": "1235-recount", "word": "recount", "label_binary": 1, "text_1": "Spent an eternity kwa barabara and it's now that I'm passing Pangani? Demanding a recount", "token_idx_1": 15, "text_start_1": 82, "text_end_1": 89, "date_1": "2019-11", "text_2": "Hang on... The fact that @JoeBiden got more votes after recount actually proves that THERE WAS a problem with counting It doesn't matter who they're going to, this still means U.S. democracy IS flawed Like we didn't already know that", "token_idx_2": 11, "text_start_2": 56, "text_end_2": 63, "date_2": "2020-11", "text_1_tokenized": ["Spent", "an", "eternity", "kwa", "barabara", "and", "it's", "now", "that", "I'm", "passing", "Pangani", "?", "Demanding", "a", "recount"], "text_2_tokenized": ["Hang", "on", "...", "The", "fact", "that", "@JoeBiden", "got", "more", "votes", "after", "recount", "actually", "proves", "that", "THERE", "WAS", "a", "problem", "with", "counting", "It", "doesn't", "matter", "who", "they're", "going", "to", ",", "this", "still", "means", "U", ".", "S", ".", "democracy", "IS", "flawed", "Like", "we", "didn't", "already", "know", "that"]} -{"id": "1236-recount", "word": "recount", "label_binary": 0, "text_1": "If you express gratitude to God for His perceived works in your life more of what to express gratitude for happen. Like from time to time sit back and recount things, events, achievement sofar within the week or month, happenings for which you are grateful to God.", "token_idx_1": 30, "text_start_1": 151, "text_end_1": 158, "date_1": "2019-11", "text_2": "IDK why they want a recount. Knowing damn well Trump's ego cannot take another defeat like that \ud83e\udd75", "token_idx_2": 5, "text_start_2": 20, "text_end_2": 27, "date_2": "2020-11", "text_1_tokenized": ["If", "you", "express", "gratitude", "to", "God", "for", "His", "perceived", "works", "in", "your", "life", "more", "of", "what", "to", "express", "gratitude", "for", "happen", ".", "Like", "from", "time", "to", "time", "sit", "back", "and", "recount", "things", ",", "events", ",", "achievement", "sofar", "within", "the", "week", "or", "month", ",", "happenings", "for", "which", "you", "are", "grateful", "to", "God", "."], "text_2_tokenized": ["IDK", "why", "they", "want", "a", "recount", ".", "Knowing", "damn", "well", "Trump's", "ego", "cannot", "take", "another", "defeat", "like", "that", "\ud83e\udd75"]} -{"id": "1237-recount", "word": "recount", "label_binary": 1, "text_1": "The Acting Minister of Defense Asadullah Khalid on Saturday warned that Afghan military forces will not allow individuals to erupt violence due to their disagreements over vote recount process.", "token_idx_1": 27, "text_start_1": 177, "text_end_1": 184, "date_1": "2019-11", "text_2": "Shit, at this point I'd consider chipping in for another recount just to see his fucking ass lose again. #SoMuchLosing #ByeDon #YoureFired45", "token_idx_2": 11, "text_start_2": 57, "text_end_2": 64, "date_2": "2020-11", "text_1_tokenized": ["The", "Acting", "Minister", "of", "Defense", "Asadullah", "Khalid", "on", "Saturday", "warned", "that", "Afghan", "military", "forces", "will", "not", "allow", "individuals", "to", "erupt", "violence", "due", "to", "their", "disagreements", "over", "vote", "recount", "process", "."], "text_2_tokenized": ["Shit", ",", "at", "this", "point", "I'd", "consider", "chipping", "in", "for", "another", "recount", "just", "to", "see", "his", "fucking", "ass", "lose", "again", ".", "#SoMuchLosing", "#ByeDon", "#YoureFired45"]} -{"id": "1238-recount", "word": "recount", "label_binary": 1, "text_1": "Florida Fact #2: It is tradition of the State of Gatorland to somehow botch every initial polling result on Election Day, thus demanding a recount. I'm not saying it's dumb and pointless, but...", "token_idx_1": 27, "text_start_1": 139, "text_end_1": 146, "date_1": "2019-11", "text_2": "Georgia completed the recount and president Biden is still the winner!! Sorry @realDonaldTrump @POTUS, you've been told twice now you are the LOSER, HOW embarrassing", "token_idx_2": 3, "text_start_2": 22, "text_end_2": 29, "date_2": "2020-11", "text_1_tokenized": ["Florida", "Fact", "#", "2", ":", "It", "is", "tradition", "of", "the", "State", "of", "Gatorland", "to", "somehow", "botch", "every", "initial", "polling", "result", "on", "Election", "Day", ",", "thus", "demanding", "a", "recount", ".", "I'm", "not", "saying", "it's", "dumb", "and", "pointless", ",", "but", "..."], "text_2_tokenized": ["Georgia", "completed", "the", "recount", "and", "president", "Biden", "is", "still", "the", "winner", "!", "!", "Sorry", "@realDonaldTrump", "@POTUS", ",", "you've", "been", "told", "twice", "now", "you", "are", "the", "LOSER", ",", "HOW", "embarrassing"]} -{"id": "1239-recount", "word": "recount", "label_binary": 1, "text_1": "Even if the reports of election fraud were true, given that Bolivia is a democracy that holds elections, why do you think the only option here was a military coup. Why wouldn't their lawyers go through their legal system, call for a recount?", "token_idx_1": 46, "text_start_1": 233, "text_end_1": 240, "date_1": "2019-11", "text_2": "Am I wrong about this? But I think if Trump wins all the states that are demanding a recount Biden will still have over 270 votes.", "token_idx_2": 19, "text_start_2": 85, "text_end_2": 92, "date_2": "2020-11", "text_1_tokenized": ["Even", "if", "the", "reports", "of", "election", "fraud", "were", "true", ",", "given", "that", "Bolivia", "is", "a", "democracy", "that", "holds", "elections", ",", "why", "do", "you", "think", "the", "only", "option", "here", "was", "a", "military", "coup", ".", "Why", "wouldn't", "their", "lawyers", "go", "through", "their", "legal", "system", ",", "call", "for", "a", "recount", "?"], "text_2_tokenized": ["Am", "I", "wrong", "about", "this", "?", "But", "I", "think", "if", "Trump", "wins", "all", "the", "states", "that", "are", "demanding", "a", "recount", "Biden", "will", "still", "have", "over", "270", "votes", "."]} -{"id": "1240-recount", "word": "recount", "label_binary": 1, "text_1": "Can you imagine? Your vote is the one that causes recount after recount and a majority of 1 in your constituency. You don't vote because it is raining or icy? Your one single vote could be THE vote that seals our future. Friends please do it however you vote. I", "token_idx_1": 11, "text_start_1": 50, "text_end_1": 57, "date_1": "2019-11", "text_2": "Matt Morgan, Trump 2020 campaign general counsel, says that the campaign is trying to slow down the vote-counting process in PA in order to force a recount. There will be a hand recount of all votes in GA & possible recount in WI.", "token_idx_2": 28, "text_start_2": 148, "text_end_2": 155, "date_2": "2020-11", "text_1_tokenized": ["Can", "you", "imagine", "?", "Your", "vote", "is", "the", "one", "that", "causes", "recount", "after", "recount", "and", "a", "majority", "of", "1", "in", "your", "constituency", ".", "You", "don't", "vote", "because", "it", "is", "raining", "or", "icy", "?", "Your", "one", "single", "vote", "could", "be", "THE", "vote", "that", "seals", "our", "future", ".", "Friends", "please", "do", "it", "however", "you", "vote", ".", "I"], "text_2_tokenized": ["Matt", "Morgan", ",", "Trump", "2020", "campaign", "general", "counsel", ",", "says", "that", "the", "campaign", "is", "trying", "to", "slow", "down", "the", "vote-counting", "process", "in", "PA", "in", "order", "to", "force", "a", "recount", ".", "There", "will", "be", "a", "hand", "recount", "of", "all", "votes", "in", "GA", "&", "possible", "recount", "in", "WI", "."]} -{"id": "1241-recount", "word": "recount", "label_binary": 1, "text_1": "Thanksgiving Day preparations: \u201cI will give thanks to the LORD with my whole heart; I will recount all of your wonderful deeds.\u201d Psalm 9:1 \u201cI will give to the LORD the thanks due to his righteousness, and I will sing praise to the name of the LORD, the Most High.\u201d Psalm 7:17", "token_idx_1": 19, "text_start_1": 91, "text_end_1": 98, "date_1": "2019-11", "text_2": "A woman I went to grade school with is using her FB page to recount every year of her K-12 experience. She is dragging teachers and students by name and by the hair. It's a real journey. I don't recommend you do this. Also, I'm reading every long post.", "token_idx_2": 14, "text_start_2": 60, "text_end_2": 67, "date_2": "2020-11", "text_1_tokenized": ["Thanksgiving", "Day", "preparations", ":", "\u201c", "I", "will", "give", "thanks", "to", "the", "LORD", "with", "my", "whole", "heart", ";", "I", "will", "recount", "all", "of", "your", "wonderful", "deeds", ".", "\u201d", "Psalm", "9:1", "\u201c", "I", "will", "give", "to", "the", "LORD", "the", "thanks", "due", "to", "his", "righteousness", ",", "and", "I", "will", "sing", "praise", "to", "the", "name", "of", "the", "LORD", ",", "the", "Most", "High", ".", "\u201d", "Psalm", "7:17"], "text_2_tokenized": ["A", "woman", "I", "went", "to", "grade", "school", "with", "is", "using", "her", "FB", "page", "to", "recount", "every", "year", "of", "her", "K", "-", "12", "experience", ".", "She", "is", "dragging", "teachers", "and", "students", "by", "name", "and", "by", "the", "hair", ".", "It's", "a", "real", "journey", ".", "I", "don't", "recommend", "you", "do", "this", ".", "Also", ",", "I'm", "reading", "every", "long", "post", "."]} -{"id": "1242-recount", "word": "recount", "label_binary": 0, "text_1": "Currently listening to @ryanogs recount the food that he ate during his @WaltDisneyWorld Honeymoon on today's episode of @ThemeParkTrader Podcast, and it is making me SO hungry! \ud83d\ude02", "token_idx_1": 4, "text_start_1": 32, "text_end_1": 39, "date_1": "2019-11", "text_2": "Stay focus on the votes recount all eyes & ears keep focus on news from all states where recounts are underway. If there are not enough volunteers them all steps of the recounts can't be verified THOSE bastards supporting the LEFT will do anything to get total counts for Biden.", "token_idx_2": 5, "text_start_2": 24, "text_end_2": 31, "date_2": "2020-11", "text_1_tokenized": ["Currently", "listening", "to", "@ryanogs", "recount", "the", "food", "that", "he", "ate", "during", "his", "@WaltDisneyWorld", "Honeymoon", "on", "today's", "episode", "of", "@ThemeParkTrader", "Podcast", ",", "and", "it", "is", "making", "me", "SO", "hungry", "!", "\ud83d\ude02"], "text_2_tokenized": ["Stay", "focus", "on", "the", "votes", "recount", "all", "eyes", "&", "ears", "keep", "focus", "on", "news", "from", "all", "states", "where", "recounts", "are", "underway", ".", "If", "there", "are", "not", "enough", "volunteers", "them", "all", "steps", "of", "the", "recounts", "can't", "be", "verified", "THOSE", "bastards", "supporting", "the", "LEFT", "will", "do", "anything", "to", "get", "total", "counts", "for", "Biden", "."]} -{"id": "1243-recount", "word": "recount", "label_binary": 0, "text_1": "My partner has epic dreams, which is fine...until I hear them in a painstakingly detailed recount. I haven't had my second coffee yet, babe, and almon milk just doesn't tide me over enough", "token_idx_1": 18, "text_start_1": 90, "text_end_1": 97, "date_1": "2019-11", "text_2": "When you demand a recount of GA because you're a sore fucking loser... and they find more Biden votes. So you're more of a loser.", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 25, "date_2": "2020-11", "text_1_tokenized": ["My", "partner", "has", "epic", "dreams", ",", "which", "is", "fine", "...", "until", "I", "hear", "them", "in", "a", "painstakingly", "detailed", "recount", ".", "I", "haven't", "had", "my", "second", "coffee", "yet", ",", "babe", ",", "and", "almon", "milk", "just", "doesn't", "tide", "me", "over", "enough"], "text_2_tokenized": ["When", "you", "demand", "a", "recount", "of", "GA", "because", "you're", "a", "sore", "fucking", "loser", "...", "and", "they", "find", "more", "Biden", "votes", ".", "So", "you're", "more", "of", "a", "loser", "."]} -{"id": "1244-recount", "word": "recount", "label_binary": 1, "text_1": "Rep. in my neighborhood FB group keep spouting off about how they will get up today and not expect their participation trophy as they go to work their 2 jobs...then are like, but recanvas/recount/contest! So they do want a participation trophy? #KYGov #suckitupbuttercup", "token_idx_1": 39, "text_start_1": 188, "text_end_1": 195, "date_1": "2019-11", "text_2": "Call Secretary of State Brad Raffensperger the GA sec of state. He's not letting Republicans observe the recount!! (404) 656-2817. Put pressure on him!!", "token_idx_2": 18, "text_start_2": 105, "text_end_2": 112, "date_2": "2020-11", "text_1_tokenized": ["Rep", ".", "in", "my", "neighborhood", "FB", "group", "keep", "spouting", "off", "about", "how", "they", "will", "get", "up", "today", "and", "not", "expect", "their", "participation", "trophy", "as", "they", "go", "to", "work", "their", "2", "jobs", "...", "then", "are", "like", ",", "but", "recanvas", "/", "recount", "/", "contest", "!", "So", "they", "do", "want", "a", "participation", "trophy", "?", "#KYGov", "#suckitupbuttercup"], "text_2_tokenized": ["Call", "Secretary", "of", "State", "Brad", "Raffensperger", "the", "GA", "sec", "of", "state", ".", "He's", "not", "letting", "Republicans", "observe", "the", "recount", "!", "!", "(404) 656-2817", ".", "Put", "pressure", "on", "him", "!", "!"]} -{"id": "1245-recount", "word": "recount", "label_binary": 1, "text_1": "I'm sure a recount is in order, but I have Michigan as 8 of 10 from the floor during that 19-0 deluge. Which is still ongoing, technically.", "token_idx_1": 3, "text_start_1": 11, "text_end_1": 18, "date_1": "2019-11", "text_2": "BREAKING NEWS: Fulton County, Georgia says they have completed their recount in the presidential election. Poll counters official say the recount of 528,000 ballots was completed by midnight as required; results will be released by the Secretary of State's Office", "token_idx_2": 12, "text_start_2": 69, "text_end_2": 76, "date_2": "2020-11", "text_1_tokenized": ["I'm", "sure", "a", "recount", "is", "in", "order", ",", "but", "I", "have", "Michigan", "as", "8", "of", "10", "from", "the", "floor", "during", "that", "19-0", "deluge", ".", "Which", "is", "still", "ongoing", ",", "technically", "."], "text_2_tokenized": ["BREAKING", "NEWS", ":", "Fulton", "County", ",", "Georgia", "says", "they", "have", "completed", "their", "recount", "in", "the", "presidential", "election", ".", "Poll", "counters", "official", "say", "the", "recount", "of", "528,000", "ballots", "was", "completed", "by", "midnight", "as", "required", ";", "results", "will", "be", "released", "by", "the", "Secretary", "of", "State's", "Office"]} -{"id": "1246-recount", "word": "recount", "label_binary": 1, "text_1": "How in the hell the tree win? Who's voting? I demand a recount #TheMaskedSinger", "token_idx_1": 14, "text_start_1": 55, "text_end_1": 62, "date_1": "2019-11", "text_2": "I'll say this one last damn time....must be a god damn audit!!!!!!!!!!!! No recount. Must be an audit.", "token_idx_2": 18, "text_start_2": 76, "text_end_2": 83, "date_2": "2020-11", "text_1_tokenized": ["How", "in", "the", "hell", "the", "tree", "win", "?", "Who's", "voting", "?", "I", "demand", "a", "recount", "#TheMaskedSinger"], "text_2_tokenized": ["I'll", "say", "this", "one", "last", "damn", "time", "...", "must", "be", "a", "god", "damn", "audit", "!", "!", "!", "No", "recount", ".", "Must", "be", "an", "audit", "."]} -{"id": "1247-recount", "word": "recount", "label_binary": 1, "text_1": "Matt Bevin may ask for a recount.", "token_idx_1": 6, "text_start_1": 25, "text_end_1": 32, "date_1": "2019-11", "text_2": "Wisconsin recount just found almost 400 uncounted ballots!!!!! But it's ok, only 57 were for Trump, so trump paid $3 million to increase Biden's lead!! Superb!!! \ud83d\ude02\ud83e\udd23\ud83d\ude02\ud83d\ude48\ud83d\ude48\ud83e\udd23\ud83e\udd23\u2665\ufe0f", "token_idx_2": 1, "text_start_2": 10, "text_end_2": 17, "date_2": "2020-11", "text_1_tokenized": ["Matt", "Bevin", "may", "ask", "for", "a", "recount", "."], "text_2_tokenized": ["Wisconsin", "recount", "just", "found", "almost", "400", "uncounted", "ballots", "!", "!", "!", "But", "it's", "ok", ",", "only", "57", "were", "for", "Trump", ",", "so", "trump", "paid", "$", "3", "million", "to", "increase", "Biden's", "lead", "!", "!", "Superb", "!", "!", "!", "\ud83d\ude02", "\ud83e\udd23", "\ud83d\ude02", "\ud83d\ude48", "\ud83d\ude48", "\ud83e\udd23", "\ud83e\udd23", "\u2665", "\ufe0f"]} -{"id": "1248-recount", "word": "recount", "label_binary": 0, "text_1": ".@finneas did you get extra points for singing more than one song for your word association for Elle, if not I demand a recount. That's such a Gryffindor thing to do.", "token_idx_1": 25, "text_start_1": 120, "text_end_1": 127, "date_1": "2019-11", "text_2": "i want a recount on my spotify wrapped", "token_idx_2": 3, "text_start_2": 9, "text_end_2": 16, "date_2": "2020-11", "text_1_tokenized": [".", "@finneas", "did", "you", "get", "extra", "points", "for", "singing", "more", "than", "one", "song", "for", "your", "word", "association", "for", "Elle", ",", "if", "not", "I", "demand", "a", "recount", ".", "That's", "such", "a", "Gryffindor", "thing", "to", "do", "."], "text_2_tokenized": ["i", "want", "a", "recount", "on", "my", "spotify", "wrapped"]} -{"id": "1249-recount", "word": "recount", "label_binary": 1, "text_1": "We demand a recount @Yankees @astros @MLB", "token_idx_1": 3, "text_start_1": 12, "text_end_1": 19, "date_1": "2019-11", "text_2": "Oh algorithm wise algorithm why show me only stories for WI counties where Biden's count decreased when that (a) doesn't reflect the overall recount and (b) is barely news as it doesn't change anything?", "token_idx_2": 25, "text_start_2": 141, "text_end_2": 148, "date_2": "2020-11", "text_1_tokenized": ["We", "demand", "a", "recount", "@Yankees", "@astros", "@MLB"], "text_2_tokenized": ["Oh", "algorithm", "wise", "algorithm", "why", "show", "me", "only", "stories", "for", "WI", "counties", "where", "Biden's", "count", "decreased", "when", "that", "(", "a", ")", "doesn't", "reflect", "the", "overall", "recount", "and", "(", "b", ")", "is", "barely", "news", "as", "it", "doesn't", "change", "anything", "?"]} -{"id": "1250-recount", "word": "recount", "label_binary": 1, "text_1": "She think that I'm the one, I beg to differ recount it please.. I'm everybody's one.", "token_idx_1": 11, "text_start_1": 44, "text_end_1": 51, "date_1": "2019-11", "text_2": "Count every LEGAL ballot. Audit, recount, sue the ba$+a*ds! Throw the corruptocrats in jail (after a speedy fair trial of course!).", "token_idx_2": 7, "text_start_2": 33, "text_end_2": 40, "date_2": "2020-11", "text_1_tokenized": ["She", "think", "that", "I'm", "the", "one", ",", "I", "beg", "to", "differ", "recount", "it", "please", "..", "I'm", "everybody's", "one", "."], "text_2_tokenized": ["Count", "every", "LEGAL", "ballot", ".", "Audit", ",", "recount", ",", "sue", "the", "ba", "$", "+", "a", "*", "ds", "!", "Throw", "the", "corruptocrats", "in", "jail", "(", "after", "a", "speedy", "fair", "trial", "of", "course", "!", ")", "."]} -{"id": "1251-recount", "word": "recount", "label_binary": 1, "text_1": "Ever since the Florida recount, it's been obvious the Republicans have been tactically two steps ahead of the Democrats. Until this month, when the Dems suddenly began running circles around them. Thank you, @SpeakerPelosi. Thank you, @AdamSchiff.", "token_idx_1": 4, "text_start_1": 23, "text_end_1": 30, "date_1": "2019-11", "text_2": "I wonder if they're having a recount in Philadelphia? \ud83d\ude0e\ud83d\ude0e", "token_idx_2": 6, "text_start_2": 29, "text_end_2": 36, "date_2": "2020-11", "text_1_tokenized": ["Ever", "since", "the", "Florida", "recount", ",", "it's", "been", "obvious", "the", "Republicans", "have", "been", "tactically", "two", "steps", "ahead", "of", "the", "Democrats", ".", "Until", "this", "month", ",", "when", "the", "Dems", "suddenly", "began", "running", "circles", "around", "them", ".", "Thank", "you", ",", "@SpeakerPelosi", ".", "Thank", "you", ",", "@AdamSchiff", "."], "text_2_tokenized": ["I", "wonder", "if", "they're", "having", "a", "recount", "in", "Philadelphia", "?", "\ud83d\ude0e", "\ud83d\ude0e"]} -{"id": "1252-recount", "word": "recount", "label_binary": 0, "text_1": "i realized the decade is ending in less than a month and was trying to sit and reflect on it and realized that if someone held a gun to my head and told me to recount a specific memory from middle school in detail, i would be shot point blank", "token_idx_1": 35, "text_start_1": 159, "text_end_1": 166, "date_1": "2019-11", "text_2": "If they're going to recount the blue states, I demand they recount the red states. And please start with FLORIDA.", "token_idx_2": 4, "text_start_2": 20, "text_end_2": 27, "date_2": "2020-11", "text_1_tokenized": ["i", "realized", "the", "decade", "is", "ending", "in", "less", "than", "a", "month", "and", "was", "trying", "to", "sit", "and", "reflect", "on", "it", "and", "realized", "that", "if", "someone", "held", "a", "gun", "to", "my", "head", "and", "told", "me", "to", "recount", "a", "specific", "memory", "from", "middle", "school", "in", "detail", ",", "i", "would", "be", "shot", "point", "blank"], "text_2_tokenized": ["If", "they're", "going", "to", "recount", "the", "blue", "states", ",", "I", "demand", "they", "recount", "the", "red", "states", ".", "And", "please", "start", "with", "FLORIDA", "."]} -{"id": "1253-recount", "word": "recount", "label_binary": 1, "text_1": "Any unilateral decision 2 begin recount process irrespective of #StabilityAndPartnership legitimate concerns will ignite d flames of crisis. #StateBuilders & its minions in @AfghanistanIEC should know that d days of entrapping are gone. We scrutinize every step of fraudsters.", "token_idx_1": 5, "text_start_1": 32, "text_end_1": 39, "date_1": "2019-11", "text_2": "Damn so Trump finessed his taxes and he got the long money to keep the recount going.... jfc this man moving different", "token_idx_2": 15, "text_start_2": 71, "text_end_2": 78, "date_2": "2020-11", "text_1_tokenized": ["Any", "unilateral", "decision", "2", "begin", "recount", "process", "irrespective", "of", "#StabilityAndPartnership", "legitimate", "concerns", "will", "ignite", "d", "flames", "of", "crisis", ".", "#StateBuilders", "&", "its", "minions", "in", "@AfghanistanIEC", "should", "know", "that", "d", "days", "of", "entrapping", "are", "gone", ".", "We", "scrutinize", "every", "step", "of", "fraudsters", "."], "text_2_tokenized": ["Damn", "so", "Trump", "finessed", "his", "taxes", "and", "he", "got", "the", "long", "money", "to", "keep", "the", "recount", "going", "...", "jfc", "this", "man", "moving", "different"]} -{"id": "1254-recount", "word": "recount", "label_binary": 0, "text_1": "I love watching @Jim_Jordan play baffled when he has to recount a story that involves like 4 people and a couple conversations. I mean, if you get confused by that chain of events, you probably shouldn't be responsible for legislation.", "token_idx_1": 10, "text_start_1": 56, "text_end_1": 63, "date_1": "2019-11", "text_2": "Burst into tears hearing @ABlinken recount his grandfather's Holocaust experience. I'd read it. But it was enormously powerful to hear him tell it in this enormous moment. #BuildBackBetter", "token_idx_2": 5, "text_start_2": 35, "text_end_2": 42, "date_2": "2020-11", "text_1_tokenized": ["I", "love", "watching", "@Jim_Jordan", "play", "baffled", "when", "he", "has", "to", "recount", "a", "story", "that", "involves", "like", "4", "people", "and", "a", "couple", "conversations", ".", "I", "mean", ",", "if", "you", "get", "confused", "by", "that", "chain", "of", "events", ",", "you", "probably", "shouldn't", "be", "responsible", "for", "legislation", "."], "text_2_tokenized": ["Burst", "into", "tears", "hearing", "@ABlinken", "recount", "his", "grandfather's", "Holocaust", "experience", ".", "I'd", "read", "it", ".", "But", "it", "was", "enormously", "powerful", "to", "hear", "him", "tell", "it", "in", "this", "enormous", "moment", ".", "#BuildBackBetter"]} -{"id": "1255-recount", "word": "recount", "label_binary": 1, "text_1": "Hearing the Counsel recount tings Trump says in a monotone and formal voice is so funny and sad. His diction is so unpresidential and idiotic my god.", "token_idx_1": 3, "text_start_1": 20, "text_end_1": 27, "date_1": "2019-11", "text_2": "REPUBLICANS Senate & House reps. had best get behind President Trump and the recount he's the reason you all are in your jobs #TRUMP2024", "token_idx_2": 14, "text_start_2": 81, "text_end_2": 88, "date_2": "2020-11", "text_1_tokenized": ["Hearing", "the", "Counsel", "recount", "tings", "Trump", "says", "in", "a", "monotone", "and", "formal", "voice", "is", "so", "funny", "and", "sad", ".", "His", "diction", "is", "so", "unpresidential", "and", "idiotic", "my", "god", "."], "text_2_tokenized": ["REPUBLICANS", "Senate", "&", "House", "reps", ".", "had", "best", "get", "behind", "President", "Trump", "and", "the", "recount", "he's", "the", "reason", "you", "all", "are", "in", "your", "jobs", "#TRUMP2024"]} -{"id": "1256-recount", "word": "recount", "label_binary": 1, "text_1": "Remember when Jill Stein took millions of dollars from people to recount votes and then kept it and didn't do anything with it? She truly went sicko mode", "token_idx_1": 11, "text_start_1": 65, "text_end_1": 72, "date_1": "2019-11", "text_2": "Just saw a chitterlings egg roll. Maybe we do need a recount.", "token_idx_2": 12, "text_start_2": 53, "text_end_2": 60, "date_2": "2020-11", "text_1_tokenized": ["Remember", "when", "Jill", "Stein", "took", "millions", "of", "dollars", "from", "people", "to", "recount", "votes", "and", "then", "kept", "it", "and", "didn't", "do", "anything", "with", "it", "?", "She", "truly", "went", "sicko", "mode"], "text_2_tokenized": ["Just", "saw", "a", "chitterlings", "egg", "roll", ".", "Maybe", "we", "do", "need", "a", "recount", "."]} -{"id": "1257-recount", "word": "recount", "label_binary": 0, "text_1": "After some initial concern we've realised the reason our kid is getting so sinewy is because she can't shut up. Every mouthful of food is interrupted by a Billy Connolly style recount of her day that has about seven offshoots (minus the satisfying pun).", "token_idx_1": 32, "text_start_1": 176, "text_end_1": 183, "date_1": "2019-11", "text_2": "I just don't get it if Biden did win like it says he did then whys it matter if they do a recount? You shouldn't have anything to worry about.", "token_idx_2": 22, "text_start_2": 90, "text_end_2": 97, "date_2": "2020-11", "text_1_tokenized": ["After", "some", "initial", "concern", "we've", "realised", "the", "reason", "our", "kid", "is", "getting", "so", "sinewy", "is", "because", "she", "can't", "shut", "up", ".", "Every", "mouthful", "of", "food", "is", "interrupted", "by", "a", "Billy", "Connolly", "style", "recount", "of", "her", "day", "that", "has", "about", "seven", "offshoots", "(", "minus", "the", "satisfying", "pun", ")", "."], "text_2_tokenized": ["I", "just", "don't", "get", "it", "if", "Biden", "did", "win", "like", "it", "says", "he", "did", "then", "whys", "it", "matter", "if", "they", "do", "a", "recount", "?", "You", "shouldn't", "have", "anything", "to", "worry", "about", "."]} -{"id": "1258-recount", "word": "recount", "label_binary": 1, "text_1": "So when we gonna find out about this \u201crecount\u201d or whatever bevin is doing today", "token_idx_1": 9, "text_start_1": 38, "text_end_1": 45, "date_1": "2019-11", "text_2": "We need a recount in Lindsey Graham's race. He seems to know a lot about election obstruction", "token_idx_2": 3, "text_start_2": 10, "text_end_2": 17, "date_2": "2020-11", "text_1_tokenized": ["So", "when", "we", "gonna", "find", "out", "about", "this", "\u201c", "recount", "\u201d", "or", "whatever", "bevin", "is", "doing", "today"], "text_2_tokenized": ["We", "need", "a", "recount", "in", "Lindsey", "Graham's", "race", ".", "He", "seems", "to", "know", "a", "lot", "about", "election", "obstruction"]} -{"id": "1259-recount", "word": "recount", "label_binary": 0, "text_1": "How y'all gon give me a temporary phone with issues too? I demand a recount", "token_idx_1": 15, "text_start_1": 68, "text_end_1": 75, "date_1": "2019-11", "text_2": "With NC and Alaska coming in for Trump, that brings the total to 306-232 in favor of Biden, assuming Georgia doesn't flip to Trump in a recount. If that's the total, that is the exact electoral college margin by which Trump defeated Clinton in 2016 (ignoring faithless electors)", "token_idx_2": 28, "text_start_2": 136, "text_end_2": 143, "date_2": "2020-11", "text_1_tokenized": ["How", "y'all", "gon", "give", "me", "a", "temporary", "phone", "with", "issues", "too", "?", "I", "demand", "a", "recount"], "text_2_tokenized": ["With", "NC", "and", "Alaska", "coming", "in", "for", "Trump", ",", "that", "brings", "the", "total", "to", "306-232", "in", "favor", "of", "Biden", ",", "assuming", "Georgia", "doesn't", "flip", "to", "Trump", "in", "a", "recount", ".", "If", "that's", "the", "total", ",", "that", "is", "the", "exact", "electoral", "college", "margin", "by", "which", "Trump", "defeated", "Clinton", "in", "2016", "(", "ignoring", "faithless", "electors", ")"]} -{"id": "1260-recount", "word": "recount", "label_binary": 1, "text_1": "BREAKING NEWS: Bevins supporters demand recount after seeing numerous people wearing Mariachi suits and sombreros in line at voting sites. #KentuckyElections", "token_idx_1": 6, "text_start_1": 40, "text_end_1": 47, "date_1": "2019-11", "text_2": "Appears Biden would win any recount so end signature voting with video for a recount uniting America and proving the process pure as a babies butt for future elections setting the example for a messed up world as usual doing it first for honest elections and 90% participation ...", "token_idx_2": 5, "text_start_2": 28, "text_end_2": 35, "date_2": "2020-11", "text_1_tokenized": ["BREAKING", "NEWS", ":", "Bevins", "supporters", "demand", "recount", "after", "seeing", "numerous", "people", "wearing", "Mariachi", "suits", "and", "sombreros", "in", "line", "at", "voting", "sites", ".", "#KentuckyElections"], "text_2_tokenized": ["Appears", "Biden", "would", "win", "any", "recount", "so", "end", "signature", "voting", "with", "video", "for", "a", "recount", "uniting", "America", "and", "proving", "the", "process", "pure", "as", "a", "babies", "butt", "for", "future", "elections", "setting", "the", "example", "for", "a", "messed", "up", "world", "as", "usual", "doing", "it", "first", "for", "honest", "elections", "and", "90", "%", "participation", "..."]} -{"id": "1261-recount", "word": "recount", "label_binary": 0, "text_1": "Keely Galvin - Adolescents with Autism Spectrum Disorder (ASD) are over-represented in the Criminal Justice System and are often required to recount events, a challenging task for people with ASD. #CurtinOTSWSP @CurtinUni #Alliedhealthhons #Speechies", "token_idx_1": 23, "text_start_1": 141, "text_end_1": 148, "date_1": "2019-11", "text_2": "Wait.... there's another recount in Georgia?!?! Are you freaking kidding me? Am in watching an old newscast?", "token_idx_2": 4, "text_start_2": 25, "text_end_2": 32, "date_2": "2020-11", "text_1_tokenized": ["Keely", "Galvin", "-", "Adolescents", "with", "Autism", "Spectrum", "Disorder", "(", "ASD", ")", "are", "over-represented", "in", "the", "Criminal", "Justice", "System", "and", "are", "often", "required", "to", "recount", "events", ",", "a", "challenging", "task", "for", "people", "with", "ASD", ".", "#CurtinOTSWSP", "@CurtinUni", "#Alliedhealthhons", "#Speechies"], "text_2_tokenized": ["Wait", "...", "there's", "another", "recount", "in", "Georgia", "?", "!", "?", "!", "Are", "you", "freaking", "kidding", "me", "?", "Am", "in", "watching", "an", "old", "newscast", "?"]} -{"id": "1262-recount", "word": "recount", "label_binary": 1, "text_1": "oneits asking for a recount saying that there anger is towards mma like you aren't essentially saying you want to take t*ts award from them or invalidate it", "token_idx_1": 4, "text_start_1": 20, "text_end_1": 27, "date_1": "2019-11", "text_2": "The two who don't win should refuse to leave the white tent, demand a recount and then pardon a turkey #GBBO", "token_idx_2": 15, "text_start_2": 70, "text_end_2": 77, "date_2": "2020-11", "text_1_tokenized": ["oneits", "asking", "for", "a", "recount", "saying", "that", "there", "anger", "is", "towards", "mma", "like", "you", "aren't", "essentially", "saying", "you", "want", "to", "take", "t", "*", "ts", "award", "from", "them", "or", "invalidate", "it"], "text_2_tokenized": ["The", "two", "who", "don't", "win", "should", "refuse", "to", "leave", "the", "white", "tent", ",", "demand", "a", "recount", "and", "then", "pardon", "a", "turkey", "#GBBO"]} -{"id": "1263-recount", "word": "recount", "label_binary": 0, "text_1": "I miss not having to do a recount of group pictures #BeLoudForMX7", "token_idx_1": 7, "text_start_1": 26, "text_end_1": 33, "date_1": "2019-11", "text_2": "Hey, fun recount guys...looks like Biden still wins Georgia though! \ud83d\udc40\ud83d\udc40\ud83d\udc40\ud83d\udc40\ud83d\udc40\ud83d\udc40\ud83d\udc40\ud83d\udc40\ud83d\udc40\ud83d\udc40", "token_idx_2": 3, "text_start_2": 9, "text_end_2": 16, "date_2": "2020-11", "text_1_tokenized": ["I", "miss", "not", "having", "to", "do", "a", "recount", "of", "group", "pictures", "#BeLoudForMX7"], "text_2_tokenized": ["Hey", ",", "fun", "recount", "guys", "...", "looks", "like", "Biden", "still", "wins", "Georgia", "though", "!", "\ud83d\udc40", "\ud83d\udc40", "\ud83d\udc40"]} -{"id": "1264-recount", "word": "recount", "label_binary": 1, "text_1": "If Andy Beshear hangs on through a recount, it will almost certainly turn out to be because unapologetically biased Beshear voters rigged the election by voting for him.", "token_idx_1": 7, "text_start_1": 35, "text_end_1": 42, "date_1": "2019-11", "text_2": "All the extra hoops Georgia Republicans put into the voting process is the thing that is making it hard for Trump to invalidate the recount.", "token_idx_2": 24, "text_start_2": 132, "text_end_2": 139, "date_2": "2020-11", "text_1_tokenized": ["If", "Andy", "Beshear", "hangs", "on", "through", "a", "recount", ",", "it", "will", "almost", "certainly", "turn", "out", "to", "be", "because", "unapologetically", "biased", "Beshear", "voters", "rigged", "the", "election", "by", "voting", "for", "him", "."], "text_2_tokenized": ["All", "the", "extra", "hoops", "Georgia", "Republicans", "put", "into", "the", "voting", "process", "is", "the", "thing", "that", "is", "making", "it", "hard", "for", "Trump", "to", "invalidate", "the", "recount", "."]} -{"id": "1265-recount", "word": "recount", "label_binary": 0, "text_1": "Will Chu, Deliveroo's founder said \u201cPeople are fundamentally lazy. And given he opportunity to be lazier, they will be.\u201d Some of my more experienced drivers recount orders were \u201cpeople pretty much live upstairs from the restaurant.\u201d This laziness is at our expense", "token_idx_1": 31, "text_start_1": 157, "text_end_1": 164, "date_1": "2019-11", "text_2": "WAPO Trump lost by >6MM votes He lost >30 legal rulings in multi states He failed in every recount challenge He lost tries to block certification He's losing tries to get states to throw out the results He's losing GOP allies He's losing lawyers as they quit SO MUCH LOSING", "token_idx_2": 20, "text_start_2": 97, "text_end_2": 104, "date_2": "2020-11", "text_1_tokenized": ["Will", "Chu", ",", "Deliveroo's", "founder", "said", "\u201c", "People", "are", "fundamentally", "lazy", ".", "And", "given", "he", "opportunity", "to", "be", "lazier", ",", "they", "will", "be", ".", "\u201d", "Some", "of", "my", "more", "experienced", "drivers", "recount", "orders", "were", "\u201c", "people", "pretty", "much", "live", "upstairs", "from", "the", "restaurant", ".", "\u201d", "This", "laziness", "is", "at", "our", "expense"], "text_2_tokenized": ["WAPO", "Trump", "lost", "by", ">", "6MM", "votes", "He", "lost", ">", "30", "legal", "rulings", "in", "multi", "states", "He", "failed", "in", "every", "recount", "challenge", "He", "lost", "tries", "to", "block", "certification", "He's", "losing", "tries", "to", "get", "states", "to", "throw", "out", "the", "results", "He's", "losing", "GOP", "allies", "He's", "losing", "lawyers", "as", "they", "quit", "SO", "MUCH", "LOSING"]} -{"id": "1266-recount", "word": "recount", "label_binary": 1, "text_1": "How can Bevin call for a recount without paper ballots? #KYGov", "token_idx_1": 6, "text_start_1": 25, "text_end_1": 32, "date_1": "2019-11", "text_2": "My Spotify Wrapped this year SUCKED. How is it going to tell me that a song I streamed 11 times my top song of the YEAR!?!?!? There needs to be a recount", "token_idx_2": 38, "text_start_2": 146, "text_end_2": 153, "date_2": "2020-11", "text_1_tokenized": ["How", "can", "Bevin", "call", "for", "a", "recount", "without", "paper", "ballots", "?", "#KYGov"], "text_2_tokenized": ["My", "Spotify", "Wrapped", "this", "year", "SUCKED", ".", "How", "is", "it", "going", "to", "tell", "me", "that", "a", "song", "I", "streamed", "11", "times", "my", "top", "song", "of", "the", "YEAR", "!", "?", "!", "?", "!", "?", "There", "needs", "to", "be", "a", "recount"]} -{"id": "1267-recount", "word": "recount", "label_binary": 0, "text_1": "about to collect recount signatures at harrys in allston come thru if youre a ward 21 voter", "token_idx_1": 3, "text_start_1": 17, "text_end_1": 24, "date_1": "2019-11", "text_2": "Starting paragraph one of a recount tomorrow! What would you provide for the children apart from word banks", "token_idx_2": 5, "text_start_2": 28, "text_end_2": 35, "date_2": "2020-11", "text_1_tokenized": ["about", "to", "collect", "recount", "signatures", "at", "harrys", "in", "allston", "come", "thru", "if", "youre", "a", "ward", "21", "voter"], "text_2_tokenized": ["Starting", "paragraph", "one", "of", "a", "recount", "tomorrow", "!", "What", "would", "you", "provide", "for", "the", "children", "apart", "from", "word", "banks"]} -{"id": "1268-recount", "word": "recount", "label_binary": 1, "text_1": "If Pitbull isn't on this list then I want a recount @Toucherandrich", "token_idx_1": 10, "text_start_1": 44, "text_end_1": 51, "date_1": "2019-11", "text_2": "Local lawyer on TV today ran a losing campaign for Congressman. My thought: He hasn't chained himself to his desk, not leaving until we coddle him with a recount. Don't have to bribe him with trip to Disney. He lost and moved on. Y R we permitting the infantile behavior of Dummy", "token_idx_2": 31, "text_start_2": 154, "text_end_2": 161, "date_2": "2020-11", "text_1_tokenized": ["If", "Pitbull", "isn't", "on", "this", "list", "then", "I", "want", "a", "recount", "@Toucherandrich"], "text_2_tokenized": ["Local", "lawyer", "on", "TV", "today", "ran", "a", "losing", "campaign", "for", "Congressman", ".", "My", "thought", ":", "He", "hasn't", "chained", "himself", "to", "his", "desk", ",", "not", "leaving", "until", "we", "coddle", "him", "with", "a", "recount", ".", "Don't", "have", "to", "bribe", "him", "with", "trip", "to", "Disney", ".", "He", "lost", "and", "moved", "on", ".", "Y", "R", "we", "permitting", "the", "infantile", "behavior", "of", "Dummy"]} -{"id": "1269-recount", "word": "recount", "label_binary": 1, "text_1": "We were robbed! @Andy should have won! Wait, I'm from Kentucky, should I demand a recount! #byebyebevin", "token_idx_1": 19, "text_start_1": 82, "text_end_1": 89, "date_1": "2019-11", "text_2": "Sooo Georgia did a recount and Trump still lost \ud83e\udd23\ud83e\udd23\ud83e\udd23\ud83e\udd23", "token_idx_2": 4, "text_start_2": 19, "text_end_2": 26, "date_2": "2020-11", "text_1_tokenized": ["We", "were", "robbed", "!", "@Andy", "should", "have", "won", "!", "Wait", ",", "I'm", "from", "Kentucky", ",", "should", "I", "demand", "a", "recount", "!", "#byebyebevin"], "text_2_tokenized": ["Sooo", "Georgia", "did", "a", "recount", "and", "Trump", "still", "lost", "\ud83e\udd23", "\ud83e\udd23", "\ud83e\udd23"]} -{"id": "1270-recount", "word": "recount", "label_binary": 1, "text_1": "What is the difference between: Burning ballots to make a recount impossible & Using electronic voting devices that leave no paper trail, record or mechanism for a recount with any integrity? Do we really need the optics of an open fire to know our system is SCREWED?", "token_idx_1": 11, "text_start_1": 58, "text_end_1": 65, "date_1": "2019-11", "text_2": "If ever we want a chance to get behind our president, no matter what side, we need a fully open recount.", "token_idx_2": 22, "text_start_2": 96, "text_end_2": 103, "date_2": "2020-11", "text_1_tokenized": ["What", "is", "the", "difference", "between", ":", "Burning", "ballots", "to", "make", "a", "recount", "impossible", "&", "Using", "electronic", "voting", "devices", "that", "leave", "no", "paper", "trail", ",", "record", "or", "mechanism", "for", "a", "recount", "with", "any", "integrity", "?", "Do", "we", "really", "need", "the", "optics", "of", "an", "open", "fire", "to", "know", "our", "system", "is", "SCREWED", "?"], "text_2_tokenized": ["If", "ever", "we", "want", "a", "chance", "to", "get", "behind", "our", "president", ",", "no", "matter", "what", "side", ",", "we", "need", "a", "fully", "open", "recount", "."]} -{"id": "1271-recount", "word": "recount", "label_binary": 1, "text_1": "In 2016 Sheila Dixon wanted a recount, Pugh had no integrity. She talked greasy about Sheila. Karma is a real bitch huh Pugh?", "token_idx_1": 6, "text_start_1": 30, "text_end_1": 37, "date_1": "2019-11", "text_2": "Delaying the transition only affects the suffering of Americans during Covid. All thank to Trump's false claims. Can't wait to see #Georgia recount show Trump losing. #TrumpConcede", "token_idx_2": 24, "text_start_2": 140, "text_end_2": 147, "date_2": "2020-11", "text_1_tokenized": ["In", "2016", "Sheila", "Dixon", "wanted", "a", "recount", ",", "Pugh", "had", "no", "integrity", ".", "She", "talked", "greasy", "about", "Sheila", ".", "Karma", "is", "a", "real", "bitch", "huh", "Pugh", "?"], "text_2_tokenized": ["Delaying", "the", "transition", "only", "affects", "the", "suffering", "of", "Americans", "during", "Covid", ".", "All", "thank", "to", "Trump's", "false", "claims", ".", "Can't", "wait", "to", "see", "#Georgia", "recount", "show", "Trump", "losing", ".", "#TrumpConcede"]} -{"id": "1272-recount", "word": "recount", "label_binary": 0, "text_1": "as ppl go home to see families, just want to remind white ppl that 1) its not POCs job to listen to u recount all the racist shit your family said at dinner, 2) it's not their job to pat u on the back for calling them out, and 3) if u don't call them out, ur part of the problem", "token_idx_1": 25, "text_start_1": 102, "text_end_1": 109, "date_1": "2019-11", "text_2": "Trump campaign files for new recount in Georgia. Its already been done a second time, that is enough.", "token_idx_2": 5, "text_start_2": 29, "text_end_2": 36, "date_2": "2020-11", "text_1_tokenized": ["as", "ppl", "go", "home", "to", "see", "families", ",", "just", "want", "to", "remind", "white", "ppl", "that", "1", ")", "its", "not", "POCs", "job", "to", "listen", "to", "u", "recount", "all", "the", "racist", "shit", "your", "family", "said", "at", "dinner", ",", "2", ")", "it's", "not", "their", "job", "to", "pat", "u", "on", "the", "back", "for", "calling", "them", "out", ",", "and", "3", ")", "if", "u", "don't", "call", "them", "out", ",", "ur", "part", "of", "the", "problem"], "text_2_tokenized": ["Trump", "campaign", "files", "for", "new", "recount", "in", "Georgia", ".", "Its", "already", "been", "done", "a", "second", "time", ",", "that", "is", "enough", "."]} -{"id": "1273-recount", "word": "recount", "label_binary": 1, "text_1": "I know the election has been over for a little while now, but I'm still basking in the loss of FORMER (yaaas!) KY governor #MattBevin, who only asked for a recount because he hates teachers and never learned how to properly count", "token_idx_1": 35, "text_start_1": 156, "text_end_1": 163, "date_1": "2019-11", "text_2": "Word to the wise, you can demand fair elections and a thorough audit & recount of votes and still not be a conspiracy theorist. It's called being an honest American \ud83c\uddfa\ud83c\uddf8 Everyone should demand #ElectionIntegrity whether you win or lose.", "token_idx_2": 15, "text_start_2": 75, "text_end_2": 82, "date_2": "2020-11", "text_1_tokenized": ["I", "know", "the", "election", "has", "been", "over", "for", "a", "little", "while", "now", ",", "but", "I'm", "still", "basking", "in", "the", "loss", "of", "FORMER", "(", "yaaas", "!", ")", "KY", "governor", "#MattBevin", ",", "who", "only", "asked", "for", "a", "recount", "because", "he", "hates", "teachers", "and", "never", "learned", "how", "to", "properly", "count"], "text_2_tokenized": ["Word", "to", "the", "wise", ",", "you", "can", "demand", "fair", "elections", "and", "a", "thorough", "audit", "&", "recount", "of", "votes", "and", "still", "not", "be", "a", "conspiracy", "theorist", ".", "It's", "called", "being", "an", "honest", "American", "\ud83c\uddfa", "\ud83c\uddf8", "Everyone", "should", "demand", "#ElectionIntegrity", "whether", "you", "win", "or", "lose", "."]} -{"id": "3845-primo", "word": "primo", "label_binary": 0, "text_1": "My primo and his girl had no idea my dad was actually coming for their wedding ,my dad wanted to surprise him since his dad is in Mexico and couldn't make it she legit cried when she saw my dad had actually made it... that moment I realized he was marrying the perfect girl \ud83d\ude22\ud83e\udd7a", "token_idx_1": 1, "text_start_1": 3, "text_end_1": 8, "date_1": "2019-11", "text_2": "k, now i gotta save more primo for zhongli", "token_idx_2": 7, "text_start_2": 25, "text_end_2": 30, "date_2": "2020-11", "text_1_tokenized": ["My", "primo", "and", "his", "girl", "had", "no", "idea", "my", "dad", "was", "actually", "coming", "for", "their", "wedding", ",", "my", "dad", "wanted", "to", "surprise", "him", "since", "his", "dad", "is", "in", "Mexico", "and", "couldn't", "make", "it", "she", "legit", "cried", "when", "she", "saw", "my", "dad", "had", "actually", "made", "it", "...", "that", "moment", "I", "realized", "he", "was", "marrying", "the", "perfect", "girl", "\ud83d\ude22", "\ud83e\udd7a"], "text_2_tokenized": ["k", ",", "now", "i", "gotta", "save", "more", "primo", "for", "zhongli"]} -{"id": "3846-primo", "word": "primo", "label_binary": 0, "text_1": "Wow!!! Jus had to say bye to my primo Marcos! So fucking sad!! \ud83d\ude25\ud83d\ude25 hopefully it won't be another 11 years before I see him!!", "token_idx_1": 11, "text_start_1": 32, "text_end_1": 37, "date_1": "2019-11", "text_2": "Sad no primo for doing the event multiple times. Though it seems like the event isn't finish yet since the story hasn't concluded yet unless they will continue it for another event....", "token_idx_2": 2, "text_start_2": 7, "text_end_2": 12, "date_2": "2020-11", "text_1_tokenized": ["Wow", "!", "!", "!", "Jus", "had", "to", "say", "bye", "to", "my", "primo", "Marcos", "!", "So", "fucking", "sad", "!", "!", "\ud83d\ude25", "\ud83d\ude25", "hopefully", "it", "won't", "be", "another", "11", "years", "before", "I", "see", "him", "!", "!"], "text_2_tokenized": ["Sad", "no", "primo", "for", "doing", "the", "event", "multiple", "times", ".", "Though", "it", "seems", "like", "the", "event", "isn't", "finish", "yet", "since", "the", "story", "hasn't", "concluded", "yet", "unless", "they", "will", "continue", "it", "for", "another", "event", "..."]} -{"id": "3847-primo", "word": "primo", "label_binary": 0, "text_1": "Follow my sheezyart page please yall are missing out on some primo content", "token_idx_1": 11, "text_start_1": 61, "text_end_1": 66, "date_1": "2019-11", "text_2": "I love my primo so much besties 4lyyyyfe", "token_idx_2": 3, "text_start_2": 10, "text_end_2": 15, "date_2": "2020-11", "text_1_tokenized": ["Follow", "my", "sheezyart", "page", "please", "yall", "are", "missing", "out", "on", "some", "primo", "content"], "text_2_tokenized": ["I", "love", "my", "primo", "so", "much", "besties", "4lyyyyfe"]} -{"id": "3848-primo", "word": "primo", "label_binary": 0, "text_1": "whens el primo cody coming over here tho?", "token_idx_1": 2, "text_start_1": 9, "text_end_1": 14, "date_1": "2019-11", "text_2": "Craving for sea ranch pizza mn primo's \ud83e\udd7a\ud83e\udd7a\ud83e\udd7a", "token_idx_2": 6, "text_start_2": 31, "text_end_2": 38, "date_2": "2020-11", "text_1_tokenized": ["whens", "el", "primo", "cody", "coming", "over", "here", "tho", "?"], "text_2_tokenized": ["Craving", "for", "sea", "ranch", "pizza", "mn", "primo's", "\ud83e\udd7a", "\ud83e\udd7a", "\ud83e\udd7a"]} -{"id": "3849-primo", "word": "primo", "label_binary": 0, "text_1": "\ud83c\udf20 My top 10 JESC winners of the decade \ud83c\udf20 1 \ud83c\uddf5\ud83c\uddf1 Superhero 2 \ud83c\uddfa\ud83c\udde6 Nebo 3 \ud83c\uddf5\ud83c\uddf1 Anyone I Want To Be 4 \ud83c\udde6\ud83c\uddf2 Mama 5 \ud83c\uddee\ud83c\uddf9 Tu primo grande amore 6 \ud83c\uddf7\ud83c\uddfa Wings 7 \ud83c\uddec\ud83c\uddea Mzeo 8 \ud83c\uddf2\ud83c\uddf9 The Start 9 \ud83c\uddec\ud83c\uddea Candy Music 10 \ud83c\uddf2\ud83c\uddf9 Not My Soul", "token_idx_1": 34, "text_start_1": 109, "text_end_1": 114, "date_1": "2019-11", "text_2": "5k primo gems... i hope i can pull zhongli. if not..then albedo it is.", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 8, "date_2": "2020-11", "text_1_tokenized": ["\ud83c\udf20", "My", "top", "10", "JESC", "winners", "of", "the", "decade", "\ud83c\udf20", "1", "\ud83c\uddf5", "\ud83c\uddf1", "Superhero", "2", "\ud83c\uddfa", "\ud83c\udde6", "Nebo", "3", "\ud83c\uddf5", "\ud83c\uddf1", "Anyone", "I", "Want", "To", "Be", "4", "\ud83c\udde6", "\ud83c\uddf2", "Mama", "5", "\ud83c\uddee", "\ud83c\uddf9", "Tu", "primo", "grande", "amore", "6", "\ud83c\uddf7", "\ud83c\uddfa", "Wings", "7", "\ud83c\uddec", "\ud83c\uddea", "Mzeo", "8", "\ud83c\uddf2", "\ud83c\uddf9", "The", "Start", "9", "\ud83c\uddec", "\ud83c\uddea", "Candy", "Music", "10", "\ud83c\uddf2", "\ud83c\uddf9", "Not", "My", "Soul"], "text_2_tokenized": ["5k", "primo", "gems", "...", "i", "hope", "i", "can", "pull", "zhongli", ".", "if", "not", "..", "then", "albedo", "it", "is", "."]} -{"id": "3850-primo", "word": "primo", "label_binary": 0, "text_1": "soy khea bro: who are you? me encontre a tu primo: im you but stronger", "token_idx_1": 12, "text_start_1": 44, "text_end_1": 49, "date_1": "2019-11", "text_2": "It feels like I should be watching Washington Week and all the other primo Friday shows", "token_idx_2": 13, "text_start_2": 69, "text_end_2": 74, "date_2": "2020-11", "text_1_tokenized": ["soy", "khea", "bro", ":", "who", "are", "you", "?", "me", "encontre", "a", "tu", "primo", ":", "im", "you", "but", "stronger"], "text_2_tokenized": ["It", "feels", "like", "I", "should", "be", "watching", "Washington", "Week", "and", "all", "the", "other", "primo", "Friday", "shows"]} -{"id": "3851-primo", "word": "primo", "label_binary": 1, "text_1": "somebody PLEASE cuff my primo it's sad boi hours aLL day", "token_idx_1": 4, "text_start_1": 24, "text_end_1": 29, "date_1": "2019-11", "text_2": "whatever im gonna save my primo for xiao's banner instead\ud83d\ude05\ud83d\ude05\ud83d\udc4d\ud83c\udffb", "token_idx_2": 5, "text_start_2": 26, "text_end_2": 31, "date_2": "2020-11", "text_1_tokenized": ["somebody", "PLEASE", "cuff", "my", "primo", "it's", "sad", "boi", "hours", "aLL", "day"], "text_2_tokenized": ["whatever", "im", "gonna", "save", "my", "primo", "for", "xiao's", "banner", "instead", "\ud83d\ude05", "\ud83d\ude05", "\ud83d\udc4d\ud83c\udffb"]} -{"id": "3852-primo", "word": "primo", "label_binary": 1, "text_1": "Trump fans: Obama's supporters are primo tax return", "token_idx_1": 6, "text_start_1": 35, "text_end_1": 40, "date_1": "2019-11", "text_2": "T-day dinner = grilled cheese sandwich w/ bacon & tomato. Pink Floyd (Two Suns in the Sunset/The Final Cut) & primo weed. This may be the best Thanksgiving day ever! \ud83e\udd83\ud83e\udd19", "token_idx_2": 26, "text_start_2": 118, "text_end_2": 123, "date_2": "2020-11", "text_1_tokenized": ["Trump", "fans", ":", "Obama's", "supporters", "are", "primo", "tax", "return"], "text_2_tokenized": ["T-day", "dinner", "=", "grilled", "cheese", "sandwich", "w", "/", "bacon", "&", "tomato", ".", "Pink", "Floyd", "(", "Two", "Suns", "in", "the", "Sunset", "/", "The", "Final", "Cut", ")", "&", "primo", "weed", ".", "This", "may", "be", "the", "best", "Thanksgiving", "day", "ever", "!", "\ud83e\udd83", "\ud83e\udd19"]} -{"id": "3853-primo", "word": "primo", "label_binary": 1, "text_1": "What's the move this weekend primo? @MartinTime8", "token_idx_1": 5, "text_start_1": 29, "text_end_1": 34, "date_1": "2019-11", "text_2": "K'la is getting tricky.., does anyone remember primo tricks of how to pack for Kyibox", "token_idx_2": 9, "text_start_2": 47, "text_end_2": 52, "date_2": "2020-11", "text_1_tokenized": ["What's", "the", "move", "this", "weekend", "primo", "?", "@MartinTime8"], "text_2_tokenized": ["K'la", "is", "getting", "tricky", "..", ",", "does", "anyone", "remember", "primo", "tricks", "of", "how", "to", "pack", "for", "Kyibox"]} -{"id": "3854-primo", "word": "primo", "label_binary": 0, "text_1": "tomorrow cyph at your crib primo @disrespecfulhoe", "token_idx_1": 5, "text_start_1": 27, "text_end_1": 32, "date_1": "2019-11", "text_2": "genshin oomfs who actually know the game: are 10 intertwined fates, 23 acquaint fates, and about 4k primo gems good?", "token_idx_2": 20, "text_start_2": 100, "text_end_2": 105, "date_2": "2020-11", "text_1_tokenized": ["tomorrow", "cyph", "at", "your", "crib", "primo", "@disrespecfulhoe"], "text_2_tokenized": ["genshin", "oomfs", "who", "actually", "know", "the", "game", ":", "are", "10", "intertwined", "fates", ",", "23", "acquaint", "fates", ",", "and", "about", "4k", "primo", "gems", "good", "?"]} -{"id": "3855-primo", "word": "primo", "label_binary": 0, "text_1": "Why my primo being a dick to his wife smh hand her over bruh\ud83d\ude02", "token_idx_1": 2, "text_start_1": 7, "text_end_1": 12, "date_1": "2019-11", "text_2": "paimon giving out 200 primo for 4 days???? oke", "token_idx_2": 4, "text_start_2": 22, "text_end_2": 27, "date_2": "2020-11", "text_1_tokenized": ["Why", "my", "primo", "being", "a", "dick", "to", "his", "wife", "smh", "hand", "her", "over", "bruh", "\ud83d\ude02"], "text_2_tokenized": ["paimon", "giving", "out", "200", "primo", "for", "4", "days", "?", "?", "?", "oke"]} -{"id": "3856-primo", "word": "primo", "label_binary": 1, "text_1": "i bought some junji ito stickers to split con mi primo, it's a shared christmas gift", "token_idx_1": 10, "text_start_1": 49, "text_end_1": 54, "date_1": "2019-11", "text_2": "My primo swear Ik everything bout cars like boy i dibble and dabble pero no se que madres me andas diciendo", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 8, "date_2": "2020-11", "text_1_tokenized": ["i", "bought", "some", "junji", "ito", "stickers", "to", "split", "con", "mi", "primo", ",", "it's", "a", "shared", "christmas", "gift"], "text_2_tokenized": ["My", "primo", "swear", "Ik", "everything", "bout", "cars", "like", "boy", "i", "dibble", "and", "dabble", "pero", "no", "se", "que", "madres", "me", "andas", "diciendo"]} -{"id": "3857-primo", "word": "primo", "label_binary": 0, "text_1": "Just gotta say, all of the #ChickFilA related tears are making me sooooo happy! This is some primo saline!", "token_idx_1": 19, "text_start_1": 93, "text_end_1": 98, "date_1": "2019-11", "text_2": "Ok, so Miho have decided to reduce the free stuff that player got when reached AR10 (20 wish and 2500 primo) From what i've read primo will ne longer be given for new player and only 5 wish for them.", "token_idx_2": 22, "text_start_2": 102, "text_end_2": 107, "date_2": "2020-11", "text_1_tokenized": ["Just", "gotta", "say", ",", "all", "of", "the", "#ChickFilA", "related", "tears", "are", "making", "me", "sooooo", "happy", "!", "This", "is", "some", "primo", "saline", "!"], "text_2_tokenized": ["Ok", ",", "so", "Miho", "have", "decided", "to", "reduce", "the", "free", "stuff", "that", "player", "got", "when", "reached", "AR10", "(", "20", "wish", "and", "2500", "primo", ")", "From", "what", "i've", "read", "primo", "will", "ne", "longer", "be", "given", "for", "new", "player", "and", "only", "5", "wish", "for", "them", "."]} -{"id": "3858-primo", "word": "primo", "label_binary": 0, "text_1": "Great couple primo and Mortis \u2764", "token_idx_1": 2, "text_start_1": 13, "text_end_1": 18, "date_1": "2019-11", "text_2": "Going to do some rolls in Genshin before I have to head out this morning, going to still save the bulk of my primo though cause Zhongli is still my most wanted man~", "token_idx_2": 24, "text_start_2": 109, "text_end_2": 114, "date_2": "2020-11", "text_1_tokenized": ["Great", "couple", "primo", "and", "Mortis", "\u2764"], "text_2_tokenized": ["Going", "to", "do", "some", "rolls", "in", "Genshin", "before", "I", "have", "to", "head", "out", "this", "morning", ",", "going", "to", "still", "save", "the", "bulk", "of", "my", "primo", "though", "cause", "Zhongli", "is", "still", "my", "most", "wanted", "man", "~"]} -{"id": "3859-primo", "word": "primo", "label_binary": 0, "text_1": "primo like da @DCostaFaCose , i'm so so happy!\ud83d\ude05\ud83e\udd70", "token_idx_1": 0, "text_start_1": 0, "text_end_1": 5, "date_1": "2019-11", "text_2": "I'm glad that us men have figured out shorter shorts are better shit is primo comfy and just looks better", "token_idx_2": 14, "text_start_2": 72, "text_end_2": 77, "date_2": "2020-11", "text_1_tokenized": ["primo", "like", "da", "@DCostaFaCose", ",", "i'm", "so", "so", "happy", "!", "\ud83d\ude05", "\ud83e\udd70"], "text_2_tokenized": ["I'm", "glad", "that", "us", "men", "have", "figured", "out", "shorter", "shorts", "are", "better", "shit", "is", "primo", "comfy", "and", "just", "looks", "better"]} -{"id": "3860-primo", "word": "primo", "label_binary": 0, "text_1": "If your first snap of the day was from Team Snapchat, let me know and I'll send you some primo shit", "token_idx_1": 20, "text_start_1": 89, "text_end_1": 94, "date_1": "2019-11", "text_2": "any rich people with 96k primo gems wanna hand some over", "token_idx_2": 5, "text_start_2": 25, "text_end_2": 30, "date_2": "2020-11", "text_1_tokenized": ["If", "your", "first", "snap", "of", "the", "day", "was", "from", "Team", "Snapchat", ",", "let", "me", "know", "and", "I'll", "send", "you", "some", "primo", "shit"], "text_2_tokenized": ["any", "rich", "people", "with", "96k", "primo", "gems", "wanna", "hand", "some", "over"]} -{"id": "3861-primo", "word": "primo", "label_binary": 0, "text_1": "Capitals getting a needed improvement from 'primo' PK unit", "token_idx_1": 7, "text_start_1": 44, "text_end_1": 49, "date_1": "2019-11", "text_2": "*Every gacha game that has leaks* Everyone: Oooooohhh \ud83e\udd29 I'm gonna save for theeeeem Me: *sees 160 primo* does a single pull and gets a 3* weapon", "token_idx_2": 22, "text_start_2": 98, "text_end_2": 103, "date_2": "2020-11", "text_1_tokenized": ["Capitals", "getting", "a", "needed", "improvement", "from", "'", "primo", "'", "PK", "unit"], "text_2_tokenized": ["*", "Every", "gacha", "game", "that", "has", "leaks", "*", "Everyone", ":", "Oooooohhh", "\ud83e\udd29", "I'm", "gonna", "save", "for", "theeeeem", "Me", ":", "*", "sees", "160", "primo", "*", "does", "a", "single", "pull", "and", "gets", "a", "3", "*", "weapon"]} -{"id": "3862-primo", "word": "primo", "label_binary": 1, "text_1": "i wonder if lucas is going to dress like someone's primo at the quincea\u00f1era for all the stops during his solo stage or if it's going to change", "token_idx_1": 10, "text_start_1": 51, "text_end_1": 56, "date_1": "2019-11", "text_2": "samuel is actually my primo", "token_idx_2": 4, "text_start_2": 22, "text_end_2": 27, "date_2": "2020-11", "text_1_tokenized": ["i", "wonder", "if", "lucas", "is", "going", "to", "dress", "like", "someone's", "primo", "at", "the", "quincea\u00f1era", "for", "all", "the", "stops", "during", "his", "solo", "stage", "or", "if", "it's", "going", "to", "change"], "text_2_tokenized": ["samuel", "is", "actually", "my", "primo"]} -{"id": "3863-primo", "word": "primo", "label_binary": 1, "text_1": "My ass was truly 13-16 when it aired. That was my primo emo/indie puberty years", "token_idx_1": 12, "text_start_1": 50, "text_end_1": 55, "date_1": "2019-11", "text_2": "About to put a few down with my primo @nickg676", "token_idx_2": 8, "text_start_2": 32, "text_end_2": 37, "date_2": "2020-11", "text_1_tokenized": ["My", "ass", "was", "truly", "13-16", "when", "it", "aired", ".", "That", "was", "my", "primo", "emo", "/", "indie", "puberty", "years"], "text_2_tokenized": ["About", "to", "put", "a", "few", "down", "with", "my", "primo", "@nickg676"]} -{"id": "3864-primo", "word": "primo", "label_binary": 1, "text_1": "It is primo Vikings to come in flat at home against a bad team after a big win on the road the previous week.", "token_idx_1": 2, "text_start_1": 6, "text_end_1": 11, "date_1": "2019-11", "text_2": "Thor 3 is primo delicious grade a thor beef and thor x Bruce x val x loki romance potential", "token_idx_2": 3, "text_start_2": 10, "text_end_2": 15, "date_2": "2020-11", "text_1_tokenized": ["It", "is", "primo", "Vikings", "to", "come", "in", "flat", "at", "home", "against", "a", "bad", "team", "after", "a", "big", "win", "on", "the", "road", "the", "previous", "week", "."], "text_2_tokenized": ["Thor", "3", "is", "primo", "delicious", "grade", "a", "thor", "beef", "and", "thor", "x", "Bruce", "x", "val", "x", "loki", "romance", "potential"]} -{"id": "3865-primo", "word": "primo", "label_binary": 1, "text_1": "What am I excited about today? I found a banging donut shop by my new office. Kolaches are amazing. Donuts are primo.", "token_idx_1": 24, "text_start_1": 111, "text_end_1": 116, "date_1": "2019-11", "text_2": "they were beyond ooobenblief they were primo efficient to a man. they were stuyvestant smoking they were the reformation spring and everybody in the world turned reformation blue", "token_idx_2": 6, "text_start_2": 39, "text_end_2": 44, "date_2": "2020-11", "text_1_tokenized": ["What", "am", "I", "excited", "about", "today", "?", "I", "found", "a", "banging", "donut", "shop", "by", "my", "new", "office", ".", "Kolaches", "are", "amazing", ".", "Donuts", "are", "primo", "."], "text_2_tokenized": ["they", "were", "beyond", "ooobenblief", "they", "were", "primo", "efficient", "to", "a", "man", ".", "they", "were", "stuyvestant", "smoking", "they", "were", "the", "reformation", "spring", "and", "everybody", "in", "the", "world", "turned", "reformation", "blue"]} -{"id": "3866-primo", "word": "primo", "label_binary": 0, "text_1": "This primo bubbler thing was not worth the trouble if everything was good I wouldn't be feeling the way I am now of course I lost all my coupons for this crap @primowater @Walmart I understand why people do the things they do..I get it. #DONE #chainreactions", "token_idx_1": 1, "text_start_1": 5, "text_end_1": 10, "date_1": "2019-11", "text_2": "Lmao I can hear nephi talking to his primo in the other room about having kids??. \u201cYeah bro when I have a son I know he's gonna play badass in soccer\u201d I didn't get the memo but I guess we're having a son guys?? Lol.", "token_idx_2": 8, "text_start_2": 37, "text_end_2": 42, "date_2": "2020-11", "text_1_tokenized": ["This", "primo", "bubbler", "thing", "was", "not", "worth", "the", "trouble", "if", "everything", "was", "good", "I", "wouldn't", "be", "feeling", "the", "way", "I", "am", "now", "of", "course", "I", "lost", "all", "my", "coupons", "for", "this", "crap", "@primowater", "@Walmart", "I", "understand", "why", "people", "do", "the", "things", "they", "do", "..", "I", "get", "it", ".", "#DONE", "#chainreactions"], "text_2_tokenized": ["Lmao", "I", "can", "hear", "nephi", "talking", "to", "his", "primo", "in", "the", "other", "room", "about", "having", "kids", "?", "?", ".", "\u201c", "Yeah", "bro", "when", "I", "have", "a", "son", "I", "know", "he's", "gonna", "play", "badass", "in", "soccer", "\u201d", "I", "didn't", "get", "the", "memo", "but", "I", "guess", "we're", "having", "a", "son", "guys", "?", "?", "Lol", "."]} -{"id": "3867-primo", "word": "primo", "label_binary": 0, "text_1": "I can't wait to go hunt with my primo!!!", "token_idx_1": 8, "text_start_1": 32, "text_end_1": 37, "date_1": "2019-11", "text_2": "im so tired doing the quests but I need primo gems and level up...", "token_idx_2": 9, "text_start_2": 40, "text_end_2": 45, "date_2": "2020-11", "text_1_tokenized": ["I", "can't", "wait", "to", "go", "hunt", "with", "my", "primo", "!", "!", "!"], "text_2_tokenized": ["im", "so", "tired", "doing", "the", "quests", "but", "I", "need", "primo", "gems", "and", "level", "up", "..."]} -{"id": "3868-primo", "word": "primo", "label_binary": 1, "text_1": "Ou shit primo he cogio feelings", "token_idx_1": 2, "text_start_1": 8, "text_end_1": 13, "date_1": "2019-11", "text_2": "Remembering when me and the welding homies would roll coal everywhere all douchey till we all blew our engines\ud83d\ude2d\ud83d\ude02fak primo..lecciones de la vida\ud83e\udd37\u200d\u2642\ufe0f", "token_idx_2": 22, "text_start_2": 116, "text_end_2": 121, "date_2": "2020-11", "text_1_tokenized": ["Ou", "shit", "primo", "he", "cogio", "feelings"], "text_2_tokenized": ["Remembering", "when", "me", "and", "the", "welding", "homies", "would", "roll", "coal", "everywhere", "all", "douchey", "till", "we", "all", "blew", "our", "engines", "\ud83d\ude2d", "\ud83d\ude02", "fak", "primo", "..", "lecciones", "de", "la", "vida", "\ud83e\udd37\u200d\u2642", "\ufe0f"]} -{"id": "3869-primo", "word": "primo", "label_binary": 0, "text_1": "I've started using my walks to and from tram stops/train stations at night as primo time to practice my spitting and I am so worried that one day someone is gonna see me accidentally dribble down my chin under suburban moonlight", "token_idx_1": 16, "text_start_1": 78, "text_end_1": 83, "date_1": "2019-11", "text_2": "It's either migo or primo \ud83d\ude02 that's all I know", "token_idx_2": 4, "text_start_2": 20, "text_end_2": 25, "date_2": "2020-11", "text_1_tokenized": ["I've", "started", "using", "my", "walks", "to", "and", "from", "tram", "stops", "/", "train", "stations", "at", "night", "as", "primo", "time", "to", "practice", "my", "spitting", "and", "I", "am", "so", "worried", "that", "one", "day", "someone", "is", "gonna", "see", "me", "accidentally", "dribble", "down", "my", "chin", "under", "suburban", "moonlight"], "text_2_tokenized": ["It's", "either", "migo", "or", "primo", "\ud83d\ude02", "that's", "all", "I", "know"]} -{"id": "3870-primo", "word": "primo", "label_binary": 0, "text_1": "In the tweet befor my mentor in the pictur on the left the one on right ther are my dad Rosario Picco primo ballerino solista della Scala di Milano and my mum Frances Norma Strubel dancer night night love you\ud83d\ude0d\ud83e\udd17\ud83d\ude4f\ud83c\udf0c\ud83c\udf0c\ud83c\udf0c", "token_idx_1": 22, "text_start_1": 102, "text_end_1": 107, "date_1": "2019-11", "text_2": "k i gained 1k primo in 3 hours gonna farm in abyss tmr \ud83d\ude24", "token_idx_2": 4, "text_start_2": 14, "text_end_2": 19, "date_2": "2020-11", "text_1_tokenized": ["In", "the", "tweet", "befor", "my", "mentor", "in", "the", "pictur", "on", "the", "left", "the", "one", "on", "right", "ther", "are", "my", "dad", "Rosario", "Picco", "primo", "ballerino", "solista", "della", "Scala", "di", "Milano", "and", "my", "mum", "Frances", "Norma", "Strubel", "dancer", "night", "night", "love", "you", "\ud83d\ude0d", "\ud83e\udd17", "\ud83d\ude4f", "\ud83c\udf0c", "\ud83c\udf0c", "\ud83c\udf0c"], "text_2_tokenized": ["k", "i", "gained", "1k", "primo", "in", "3", "hours", "gonna", "farm", "in", "abyss", "tmr", "\ud83d\ude24"]} -{"id": "3871-primo", "word": "primo", "label_binary": 0, "text_1": "Niggas think I'm coming uptown tonight hace frio primo", "token_idx_1": 8, "text_start_1": 49, "text_end_1": 54, "date_1": "2019-11", "text_2": "I remember I seen a nigga roll up a blunt in a black and mild... I feel like if you doing this you doing primo too", "token_idx_2": 25, "text_start_2": 105, "text_end_2": 110, "date_2": "2020-11", "text_1_tokenized": ["Niggas", "think", "I'm", "coming", "uptown", "tonight", "hace", "frio", "primo"], "text_2_tokenized": ["I", "remember", "I", "seen", "a", "nigga", "roll", "up", "a", "blunt", "in", "a", "black", "and", "mild", "...", "I", "feel", "like", "if", "you", "doing", "this", "you", "doing", "primo", "too"]} -{"id": "3872-primo", "word": "primo", "label_binary": 0, "text_1": "i could watch leon & primo all day!! \ud83d\ude2d", "token_idx_1": 5, "text_start_1": 25, "text_end_1": 30, "date_1": "2019-11", "text_2": "My cousins texts start with \u201cMiss you primo\u201d and ends with \u201cdame una buena apuesta cerote\u201d \ud83e\udd26\ud83c\udffb\u200d\u2642\ufe0f", "token_idx_2": 8, "text_start_2": 38, "text_end_2": 43, "date_2": "2020-11", "text_1_tokenized": ["i", "could", "watch", "leon", "&", "primo", "all", "day", "!", "!", "\ud83d\ude2d"], "text_2_tokenized": ["My", "cousins", "texts", "start", "with", "\u201c", "Miss", "you", "primo", "\u201d", "and", "ends", "with", "\u201c", "dame", "una", "buena", "apuesta", "cerote", "\u201d", "\ud83e\udd26\ud83c\udffb\u200d\u2642", "\ufe0f"]} -{"id": "3873-primo", "word": "primo", "label_binary": 0, "text_1": "Fine primo tempo. Cagliari \ud83c\udd9a Fiorentina 3\u20e3-0\u20e3 #ForzaViola \ud83d\udc9c #CagliariFiorentina", "token_idx_1": 1, "text_start_1": 5, "text_end_1": 10, "date_1": "2019-11", "text_2": "And I didn't meet an ock until I was like 19. All the shop runners in my neighborhoods growing up was Spanish. So they was papi or primo.", "token_idx_2": 29, "text_start_2": 131, "text_end_2": 136, "date_2": "2020-11", "text_1_tokenized": ["Fine", "primo", "tempo", ".", "Cagliari", "\ud83c\udd9a", "Fiorentina", "3\u20e3", "-", "0\u20e3", "#ForzaViola", "\ud83d\udc9c", "#CagliariFiorentina"], "text_2_tokenized": ["And", "I", "didn't", "meet", "an", "ock", "until", "I", "was", "like", "19", ".", "All", "the", "shop", "runners", "in", "my", "neighborhoods", "growing", "up", "was", "Spanish", ".", "So", "they", "was", "papi", "or", "primo", "."]} -{"id": "3874-primo", "word": "primo", "label_binary": 0, "text_1": "HBD to the A1 day 1! @JChavira21 have a good one primo! Plan a trip to midland with your pops sometime! Stay up! \ud83d\udc4a\ud83c\udffd", "token_idx_1": 12, "text_start_1": 49, "text_end_1": 54, "date_1": "2019-11", "text_2": "Landing primo is so scary", "token_idx_2": 1, "text_start_2": 8, "text_end_2": 13, "date_2": "2020-11", "text_1_tokenized": ["HBD", "to", "the", "A1", "day", "1", "!", "@JChavira21", "have", "a", "good", "one", "primo", "!", "Plan", "a", "trip", "to", "midland", "with", "your", "pops", "sometime", "!", "Stay", "up", "!", "\ud83d\udc4a\ud83c\udffd"], "text_2_tokenized": ["Landing", "primo", "is", "so", "scary"]} -{"id": "3875-primo", "word": "primo", "label_binary": 0, "text_1": "19.5kg down and 12.5kg to go to pre-baby weight! But the number on the scales isn't my concern, I just wanna be strong and happy. (Although below pre-baby weight would be primo). I'm loving being back at the gym \ud83e\udd70", "token_idx_1": 37, "text_start_1": 171, "text_end_1": 176, "date_1": "2019-11", "text_2": "Brace yourself, primo positivo in azienda is coming", "token_idx_2": 3, "text_start_2": 16, "text_end_2": 21, "date_2": "2020-11", "text_1_tokenized": ["19.5", "kg", "down", "and", "12.5", "kg", "to", "go", "to", "pre-baby", "weight", "!", "But", "the", "number", "on", "the", "scales", "isn't", "my", "concern", ",", "I", "just", "wanna", "be", "strong", "and", "happy", ".", "(", "Although", "below", "pre-baby", "weight", "would", "be", "primo", ")", ".", "I'm", "loving", "being", "back", "at", "the", "gym", "\ud83e\udd70"], "text_2_tokenized": ["Brace", "yourself", ",", "primo", "positivo", "in", "azienda", "is", "coming"]} -{"id": "3876-primo", "word": "primo", "label_binary": 0, "text_1": "my holiday fits looking extra spicy. best believe ima be the flyest primo chilling in the sala", "token_idx_1": 13, "text_start_1": 68, "text_end_1": 73, "date_1": "2019-11", "text_2": "Happy birthday primo \u203c\ufe0f\ud83c\udf7b @bozyomar", "token_idx_2": 2, "text_start_2": 15, "text_end_2": 20, "date_2": "2020-11", "text_1_tokenized": ["my", "holiday", "fits", "looking", "extra", "spicy", ".", "best", "believe", "ima", "be", "the", "flyest", "primo", "chilling", "in", "the", "sala"], "text_2_tokenized": ["Happy", "birthday", "primo", "\u203c", "\ufe0f", "\ud83c\udf7b", "@bozyomar"]} -{"id": "3877-primo", "word": "primo", "label_binary": 0, "text_1": "I'm waiting for primo jordan's bday greetings tho \ud83d\ude4f\ud83c\udffb\ud83d\ude4f\ud83c\udffb\ud83d\ude4f\ud83c\udffb\u2728", "token_idx_1": 3, "text_start_1": 16, "text_end_1": 21, "date_1": "2019-11", "text_2": "im in spanish class rn nd im confused lool what's the difference between sobrina sobrina nd primo prima google is useless i dont understandd\ud83d\udd74\ud83c\udffe", "token_idx_2": 16, "text_start_2": 92, "text_end_2": 97, "date_2": "2020-11", "text_1_tokenized": ["I'm", "waiting", "for", "primo", "jordan's", "bday", "greetings", "tho", "\ud83d\ude4f\ud83c\udffb", "\ud83d\ude4f\ud83c\udffb", "\ud83d\ude4f\ud83c\udffb", "\u2728"], "text_2_tokenized": ["im", "in", "spanish", "class", "rn", "nd", "im", "confused", "lool", "what's", "the", "difference", "between", "sobrina", "sobrina", "nd", "primo", "prima", "google", "is", "useless", "i", "dont", "understandd", "\ud83d\udd74\ud83c\udffe"]} -{"id": "3878-primo", "word": "primo", "label_binary": 0, "text_1": "Midwest kids still thinking 20-30 a g is fair because their dealer says it's 'primo' or 'fire'. Stop it. Get some help.", "token_idx_1": 15, "text_start_1": 78, "text_end_1": 83, "date_1": "2019-11", "text_2": "Me has experienced Epic 7, FGO, and other other gacha like games and Genshim Impact is just... ;-; oof.. like getting primo gems is effing hard, plus the achievement rewards doesn't' feel like an achievement at all since you will be given wot.. 3 or 5 primo? hecc..", "token_idx_2": 27, "text_start_2": 118, "text_end_2": 123, "date_2": "2020-11", "text_1_tokenized": ["Midwest", "kids", "still", "thinking", "20-30", "a", "g", "is", "fair", "because", "their", "dealer", "says", "it's", "'", "primo", "'", "or", "'", "fire", "'", ".", "Stop", "it", ".", "Get", "some", "help", "."], "text_2_tokenized": ["Me", "has", "experienced", "Epic", "7", ",", "FGO", ",", "and", "other", "other", "gacha", "like", "games", "and", "Genshim", "Impact", "is", "just", "...", ";", "-", ";", "oof", "..", "like", "getting", "primo", "gems", "is", "effing", "hard", ",", "plus", "the", "achievement", "rewards", "doesn't", "'", "feel", "like", "an", "achievement", "at", "all", "since", "you", "will", "be", "given", "wot", "..", "3", "or", "5", "primo", "?", "hecc", ".."]} -{"id": "3879-primo", "word": "primo", "label_binary": 0, "text_1": "Long Live droopy! Next week marks 2 years primo", "token_idx_1": 9, "text_start_1": 42, "text_end_1": 47, "date_1": "2019-11", "text_2": "I'm pretty sure the current Fischl event in #GenshinImpact is designed to force players to spend saved up fragile resin or burn primo for resin refill. Accounting for BP events and stuff i'm not entirely sure it's doable without spending hard currency? maybe i'll dig into later.", "token_idx_2": 22, "text_start_2": 128, "text_end_2": 133, "date_2": "2020-11", "text_1_tokenized": ["Long", "Live", "droopy", "!", "Next", "week", "marks", "2", "years", "primo"], "text_2_tokenized": ["I'm", "pretty", "sure", "the", "current", "Fischl", "event", "in", "#GenshinImpact", "is", "designed", "to", "force", "players", "to", "spend", "saved", "up", "fragile", "resin", "or", "burn", "primo", "for", "resin", "refill", ".", "Accounting", "for", "BP", "events", "and", "stuff", "i'm", "not", "entirely", "sure", "it's", "doable", "without", "spending", "hard", "currency", "?", "maybe", "i'll", "dig", "into", "later", "."]} -{"id": "3880-primo", "word": "primo", "label_binary": 0, "text_1": "Idk how I have so many cousins but thanks Mom and Dad for making me call your best friends kids primo/a.", "token_idx_1": 20, "text_start_1": 96, "text_end_1": 101, "date_1": "2019-11", "text_2": "C2 Xingqiu, wth am I gonna do with this? Give me the Keqing Or make my greedy ass stop wasting primo", "token_idx_2": 22, "text_start_2": 95, "text_end_2": 100, "date_2": "2020-11", "text_1_tokenized": ["Idk", "how", "I", "have", "so", "many", "cousins", "but", "thanks", "Mom", "and", "Dad", "for", "making", "me", "call", "your", "best", "friends", "kids", "primo", "/", "a", "."], "text_2_tokenized": ["C2", "Xingqiu", ",", "wth", "am", "I", "gonna", "do", "with", "this", "?", "Give", "me", "the", "Keqing", "Or", "make", "my", "greedy", "ass", "stop", "wasting", "primo"]} -{"id": "3881-primo", "word": "primo", "label_binary": 0, "text_1": "And let's be honest, @us_navyseals you saw Bonnie losing her mind sexually just being across that table from me. You're a bunch of fucking cock-blocks. If I was out with y'all I'd get you some primo pussy, dudes. I'm a great fucking wingman.", "token_idx_1": 38, "text_start_1": 193, "text_end_1": 198, "date_1": "2019-11", "text_2": "- @ me saving these primo gems from spiral abyss -", "token_idx_2": 5, "text_start_2": 20, "text_end_2": 25, "date_2": "2020-11", "text_1_tokenized": ["And", "let's", "be", "honest", ",", "@us_navyseals", "you", "saw", "Bonnie", "losing", "her", "mind", "sexually", "just", "being", "across", "that", "table", "from", "me", ".", "You're", "a", "bunch", "of", "fucking", "cock-blocks", ".", "If", "I", "was", "out", "with", "y'all", "I'd", "get", "you", "some", "primo", "pussy", ",", "dudes", ".", "I'm", "a", "great", "fucking", "wingman", "."], "text_2_tokenized": ["-", "@", "me", "saving", "these", "primo", "gems", "from", "spiral", "abyss", "-"]} -{"id": "3882-primo", "word": "primo", "label_binary": 0, "text_1": "le hicieron un corrido a mi primo , should i be concerned or proud?", "token_idx_1": 6, "text_start_1": 28, "text_end_1": 33, "date_1": "2019-11", "text_2": "No primo walk this year \ud83e\udd72", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 8, "date_2": "2020-11", "text_1_tokenized": ["le", "hicieron", "un", "corrido", "a", "mi", "primo", ",", "should", "i", "be", "concerned", "or", "proud", "?"], "text_2_tokenized": ["No", "primo", "walk", "this", "year", "\ud83e\udd72"]} -{"id": "3883-primo", "word": "primo", "label_binary": 0, "text_1": "The cousins that keep telling me to get on WhatsApp are the first ones that would hit me on there talking bout \u201cprimo, la cosa esta floja ahora mismo, te veo moviendo pa ya, mandame cien dollaritos por fa.\u201d Meanwhile they're on El Malecon everyday on the IG living their best life", "token_idx_1": 23, "text_start_1": 112, "text_end_1": 117, "date_1": "2019-11", "text_2": "n e wayz primo farming again for december hoping i can make it", "token_idx_2": 3, "text_start_2": 9, "text_end_2": 14, "date_2": "2020-11", "text_1_tokenized": ["The", "cousins", "that", "keep", "telling", "me", "to", "get", "on", "WhatsApp", "are", "the", "first", "ones", "that", "would", "hit", "me", "on", "there", "talking", "bout", "\u201c", "primo", ",", "la", "cosa", "esta", "floja", "ahora", "mismo", ",", "te", "veo", "moviendo", "pa", "ya", ",", "mandame", "cien", "dollaritos", "por", "fa", ".", "\u201d", "Meanwhile", "they're", "on", "El", "Malecon", "everyday", "on", "the", "IG", "living", "their", "best", "life"], "text_2_tokenized": ["n", "e", "wayz", "primo", "farming", "again", "for", "december", "hoping", "i", "can", "make", "it"]} -{"id": "3884-primo", "word": "primo", "label_binary": 0, "text_1": "Local production fellas. I have 2x primo duel 12 delta mains that I'm tired of looking at. Bangin pa gear. 250 bucks today, you own em. DM me quick.", "token_idx_1": 7, "text_start_1": 35, "text_end_1": 40, "date_1": "2019-11", "text_2": "I made a new GI account just to collect primo for Zhongli \ud83d\udc49\ud83d\udc48", "token_idx_2": 9, "text_start_2": 40, "text_end_2": 45, "date_2": "2020-11", "text_1_tokenized": ["Local", "production", "fellas", ".", "I", "have", "2x", "primo", "duel", "12", "delta", "mains", "that", "I'm", "tired", "of", "looking", "at", ".", "Bangin", "pa", "gear", ".", "250", "bucks", "today", ",", "you", "own", "em", ".", "DM", "me", "quick", "."], "text_2_tokenized": ["I", "made", "a", "new", "GI", "account", "just", "to", "collect", "primo", "for", "Zhongli", "\ud83d\udc49", "\ud83d\udc48"]} -{"id": "3885-primo", "word": "primo", "label_binary": 1, "text_1": "Me: Why do you only fuck Miami whores? My primo: Cause I love whores, I can fuck them all I want and never get attached lmao South FL Lifehacks \ud83d\ude02\ud83d\ude02\ud83d\ude02", "token_idx_1": 11, "text_start_1": 42, "text_end_1": 47, "date_1": "2019-11", "text_2": "Love on your loved ones hard. Try and reach out to those that cross your mind even if it's for one second. We all know our time here on earth is limited but damn this hurts. Our family won't be the same. Hope your dancing up there with mama laura primo. \ud83d\ude14\ud83d\udc94\ud83d\udc7c\ud83c\udffc\ud83d\ude4f\ud83c\udffb", "token_idx_2": 54, "text_start_2": 247, "text_end_2": 252, "date_2": "2020-11", "text_1_tokenized": ["Me", ":", "Why", "do", "you", "only", "fuck", "Miami", "whores", "?", "My", "primo", ":", "Cause", "I", "love", "whores", ",", "I", "can", "fuck", "them", "all", "I", "want", "and", "never", "get", "attached", "lmao", "South", "FL", "Lifehacks", "\ud83d\ude02", "\ud83d\ude02", "\ud83d\ude02"], "text_2_tokenized": ["Love", "on", "your", "loved", "ones", "hard", ".", "Try", "and", "reach", "out", "to", "those", "that", "cross", "your", "mind", "even", "if", "it's", "for", "one", "second", ".", "We", "all", "know", "our", "time", "here", "on", "earth", "is", "limited", "but", "damn", "this", "hurts", ".", "Our", "family", "won't", "be", "the", "same", ".", "Hope", "your", "dancing", "up", "there", "with", "mama", "laura", "primo", ".", "\ud83d\ude14", "\ud83d\udc94", "\ud83d\udc7c\ud83c\udffc", "\ud83d\ude4f\ud83c\udffb"]} -{"id": "3886-primo", "word": "primo", "label_binary": 1, "text_1": "I still find it so weird that @crecergermanOf se mi primo .. from my moms side for those who don't believe me", "token_idx_1": 10, "text_start_1": 52, "text_end_1": 57, "date_1": "2019-11", "text_2": "S/O to my primo for the drinks!", "token_idx_2": 5, "text_start_2": 10, "text_end_2": 15, "date_2": "2020-11", "text_1_tokenized": ["I", "still", "find", "it", "so", "weird", "that", "@crecergermanOf", "se", "mi", "primo", "..", "from", "my", "moms", "side", "for", "those", "who", "don't", "believe", "me"], "text_2_tokenized": ["S", "/", "O", "to", "my", "primo", "for", "the", "drinks", "!"]} -{"id": "3887-primo", "word": "primo", "label_binary": 0, "text_1": "Bet my primo a round of golf instead of money, he says that's not fair way", "token_idx_1": 2, "text_start_1": 7, "text_end_1": 12, "date_1": "2019-11", "text_2": "ughhghgh I just need 20 more primo for 49 pulls", "token_idx_2": 6, "text_start_2": 29, "text_end_2": 34, "date_2": "2020-11", "text_1_tokenized": ["Bet", "my", "primo", "a", "round", "of", "golf", "instead", "of", "money", ",", "he", "says", "that's", "not", "fair", "way"], "text_2_tokenized": ["ughhghgh", "I", "just", "need", "20", "more", "primo", "for", "49", "pulls"]} -{"id": "3888-primo", "word": "primo", "label_binary": 1, "text_1": "This war of Mine, il gameplay del primo raid, Operazione Cuor.", "token_idx_1": 8, "text_start_1": 34, "text_end_1": 39, "date_1": "2019-11", "text_2": "my primo always tells me he loves me more \ud83d\ude2d\ud83d\ude2d and that's why i am forever grateful for my family", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 8, "date_2": "2020-11", "text_1_tokenized": ["This", "war", "of", "Mine", ",", "il", "gameplay", "del", "primo", "raid", ",", "Operazione", "Cuor", "."], "text_2_tokenized": ["my", "primo", "always", "tells", "me", "he", "loves", "me", "more", "\ud83d\ude2d", "\ud83d\ude2d", "and", "that's", "why", "i", "am", "forever", "grateful", "for", "my", "family"]} -{"id": "3889-primo", "word": "primo", "label_binary": 0, "text_1": "Happy birthday primo @Elninofred ! Have a good one man \ud83d\udc99", "token_idx_1": 2, "text_start_1": 15, "text_end_1": 20, "date_1": "2019-11", "text_2": "So have you all been subscribing to substacks all this time and secretly reading primo content you never talk about", "token_idx_2": 14, "text_start_2": 81, "text_end_2": 86, "date_2": "2020-11", "text_1_tokenized": ["Happy", "birthday", "primo", "@Elninofred", "!", "Have", "a", "good", "one", "man", "\ud83d\udc99"], "text_2_tokenized": ["So", "have", "you", "all", "been", "subscribing", "to", "substacks", "all", "this", "time", "and", "secretly", "reading", "primo", "content", "you", "never", "talk", "about"]} -{"id": "3890-primo", "word": "primo", "label_binary": 0, "text_1": "I got like 35 cousins but I'm the only one called primo. Pretty obvious who the favorite is", "token_idx_1": 11, "text_start_1": 50, "text_end_1": 55, "date_1": "2019-11", "text_2": "Jean is the first and only 5* I've gotten with earned primo so that's nice but also I WANTED ZHONGLI FUCK YOU JEAN", "token_idx_2": 12, "text_start_2": 54, "text_end_2": 59, "date_2": "2020-11", "text_1_tokenized": ["I", "got", "like", "35", "cousins", "but", "I'm", "the", "only", "one", "called", "primo", ".", "Pretty", "obvious", "who", "the", "favorite", "is"], "text_2_tokenized": ["Jean", "is", "the", "first", "and", "only", "5", "*", "I've", "gotten", "with", "earned", "primo", "so", "that's", "nice", "but", "also", "I", "WANTED", "ZHONGLI", "FUCK", "YOU", "JEAN"]} -{"id": "3891-primo", "word": "primo", "label_binary": 0, "text_1": "I'm enjoying life and my primo from Mexico msges me \u201chola prima te est\u00e1s perdiendo de la fiesta,\u201d like damn you trying to get me all depri lol", "token_idx_1": 5, "text_start_1": 25, "text_end_1": 30, "date_1": "2019-11", "text_2": "Pulled zhongli with the primo gems i saved up\ud83d\ude05", "token_idx_2": 4, "text_start_2": 24, "text_end_2": 29, "date_2": "2020-11", "text_1_tokenized": ["I'm", "enjoying", "life", "and", "my", "primo", "from", "Mexico", "msges", "me", "\u201c", "hola", "prima", "te", "est\u00e1s", "perdiendo", "de", "la", "fiesta", ",", "\u201d", "like", "damn", "you", "trying", "to", "get", "me", "all", "depri", "lol"], "text_2_tokenized": ["Pulled", "zhongli", "with", "the", "primo", "gems", "i", "saved", "up", "\ud83d\ude05"]} -{"id": "3892-primo", "word": "primo", "label_binary": 1, "text_1": "Rest easy primo we still Flexin on em!", "token_idx_1": 2, "text_start_1": 10, "text_end_1": 15, "date_1": "2019-11", "text_2": "It's a Saenz thing \ud83d\udc81\ud83c\udffc\u200d\u2640\ufe0f ok primo", "token_idx_2": 7, "text_start_2": 28, "text_end_2": 33, "date_2": "2020-11", "text_1_tokenized": ["Rest", "easy", "primo", "we", "still", "Flexin", "on", "em", "!"], "text_2_tokenized": ["It's", "a", "Saenz", "thing", "\ud83d\udc81\ud83c\udffc\u200d\u2640", "\ufe0f", "ok", "primo"]} -{"id": "3893-primo", "word": "primo", "label_binary": 0, "text_1": "Ngl im the handsome primo, I just pretend to be Ugly to give everyone else a chance.", "token_idx_1": 4, "text_start_1": 20, "text_end_1": 25, "date_1": "2019-11", "text_2": "I was saving primo gems so I could do 20 pulls when zhonglis banner comes out and get the pity pull", "token_idx_2": 3, "text_start_2": 13, "text_end_2": 18, "date_2": "2020-11", "text_1_tokenized": ["Ngl", "im", "the", "handsome", "primo", ",", "I", "just", "pretend", "to", "be", "Ugly", "to", "give", "everyone", "else", "a", "chance", "."], "text_2_tokenized": ["I", "was", "saving", "primo", "gems", "so", "I", "could", "do", "20", "pulls", "when", "zhonglis", "banner", "comes", "out", "and", "get", "the", "pity", "pull"]} -{"id": "3894-primo", "word": "primo", "label_binary": 0, "text_1": "can primo win a giveaway pwease? \ud83e\udd7a", "token_idx_1": 1, "text_start_1": 4, "text_end_1": 9, "date_1": "2019-11", "text_2": "i just got my pocket money from my bro yesterday, and i'm thinking to buy that 980+980 primo \ud83e\udd14", "token_idx_2": 20, "text_start_2": 87, "text_end_2": 92, "date_2": "2020-11", "text_1_tokenized": ["can", "primo", "win", "a", "giveaway", "pwease", "?", "\ud83e\udd7a"], "text_2_tokenized": ["i", "just", "got", "my", "pocket", "money", "from", "my", "bro", "yesterday", ",", "and", "i'm", "thinking", "to", "buy", "that", "980", "+", "980", "primo", "\ud83e\udd14"]} -{"id": "3895-primo", "word": "primo", "label_binary": 0, "text_1": "happy birthday to my primo, have a great day love you! \ud83e\udd73\ud83d\udc96 @JoeyAguirre13", "token_idx_1": 4, "text_start_1": 21, "text_end_1": 26, "date_1": "2019-11", "text_2": "I might be lvl 50 but I have zero heros wit, zero mora, zero weapon material and zero primo so what's the point", "token_idx_2": 20, "text_start_2": 86, "text_end_2": 91, "date_2": "2020-11", "text_1_tokenized": ["happy", "birthday", "to", "my", "primo", ",", "have", "a", "great", "day", "love", "you", "!", "\ud83e\udd73", "\ud83d\udc96", "@JoeyAguirre13"], "text_2_tokenized": ["I", "might", "be", "lvl", "50", "but", "I", "have", "zero", "heros", "wit", ",", "zero", "mora", ",", "zero", "weapon", "material", "and", "zero", "primo", "so", "what's", "the", "point"]} -{"id": "3896-primo", "word": "primo", "label_binary": 0, "text_1": "How to get the new skin of primo @PitBullFera ?", "token_idx_1": 7, "text_start_1": 27, "text_end_1": 32, "date_1": "2019-11", "text_2": "more than 120 primo???? SWEET", "token_idx_2": 3, "text_start_2": 14, "text_end_2": 19, "date_2": "2020-11", "text_1_tokenized": ["How", "to", "get", "the", "new", "skin", "of", "primo", "@PitBullFera", "?"], "text_2_tokenized": ["more", "than", "120", "primo", "?", "?", "?", "SWEET"]} -{"id": "3897-primo", "word": "primo", "label_binary": 0, "text_1": "Did too much of that primo \u26f7", "token_idx_1": 5, "text_start_1": 21, "text_end_1": 26, "date_1": "2019-11", "text_2": "*cuts to me immediately going to my calculator app to see how many pulls 800 primo gems is*", "token_idx_2": 16, "text_start_2": 77, "text_end_2": 82, "date_2": "2020-11", "text_1_tokenized": ["Did", "too", "much", "of", "that", "primo", "\u26f7"], "text_2_tokenized": ["*", "cuts", "to", "me", "immediately", "going", "to", "my", "calculator", "app", "to", "see", "how", "many", "pulls", "800", "primo", "gems", "is", "*"]} -{"id": "3898-primo", "word": "primo", "label_binary": 0, "text_1": "Yay! I will be taking a road trip to Waco to take a lot of toys, and hopefully clothes, that our sons have outgrown to their primo. I hope it's not too much for our cu\u00f1ada! She might not want to see me coming with the trailer. Hehe", "token_idx_1": 29, "text_start_1": 125, "text_end_1": 130, "date_1": "2019-11", "text_2": "Who's dick do I have to suck to get some primo gems?", "token_idx_2": 10, "text_start_2": 41, "text_end_2": 46, "date_2": "2020-11", "text_1_tokenized": ["Yay", "!", "I", "will", "be", "taking", "a", "road", "trip", "to", "Waco", "to", "take", "a", "lot", "of", "toys", ",", "and", "hopefully", "clothes", ",", "that", "our", "sons", "have", "outgrown", "to", "their", "primo", ".", "I", "hope", "it's", "not", "too", "much", "for", "our", "cu\u00f1ada", "!", "She", "might", "not", "want", "to", "see", "me", "coming", "with", "the", "trailer", ".", "Hehe"], "text_2_tokenized": ["Who's", "dick", "do", "I", "have", "to", "suck", "to", "get", "some", "primo", "gems", "?"]} -{"id": "3899-primo", "word": "primo", "label_binary": 0, "text_1": "1 \ud83c\uddf5\ud83c\uddf1 Superhero 2 \ud83c\uddfa\ud83c\udde6 Nebo 3 \ud83c\uddf7\ud83c\uddfa Wings 4 \ud83c\udde6\ud83c\uddf2 Mama 5\ud83c\uddf2\ud83c\uddf9Not My Soul 6\ud83c\uddf5\ud83c\uddf1Anyone I Want To Be 7\ud83c\uddec\ud83c\uddea Mzeo 8\ud83c\uddee\ud83c\uddf9 Tu primo grande amore 9\ud83c\uddec\ud83c\uddea Candy Music 10\ud83c\uddf2\ud83c\uddf9 The Start", "token_idx_1": 38, "text_start_1": 100, "text_end_1": 105, "date_1": "2019-11", "text_2": "today just for fun I smooshed some shaving foam around, and it did have a very satisfying texture to it, but now my hands smell like man face. I keep eating apple slices and tasting it \u2639\ufe0fnot primo", "token_idx_2": 41, "text_start_2": 191, "text_end_2": 196, "date_2": "2020-11", "text_1_tokenized": ["1", "\ud83c\uddf5", "\ud83c\uddf1", "Superhero", "2", "\ud83c\uddfa", "\ud83c\udde6", "Nebo", "3", "\ud83c\uddf7", "\ud83c\uddfa", "Wings", "4", "\ud83c\udde6", "\ud83c\uddf2", "Mama", "5", "\ud83c\uddf2", "\ud83c\uddf9", "Not", "My", "Soul", "6", "\ud83c\uddf5", "\ud83c\uddf1", "Anyone", "I", "Want", "To", "Be", "7", "\ud83c\uddec", "\ud83c\uddea", "Mzeo", "8", "\ud83c\uddee", "\ud83c\uddf9", "Tu", "primo", "grande", "amore", "9", "\ud83c\uddec", "\ud83c\uddea", "Candy", "Music", "10", "\ud83c\uddf2", "\ud83c\uddf9", "The", "Start"], "text_2_tokenized": ["today", "just", "for", "fun", "I", "smooshed", "some", "shaving", "foam", "around", ",", "and", "it", "did", "have", "a", "very", "satisfying", "texture", "to", "it", ",", "but", "now", "my", "hands", "smell", "like", "man", "face", ".", "I", "keep", "eating", "apple", "slices", "and", "tasting", "it", "\u2639", "\ufe0fnot", "primo"]} -{"id": "3900-primo", "word": "primo", "label_binary": 0, "text_1": "The Cowboys were down 31-3 to the Packers in the second half and 31-14 to the Bears in the fourth quarter. Some primo garbagio time stat padding in each game.", "token_idx_1": 23, "text_start_1": 112, "text_end_1": 117, "date_1": "2019-11", "text_2": "Seeing primo \ud83d\udc49 auto good mood", "token_idx_2": 1, "text_start_2": 7, "text_end_2": 12, "date_2": "2020-11", "text_1_tokenized": ["The", "Cowboys", "were", "down", "31-3", "to", "the", "Packers", "in", "the", "second", "half", "and", "31-14", "to", "the", "Bears", "in", "the", "fourth", "quarter", ".", "Some", "primo", "garbagio", "time", "stat", "padding", "in", "each", "game", "."], "text_2_tokenized": ["Seeing", "primo", "\ud83d\udc49", "auto", "good", "mood"]} -{"id": "3901-primo", "word": "primo", "label_binary": 0, "text_1": "disney+? nah, this is a netflix and youtube primo household", "token_idx_1": 11, "text_start_1": 44, "text_end_1": 49, "date_1": "2019-11", "text_2": "tienes todo por delante primo the world is yours", "token_idx_2": 4, "text_start_2": 24, "text_end_2": 29, "date_2": "2020-11", "text_1_tokenized": ["disney", "+", "?", "nah", ",", "this", "is", "a", "netflix", "and", "youtube", "primo", "household"], "text_2_tokenized": ["tienes", "todo", "por", "delante", "primo", "the", "world", "is", "yours"]} -{"id": "3902-primo", "word": "primo", "label_binary": 0, "text_1": "104 chamas com a Bruna. E 101 com o meu primo. Proud of myself.", "token_idx_1": 11, "text_start_1": 40, "text_end_1": 45, "date_1": "2019-11", "text_2": "lrt > genshin can u give us some primo for best games of 2020 on apply", "token_idx_2": 8, "text_start_2": 36, "text_end_2": 41, "date_2": "2020-11", "text_1_tokenized": ["104", "chamas", "com", "a", "Bruna", ".", "E", "101", "com", "o", "meu", "primo", ".", "Proud", "of", "myself", "."], "text_2_tokenized": ["lrt", ">", "genshin", "can", "u", "give", "us", "some", "primo", "for", "best", "games", "of", "2020", "on", "apply"]} -{"id": "3903-primo", "word": "primo", "label_binary": 0, "text_1": "\"what's your preference?\" \"Your tia, primo.\" Ohhhhh touch\u00e9", "token_idx_1": 10, "text_start_1": 37, "text_end_1": 42, "date_1": "2019-11", "text_2": "when a genshin impact player had a kid \"what's your kid's name?\" \"primo\" \"is that short for primitivo?\u201d \"no it's short for primogems\"", "token_idx_2": 16, "text_start_2": 66, "text_end_2": 71, "date_2": "2020-11", "text_1_tokenized": ["\"", "what's", "your", "preference", "?", "\"", "\"", "Your", "tia", ",", "primo", ".", "\"", "Ohhhhh", "touch\u00e9"], "text_2_tokenized": ["when", "a", "genshin", "impact", "player", "had", "a", "kid", "\"", "what's", "your", "kid's", "name", "?", "\"", "\"", "primo", "\"", "\"", "is", "that", "short", "for", "primitivo", "?", "\u201d", "\"", "no", "it's", "short", "for", "primogems", "\""]} -{"id": "3904-primo", "word": "primo", "label_binary": 0, "text_1": "Thank you @IamSiddhartha for spending 73 hours with me this year on @Spotify. You are my #1. #spotifywrapped gracias primo un abrazo", "token_idx_1": 22, "text_start_1": 117, "text_end_1": 122, "date_1": "2019-11", "text_2": "Actually i dont have enrgy to play genshin but i want those primo for scaramouche", "token_idx_2": 12, "text_start_2": 60, "text_end_2": 65, "date_2": "2020-11", "text_1_tokenized": ["Thank", "you", "@IamSiddhartha", "for", "spending", "73", "hours", "with", "me", "this", "year", "on", "@Spotify", ".", "You", "are", "my", "#", "1", ".", "#spotifywrapped", "gracias", "primo", "un", "abrazo"], "text_2_tokenized": ["Actually", "i", "dont", "have", "enrgy", "to", "play", "genshin", "but", "i", "want", "those", "primo", "for", "scaramouche"]} -{"id": "3905-primo", "word": "primo", "label_binary": 1, "text_1": "my cousin just said my tattoo wasn't \u201creal art\u201d FOH primo u annoying \ud83d\ude44", "token_idx_1": 12, "text_start_1": 52, "text_end_1": 57, "date_1": "2019-11", "text_2": "Once I had a girl come by jaimes Mexican hacienda and we went out in the nicest summer nights in the deck and their were oak trees and while we were smoking a huge owl \ud83e\udd89 fly's right at us and primo said it was a sign from the great spirit \u201cla chusa\u201d the guardian she ended up bad", "token_idx_2": 41, "text_start_2": 192, "text_end_2": 197, "date_2": "2020-11", "text_1_tokenized": ["my", "cousin", "just", "said", "my", "tattoo", "wasn't", "\u201c", "real", "art", "\u201d", "FOH", "primo", "u", "annoying", "\ud83d\ude44"], "text_2_tokenized": ["Once", "I", "had", "a", "girl", "come", "by", "jaimes", "Mexican", "hacienda", "and", "we", "went", "out", "in", "the", "nicest", "summer", "nights", "in", "the", "deck", "and", "their", "were", "oak", "trees", "and", "while", "we", "were", "smoking", "a", "huge", "owl", "\ud83e\udd89", "fly's", "right", "at", "us", "and", "primo", "said", "it", "was", "a", "sign", "from", "the", "great", "spirit", "\u201c", "la", "chusa", "\u201d", "the", "guardian", "she", "ended", "up", "bad"]} -{"id": "3906-primo", "word": "primo", "label_binary": 0, "text_1": "The time, can't erase... r.i.p primo \ud83d\ude4f\ud83c\udffd", "token_idx_1": 11, "text_start_1": 31, "text_end_1": 36, "date_1": "2019-11", "text_2": "Would love to know @BonettiESPN's opinion on Sassuolo. So much talented youth, solid depth, bit of dodgy defending on crosses/set pieces, but enough quality to create consistent wins. Curious to see where they land in the final table but for now: primo posto. #ForzaSasol", "token_idx_2": 51, "text_start_2": 247, "text_end_2": 252, "date_2": "2020-11", "text_1_tokenized": ["The", "time", ",", "can't", "erase", "...", "r", ".", "i", ".", "p", "primo", "\ud83d\ude4f\ud83c\udffd"], "text_2_tokenized": ["Would", "love", "to", "know", "@BonettiESPN", "'", "s", "opinion", "on", "Sassuolo", ".", "So", "much", "talented", "youth", ",", "solid", "depth", ",", "bit", "of", "dodgy", "defending", "on", "crosses", "/", "set", "pieces", ",", "but", "enough", "quality", "to", "create", "consistent", "wins", ".", "Curious", "to", "see", "where", "they", "land", "in", "the", "final", "table", "but", "for", "now", ":", "primo", "posto", ".", "#ForzaSasol"]} -{"id": "3907-primo", "word": "primo", "label_binary": 0, "text_1": "Kickback with the primo playing would be dope for my bday , too bad I work \ud83d\ude11", "token_idx_1": 3, "text_start_1": 18, "text_end_1": 23, "date_1": "2019-11", "text_2": "Who's buying that $mph dip? That's a primo buy, with just under a 5 Mil Market Cap! $sfi $dai $ethy #defi", "token_idx_2": 9, "text_start_2": 37, "text_end_2": 42, "date_2": "2020-11", "text_1_tokenized": ["Kickback", "with", "the", "primo", "playing", "would", "be", "dope", "for", "my", "bday", ",", "too", "bad", "I", "work", "\ud83d\ude11"], "text_2_tokenized": ["Who's", "buying", "that", "$", "mph", "dip", "?", "That's", "a", "primo", "buy", ",", "with", "just", "under", "a", "5", "Mil", "Market", "Cap", "!", "$", "sfi", "$", "dai", "$", "ethy", "#defi"]} -{"id": "3908-primo", "word": "primo", "label_binary": 1, "text_1": "Rest easy primo. My heart aches for you.", "token_idx_1": 2, "text_start_1": 10, "text_end_1": 15, "date_1": "2019-11", "text_2": "At my parents,my little primo (he's 20 lol) saw me scrolling on my tl L: BRO!! YOU'RE ON TWITTER AND YOU DON'T EVEN FOLLOW ME!!! ME \ud83e\udd2b as he walks away \ud83d\ude02", "token_idx_2": 6, "text_start_2": 24, "text_end_2": 29, "date_2": "2020-11", "text_1_tokenized": ["Rest", "easy", "primo", ".", "My", "heart", "aches", "for", "you", "."], "text_2_tokenized": ["At", "my", "parents", ",", "my", "little", "primo", "(", "he's", "20", "lol", ")", "saw", "me", "scrolling", "on", "my", "tl", "L", ":", "BRO", "!", "!", "YOU'RE", "ON", "TWITTER", "AND", "YOU", "DON'T", "EVEN", "FOLLOW", "ME", "!", "!", "!", "ME", "\ud83e\udd2b", "as", "he", "walks", "away", "\ud83d\ude02"]} -{"id": "3909-primo", "word": "primo", "label_binary": 0, "text_1": "Eah this dairy needs to be cancelled how'd you not have banana primo lmao", "token_idx_1": 12, "text_start_1": 63, "text_end_1": 68, "date_1": "2019-11", "text_2": "uhm should i roll or save my primo for the next banner?? HELPPP", "token_idx_2": 7, "text_start_2": 29, "text_end_2": 34, "date_2": "2020-11", "text_1_tokenized": ["Eah", "this", "dairy", "needs", "to", "be", "cancelled", "how'd", "you", "not", "have", "banana", "primo", "lmao"], "text_2_tokenized": ["uhm", "should", "i", "roll", "or", "save", "my", "primo", "for", "the", "next", "banner", "?", "?", "HELPPP"]} -{"id": "3910-primo", "word": "primo", "label_binary": 0, "text_1": "If you want a big dose of nostalgia tonight, turn on the LED light on your phone and put your finger over it. That fleshy red glow is primo memory fodder.", "token_idx_1": 30, "text_start_1": 134, "text_end_1": 139, "date_1": "2019-11", "text_2": "goodbye. 6.5k primo i hoarded . tomorrow", "token_idx_2": 4, "text_start_2": 14, "text_end_2": 19, "date_2": "2020-11", "text_1_tokenized": ["If", "you", "want", "a", "big", "dose", "of", "nostalgia", "tonight", ",", "turn", "on", "the", "LED", "light", "on", "your", "phone", "and", "put", "your", "finger", "over", "it", ".", "That", "fleshy", "red", "glow", "is", "primo", "memory", "fodder", "."], "text_2_tokenized": ["goodbye", ".", "6.5", "k", "primo", "i", "hoarded", ".", "tomorrow"]} -{"id": "3911-primo", "word": "primo", "label_binary": 0, "text_1": "I really thought I was gonna take forever w my HW \ud83e\udd75 papitaaaas, me fuera quedado mas tiempo en casa de mi primo con mi fam :(", "token_idx_1": 23, "text_start_1": 106, "text_end_1": 111, "date_1": "2019-11", "text_2": "tempted to change my username again but ultrasylveon is still primo", "token_idx_2": 10, "text_start_2": 62, "text_end_2": 67, "date_2": "2020-11", "text_1_tokenized": ["I", "really", "thought", "I", "was", "gonna", "take", "forever", "w", "my", "HW", "\ud83e\udd75", "papitaaaas", ",", "me", "fuera", "quedado", "mas", "tiempo", "en", "casa", "de", "mi", "primo", "con", "mi", "fam", ":("], "text_2_tokenized": ["tempted", "to", "change", "my", "username", "again", "but", "ultrasylveon", "is", "still", "primo"]} -{"id": "3912-primo", "word": "primo", "label_binary": 0, "text_1": "The Lighthouse is some primo lunacy, what a feverish fucking nightmare. Sublime work from Pattinson and Dafoe, like goddamn.", "token_idx_1": 4, "text_start_1": 23, "text_end_1": 28, "date_1": "2019-11", "text_2": "Now I know where I should burn my primo", "token_idx_2": 8, "text_start_2": 34, "text_end_2": 39, "date_2": "2020-11", "text_1_tokenized": ["The", "Lighthouse", "is", "some", "primo", "lunacy", ",", "what", "a", "feverish", "fucking", "nightmare", ".", "Sublime", "work", "from", "Pattinson", "and", "Dafoe", ",", "like", "goddamn", "."], "text_2_tokenized": ["Now", "I", "know", "where", "I", "should", "burn", "my", "primo"]} -{"id": "3913-primo", "word": "primo", "label_binary": 0, "text_1": "The canine talent at this holiday market has been *primo*", "token_idx_1": 10, "text_start_1": 51, "text_end_1": 56, "date_1": "2019-11", "text_2": "ban zhao tvb fighting for women's rights to education dal primo secolo a.C.", "token_idx_2": 10, "text_start_2": 58, "text_end_2": 63, "date_2": "2020-11", "text_1_tokenized": ["The", "canine", "talent", "at", "this", "holiday", "market", "has", "been", "*", "primo", "*"], "text_2_tokenized": ["ban", "zhao", "tvb", "fighting", "for", "women's", "rights", "to", "education", "dal", "primo", "secolo", "a", ".", "C", "."]} -{"id": "3914-primo", "word": "primo", "label_binary": 0, "text_1": "The more I listen, more I'm digging #Marcielago. Know early but Roc's choices for sample sources were primo this time around. Thought @rocmarci was more focused lyrically on this LP too. Not to say was slacking on albums last year, just even more detail on this LP.", "token_idx_1": 19, "text_start_1": 102, "text_end_1": 107, "date_1": "2019-11", "text_2": "\u2022 thread tabungan primo \u0ca5_\u0ca5 goal - 28.800", "token_idx_2": 3, "text_start_2": 18, "text_end_2": 23, "date_2": "2020-11", "text_1_tokenized": ["The", "more", "I", "listen", ",", "more", "I'm", "digging", "#Marcielago", ".", "Know", "early", "but", "Roc's", "choices", "for", "sample", "sources", "were", "primo", "this", "time", "around", ".", "Thought", "@rocmarci", "was", "more", "focused", "lyrically", "on", "this", "LP", "too", ".", "Not", "to", "say", "was", "slacking", "on", "albums", "last", "year", ",", "just", "even", "more", "detail", "on", "this", "LP", "."], "text_2_tokenized": ["\u2022", "thread", "tabungan", "primo", "\u0ca5_\u0ca5", "goal", "-", "28.800"]} -{"id": "3915-primo", "word": "primo", "label_binary": 0, "text_1": "Shoutout to my primo @MeLlamoJesse who just tied the knot!!!", "token_idx_1": 3, "text_start_1": 15, "text_end_1": 20, "date_1": "2019-11", "text_2": "Sorry, big dong Zhong, but I'm saving all my primo for Scaramouche.", "token_idx_2": 11, "text_start_2": 45, "text_end_2": 50, "date_2": "2020-11", "text_1_tokenized": ["Shoutout", "to", "my", "primo", "@MeLlamoJesse", "who", "just", "tied", "the", "knot", "!", "!", "!"], "text_2_tokenized": ["Sorry", ",", "big", "dong", "Zhong", ",", "but", "I'm", "saving", "all", "my", "primo", "for", "Scaramouche", "."]} -{"id": "3916-primo", "word": "primo", "label_binary": 0, "text_1": ".@MBakerTBTimes I am baffled at how James Franklin even comes across minds of Floridians. He is from Pennsylvania, he is in the primo coaching job in PA he dreamed of years ago, he is doing well, and totally funded. Why would he pick up and leave?", "token_idx_1": 25, "text_start_1": 128, "text_end_1": 133, "date_1": "2019-11", "text_2": "Yoooo finally trying primo pizzakaya for the first time, got da miso Mac, Braddah pi, and maga haga yurrrr", "token_idx_2": 3, "text_start_2": 21, "text_end_2": 26, "date_2": "2020-11", "text_1_tokenized": [".", "@MBakerTBTimes", "I", "am", "baffled", "at", "how", "James", "Franklin", "even", "comes", "across", "minds", "of", "Floridians", ".", "He", "is", "from", "Pennsylvania", ",", "he", "is", "in", "the", "primo", "coaching", "job", "in", "PA", "he", "dreamed", "of", "years", "ago", ",", "he", "is", "doing", "well", ",", "and", "totally", "funded", ".", "Why", "would", "he", "pick", "up", "and", "leave", "?"], "text_2_tokenized": ["Yoooo", "finally", "trying", "primo", "pizzakaya", "for", "the", "first", "time", ",", "got", "da", "miso", "Mac", ",", "Braddah", "pi", ",", "and", "maga", "haga", "yurrrr"]} -{"id": "3917-primo", "word": "primo", "label_binary": 0, "text_1": "Yeosang that one primo showing off he can dance", "token_idx_1": 3, "text_start_1": 17, "text_end_1": 22, "date_1": "2019-11", "text_2": "idk what best of next is but yay cravity yay allencito mi primo\ud83d\ude0b", "token_idx_2": 12, "text_start_2": 58, "text_end_2": 63, "date_2": "2020-11", "text_1_tokenized": ["Yeosang", "that", "one", "primo", "showing", "off", "he", "can", "dance"], "text_2_tokenized": ["idk", "what", "best", "of", "next", "is", "but", "yay", "cravity", "yay", "allencito", "mi", "primo", "\ud83d\ude0b"]} -{"id": "3918-primo", "word": "primo", "label_binary": 0, "text_1": "also watched iria: zeiram the animation while i was working on stuff, solid show. goku midnight eye was primo 80s cyberpunk goofiness. watching 12 kingdoms now, i really liked it in HS though the rental place only had the first volume so getting to watch all of it will be nice.", "token_idx_1": 21, "text_start_1": 104, "text_end_1": 109, "date_1": "2019-11", "text_2": "jeonghan primo as he should", "token_idx_2": 1, "text_start_2": 9, "text_end_2": 14, "date_2": "2020-11", "text_1_tokenized": ["also", "watched", "iria", ":", "zeiram", "the", "animation", "while", "i", "was", "working", "on", "stuff", ",", "solid", "show", ".", "goku", "midnight", "eye", "was", "primo", "80s", "cyberpunk", "goofiness", ".", "watching", "12", "kingdoms", "now", ",", "i", "really", "liked", "it", "in", "HS", "though", "the", "rental", "place", "only", "had", "the", "first", "volume", "so", "getting", "to", "watch", "all", "of", "it", "will", "be", "nice", "."], "text_2_tokenized": ["jeonghan", "primo", "as", "he", "should"]} -{"id": "3919-primo", "word": "primo", "label_binary": 0, "text_1": "okay,, primo knows the difference between kiss and bite. he so cute asf\ud83d\ude2d\u2764", "token_idx_1": 3, "text_start_1": 7, "text_end_1": 12, "date_1": "2019-11", "text_2": "Im gonna save up primo and try pulling zhongli at the last min \ud83d\ude2d\ud83d\ude2d\ud83d\ude2d", "token_idx_2": 4, "text_start_2": 17, "text_end_2": 22, "date_2": "2020-11", "text_1_tokenized": ["okay", ",", ",", "primo", "knows", "the", "difference", "between", "kiss", "and", "bite", ".", "he", "so", "cute", "asf", "\ud83d\ude2d", "\u2764"], "text_2_tokenized": ["Im", "gonna", "save", "up", "primo", "and", "try", "pulling", "zhongli", "at", "the", "last", "min", "\ud83d\ude2d", "\ud83d\ude2d", "\ud83d\ude2d"]} -{"id": "3920-primo", "word": "primo", "label_binary": 0, "text_1": "The fact that human trafficking is so common now with both men and women is so scary and heartbreaking. It happened to my primo 10 years ago and it's getting worse stay safe everyone and share ur locations. \ud83d\ude25\u2764\ufe0f", "token_idx_1": 24, "text_start_1": 122, "text_end_1": 127, "date_1": "2019-11", "text_2": "ughhhh i'm super busy this week\ud83d\ude2d i won't have time to play genshin and grind some primo gems\ud83d\ude29\ud83d\ude2d", "token_idx_2": 17, "text_start_2": 82, "text_end_2": 87, "date_2": "2020-11", "text_1_tokenized": ["The", "fact", "that", "human", "trafficking", "is", "so", "common", "now", "with", "both", "men", "and", "women", "is", "so", "scary", "and", "heartbreaking", ".", "It", "happened", "to", "my", "primo", "10", "years", "ago", "and", "it's", "getting", "worse", "stay", "safe", "everyone", "and", "share", "ur", "locations", ".", "\ud83d\ude25", "\u2764", "\ufe0f"], "text_2_tokenized": ["ughhhh", "i'm", "super", "busy", "this", "week", "\ud83d\ude2d", "i", "won't", "have", "time", "to", "play", "genshin", "and", "grind", "some", "primo", "gems", "\ud83d\ude29", "\ud83d\ude2d"]} -{"id": "3921-primo", "word": "primo", "label_binary": 0, "text_1": "I cannot get over Jim Ross and Tony Schiavone at the same announcer's table. It's fascinating, it's intriguing, it's...awkwardly engaging. I love it. AEW in general has been pretty primo.", "token_idx_1": 36, "text_start_1": 181, "text_end_1": 186, "date_1": "2019-11", "text_2": "anyways let me go mass vote on that poll to make sure my primo samuel wins", "token_idx_2": 13, "text_start_2": 57, "text_end_2": 62, "date_2": "2020-11", "text_1_tokenized": ["I", "cannot", "get", "over", "Jim", "Ross", "and", "Tony", "Schiavone", "at", "the", "same", "announcer's", "table", ".", "It's", "fascinating", ",", "it's", "intriguing", ",", "it's", "...", "awkwardly", "engaging", ".", "I", "love", "it", ".", "AEW", "in", "general", "has", "been", "pretty", "primo", "."], "text_2_tokenized": ["anyways", "let", "me", "go", "mass", "vote", "on", "that", "poll", "to", "make", "sure", "my", "primo", "samuel", "wins"]} -{"id": "3922-primo", "word": "primo", "label_binary": 0, "text_1": "Lemme borrow that Disney+ account primo @Tylerxjoness \ud83d\udc40\ud83d\udc40\ud83d\udc40", "token_idx_1": 6, "text_start_1": 34, "text_end_1": 39, "date_1": "2019-11", "text_2": "primo black friday da stipendiata means \ud83d\udcb8\ud83d\udcb8\ud83d\udcb8", "token_idx_2": 0, "text_start_2": 0, "text_end_2": 5, "date_2": "2020-11", "text_1_tokenized": ["Lemme", "borrow", "that", "Disney", "+", "account", "primo", "@Tylerxjoness", "\ud83d\udc40", "\ud83d\udc40", "\ud83d\udc40"], "text_2_tokenized": ["primo", "black", "friday", "da", "stipendiata", "means", "\ud83d\udcb8", "\ud83d\udcb8", "\ud83d\udcb8"]} -{"id": "3923-primo", "word": "primo", "label_binary": 0, "text_1": "I checked the weather for Saturday and it should be in primo conditions for the beach. WE ARE GOING! \ud83d\ude2c@james_st_jamess", "token_idx_1": 11, "text_start_1": 55, "text_end_1": 60, "date_1": "2019-11", "text_2": "gonna go buy some primo to feel something idk", "token_idx_2": 4, "text_start_2": 18, "text_end_2": 23, "date_2": "2020-11", "text_1_tokenized": ["I", "checked", "the", "weather", "for", "Saturday", "and", "it", "should", "be", "in", "primo", "conditions", "for", "the", "beach", ".", "WE", "ARE", "GOING", "!", "\ud83d\ude2c", "@james_st_jamess"], "text_2_tokenized": ["gonna", "go", "buy", "some", "primo", "to", "feel", "something", "idk"]} -{"id": "3924-primo", "word": "primo", "label_binary": 1, "text_1": "I need me a good bailada con mi primo Marty \ud83e\udd7a\ud83d\udc83\ud83c\udffb\ud83d\udd7a\ud83c\udffb", "token_idx_1": 8, "text_start_1": 32, "text_end_1": 37, "date_1": "2019-11", "text_2": "It's been a week, the house is not the same without you primo \ud83e\udd7a", "token_idx_2": 13, "text_start_2": 56, "text_end_2": 61, "date_2": "2020-11", "text_1_tokenized": ["I", "need", "me", "a", "good", "bailada", "con", "mi", "primo", "Marty", "\ud83e\udd7a", "\ud83d\udc83\ud83c\udffb", "\ud83d\udd7a\ud83c\udffb"], "text_2_tokenized": ["It's", "been", "a", "week", ",", "the", "house", "is", "not", "the", "same", "without", "you", "primo", "\ud83e\udd7a"]} -{"id": "3925-primo", "word": "primo", "label_binary": 1, "text_1": "i just started properly editing my 1k celebration video and oh my god it is primo drunk person content", "token_idx_1": 15, "text_start_1": 76, "text_end_1": 81, "date_1": "2019-11", "text_2": "I love fics that have dudes begging for their gf to fuck them.....that's so hot. begging all whiny and needy about their gf riding them or them being able to put their dick in whatever... primo content", "token_idx_2": 39, "text_start_2": 188, "text_end_2": 193, "date_2": "2020-11", "text_1_tokenized": ["i", "just", "started", "properly", "editing", "my", "1k", "celebration", "video", "and", "oh", "my", "god", "it", "is", "primo", "drunk", "person", "content"], "text_2_tokenized": ["I", "love", "fics", "that", "have", "dudes", "begging", "for", "their", "gf", "to", "fuck", "them", "...", "that's", "so", "hot", ".", "begging", "all", "whiny", "and", "needy", "about", "their", "gf", "riding", "them", "or", "them", "being", "able", "to", "put", "their", "dick", "in", "whatever", "...", "primo", "content"]} -{"id": "3926-primo", "word": "primo", "label_binary": 0, "text_1": "what if i actually go READ la corda doro primo passo for the loml kazuki hihara,, it's been what 11 years???", "token_idx_1": 9, "text_start_1": 41, "text_end_1": 46, "date_1": "2019-11", "text_2": "just found out people introduce their sides to their mains as \u201cprima\u201d and \u201cprimo\u201d I'm \ud83e\udd2e\ud83e\udd2e\ud83e\udd22\ud83e\udd22\ud83e\udd22", "token_idx_2": 16, "text_start_2": 75, "text_end_2": 80, "date_2": "2020-11", "text_1_tokenized": ["what", "if", "i", "actually", "go", "READ", "la", "corda", "doro", "primo", "passo", "for", "the", "loml", "kazuki", "hihara", ",", ",", "it's", "been", "what", "11", "years", "?", "?", "?"], "text_2_tokenized": ["just", "found", "out", "people", "introduce", "their", "sides", "to", "their", "mains", "as", "\u201c", "prima", "\u201d", "and", "\u201c", "primo", "\u201d", "I'm", "\ud83e\udd2e", "\ud83e\udd2e", "\ud83e\udd22", "\ud83e\udd22", "\ud83e\udd22"]} -{"id": "3927-primo", "word": "primo", "label_binary": 0, "text_1": "and it's hardwood so I can just sweep and wipe up any pee that ends up on the floor and aaahhhhhh god this is the best setup. they get primo floor time in here", "token_idx_1": 30, "text_start_1": 135, "text_end_1": 140, "date_1": "2019-11", "text_2": "late night deep convo w primo nanaman \ud83e\udd7a\u2764\ufe0f", "token_idx_2": 5, "text_start_2": 24, "text_end_2": 29, "date_2": "2020-11", "text_1_tokenized": ["and", "it's", "hardwood", "so", "I", "can", "just", "sweep", "and", "wipe", "up", "any", "pee", "that", "ends", "up", "on", "the", "floor", "and", "aaahhhhhh", "god", "this", "is", "the", "best", "setup", ".", "they", "get", "primo", "floor", "time", "in", "here"], "text_2_tokenized": ["late", "night", "deep", "convo", "w", "primo", "nanaman", "\ud83e\udd7a", "\u2764", "\ufe0f"]} -{"id": "3928-primo", "word": "primo", "label_binary": 0, "text_1": "My primo @Chrismalooo saving my ass right now with this kijiji info.. rims+winter tires for 600$ dont fkn mind if I do!", "token_idx_1": 1, "text_start_1": 3, "text_end_1": 8, "date_1": "2019-11", "text_2": "4 mins before Zhongli banner but I don't have any primo left \ud83e\udd27\ud83d\ude30", "token_idx_2": 10, "text_start_2": 50, "text_end_2": 55, "date_2": "2020-11", "text_1_tokenized": ["My", "primo", "@Chrismalooo", "saving", "my", "ass", "right", "now", "with", "this", "kijiji", "info", "..", "rims", "+", "winter", "tires", "for", "600", "$", "dont", "fkn", "mind", "if", "I", "do", "!"], "text_2_tokenized": ["4", "mins", "before", "Zhongli", "banner", "but", "I", "don't", "have", "any", "primo", "left", "\ud83e\udd27", "\ud83d\ude30"]} -{"id": "3929-primo", "word": "primo", "label_binary": 0, "text_1": "Missing my primo cuz he was the only one que supo como bailarme", "token_idx_1": 2, "text_start_1": 11, "text_end_1": 16, "date_1": "2019-11", "text_2": "5 summons is technically getting 6% closer to the pity 90. Why can't the majority of the Genshin fandom be grateful for it? Here I was thinking I'd find people who were interested in the plot, and getting complaints on resin and primo...", "token_idx_2": 46, "text_start_2": 229, "text_end_2": 234, "date_2": "2020-11", "text_1_tokenized": ["Missing", "my", "primo", "cuz", "he", "was", "the", "only", "one", "que", "supo", "como", "bailarme"], "text_2_tokenized": ["5", "summons", "is", "technically", "getting", "6", "%", "closer", "to", "the", "pity", "90", ".", "Why", "can't", "the", "majority", "of", "the", "Genshin", "fandom", "be", "grateful", "for", "it", "?", "Here", "I", "was", "thinking", "I'd", "find", "people", "who", "were", "interested", "in", "the", "plot", ",", "and", "getting", "complaints", "on", "resin", "and", "primo", "..."]} -{"id": "3930-primo", "word": "primo", "label_binary": 1, "text_1": "Rip Juanito you will be missed primo, Tonight I raise my glass to you \ud83e\udd43\ud83d\ude4c\ud83c\udffc", "token_idx_1": 6, "text_start_1": 31, "text_end_1": 36, "date_1": "2019-11", "text_2": "This is the first thanksgiving I'm spending with my primo/as and I'm so excited", "token_idx_2": 9, "text_start_2": 52, "text_end_2": 57, "date_2": "2020-11", "text_1_tokenized": ["Rip", "Juanito", "you", "will", "be", "missed", "primo", ",", "Tonight", "I", "raise", "my", "glass", "to", "you", "\ud83e\udd43", "\ud83d\ude4c\ud83c\udffc"], "text_2_tokenized": ["This", "is", "the", "first", "thanksgiving", "I'm", "spending", "with", "my", "primo", "/", "as", "and", "I'm", "so", "excited"]} -{"id": "3931-primo", "word": "primo", "label_binary": 0, "text_1": "Only girls that ain't blood & kould kall me bro is @_larkiee @Nisha0619 so when I'm up ion need no bitches tryna kall me brudda, bro, brother, primo , kousin nunna that\ud83d\ude02\ud83d\udc4b\ud83c\udffd", "token_idx_1": 30, "text_start_1": 147, "text_end_1": 152, "date_1": "2019-11", "text_2": "Westbrook a Washington (+ una prima scelta protetta al primo round) e Wall a Houston. HARDEN, WALL E forse COUSINS... \ud83d\ude0b #SkySport #NBATrades #NbaTipo", "token_idx_2": 10, "text_start_2": 55, "text_end_2": 60, "date_2": "2020-11", "text_1_tokenized": ["Only", "girls", "that", "ain't", "blood", "&", "kould", "kall", "me", "bro", "is", "@_larkiee", "@Nisha0619", "so", "when", "I'm", "up", "ion", "need", "no", "bitches", "tryna", "kall", "me", "brudda", ",", "bro", ",", "brother", ",", "primo", ",", "kousin", "nunna", "that", "\ud83d\ude02", "\ud83d\udc4b\ud83c\udffd"], "text_2_tokenized": ["Westbrook", "a", "Washington", "(", "+", "una", "prima", "scelta", "protetta", "al", "primo", "round", ")", "e", "Wall", "a", "Houston", ".", "HARDEN", ",", "WALL", "E", "forse", "COUSINS", "...", "\ud83d\ude0b", "#SkySport", "#NBATrades", "#NbaTipo"]} -{"id": "3932-primo", "word": "primo", "label_binary": 0, "text_1": "Lmk why el primo is legit straight chillin w/ Natanael Cano rn\ud83e\udd24\ud83e\udd24\ud83d\ude02", "token_idx_1": 3, "text_start_1": 11, "text_end_1": 16, "date_1": "2019-11", "text_2": "I think he heard me \ud83d\ude05\ud83e\udd0dpero no me asustes asi primo \ud83d\ude16", "token_idx_2": 12, "text_start_2": 45, "text_end_2": 50, "date_2": "2020-11", "text_1_tokenized": ["Lmk", "why", "el", "primo", "is", "legit", "straight", "chillin", "w", "/", "Natanael", "Cano", "rn", "\ud83e\udd24", "\ud83e\udd24", "\ud83d\ude02"], "text_2_tokenized": ["I", "think", "he", "heard", "me", "\ud83d\ude05", "\ud83e\udd0d", "pero", "no", "me", "asustes", "asi", "primo", "\ud83d\ude16"]} -{"id": "3933-primo", "word": "primo", "label_binary": 0, "text_1": "dp & primo on one today chile! lol", "token_idx_1": 2, "text_start_1": 9, "text_end_1": 14, "date_1": "2019-11", "text_2": "RIP primo James, no more suffering....we love you #cancersucks", "token_idx_2": 1, "text_start_2": 4, "text_end_2": 9, "date_2": "2020-11", "text_1_tokenized": ["dp", "&", "primo", "on", "one", "today", "chile", "!", "lol"], "text_2_tokenized": ["RIP", "primo", "James", ",", "no", "more", "suffering", "...", "we", "love", "you", "#cancersucks"]} -{"id": "3934-primo", "word": "primo", "label_binary": 0, "text_1": "My primo about slide through with this Loud \ud83d\udd0a I'm sure by the end of night a n\u2022gga about to be stuck .", "token_idx_1": 1, "text_start_1": 3, "text_end_1": 8, "date_1": "2019-11", "text_2": "7k primo and nothing worth to spend it on \ud83d\ude2c", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 8, "date_2": "2020-11", "text_1_tokenized": ["My", "primo", "about", "slide", "through", "with", "this", "Loud", "\ud83d\udd0a", "I'm", "sure", "by", "the", "end", "of", "night", "a", "n", "\u2022", "gga", "about", "to", "be", "stuck", "."], "text_2_tokenized": ["7k", "primo", "and", "nothing", "worth", "to", "spend", "it", "on", "\ud83d\ude2c"]} -{"id": "3935-primo", "word": "primo", "label_binary": 1, "text_1": "everytime krimi sends me primo pics i cry a little", "token_idx_1": 4, "text_start_1": 25, "text_end_1": 30, "date_1": "2019-11", "text_2": "Free will explored without ostentatious naming of philosophers with clarifying examples: multiple scenarios @WestworldHBO esp S3E5 Also primo sound track (incl subtle cover of Space Oddity)", "token_idx_2": 18, "text_start_2": 136, "text_end_2": 141, "date_2": "2020-11", "text_1_tokenized": ["everytime", "krimi", "sends", "me", "primo", "pics", "i", "cry", "a", "little"], "text_2_tokenized": ["Free", "will", "explored", "without", "ostentatious", "naming", "of", "philosophers", "with", "clarifying", "examples", ":", "multiple", "scenarios", "@WestworldHBO", "esp", "S3E5", "Also", "primo", "sound", "track", "(", "incl", "subtle", "cover", "of", "Space", "Oddity", ")"]} -{"id": "3936-primo", "word": "primo", "label_binary": 0, "text_1": "#WinBrawlSkins to get new skins for carl leon and primo", "token_idx_1": 9, "text_start_1": 50, "text_end_1": 55, "date_1": "2019-11", "text_2": "#TheMandalorian is primo #StarWars for me.", "token_idx_2": 2, "text_start_2": 19, "text_end_2": 24, "date_2": "2020-11", "text_1_tokenized": ["#WinBrawlSkins", "to", "get", "new", "skins", "for", "carl", "leon", "and", "primo"], "text_2_tokenized": ["#TheMandalorian", "is", "primo", "#StarWars", "for", "me", "."]} -{"id": "3937-primo", "word": "primo", "label_binary": 0, "text_1": "lakini nikiwa primo nkichapwa pekee yangu ndo nilikua nalia, but the moment i see my friends crying ata ka nimechapwa aje, i used to laugh hard & make them laugh too while they are crying at same time . Ths is same story of Man united & Arsenal fans\ud83d\ude02\ud83d\ude02\ud83d\ude02\ud83d\ude02\ud83d\ude02", "token_idx_1": 2, "text_start_1": 14, "text_end_1": 19, "date_1": "2019-11", "text_2": "sorry but trump goin gonzo on twitter about michigan refusing to certify votes four minutes after they voted to certify them is primo content", "token_idx_2": 22, "text_start_2": 128, "text_end_2": 133, "date_2": "2020-11", "text_1_tokenized": ["lakini", "nikiwa", "primo", "nkichapwa", "pekee", "yangu", "ndo", "nilikua", "nalia", ",", "but", "the", "moment", "i", "see", "my", "friends", "crying", "ata", "ka", "nimechapwa", "aje", ",", "i", "used", "to", "laugh", "hard", "&", "make", "them", "laugh", "too", "while", "they", "are", "crying", "at", "same", "time", ".", "Ths", "is", "same", "story", "of", "Man", "united", "&", "Arsenal", "fans", "\ud83d\ude02", "\ud83d\ude02", "\ud83d\ude02"], "text_2_tokenized": ["sorry", "but", "trump", "goin", "gonzo", "on", "twitter", "about", "michigan", "refusing", "to", "certify", "votes", "four", "minutes", "after", "they", "voted", "to", "certify", "them", "is", "primo", "content"]} -{"id": "3938-primo", "word": "primo", "label_binary": 1, "text_1": "People pretending like Black male \"creatives\" don't get their shit called trash in service of trying to make people feel guilty about not rocking with Queen and Slim is primo Twitter silliness.", "token_idx_1": 31, "text_start_1": 169, "text_end_1": 174, "date_1": "2019-11", "text_2": "We need another 3 & Out...a turnover would be primo right now too. Flip the field...", "token_idx_2": 11, "text_start_2": 50, "text_end_2": 55, "date_2": "2020-11", "text_1_tokenized": ["People", "pretending", "like", "Black", "male", "\"", "creatives", "\"", "don't", "get", "their", "shit", "called", "trash", "in", "service", "of", "trying", "to", "make", "people", "feel", "guilty", "about", "not", "rocking", "with", "Queen", "and", "Slim", "is", "primo", "Twitter", "silliness", "."], "text_2_tokenized": ["We", "need", "another", "3", "&", "Out", "...", "a", "turnover", "would", "be", "primo", "right", "now", "too", ".", "Flip", "the", "field", "..."]} -{"id": "3939-primo", "word": "primo", "label_binary": 0, "text_1": "<< La natura non si getta tra le braccia del primo venuto, pretende infinita passione, prima di svelarsi e concederglisi. >> H. Hesse #PassionPulseProject", "token_idx_1": 11, "text_start_1": 51, "text_end_1": 56, "date_1": "2019-11", "text_2": "My primo really gots the tea", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 8, "date_2": "2020-11", "text_1_tokenized": ["<", "<", "La", "natura", "non", "si", "getta", "tra", "le", "braccia", "del", "primo", "venuto", ",", "pretende", "infinita", "passione", ",", "prima", "di", "svelarsi", "e", "concederglisi", ".", ">", ">", "H", ".", "Hesse", "#PassionPulseProject"], "text_2_tokenized": ["My", "primo", "really", "gots", "the", "tea"]} -{"id": "3940-primo", "word": "primo", "label_binary": 1, "text_1": "That message from my primo made my whole day \ud83d\ude2d\ud83d\udc99", "token_idx_1": 4, "text_start_1": 21, "text_end_1": 26, "date_1": "2019-11", "text_2": "RT if your primo texts you once a month to ask for your Netflix password, fav if you're the primo sending those texts", "token_idx_2": 3, "text_start_2": 11, "text_end_2": 16, "date_2": "2020-11", "text_1_tokenized": ["That", "message", "from", "my", "primo", "made", "my", "whole", "day", "\ud83d\ude2d", "\ud83d\udc99"], "text_2_tokenized": ["RT", "if", "your", "primo", "texts", "you", "once", "a", "month", "to", "ask", "for", "your", "Netflix", "password", ",", "fav", "if", "you're", "the", "primo", "sending", "those", "texts"]} -{"id": "3941-primo", "word": "primo", "label_binary": 0, "text_1": "COLTS/TEXANS TRIVIA for 4 primo seats to the showdown this Thursday night + $5k to help toward travel: Name the Colts player who recovered his own onside kick vs. Houston AND the final score!! (Note, the $ will be received at the game). Alyssa's hat pick!", "token_idx_1": 6, "text_start_1": 26, "text_end_1": 31, "date_1": "2019-11", "text_2": "Somali guys used to be very good tukicheza futa either primo ama zile matches za estate. I wonder why \u00e0 good number of them never end up in professional football clubs or the national team.", "token_idx_2": 10, "text_start_2": 55, "text_end_2": 60, "date_2": "2020-11", "text_1_tokenized": ["COLTS", "/", "TEXANS", "TRIVIA", "for", "4", "primo", "seats", "to", "the", "showdown", "this", "Thursday", "night", "+", "$", "5k", "to", "help", "toward", "travel", ":", "Name", "the", "Colts", "player", "who", "recovered", "his", "own", "onside", "kick", "vs", ".", "Houston", "AND", "the", "final", "score", "!", "!", "(", "Note", ",", "the", "$", "will", "be", "received", "at", "the", "game", ")", ".", "Alyssa's", "hat", "pick", "!"], "text_2_tokenized": ["Somali", "guys", "used", "to", "be", "very", "good", "tukicheza", "futa", "either", "primo", "ama", "zile", "matches", "za", "estate", ".", "I", "wonder", "why", "\u00e0", "good", "number", "of", "them", "never", "end", "up", "in", "professional", "football", "clubs", "or", "the", "national", "team", "."]} -{"id": "3942-primo", "word": "primo", "label_binary": 1, "text_1": "Usually we go to 510 bistro every year the day before thanksgiving but because I'm being the best cousin ever and driving my primo to sf airport I'm missing that tonight \ud83d\ude2d\ud83d\ude2d", "token_idx_1": 23, "text_start_1": 125, "text_end_1": 130, "date_1": "2019-11", "text_2": "My primo on his game and says \u201cDUDE SHUT UP OR IN KICKING YOU\u201d and then later says \u201chey can I have that jet pack?\u201d \ud83d\ude02\ud83d\ude02\ud83d\ude02", "token_idx_2": 1, "text_start_2": 3, "text_end_2": 8, "date_2": "2020-11", "text_1_tokenized": ["Usually", "we", "go", "to", "510", "bistro", "every", "year", "the", "day", "before", "thanksgiving", "but", "because", "I'm", "being", "the", "best", "cousin", "ever", "and", "driving", "my", "primo", "to", "sf", "airport", "I'm", "missing", "that", "tonight", "\ud83d\ude2d", "\ud83d\ude2d"], "text_2_tokenized": ["My", "primo", "on", "his", "game", "and", "says", "\u201c", "DUDE", "SHUT", "UP", "OR", "IN", "KICKING", "YOU", "\u201d", "and", "then", "later", "says", "\u201c", "hey", "can", "I", "have", "that", "jet", "pack", "?", "\u201d", "\ud83d\ude02", "\ud83d\ude02", "\ud83d\ude02"]} -{"id": "3943-primo", "word": "primo", "label_binary": 1, "text_1": "I think my primo just drunk called me \ud83d\ude02\ud83d\ude02", "token_idx_1": 3, "text_start_1": 11, "text_end_1": 16, "date_1": "2019-11", "text_2": "I'm over here sending demands to my primo in Mexico to bring me back a bottle of tequila like this a hostage situation \ud83d\ude02\ud83d\ude02\ud83d\ude02", "token_idx_2": 7, "text_start_2": 36, "text_end_2": 41, "date_2": "2020-11", "text_1_tokenized": ["I", "think", "my", "primo", "just", "drunk", "called", "me", "\ud83d\ude02", "\ud83d\ude02"], "text_2_tokenized": ["I'm", "over", "here", "sending", "demands", "to", "my", "primo", "in", "Mexico", "to", "bring", "me", "back", "a", "bottle", "of", "tequila", "like", "this", "a", "hostage", "situation", "\ud83d\ude02", "\ud83d\ude02", "\ud83d\ude02"]} -{"id": "3944-primo", "word": "primo", "label_binary": 0, "text_1": "Presto il primo post su Amsterdam sul blog. Soon first Amsterdam post online on my blog. LINK IN BIO #blogger #newpost #StayTuned", "token_idx_1": 2, "text_start_1": 10, "text_end_1": 15, "date_1": "2019-11", "text_2": "I hope primo from the dyckman games who sold pastelitos is doing well .", "token_idx_2": 2, "text_start_2": 7, "text_end_2": 12, "date_2": "2020-11", "text_1_tokenized": ["Presto", "il", "primo", "post", "su", "Amsterdam", "sul", "blog", ".", "Soon", "first", "Amsterdam", "post", "online", "on", "my", "blog", ".", "LINK", "IN", "BIO", "#blogger", "#newpost", "#StayTuned"], "text_2_tokenized": ["I", "hope", "primo", "from", "the", "dyckman", "games", "who", "sold", "pastelitos", "is", "doing", "well", "."]} \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:42052a317896a2722fabf71b797673cd91e5d98b5330623abdf414c3c60d6f1a +size 383998