Datasets:
id (string) | tokens (sequence) | ner_tags (sequence) |
---|---|---|
"755276242237218816::0"
| [
"Werde",
"meine",
"#Verspätungen",
"als",
"#Statistik",
"mal",
"veröffentlichen",
"."
]
| [
0,
0,
0,
0,
0,
0,
0,
0
]
|
"755276242237218816::1"
| [
"#Bahn",
"#DeutscheBahn",
"#Abenteuer",
"#DB",
"#Chaos",
"#Zugausfall"
]
| [
27,
27,
0,
27,
0,
0
]
|
"1108129826844672001::0"
| [
"#S4",
"#RegioNDS",
"#Teilausfall",
"#Mellendorf",
"(",
"23.03",
")",
">",
"#Bennemühlen",
"(",
"23.07",
")",
"."
]
| [
15,
27,
39,
17,
0,
1,
0,
0,
13,
0,
1,
0,
0
]
|
"1108129826844672001::1"
| [
"Grund",
":",
"technische",
"Störung",
"an",
"der",
"Strecke",
"."
]
| [
0,
0,
9,
10,
0,
0,
0,
0
]
|
"1108129826844672001::2"
| [
"Bitte",
"nutzen",
"Sie",
"#RB38",
"nach",
"Soltau",
"über",
"Bennemühlen",
"Abfahrt",
":",
"23:08",
"Uhr",
"vom",
"Gleis",
"2"
]
| [
0,
0,
0,
15,
0,
13,
0,
17,
0,
0,
37,
38,
0,
0,
23
]
|
"https://www.presseportal.de/blaulicht/pm/62288/4117034#9@2018-11-15T22:07:09.000+01:00::0"
| [
"POL",
"-",
"WI",
":",
"Falscher",
"Polizeibeamter",
"auf",
"A66",
"festgenommen",
"."
]
| [
25,
26,
26,
0,
0,
0,
0,
19,
0,
0
]
|
"https://www.presseportal.de/blaulicht/pm/62288/4117034#9@2018-11-15T22:07:09.000+01:00::1"
| [
"PASt",
"(",
"KvD",
")",
"-",
"Polizeipräsidium",
"Westhessen",
"[",
"Newsroom",
"]",
"Wiesbaden",
"(",
"ots",
")",
"-",
"15.11.2018",
",",
"17:50",
"Uhr",
",",
"TuR",
"Weilbach",
",",
"in",
"Richtung",
"Frankfurt",
"am",
"Main",
"Einer",
"Zivilstreife",
"der",
"Wiesbadener",
"Polizei",
"fiel",
"auf",
"dem",
"Gelände",
"der",
"Tank",
"-",
"und",
"Rastanlage",
"Weilbach",
"ein",
"dunkler",
"5er",
"BMW",
",",
"mit",
"Frankfurter",
"Ortskennung",
",",
"auf",
"."
]
| [
25,
26,
26,
26,
26,
26,
26,
0,
0,
0,
13,
0,
0,
0,
0,
1,
0,
39,
40,
0,
11,
12,
0,
0,
0,
13,
14,
14,
0,
0,
0,
25,
26,
0,
0,
0,
0,
0,
11,
12,
12,
12,
12,
0,
0,
0,
27,
0,
0,
13,
0,
0,
0,
0
]
|
"https://www.presseportal.de/blaulicht/pm/62288/4117034#9@2018-11-15T22:07:09.000+01:00::2"
| [
"Im",
"Zuge",
"der",
"...",
"Lesen",
"Sie",
"hier",
"weiter",
"...",
"Original",
"-",
"Content",
"von",
":",
"PASt",
"(",
"KvD",
")",
"-",
"Polizeipräsidium",
"Westhessen",
",",
"übermittelt",
"durch",
"news",
"aktuell"
]
| [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
25,
26,
26,
26,
26,
26,
26,
0,
0,
0,
27,
28
]
|
"http://www.ndr.de/nachrichten/verkehr/index.html#2@2016-05-04T21:02:14.000+02:00::0"
| [
"Vorsicht",
"bitte",
"auf",
"der",
"A28",
"Leer",
"Richtung",
"Oldenburg",
"zwischen",
"Zwischenahner",
"Meer",
"und",
"Neuenkruge",
"liegen",
"Gegenstände",
"!"
]
| [
0,
0,
0,
0,
19,
13,
0,
13,
0,
11,
12,
0,
11,
0,
0,
0
]
|
"1111089830971215872::0"
| [
"1",
"/",
"2",
"#S7",
":",
"weiterhin",
"bestehende",
"Weichenstörung",
"in",
"#Wannsee",
"."
]
| [
23,
0,
23,
15,
0,
0,
0,
9,
0,
17,
0
]
|
"1111089830971215872::1"
| [
"#S7",
"fährt",
"zwischen",
"#Wannsee",
"und",
"#Potsdam_Hbf",
"."
]
| [
15,
0,
0,
17,
0,
17,
0
]
|
"1111089830971215872::2"
| [
"nur",
"im",
"20",
"-",
"Min",
"-",
"Takt.",
",",
"Züge",
"nach",
"#Potsdam_Hbf",
"."
]
| [
0,
0,
7,
8,
8,
8,
8,
0,
0,
0,
13,
0
]
|
"1111089830971215872::3"
| [
"fahren",
"ab",
"#Wannsee",
"zu",
"den",
"Minuten",
"12",
",",
"32",
",",
"52",
"."
]
| [
0,
0,
17,
0,
0,
0,
37,
0,
37,
0,
37,
0
]
|
"1111089830971215872::4"
| [
"/",
"Züge",
"nach",
"#Ahrensfelde",
"fahren",
"ab",
"#Potsdam_Hbf",
"."
]
| [
0,
0,
0,
13,
0,
0,
17,
0
]
|
"1111089830971215872::5"
| [
"zu",
"den",
"Minuten",
"11",
",",
"31",
",",
"51",
"(",
"ab",
"5:11"
]
| [
0,
0,
0,
37,
0,
37,
0,
37,
0,
0,
37
]
|
"612168123924529152::0"
| [
"■",
"#Hamburg",
":",
"Die",
"#B75",
"Bremer",
"Straße",
"ist",
"zwischen",
"Eißendorfer",
"Mühlenweg",
"und",
"Metzendorfer",
"Weg",
"wegen",
"#Bauarbeiten",
"bis",
"Anfang",
"..."
]
| [
0,
15,
0,
0,
19,
20,
20,
0,
0,
19,
20,
0,
19,
20,
0,
39,
0,
0,
0
]
|
"1110205733730217985::0"
| [
"RT",
"@Ondraeh",
":",
"#S5",
"um",
"16:41",
"Uhr",
"von",
"#Halle",
"nach",
"#Leipzig",
",",
"genügend",
"Platz",
"und",
"funktionierendes",
"WLAN",
"."
]
| [
0,
0,
0,
15,
0,
37,
38,
0,
17,
0,
13,
0,
0,
0,
0,
0,
0,
0
]
|
"1110205733730217985::1"
| [
"Eine",
"Tür",
"ist",
"sichtbar",
"als",
"defekt",
"gekennze",
"…"
]
| [
0,
0,
0,
0,
0,
0,
0,
0
]
|
"1111185208647274501::0"
| [
"RT",
"@SBahn_Stuttgart",
":",
"🚨S",
"törung🚨",
" D",
"rzeit s",
"eht e",
"ne #",
"2 R",
"chtung F",
"lderstadt m",
"t e",
"ner T",
"rstörung i",
" S",
"g-R",
"o",
"hr. ",
"E"
]
| [
0,
27,
0,
0,
9,
0,
0,
0,
0,
15,
0,
13,
0,
0,
9,
0,
17,
18,
18,
0
]
|
"1111185208647274501::1"
| [
" k",
"mmt a",
"f d",
"n L",
"nien #",
"1, ",
"#",
"",
""
]
| [
0,
0,
0,
0,
0,
15,
0,
0,
0
]
|
"1107999871552942083::0"
| [
"#RE7",
"#RegioSH",
"#RE",
"21070",
"#Hamburg",
"Hbf",
"(",
"14:43",
")",
"-",
"#Flensburg",
"(",
"16:42",
")",
"."
]
| [
15,
27,
15,
16,
17,
18,
0,
37,
0,
0,
17,
0,
37,
0,
0
]
|
"1107999871552942083::1"
| [
"Zug",
"beginnt",
"aufgrund",
"einer",
"Signalstörung",
"in",
"Hamburg",
"-",
"Altona",
"und",
"fährt",
"um",
"14:49",
"Uhr",
"ab",
"."
]
| [
0,
0,
0,
0,
9,
0,
17,
18,
18,
0,
0,
0,
37,
38,
0,
0
]
|
"1107999871552942083::2"
| [
"Die",
"Halte",
"in",
"Hamburg",
"Hbf",
"und",
"Hamburg",
"Dammtor",
"entfallen",
"."
]
| [
0,
39,
0,
17,
18,
0,
17,
18,
39,
0
]
|
"1107999871552942083::3"
| [
"Bitte",
"nutzen",
"Sie",
"ab",
"Hamburg",
"Hbf",
"die",
"S3",
"bzw",
"."
]
| [
0,
0,
0,
0,
17,
18,
0,
15,
0,
0
]
|
"1107999871552942083::4"
| [
"S31",
"bis",
"Hamburg",
"-",
"Altona",
"."
]
| [
15,
0,
17,
18,
18,
0
]
|
"http://www.ndr.de/nachrichten/verkehr/index.html#8@2016-03-17T01:12:00.000+01:00::0"
| [
"A20",
"Lübeck",
"Richtung",
"Rostock",
"zwischen",
"Raststätte",
"Fuchsberg",
"Süd",
"und",
"Kreuz",
"Rostock",
"fährt",
"ein",
"Schwertransport",
",",
"der",
"nicht",
"überholt",
"werden",
"kann",
"."
]
| [
19,
13,
0,
13,
0,
11,
12,
12,
0,
11,
12,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
]
|
"671260432699629568::0"
| [
"RT",
"@muellerjuerg",
":",
"Bahn",
"frei",
"für",
"die",
"Zukunft",
":",
"https://t.co/zJv4IA5wKv",
"An",
"der",
"Weltfunkkonferenz",
"in",
"Genf",
"wurden",
"verschiedene",
"Frequenzen",
"neu",
"zu",
"…"
]
| [
0,
33,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
13,
0,
0,
0,
0,
0,
0
]
|
"http://verkehrsmeldungen.polizei-bw.de//TICRss.ashx?region=BW&id=e380f236-ac5b-441c-aa27-01bed7b113de@2016-03-17T21:40:00.000+01:00::0"
| [
"Friedrichshafen",
"Richtung",
"Stockach",
"zwischen",
"Überlingen",
"-",
"Aufkirch",
"und",
"Überlingen",
"-",
"Goldbach",
"Gefahr",
"durch",
"ein",
"totes",
"Tier",
"auf",
"der",
"Fahrbahn"
]
| [
13,
0,
13,
0,
11,
12,
12,
0,
11,
12,
12,
0,
0,
0,
0,
0,
0,
0,
0
]
|
"1109145372537077760::0"
| [
"#Zuffenhausen",
"Heute",
"Nachmittag",
"ist",
"auf",
"der",
"B27",
"auf",
"Höhe",
"der",
"Ausfahrt",
"Zuffenhausen",
"ein",
"Smart",
"in",
"Brand",
"geraten",
"."
]
| [
11,
1,
37,
0,
0,
0,
19,
0,
0,
0,
0,
11,
0,
0,
39,
40,
40,
0
]
|
"1109145372537077760::1"
| [
"Die",
"Bundesstraße",
"war",
"wegen",
"den",
"Bergungsarbeiten",
"kurzzeitig",
"gesperrt",
"."
]
| [
0,
19,
0,
0,
0,
9,
0,
39,
0
]
|
"1109145372537077760::2"
| [
"Zur",
"Pressemeldung",
"▶",
"https://t.co/D0qQtXXcJF",
"Eure",
"#Polizei",
"#Stuttgart",
"https://t.co/J0IAXq7us2"
]
| [
0,
0,
0,
0,
0,
25,
13,
0
]
|
"1106220052636975105::0"
| [
"SBahn",
"_",
"Stuttgart",
":",
"✔️",
"Stellwerkstörung",
"im",
"Bahnhof",
"Plochingen",
"ist",
"behoben",
"✔️",
"Rechnet",
"aber",
"noch",
"bis",
"ca.",
"18.00",
"Uhr",
"auf",
"der",
"S",
"-",
"Bahn",
"-",
"Linie",
"S1",
"mit",
"Verspätungen",
"und",
"mit",
"möglichen",
"Teilausfällen",
"."
]
| [
27,
28,
28,
0,
0,
9,
0,
0,
17,
0,
0,
0,
0,
0,
0,
0,
0,
37,
38,
0,
0,
0,
0,
0,
0,
0,
15,
0,
39,
0,
0,
0,
39,
0
]
|
"1106220052636975105::1"
| [
"P.S.",
"Der",
"Busersatzverkehr",
"wird",
"eingestellt",
"."
]
| [
0,
0,
0,
0,
0,
0
]
|
"1106220052636975105::2"
| [
"#SBS",
"#VVS"
]
| [
27,
27
]
|
"1109044409402904576::0"
| [
"SEV",
"S2",
"-",
"Wochenende",
"zwischen",
"Buch",
"und",
"Bernau",
"-",
"Fahrplanänderung",
"Vom",
"heutigen",
"Freitag",
",",
"den",
"22.03.19",
"ab",
"ca.",
"22",
"Uhr",
",",
"bis",
"zum",
"Montag",
",",
"den",
"25.03.2019",
"-",
"01.30",
"Uhr",
",",
"fährt",
"keine",
"S",
"-",
"Bahn",
"(",
"S2",
")",
"zwischen",
"den",
"Bahnhöfen",
"Buch",
"und",
"Bernau",
"."
]
| [
39,
15,
0,
1,
0,
17,
0,
17,
0,
39,
0,
0,
1,
2,
2,
2,
0,
0,
37,
38,
0,
0,
0,
1,
2,
2,
2,
0,
37,
38,
0,
39,
40,
40,
40,
40,
0,
15,
0,
0,
0,
0,
17,
0,
17,
0
]
|
"1109044409402904576::1"
| [
"Mehr",
"dazu",
"…",
"https://t.co/Tgb2ezRnf5",
"https://t.co/pf4KvtzL8J"
]
| [
0,
0,
0,
0,
0
]
|
"1108309973040275457::0"
| [
"RT",
"@KVV_GmbH",
":",
"Verkehrsunfall",
"Karlsruhe",
"Philippstraße",
"in",
"beiden",
"Richtungen",
"."
]
| [
0,
27,
0,
9,
17,
18,
0,
11,
12,
0
]
|
"1108309973040275457::1"
| [
"Betroffene",
"Linien",
":",
"6",
",",
"S2",
",",
"S5",
"und",
"S52",
"."
]
| [
0,
0,
0,
15,
0,
15,
0,
15,
0,
15,
0
]
|
"1108309973040275457::2"
| [
"Störungsende",
"ca.",
"12:00",
"Uhr",
"."
]
| [
0,
0,
37,
38,
0
]
|
"1110858856689737730::0"
| [
"Statt",
"der",
"S",
"-",
"Bahn",
"S9",
"verkehren",
"in",
"Velbert",
"im",
"April",
"nur",
"Busse",
"https://t.co/HISMPdv53k"
]
| [
0,
0,
0,
0,
0,
15,
0,
0,
13,
0,
35,
0,
39,
0
]
|
"1110147697166766080::0"
| [
"Neuss",
"/",
"Düsseldorf",
":",
"Baubedingte",
"Fahrplanänderungen",
"S8",
"&",
"S11",
"-",
"Ersatzverkehr",
",",
"#Neuss",
",",
"#SBahn",
",",
"#Bahnsteigarbeiten",
",",
"#Ausfälle",
",",
"#Fahrplanänderungen",
",",
"#Schienenersatzverkehr",
",",
"#Düsseldorf",
",",
"https://t.co/azLt5oeSUv"
]
| [
17,
0,
17,
0,
9,
39,
15,
0,
15,
0,
39,
0,
13,
0,
27,
0,
9,
0,
39,
0,
39,
0,
39,
0,
13,
0,
0
]
|
"https://www.berlin.de/polizei/polizeimeldungen/pressemitteilung.467547.php@2016-04-13T10:38:56.000+02:00::0"
| [
"Zeitungsladen",
"überfallen",
"."
]
| [
0,
0,
0
]
|
"745245399045279748::0"
| [
"@n8zug",
"Jupp",
"Streik",
"habe",
"ich",
"gehört"
]
| [
0,
0,
0,
0,
0,
0
]
|
"1107722457090281472::0"
| [
"#ICE",
"#Bahn",
"#ard",
"Wieder",
"Milliarden",
"für",
"ein",
"paar",
"Min",
"."
]
| [
0,
27,
0,
0,
21,
0,
7,
8,
8,
0
]
|
"1107722457090281472::1"
| [
"?"
]
| [
0
]
|
"1107722457090281472::2"
| [
"und",
"womit",
"?"
]
| [
0,
0,
0
]
|
"1107722457090281472::3"
| [
"ICE4",
"fährt",
"250",
"."
]
| [
0,
0,
23,
0
]
|
"1107722457090281472::4"
| [
"Wichtiger",
"wäre",
"ein",
"schnellerer",
"Verteilungsverkehr",
"aus",
"den",
"Metropolen",
"."
]
| [
0,
0,
0,
0,
0,
0,
0,
0,
0
]
|
"1107722457090281472::5"
| [
"Was",
"nutzen",
"Min.",
",",
"wenn",
"man",
"dann",
"noch",
"Stunden",
"bis",
"zum",
"Endziel",
"braucht",
"?!"
]
| [
0,
0,
7,
0,
0,
0,
0,
0,
7,
0,
0,
0,
0,
0
]
|
"1107722457090281472::6"
| [
"Da",
"drängt",
"sich",
"langsam",
"der",
"Verdacht",
"des",
"Filzes",
"auf"
]
| [
0,
0,
0,
0,
0,
0,
0,
0,
0
]
|
"720167633467916288::0"
| [
"#focus",
"Polizei",
"-",
"Eutin",
"-",
"Von",
"der",
"Fahrbahn",
"abgekommen.",
":",
"Am",
"Dienstagabend",
"(",
"12.04.2016",
")",
"kam",
"…",
"https://t.co/X0fQw7PPhe"
]
| [
27,
0,
0,
13,
0,
0,
0,
0,
0,
0,
0,
1,
0,
1,
0,
0,
0,
0
]
|
"1111306738605674497::0"
| [
"✅",
"Störung",
"beendet",
":",
"Die",
"Strecke",
"zwischen",
"Renningen",
"und",
"Böblingen",
"ist",
"für",
"die",
"#S60",
"wieder",
"befahrbar",
"."
]
| [
0,
9,
0,
0,
0,
15,
0,
17,
0,
17,
0,
0,
0,
15,
0,
0,
0
]
|
"1111306738605674497::1"
| [
"Jedoch",
"kommt",
"es",
"noch",
"bis",
"ca.",
"18:30",
"Uhr",
"zu",
"Verspätungen",
"und",
"einzelnen",
"Zugausfällen",
"auf",
"der",
"#S6",
"und",
"#S60",
"."
]
| [
0,
0,
0,
0,
0,
0,
37,
38,
0,
0,
0,
0,
0,
0,
0,
15,
0,
15,
0
]
|
"1111306738605674497::2"
| [
"#VVS",
"#SBS",
"#WegvomGleis"
]
| [
27,
27,
0
]
|
"1111733575425105920::0"
| [
"RT",
"@SBahnHamburg",
":",
"Weiterhin",
"Streckensperrung",
"zw",
"."
]
| [
0,
27,
0,
0,
39,
0,
0
]
|
"1111733575425105920::1"
| [
"Poppenbüttel",
"<",
">",
"Ohlsdorf",
"."
]
| [
17,
0,
0,
17,
0
]
|
"1111733575425105920::2"
| [
"Grund",
":",
"Weichenstörung",
"."
]
| [
0,
0,
9,
0
]
|
"1111733575425105920::3"
| [
"Ersatzverkehr",
"mit",
"Bussen",
"u.",
"Taxis",
"."
]
| [
39,
40,
40,
40,
40,
0
]
|
"1111733575425105920::4"
| [
"#S1",
"#",
"h",
"…"
]
| [
15,
0,
0,
0
]
|
"671195480374685696::0"
| [
"Nachts",
"um",
"12",
"in",
"der",
"Berliner",
"U-Bahn",
"?"
]
| [
37,
38,
38,
0,
0,
27,
28,
0
]
|
"671195480374685696::1"
| [
"Kein",
"Problem",
"für",
"mich.",
"https://t.co/abeBQYEiYb",
"#Retweet",
"#RT",
"https://t.co/n8TJLouTOu"
]
| [
0,
0,
0,
0,
0,
0,
0,
0
]
|
"666191143986667520::0"
| [
"RT",
"@BILD",
":",
"„",
"Unbequeme",
"Wahrheit",
"."
]
| [
0,
27,
0,
0,
0,
0,
0
]
|
"666191143986667520::1"
| [
"Der",
"Kommentar",
"von",
"@jreichelt",
":",
"https://t.co/Kb5x0XAkQ0",
"#ParisAttacks",
"https://t.co/3N3dI0EANT"
]
| [
0,
0,
0,
33,
0,
0,
0,
0
]
|
"http://www.viz-info.de/LMS-BR_r_LMS-BR_66790@2016-05-04T22:07:12.000+02:00::0"
| [
"Sonstiges",
"Abschnitt",
":",
"(",
"Berlin",
")",
"Gültig",
"ab",
":",
"04.05.2016",
"21:07",
"Gefahr",
"durch",
"totes",
"Tier",
"auf",
"der",
"Fahrbahn",
"auf",
"dem",
"rechten",
"Fahrstreifen",
",",
"fahren",
"Sie",
"bitte",
"besonders",
"vorsichtig"
]
| [
0,
0,
0,
0,
13,
0,
0,
0,
0,
1,
37,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
]
|
"1111612451336962052::0"
| [
"wknachrichten",
"|",
"Glimpflich",
"ausgegangen",
":",
"Ein",
"Pkw",
"-",
"Fahrer",
"hat",
"einen",
"spektakulären",
"Unfall",
"auf",
"der",
"A66",
"bei",
"Wiesbaden",
"vollkommen",
"unverletzt",
"überstanden",
"."
]
| [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
9,
0,
0,
19,
0,
11,
0,
39,
40,
0
]
|
"1111612451336962052::1"
| [
"https://t.co/QI5S2CxAka"
]
| [
0
]
|
"1106782675879251969::0"
| [
"1",
"/",
"2",
"#S8",
",",
"#S9",
"Weichenstörung",
"zw",
"."
]
| [
0,
0,
0,
15,
0,
15,
9,
0,
0
]
|
"1106782675879251969::1"
| [
"#Wuppertal",
"Hbf",
"und",
"#Wuppertal",
"-",
"Vohwinkel",
"."
]
| [
17,
18,
0,
17,
18,
18,
0
]
|
"1106782675879251969::2"
| [
"Züge",
"der",
"#S9",
"aus",
"#Bottrop",
"Hbf",
"enden",
"/",
"beginnen",
"in",
"#Essen",
"Hbf",
"."
]
| [
0,
0,
15,
0,
17,
18,
0,
0,
0,
0,
17,
18,
18
]
|
"1106782675879251969::3"
| [
"Es",
"kann",
"kurzfristig",
"zu",
"Zugausfällen",
"bei",
"der",
"#S8",
"kommen",
"."
]
| [
0,
0,
0,
0,
39,
0,
0,
15,
0,
0
]
|
"1106782675879251969::4"
| [
"Ersatzverkehr",
"mit",
"10",
"Taxis",
"zw",
"."
]
| [
39,
0,
23,
0,
0,
0
]
|
"1106782675879251969::5"
| [
"#Essen",
"Hbf",
"und",
"#Wuppertal",
"-",
"Vohwinkel",
"eingerichtet",
"."
]
| [
17,
18,
0,
17,
18,
18,
0,
0
]
|
"1111673432348020736::0"
| [
"#Streckenunterbruch",
"#Busersatz",
"#S1",
"#sbahn",
"#Münsingen",
"#Thun",
"08.04.",
"-30.04",
"."
]
| [
9,
39,
15,
27,
13,
0,
1,
2,
0
]
|
"1111673432348020736::1"
| [
"ganzer",
"Tag",
"."
]
| [
0,
0,
0
]
|
"1111673432348020736::2"
| [
"Es",
"ist",
"mit",
"erheblichen",
"Einschränkungen",
"zu",
"rechnen",
"please",
"RT",
"#SBBconnect",
"#BLS",
"#ÖVLive"
]
| [
0,
0,
0,
0,
39,
0,
0,
0,
0,
0,
27,
0
]
|
"1111188815421554688::0"
| [
"#S45",
"#S9",
"Wegen",
"einer",
"Signalstörung",
"zwischen",
"#Adlershof",
"und",
"#Altglienicke",
"kommt",
"es",
"zu",
"Verspätungen",
"und",
"einzelnen",
"Ausfällen",
"."
]
| [
15,
15,
0,
0,
9,
0,
17,
0,
17,
0,
0,
0,
39,
0,
0,
39,
0
]
|
"745066771980369920::0"
| [
"Ich",
"habe",
"ein",
"@YouTube",
"-",
"Video",
"positiv",
"bewertet",
":",
"https://t.co/AlgJjg6ZOG",
"DAS",
"BEDEUTET",
"KRIEG",
"-",
"TRIALS",
"FUSION",
"-",
"Let",
"'s",
"Play",
"Trials",
"Fusion",
"-"
]
| [
0,
0,
0,
27,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
]
|
"703753019758739457::0"
| [
"POL",
"-",
"HAM",
":",
"Bei",
"Auffahrunfall",
"leicht",
"verletzt",
"https://t.co/QZ8okhHNWU",
"#Polizei",
"#NWestfalen"
]
| [
25,
26,
26,
0,
0,
0,
0,
0,
0,
0,
11
]
|
"1112305754378248193::0"
| [
"🚨S",
"törung🚨",
" D",
"rzeit s",
"eht e",
"ne #",
"1 m",
"t e",
"ner F",
"hrzeugstörung z",
"ischen K",
"rchheim (",
")",
" ",
"u",
"d W",
"ndlingen. ",
"E"
]
| [
0,
9,
0,
0,
0,
0,
15,
0,
0,
9,
0,
17,
18,
18,
18,
0,
17,
0
]
|
"1112305754378248193::1"
| [
" k",
"mmt a",
"f d",
"r L",
"nie #",
"1 i",
" b",
"iden R",
"chtungen z",
" V",
"rspätungen u",
"d A",
"sfällen. ",
"U"
]
| [
0,
0,
0,
0,
0,
15,
0,
11,
12,
0,
39,
0,
39,
0
]
|
"1112305754378248193::2"
| [
"date f",
"lgt!",
""
]
| [
0,
0,
0
]
|
"672385794410192896::0"
| [
"@jam294",
"Hallo",
"Sebastian",
",",
"alle",
"Infos",
"zum",
"Probebetrieb",
"findest",
"du",
"auf",
":",
"https://t.co/Qmk0zs1cFj",
",",
"schöne",
"Grüße",
"@MUC_Airport",
"/",
"IR"
]
| [
0,
0,
33,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
27,
0,
0
]
|
"1110876915777368064::0"
| [
"Die",
"#S7",
"ist",
"unterbrochen",
"."
]
| [
0,
15,
0,
39,
0
]
|
"1110876915777368064::1"
| [
"Zwischen",
"#Wannsee",
"und",
"#Griebnitzsee",
"fahren",
"Busse",
"."
]
| [
0,
17,
0,
17,
0,
0,
0
]
|
"1110876915777368064::2"
| [
"@SBahnBerlin",
"#verkehr",
"#potsdam",
"https://t.co/OibwP9eb64"
]
| [
27,
0,
13,
0
]
|
"1112040034172092418::0"
| [
"Wo",
"geht",
"es",
"den",
"heute",
"mal",
"hin",
"?"
]
| [
0,
0,
0,
0,
1,
0,
0,
0
]
|
"1112040034172092418::1"
| [
"(",
"@",
"S",
"5",
"-",
">",
"Frankfurt",
"(",
"Main",
")",
"Hbf",
"(",
"Tief",
")",
")",
"#NowTräwelling",
"https://t.co/1qGxRpSf98"
]
| [
0,
0,
15,
16,
0,
0,
13,
14,
14,
14,
14,
14,
14,
14,
0,
0,
0
]
|
"1108353210773921793::0"
| [
"@RMVdialog",
"@Willy10200",
"@NadUndDieNatur",
"D.h.",
"Entfall",
"der",
"RB75",
"auch",
"zwischen",
"Bischofsheim",
"und",
"Wiesbaden",
"-",
"oder",
"?"
]
| [
0,
0,
0,
0,
39,
0,
15,
0,
0,
17,
0,
17,
0,
0,
0
]
|
"1108353210773921793::1"
| [
"Ist",
"bei",
"der",
"HLB",
"etwas",
"uneindeutig",
"ausgedrückt",
"..."
]
| [
0,
0,
0,
27,
0,
0,
0,
0
]
|
"631449197435322369::0"
| [
"RT",
"@faznet",
":",
"Der",
"ägyptische",
"Ableger",
"des",
"Islamischen",
"Staats",
"hat",
"nach",
"eigenen",
"Angaben",
"eine",
"kroatische",
"Geisel",
"ermordet",
"http://t.co/C6bQbEAbOh"
]
| [
0,
27,
0,
0,
11,
0,
0,
25,
26,
0,
0,
0,
0,
0,
11,
0,
0,
0
]
|
"744552884163969025::0"
| [
"@zeitonline",
"wir",
"lassen",
"unsere",
"Gesellschaft",
"nicht",
"von",
"solchen",
"Menschen",
"verderben",
"."
]
| [
27,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
]
|
"744552884163969025::1"
| [
"."
]
| [
0
]
|
"744552884163969025::2"
| [
"Traditionell",
"Orientiert",
"!!!"
]
| [
0,
0,
0
]
|
"1109198606198804480::0"
| [
"Die",
"A57",
"bei",
"Köln",
"ist",
"wieder",
"frei",
",",
"der",
"Stau",
"hat",
"sich",
"auch",
"schon",
"aufgelöst",
"."
]
| [
0,
19,
0,
11,
0,
0,
0,
0,
0,
39,
0,
0,
0,
0,
39,
0
]
|
"1109198606198804480::1"
| [
"Ihr",
"steht",
"jetzt",
"noch",
"auf",
"der",
"A4",
"zwischen",
"#Kerpen",
"und",
"Köln",
"sowie",
"an",
"den",
"Nachtbaustellen",
"auf",
"der",
"A4",
"zwischen",
"#Lind",
"und",
"Wahn",
"und",
"der",
"A45",
"zwischen",
"#Freudenberg",
"und",
"Olpe",
"."
]
| [
0,
0,
0,
0,
0,
0,
19,
0,
11,
0,
11,
0,
0,
0,
0,
0,
0,
19,
0,
11,
0,
11,
0,
0,
19,
0,
11,
0,
11,
0
]
|
"1109198606198804480::2"
| [
"https://t.co/ZCIdP2XUVx"
]
| [
0
]
|
"1106541567421136897::0"
| [
"@SBahnBerlin",
"@IGEB_Berlin",
"Umfangreiche",
"Aus",
"-",
"und",
"Erneuerungsarbeiten",
"entlang",
"der",
"Strecke",
"Berlin",
"-",
"Gesundbrunnen",
"–",
"Berlin",
"-",
"Karow",
"–",
"Bernau",
"Entlang",
"der",
"„",
"Stettiner",
"Bahn",
"“",
"stehen",
"in",
"den",
"nächsten",
"Jahren",
"verschiedene",
"Bauvorhaben",
"auf",
"dem",
"Plan",
",",
"."
]
| [
27,
25,
0,
9,
0,
0,
9,
0,
0,
0,
17,
18,
18,
0,
17,
18,
18,
0,
17,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
9,
0,
0,
0,
0,
0
]
|
"1106541567421136897::1"
| [
"."
]
| [
0
]
|
"1106541567421136897::2"
| [
"Ok",
",",
"mit",
"der",
"S2",
"wird",
"das",
"nichts",
"mehr",
",",
"der",
"Nordosten",
"für",
"Jahre",
"gekappt"
]
| [
0,
0,
0,
0,
15,
0,
0,
0,
0,
0,
0,
11,
0,
7,
0
]
|
"http://www.br-online.de/aktuell/verkehr/verkehr-verkehrsmeldungen-staumeldungen-ID119730167093.xml#17@2019-03-30T20:04:48.757+01:00::0"
| [
"Service",
"S",
"-",
"Bahn",
"München",
":",
"."
]
| [
0,
27,
28,
28,
28,
0,
0
]
|
Dataset Card for "MobIE"
Dataset Summary
This script is for loading the MobIE dataset from https://github.com/dfki-nlp/mobie.
MobIE is a German-language dataset which is human-annotated with 20 coarse- and fine-grained entity types and entity linking information for geographically linkable entities. The dataset consists of 3,232 social media texts and traffic reports with 91K tokens, and contains 20.5K annotated entities, 13.1K of which are linked to a knowledge base. A subset of the dataset is human-annotated with seven mobility-related, n-ary relation types, while the remaining documents are annotated using a weakly-supervised labeling approach implemented with the Snorkel framework. The dataset combines annotations for NER, EL and RE, and thus can be used for joint and multi-task learning of these fundamental information extraction tasks.
This version of the dataset loader provides NER tags only. NER tags use the BIO
tagging scheme.
For more details see https://github.com/dfki-nlp/mobie and https://aclanthology.org/2021.konvens-1.22/.
Supported Tasks and Leaderboards
- Tasks: Named Entity Recognition
- Leaderboards:
Languages
German
Dataset Structure
Data Instances
- Size of downloaded dataset files: 7.8 MB
- Size of the generated dataset: 1.9 MB
- Total amount of disk used: 9.7 MB
An example of 'train' looks as follows.
{
'id': 'http://www.ndr.de/nachrichten/verkehr/index.html#2@2016-05-04T21:02:14.000+02:00',
'tokens': ['Vorsicht', 'bitte', 'auf', 'der', 'A28', 'Leer', 'Richtung', 'Oldenburg', 'zwischen', 'Zwischenahner', 'Meer', 'und', 'Neuenkruge', 'liegen', 'Gegenstände', '!'],
'ner_tags': [0, 0, 0, 0, 19, 13, 0, 13, 0, 11, 12, 0, 11, 0, 0, 0]
}
Data Fields
The data fields are the same among all splits.
id
: astring
feature.tokens
: alist
ofstring
features.ner_tags
: alist
of classification labels, with possible values includingO
(0),B-date
(1),I-date
(2),B-disaster-type
(3),I-disaster-type
(4), ...
Data Splits
Train | Dev | Test | |
---|---|---|---|
MobIE | 4785 | 1082 | 1210 |
Dataset Creation
Curation Rationale
Source Data
Initial Data Collection and Normalization
Who are the source language producers?
Annotations
Annotation process
Who are the annotators?
Personal and Sensitive Information
Considerations for Using the Data
Social Impact of Dataset
Discussion of Biases
Other Known Limitations
Additional Information
Dataset Curators
Licensing Information
Citation Information
@inproceedings{hennig-etal-2021-mobie,
title = "{M}ob{IE}: A {G}erman Dataset for Named Entity Recognition, Entity Linking and Relation Extraction in the Mobility Domain",
author = "Hennig, Leonhard and
Truong, Phuc Tran and
Gabryszak, Aleksandra",
booktitle = "Proceedings of the 17th Conference on Natural Language Processing (KONVENS 2021)",
month = "6--9 " # sep,
year = "2021",
address = {D{\"u}sseldorf, Germany},
publisher = "KONVENS 2021 Organizers",
url = "https://aclanthology.org/2021.konvens-1.22",
pages = "223--227",
}
Contributions
- Downloads last month
- 242