{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import pandas as pd\n", "import numpy as np\n", "\n", "# fall, winter & spring are aligned to summer, so read summer.csv\n", "df=pd.read_csv('summer.csv', sep=',', header=0)\n", "df.head()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "tids = df.values[:, 0] / 100 # format: hhmmss\n", "lats = df.values[:, 1]\n", "lons = df.values[:, 2]\n", "speeds = df.values[:, 3]\n", "courses = df.values[:, 4]\n", "alts = df.values[:, 5]" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# go from their time-based representation to number of seconds\n", "def val_to_sec(val):\n", " if not isinstance(val, np.ndarray):\n", " val = np.array([val])\n", " hours = (val / 10000).astype(np.int)\n", " minutes = ((val % 10000) / 100).astype(np.int)\n", " secs = (val % 100).astype(np.int)\n", " \n", " absolute = hours * 3600 + minutes * 60 + secs\n", " if len(absolute) == 1:\n", " return int(absolute[0])\n", " else:\n", " return hours * 3600 + minutes * 60 + secs" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "tids_abs_seconds = val_to_sec(tids)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "keep_indices = tids_abs_seconds <= val_to_sec(152840) # all data past 15h 28m 40s is not contained in the video, so remove it\n", "# check that we have increasing timestamps\n", "going_back_in_time = np.diff(tids_abs_seconds) <= 0\n", "assert not np.any(going_back_in_time)\n", "\n", "speeds = speeds[keep_indices]\n", "tids_abs_seconds = tids_abs_seconds[keep_indices]\n", "start_val = val_to_sec(53806) # this is when the train starts moving" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "tids_abs_seconds_off = tids_abs_seconds - start_val" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "tids_abs_seconds_off" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "np.argwhere(np.diff(tids_abs_seconds_off) > 25).flatten() # just for debugging" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "np.diff(tids_abs_seconds_off[tids_abs_seconds_off > 0]).sum() # train moves for ~35439 frames so we're not far off" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "desired_times = np.arange(-168, 35768-168) # train starts moving at frame 168 so make everything relative to that" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "match_indices = []\n", "for desired_time in desired_times:\n", " diffs = np.abs(tids_abs_seconds_off - desired_time)\n", " best_idx = diffs.argmin()\n", " match_indices.append(best_idx)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "new_img_ids, new_speeds, new_ref_times, new_lats, new_lons, new_courses, new_alts = [], [], [], [], [], [], []\n", "for row_idx in range(35768):\n", " new_img_ids.append(row_idx + 1)\n", " new_match_idx = match_indices[row_idx]\n", " new_speeds.append(speeds[new_match_idx])\n", " new_ref_times.append(tids[new_match_idx])\n", " new_lats.append(lats[new_match_idx] / 100000)\n", " new_lons.append(lons[new_match_idx] / 100000)\n", " new_courses.append(courses[new_match_idx])\n", " new_alts.append(alts[new_match_idx])\n", "new_img_ids = np.array(new_img_ids)\n", "new_speeds = np.array(new_speeds)\n", "new_ref_times = np.array(new_ref_times)\n", "new_lats = np.array(new_lats)\n", "new_lons = np.array(new_lons)\n", "new_courses = np.array(new_courses)\n", "new_alts = np.array(new_alts)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "np.savez('nordland_aligned.npz',\n", " img_id=new_img_ids,\n", " speed=new_speeds,\n", " ref_time=new_ref_times,\n", " lat=new_lats,\n", " lon=new_lons,\n", " course=new_courses,\n", " alt=new_alts)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Sanity check from manually found matches" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "rows_frames = np.array([ # manually found matches (when does train start/stop moving) -> frame number in video\n", " 168, 1290, 1792, 2211, 2295, 2501, 2655, 3405, 3668, 5072, 5460, 7080, 7277, 7772, 7870, 10050, 10200, 11670, 11880, 13360, 14835, 19740,\n", " 20040, 24120, 24390, 26410, 26535, 28975, 29090, 31090, 31185, 32400, 33040, 35130, 35177, 35608,\n", "])\n", "points_gps = [ # manually found matches -> time stamp in GPS data\n", " 5380600, 5564200, 6050800, 6120100, 6133500, 6165300, 6193200, 6315700, 6363000, 6593500, 7061900, 7331200, 7363400, 7444600, 7462800,\n", " 8225000, 8250600, 8494500, 8531900, 9175300, 9421800, 11040900, 11092800, 12171300, 12215500, 12552800, 12573600, 13381000,\n", " 13400700, 14132000, 14150200, 14350900, 14455900, 15204500, 15213600, 15284000,\n", "]" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "rows_gps = []\n", "for point_gps in points_gps:\n", " diffs = np.abs(tids - point_gps / 100)\n", " best_idx = diffs.argmin()\n", " rows_gps.append(best_idx)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "np.array(rows_gps) # as we can see we're close enough" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "np.array(match_indices)[np.array(rows_frames)]" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "abs_diff = np.abs(np.array(rows_gps)-np.array(match_indices)[np.array(rows_frames)])\n", "np.mean(abs_diff), np.max(abs_diff)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Build dbStruct matlab file" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "%load_ext autoreload\n", "%autoreload 2\n", "\n", "import sys\n", "sys.path.append('../pytorch-NetVlad-Nanne')\n", "\n", "from datasets import parse_db_struct, save_db_struct, dbStruct" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "tunnels = [(1870, 2029), (2313, 2333), (2341, 2355), (4093, 4097), (6501, 6506), (6756, 6773), (8479, 8484), (8489, 8494), (9967, 9979), (10239, 10268), (10408, 10416), (10944, 10947),\n", " (10985, 10991), (10997, 11003), (11008, 11019), (11022, 11028), (11030, 11032),\n", " (11037, 11048), (11057, 11065), (11101, 11107), (11129, 11146), (11225, 11228), (11280, 11286), (11915, 12036), (12057, 12062), (12074, 12082), (12165, 12168), (12204, 12208), (12319, 12365),\n", " (12409, 12417), (12472, 12481), (13620, 13628), (14320, 14348), (14390, 14400), (16203, 16206), (16472, 16484), (16690, 16695), (16933, 16936), (17054, 17068), (17177, 17183), (17734, 17756),\n", " (17868, 17902), (17974, 17986), (17991, 17996), (18161, 18170), (18330, 18443), (18540, 18550), (18580, 18588), (18661, 18683), (18955, 18966), (18977, 18986), (19019, 19026), (19092, 19100),\n", " (19170, 19185), (20310, 20354), (20540, 20547), (20594, 20599), (20737, 20760), (21058, 21063), (21478, 21499), (21832, 21872), (21947, 21961), (21986, 22003), (22014, 22030), (22037, 22048),\n", " (22149, 22152), (22174, 22197), (22212, 22241), (22249, 22251), (22263, 22269), (22279, 22344), (22358, 22361), (22397, 22430), (22442, 22450), (22483, 22502), (22571, 22578), (22593, 22596),\n", " (22944, 22950), (22999, 23004), (23026, 23029), (23045, 23049), (23141, 23148), (23166, 23171), (23197, 23214), (23402, 23407), (23486, 23493), (23496, 23503), (23519, 23534), (23571, 23577),\n", " (23593, 23598), (23666, 23675), (23691, 23703), (23707, 23711), (23842, 23855), (23950, 23955), (24988, 24997), (25004, 25030), (25037, 25044), (25256, 25320), (25373, 25380), (25398, 25406),\n", " (25507, 25521), (25825, 25846), (26086, 26091), (26120, 26135), (26890, 26897), (26997, 27012), (27408, 27423), (27432, 27435), (27926, 27943), (28687, 28693), (29321, 29331), (29384, 29421),\n", " (29525, 29532), (29693, 29707), (29974, 29981), (29994, 30010), (30073, 30091), (30103, 30106), (30137, 30142), (30174, 30179), (30204, 30211), (31301, 31325), (31332, 31340), (31396, 31410),\n", " (31433, 31437), (31448, 31482), (31492, 31551), (31611, 31628), (31666, 31712), (31748, 31796), (31823, 31828), (31831, 31836), (31848, 31865), (31903, 31965), (31998, 32062), (32102, 32128),\n", " (32143, 32165), (32214, 32242), (32344, 32348), (33317, 33328), (33341, 33346), (33370, 33386), (33430, 33513), (33717, 33721), (33754, 33781), (33917, 33923), (34234, 34242), (34631, 34655),\n", " (34742, 34757), (34775, 34811), (34849, 34857), (34978, 34992), (35362, 35366), (35386, 35390), (35395, 35400), (35430, 35440), (35464, 35466)]\n", "filter_tunnels = np.array(np.ones(len(new_img_ids)), dtype=np.bool)\n", "last = 0\n", "for tunnel in tunnels:\n", " # print(tunnel[1]-tunnel[0])\n", " # print(tunnel[1])\n", " assert tunnel[0] > last\n", " last = tunnel[1]\n", " filter_tunnels[tunnel[0]-1:tunnel[1]-1] = False" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "filter_speed = new_speeds > 1500\n", "all_filters = np.logical_and(filter_speed, filter_tunnels)\n", "max_im_num = 10000000000000 # 10000000000 for all\n", "\n", "whichSet = 'test'\n", "dataset = 'nordland'\n", "dbImage = ['images-%05d.png' % img_id for img_id in new_img_ids[all_filters][:max_im_num]]\n", "qImage = dbImage" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "numDb = len(dbImage)\n", "numQ = len(qImage)\n", "\n", "posDistThr = 2\n", "posDistSqThr = posDistThr**2\n", "nonTrivPosDistSqThr = 100\n", "\n", "gpsDb = np.vstack((new_lats[all_filters][:max_im_num], new_lons[all_filters][:max_im_num])).T\n", "gpsQ = gpsDb\n", "\n", "utmDb = np.vstack((range(numDb), range(numDb))).T\n", "utmQ = utmDb\n", "# utmQ = None; utmDb = None; \n", "\n", "dbTimeStamp = None; qTimeStamp = None\n", "\n", "db = dbStruct(whichSet, dataset, dbImage, utmDb, qImage, utmQ, numDb, numQ, posDistThr,\n", " posDistSqThr, nonTrivPosDistSqThr, dbTimeStamp, qTimeStamp, gpsDb, gpsQ)\n", "\n", "save_db_struct('nordland.mat', db)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from sklearn.neighbors import NearestNeighbors\n", "knn = NearestNeighbors(n_jobs=-1)\n", "knn.fit(db.utmDb)\n", "distances, positives = knn.radius_neighbors(db.utmQ, radius=db.posDistThr)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "positives" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Other stuff" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "scrolled": true }, "outputs": [], "source": [ "import os\n", "source_dir = '/media/storage_hdd/Datasets/nordland/640x320-color/'\n", "dest_dir = '/media/storage_hdd/Datasets/nordland/640x320-color-filtered/'\n", "for season in ['summer', 'spring', 'fall', 'winter']:\n", " os.makedirs(os.path.join(dest_dir, season))\n", " for idx, im in enumerate(dbImage):\n", " os.symlink(os.path.join(source_dir, season, im), os.path.join(dest_dir, season, 'filtered-%05d.png' % idx))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "with open('nordland_matches.txt', 'w') as outfile:\n", " for im_name1 in dbImage:\n", " for im_name2 in dbImage:\n", " outfile.write('summer/' + im_name1 + ' ' + 'winter/' + im_name2 + '\\n')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "__End__" ] } ], "metadata": { "kernelspec": { "display_name": "Python [conda env:netvlad20]", "language": "python", "name": "conda-env-netvlad20-py" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.7" }, "widgets": { "application/vnd.jupyter.widget-state+json": { "state": {}, "version_major": 2, "version_minor": 0 } } }, "nbformat": 4, "nbformat_minor": 4 }