{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Web scrape valid surf rides from the Smartfin website: "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 57,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Done.\n"
     ]
    }
   ],
   "source": [
    "#Imports \n",
    "import requests\n",
    "import lxml.html as lh\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "import time\n",
    "from bs4 import BeautifulSoup\n",
    "import re\n",
    "import time\n",
    "\n",
    "print(\"Done.\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 58,
   "metadata": {},
   "outputs": [],
   "source": [
    "from selenium import webdriver\n",
    "from selenium.webdriver.chrome.service import Service\n",
    "from selenium.webdriver.chrome.options import Options\n",
    "\n",
    "chrome_options = Options()\n",
    "chrome_options.add_argument(\"--headless\")\n",
    "\n",
    "service = Service('/Users/jasminesimmons/Smartfin/Smartfin/Mini_Projects/ML_Wave_Stats/chromedriver')\n",
    "service.start()\n",
    "#driver = webdriver.Remote(service.service_url)\n",
    "#driver.get('http://www.google.com/');\n",
    "#time.sleep(5) # Let the user actually see something!\n",
    "#driver.quit()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Scrape verified surf sessions into a dataframe (can add more later):"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 59,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Start by just looking at my surf sessions in SD (near CDIP buoy): \n",
    "\n",
    "# 15218 - First VIRB filmed session - Oct. 24, 2018\n",
    "# 15669 - Second VIRB filmed session - Nov. 7, 2018\n",
    "# 15692 - Third VIRB filmed session - Nov. 9, 2018\n",
    "# 15686 - Fourth VIRB filmed session - Nov. 11, 2018\n",
    "\n",
    "ride_ids = [15218, 15669, 15692, 15686]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 60,
   "metadata": {},
   "outputs": [],
   "source": [
    "#Get urls for surf sessions in Scripps beach area that most likely correspond to surfing time intervals: \n",
    "url_45_60 = 'https://surf.smartfin.org/advanced_search/?northEastLat=32.6&northEastLon=-117.24&southWestLat=32.9&southWestLon=-117.33&dateTimeBegin=&dateTimeEnd=&timeZone=PDT&durationMin=45&durationMax=60&sensorTypeWave=true#searchResultsContainer'\n",
    "url_61_80 = 'https://surf.smartfin.org/advanced_search/?northEastLat=32.6&northEastLon=-117.24&southWestLat=32.9&southWestLon=-117.33&dateTimeBegin=&dateTimeEnd=&timeZone=PDT&durationMin=61&durationMax=80&sensorTypeWave=true#searchResultsContainer'\n",
    "url_81_100 = 'https://surf.smartfin.org/advanced_search/?northEastLat=32.6&northEastLon=-117.24&southWestLat=32.9&southWestLon=-117.33&dateTimeBegin=&dateTimeEnd=&timeZone=PDT&durationMin=81&durationMax=100&sensorTypeWave=true#searchResultsContainer'\n",
    "url_101_130 = 'https://surf.smartfin.org/advanced_search/?northEastLat=32.6&northEastLon=-117.24&southWestLat=32.9&southWestLon=-117.33&dateTimeBegin=&dateTimeEnd=&timeZone=PDT&durationMin=101&durationMax=130&sensorTypeWave=true#searchResultsContainer'\n",
    "\n",
    "urls = [url_45_60]\n",
    "#urls = [url_45_60, url_61_80, url_81_100, url_101_130]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 61,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Define a function that will parse rows from data tables on CDIP's website:\n",
    "def parse_rows(row_number):\n",
    "    name_string = \"\"\n",
    "    name_list = []\n",
    "    for t in tr_elements[row_number]:\n",
    "        name = t.text_content()\n",
    "        for i in name: \n",
    "            name_string += i\n",
    "    #print(name_string)\n",
    "\n",
    "    # Create a string from the values\n",
    "    name_string = name_string.split(\" \")\n",
    "    \n",
    "    # Remove all spaces from the list\n",
    "    for i in name_string: \n",
    "        if len(i) > 0:\n",
    "            name_list.append(i)\n",
    "      \n",
    "    # Ensure that time stays with 'Date (UTC)'' header\n",
    "    name_list[0] = name_list[0] + \" \" +  name_list[1]\n",
    "    name_list.pop(1)\n",
    "    \n",
    "    return name_list"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 62,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "194\n",
      "[' 16345', ' 16318', ' 16300', ' 16289', ' 16278', ' 16211', ' 16197', ' 16194', ' 16191', ' 16169', ' 16167', ' 16137', ' 16135', ' 16134', ' 16124', ' 16115', ' 16099', ' 16098', ' 16088', ' 16076']\n",
      "[' 10324', ' 10247', ' 10223', ' 10214', ' 10212', ' 10208', ' 10190', ' 10074', ' 10069', ' 10066']\n"
     ]
    }
   ],
   "source": [
    "# Iterate over each url and get all ride_ids located at that url: \n",
    "ride_ids = []\n",
    "for u in urls: \n",
    "    \n",
    "    # Create a handle, page, to handle the contents of the website\n",
    "    page = requests.get(u)\n",
    "\n",
    "    # Store the contents of the website under doc\n",
    "    doc = lh.fromstring(page.content)\n",
    "\n",
    "    # Parse data stored between <tr>..</tr> of HTML\n",
    "    tr_elements = doc.xpath('//tr')\n",
    "\n",
    "    # Retrieve all of the ride ids: \n",
    "    for j in range(1, len(tr_elements)):\n",
    "        data = parse_rows(j)\n",
    "        data = str(data[0]).strip(\"\\n\")\n",
    "        if int(data) > 10000: \n",
    "            ride_ids.append(data)\n",
    "\n",
    "print(len(ride_ids))\n",
    "print(ride_ids[:20])\n",
    "print(ride_ids[-10:])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 63,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Fin ID Scraper: \n",
    "\n",
    "ride_url_base = 'http://surf.smartfin.org/ride/'\n",
    "str_id_csv = 'img id=\"temperatureChart\" class=\"chart\" src=\"' # Look for this text in the HTML contents in fcn below\n",
    "\n",
    "def read_csv_from_ride(ride): \n",
    "    ride_url = ride_url_base + str(ride)\n",
    "    html_contents = requests.get(ride_url).text # Get contents of ride_url\n",
    "    loc_csv_id = html_contents.find(str_id_csv) # Find CSV identifier \n",
    "    \n",
    "    # Different based on whether user logged in with FB or Google\n",
    "    offset_googleOAuth = [46, 114]\n",
    "    offset_facebkOAuth = [46, 112]\n",
    "    if html_contents[loc_csv_id+59] == 'f': # Facebook login\n",
    "        off0 = offset_facebkOAuth[0]\n",
    "        off1 = offset_facebkOAuth[1]\n",
    "    else: # Google login\n",
    "        off0 = offset_googleOAuth[0]\n",
    "        off1 = offset_googleOAuth[1]\n",
    "        \n",
    "    csv_id_longstr = html_contents[loc_csv_id+off0:loc_csv_id+off1]\n",
    "        \n",
    "    # Stitch together full URL for CSV\n",
    "        \n",
    "    motion_csv_url = 'https://surf.smartfin.org/'+csv_id_longstr+'Motion.CSV'\n",
    "\n",
    "    #print(motion_csv_url)\n",
    "\n",
    "    motion_df_small = pd.read_csv(motion_csv_url)\n",
    "    #print(len(motion_df_small))\n",
    "        \n",
    "        \n",
    "    return motion_df_small"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 64,
   "metadata": {},
   "outputs": [],
   "source": [
    "#ride_ids = ride_ids[:10]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 65,
   "metadata": {},
   "outputs": [],
   "source": [
    "#%% Fin ID scraper\n",
    "# Input fin ID, get all ride IDs\n",
    "# base URL to which we'll append given fin IDs\n",
    "fin_url_base = 'http://surf.smartfin.org/fin/'\n",
    "\n",
    "# Look for the following text in the HTML contents in fcn below\n",
    "str_id_ride = 'rideId = \\'' # backslash allows us to look for single quote\n",
    "str_id_date = 'var date = \\'' # backslash allows us to look for single quote\n",
    "\n",
    "#%% Ride ID scraper\n",
    "# Input ride ID, get ocean and motion CSVs\n",
    "# Base URL to which we'll append given ride IDs\n",
    "ride_url_base = 'https://surf.smartfin.org/ride/'\n",
    "\n",
    "# Look for the following text in the HTML contents in fcn below\n",
    "str_id_csv = 'img id=\"temperatureChart\" class=\"chart\" src=\"' \n",
    "\n",
    "def get_csv_from_ride_id(rid):\n",
    "    # Build URL for each individual ride\n",
    "    ride_url = ride_url_base+str(rid)\n",
    "    #print(ride_url)\n",
    "    \n",
    "    # Get contents of ride_url\n",
    "    html_contents = requests.get(ride_url).text\n",
    "    \n",
    "    # Find CSV identifier \n",
    "    loc_csv_id = html_contents.find(str_id_csv)\n",
    "    \n",
    "    # Different based on whether user logged in with FB or Google\n",
    "    offset_googleOAuth = [46, 114]\n",
    "    offset_facebkOAuth = [46, 112]\n",
    "    if html_contents[loc_csv_id+59] == 'f': # Facebook login\n",
    "        off0 = offset_facebkOAuth[0]\n",
    "        off1 = offset_facebkOAuth[1]\n",
    "    else: # Google login\n",
    "        off0 = offset_googleOAuth[0]\n",
    "        off1 = offset_googleOAuth[1]\n",
    "        \n",
    "    csv_id_longstr = html_contents[loc_csv_id+off0:loc_csv_id+off1]\n",
    "        \n",
    "    # Stitch together full URL for CSV\n",
    "    if (\"media\" in csv_id_longstr) & (\"Calibration\" not in html_contents): # other junk URLs can exist and break everything\n",
    "        \n",
    "        ocean_csv_url = 'https://surf.smartfin.org/'+csv_id_longstr+'Ocean.CSV'\n",
    "        motion_csv_url = 'https://surf.smartfin.org/'+csv_id_longstr+'Motion.CSV'\n",
    "        \n",
    "        print(motion_csv_url)\n",
    "        \n",
    "        # Go to ocean_csv_url and grab contents (theoretically, a CSV)\n",
    "        ocean_df_small = pd.read_csv(ocean_csv_url, parse_dates = [0])\n",
    "        elapsed_timedelta = (ocean_df_small['UTC']-ocean_df_small['UTC'][0])\n",
    "        ocean_df_small['elapsed'] = elapsed_timedelta/np.timedelta64(1, 's')\n",
    "        \n",
    "        motion_df_small = pd.read_csv(motion_csv_url, parse_dates = [0])\n",
    "        print(len(motion_df_small))\n",
    "        \n",
    "        # Reindex on timestamp if there are at least a few rows\n",
    "        if len(ocean_df_small) > 1:\n",
    "            ocean_df_small.set_index('UTC', drop = True, append = False, inplace = True)\n",
    "            motion_df_small.set_index('UTC', drop = True, append = False, inplace = True)            \n",
    "            \n",
    "            #May need to change this sampling interval:\n",
    "            sample_interval = '33ms'\n",
    "            \n",
    "            ocean_df_small_resample = ocean_df_small.resample(sample_interval).mean()\n",
    "            motion_df_small_resample = motion_df_small.resample(sample_interval).mean()\n",
    "            \n",
    "            # No need to save many extra rows with no fix\n",
    "            motion_df_small = motion_df_small[~np.isnan(motion_df_small.Latitude)]\n",
    "\n",
    "            \n",
    "    else:\n",
    "        ocean_df_small_resample = pd.DataFrame() # empty DF just so something is returned\n",
    "        motion_df_small_resample = pd.DataFrame() \n",
    "        print(len(motion_df_small_resample))\n",
    "        return ocean_df_small_resample, motion_df_small_resample"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 66,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Fin ID Scraper: \n",
    "\n",
    "ride_url_base = 'http://surf.smartfin.org/ride/'\n",
    "str_id_csv = 'img id=\"temperatureChart\" class=\"chart\" src=\"' # Look for this text in the HTML contents in fcn below\n",
    "\n",
    "def read_csv_from_ride2(ride): \n",
    "    smartfin_url_base = 'http://surf.smartfin.org'\n",
    "    ride_url = ride_url_base + str(ride)\n",
    "    #html_page = requests.get(ride_url)\n",
    "    driver = webdriver.Remote(service.service_url, options=chrome_options)\n",
    "    driver.get(ride_url);\n",
    "    soup = BeautifulSoup(driver.page_source, 'html.parser')\n",
    "    #print(soup)\n",
    "    links = []\n",
    "    for link in soup.findAll('a'):\n",
    "        links.append(link)\n",
    "    if len(links) > 8: \n",
    "        motion_csv_link = str(links[8])\n",
    "        #print(links[8])\n",
    "        motion_csv_link = motion_csv_link.split(\" \")\n",
    "        motion_csv_link = motion_csv_link[1]\n",
    "        motion_csv_link = motion_csv_link.split(\"\\\"\")\n",
    "        motion_csv_link = motion_csv_link[1]\n",
    "\n",
    "        #print(motion_csv_link)\n",
    "\n",
    "        motion_csv_link = smartfin_url_base + motion_csv_link\n",
    "        motion_df_small = pd.read_csv(motion_csv_link, parse_dates = [0])\n",
    "        \n",
    "\n",
    "        return 1, motion_df_small\n",
    "    \n",
    "    else: \n",
    "        \n",
    "        return 0, 0"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 67,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1 13087\n",
      "1\n",
      "2 15297\n",
      "2\n",
      "3 13971\n",
      "3\n",
      "4 11732\n",
      "4\n",
      "5 12705\n",
      "5\n",
      "6 14766\n",
      "6\n"
     ]
    },
    {
     "ename": "ParserError",
     "evalue": "Error tokenizing data. C error: Expected 1 fields in line 3, saw 2\n",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mParserError\u001b[0m                               Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-67-a45c00890f6c>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m      6\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mride\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mride_ids\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      7\u001b[0m     \u001b[0;31m#ride = '16167'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 8\u001b[0;31m     \u001b[0mb\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmotion_df\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mread_csv_from_ride2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mride\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      9\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     10\u001b[0m     \u001b[0;31m# Check to make sure that a .CSV link was found for motion:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m<ipython-input-66-50b36166d826>\u001b[0m in \u001b[0;36mread_csv_from_ride2\u001b[0;34m(ride)\u001b[0m\n\u001b[1;32m     26\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     27\u001b[0m         \u001b[0mmotion_csv_link\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msmartfin_url_base\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mmotion_csv_link\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 28\u001b[0;31m         \u001b[0mmotion_df_small\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpd\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread_csv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmotion_csv_link\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mparse_dates\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     29\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     30\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/lib/python3.7/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36mparser_f\u001b[0;34m(filepath_or_buffer, sep, delimiter, header, names, index_col, usecols, squeeze, prefix, mangle_dupe_cols, dtype, engine, converters, true_values, false_values, skipinitialspace, skiprows, skipfooter, nrows, na_values, keep_default_na, na_filter, verbose, skip_blank_lines, parse_dates, infer_datetime_format, keep_date_col, date_parser, dayfirst, iterator, chunksize, compression, thousands, decimal, lineterminator, quotechar, quoting, doublequote, escapechar, comment, encoding, dialect, tupleize_cols, error_bad_lines, warn_bad_lines, delim_whitespace, low_memory, memory_map, float_precision)\u001b[0m\n\u001b[1;32m    700\u001b[0m                     skip_blank_lines=skip_blank_lines)\n\u001b[1;32m    701\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 702\u001b[0;31m         \u001b[0;32mreturn\u001b[0m \u001b[0m_read\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfilepath_or_buffer\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    703\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    704\u001b[0m     \u001b[0mparser_f\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__name__\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/lib/python3.7/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36m_read\u001b[0;34m(filepath_or_buffer, kwds)\u001b[0m\n\u001b[1;32m    433\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    434\u001b[0m     \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 435\u001b[0;31m         \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mparser\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnrows\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    436\u001b[0m     \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    437\u001b[0m         \u001b[0mparser\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mclose\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/lib/python3.7/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36mread\u001b[0;34m(self, nrows)\u001b[0m\n\u001b[1;32m   1137\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0mread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnrows\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1138\u001b[0m         \u001b[0mnrows\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_validate_integer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'nrows'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnrows\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1139\u001b[0;31m         \u001b[0mret\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_engine\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnrows\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1140\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1141\u001b[0m         \u001b[0;31m# May alter columns / col_dict\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/lib/python3.7/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36mread\u001b[0;34m(self, nrows)\u001b[0m\n\u001b[1;32m   1993\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0mread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnrows\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1994\u001b[0m         \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1995\u001b[0;31m             \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_reader\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnrows\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1996\u001b[0m         \u001b[0;32mexcept\u001b[0m \u001b[0mStopIteration\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1997\u001b[0m             \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_first_chunk\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.TextReader.read\u001b[0;34m()\u001b[0m\n",
      "\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.TextReader._read_low_memory\u001b[0;34m()\u001b[0m\n",
      "\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.TextReader._read_rows\u001b[0;34m()\u001b[0m\n",
      "\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.TextReader._tokenize_rows\u001b[0;34m()\u001b[0m\n",
      "\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.raise_parser_error\u001b[0;34m()\u001b[0m\n",
      "\u001b[0;31mParserError\u001b[0m: Error tokenizing data. C error: Expected 1 fields in line 3, saw 2\n"
     ]
    }
   ],
   "source": [
    "appended_motion_list = []\n",
    "appended_multiIndex = [] # fin_id & ride_id used to identify each DataFrame\n",
    "\n",
    "counter = 0\n",
    "index = 1\n",
    "for ride in ride_ids: \n",
    "    #ride = '16167'\n",
    "    b, motion_df = read_csv_from_ride2(ride)\n",
    "    \n",
    "    # Check to make sure that a .CSV link was found for motion: \n",
    "    if b == 1: \n",
    "        counter += 1\n",
    "        print(counter, len(motion_df))\n",
    "        motion_df.set_index('UTC', drop = True, append = False, inplace = True)\n",
    "        appended_multiIndex.append(str(ride)) # build list to be multiIndex of future DataFrame\n",
    "        appended_motion_list.append(motion_df)\n",
    "        \n",
    "            \n",
    "    print(index)\n",
    "    index += 1\n",
    "\n",
    "print(\"Total number of rides downloaded: \", counter)\n",
    "df_keys = tuple(appended_multiIndex)\n",
    "motion_df = pd.concat(appended_motion_list, keys = df_keys, names = ['ride_id'])\n",
    "\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "motion_df[:10] # 18 / 37 with 5 repeated requests, 18 / 37 with 10 repeated requests.  "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "'''\n",
    "appended_ocean_list = [] # list of DataFrames from original CSVs\n",
    "appended_motion_list = []\n",
    "appended_multiIndex = [] # fin_id & ride_id used to identify each DataFrame\n",
    "\n",
    "print(\"Once the counter gets to \", len(ride_ids), \" it will be done printing.\")\n",
    "\n",
    "## Nested loops (for each fin ID, find all ride IDs, then build a DataFrame from all ride CSVs)\n",
    "## (Here, ride IDS are either ocean or motion dataframes)\n",
    "count_good_fins = 0\n",
    "    \n",
    "# Loop over ride_ids and find CSVs\n",
    "counter = 0\n",
    "for rid in ride_ids:\n",
    "    counter += 1\n",
    "    if counter % 10 == 0:\n",
    "        print(counter)\n",
    "    try:\n",
    "        new_motion_df = read_csv_from_ride(rid) # get given ride's CSV from its ride ID using function above\n",
    "        new_motion_df.set_index('UTC', drop = True, append = False, inplace = True)            \n",
    "\n",
    "        if not new_motion_df.empty: # Calibration rides, for example\n",
    "            # Append only if DF isn't empty. There may be a better way to control empty DFs which are created above\n",
    "            appended_multiIndex.append(str(rid)) # build list to be multiIndex of future DataFrame\n",
    "            #appended_ocean_list.append(new_ocean_df)\n",
    "            appended_motion_list.append(new_motion_df)\n",
    "            #print(\"Ride data has been uploaded.\")\n",
    "            #print(\"Ride: \", rid, \"data has been uploaded.\")\n",
    "            count_good_fins += 1\n",
    "            #print(\"Ride \", rid, \"worked!\")    \n",
    "     \n",
    "    except: \n",
    "        print(\"Ride \", rid, \"threw an exception!\")    \n",
    "\n",
    "#%% Build the \"Master\" DataFrame\n",
    "\n",
    "# appended_ocean_df.summary()\n",
    "\n",
    "df_keys = tuple(appended_multiIndex) # keys gotta be a tuple, a list which data in it cannot be changed\n",
    "#ocean_df = pd.concat(appended_ocean_list, keys = df_keys, names=['ride_id'])\n",
    "motion_df = pd.concat(appended_motion_list, keys = df_keys, names = ['ride_id'])\n",
    "\n",
    "\n",
    "##Here, maybe just use info from the motion_df and don't worry about ocean_df data for now.\n",
    "##If you do want ocean_df data, look at how Phil was getting it from \"July 10th and 11th Calibration\" jupyter notebook file.\n",
    "#print(motion_df)\n",
    "#print(appended_motion_list)\n",
    "print(\"Done.\")\n",
    "'''"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# View data in a python dataframe: \n",
    "print(len(motion_df))\n",
    "motion_df[0:10]\n",
    "\n",
    "# I think that we're resampling each surf session at too small of a rate? \n",
    "# Why do we have so little data? \n",
    "\n",
    "#Sampling interval of 33ms: 3,290,682 initial data points, 438,099 final data points (adding .033 to UTC time)\n",
    "#Sampling interval of 20ms: 5,429,605 initial data points, 438,099 final data points (adding .020 to UTC time)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 51,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(255382, 12)"
      ]
     },
     "execution_count": 51,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "motion_df.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 121,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "621377\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th>Time</th>\n",
       "      <th>IMU A1</th>\n",
       "      <th>IMU A2</th>\n",
       "      <th>IMU A3</th>\n",
       "      <th>IMU G1</th>\n",
       "      <th>IMU G2</th>\n",
       "      <th>IMU G3</th>\n",
       "      <th>IMU M1</th>\n",
       "      <th>IMU M2</th>\n",
       "      <th>IMU M3</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>ride_id</th>\n",
       "      <th>UTC</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th rowspan=\"20\" valign=\"top\">16345</th>\n",
       "      <th>2019-11-19 15:02:50.090000+00:00</th>\n",
       "      <td>3888630399</td>\n",
       "      <td>502.0</td>\n",
       "      <td>41.0</td>\n",
       "      <td>60.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>-1.0</td>\n",
       "      <td>32.0</td>\n",
       "      <td>-110.0</td>\n",
       "      <td>-468.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:50.340000+00:00</th>\n",
       "      <td>3888630649</td>\n",
       "      <td>501.0</td>\n",
       "      <td>37.0</td>\n",
       "      <td>63.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>39.0</td>\n",
       "      <td>-115.0</td>\n",
       "      <td>-461.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:50.590000+00:00</th>\n",
       "      <td>3888630899</td>\n",
       "      <td>503.0</td>\n",
       "      <td>36.0</td>\n",
       "      <td>59.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>38.0</td>\n",
       "      <td>-110.0</td>\n",
       "      <td>-462.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:50.841000+00:00</th>\n",
       "      <td>3888631150</td>\n",
       "      <td>500.0</td>\n",
       "      <td>35.0</td>\n",
       "      <td>62.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>3.0</td>\n",
       "      <td>37.0</td>\n",
       "      <td>-111.0</td>\n",
       "      <td>-457.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:51.092000+00:00</th>\n",
       "      <td>3888631401</td>\n",
       "      <td>501.0</td>\n",
       "      <td>33.0</td>\n",
       "      <td>61.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>38.0</td>\n",
       "      <td>-116.0</td>\n",
       "      <td>-460.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:51.342000+00:00</th>\n",
       "      <td>3888631651</td>\n",
       "      <td>500.0</td>\n",
       "      <td>34.0</td>\n",
       "      <td>63.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>30.0</td>\n",
       "      <td>-108.0</td>\n",
       "      <td>-460.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:51.593000+00:00</th>\n",
       "      <td>3888631902</td>\n",
       "      <td>501.0</td>\n",
       "      <td>34.0</td>\n",
       "      <td>61.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>3.0</td>\n",
       "      <td>39.0</td>\n",
       "      <td>-115.0</td>\n",
       "      <td>-453.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:51.843000+00:00</th>\n",
       "      <td>3888632152</td>\n",
       "      <td>502.0</td>\n",
       "      <td>35.0</td>\n",
       "      <td>62.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-11.0</td>\n",
       "      <td>11.0</td>\n",
       "      <td>41.0</td>\n",
       "      <td>-113.0</td>\n",
       "      <td>-465.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:52.094000+00:00</th>\n",
       "      <td>3888632403</td>\n",
       "      <td>502.0</td>\n",
       "      <td>23.0</td>\n",
       "      <td>61.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-12.0</td>\n",
       "      <td>16.0</td>\n",
       "      <td>42.0</td>\n",
       "      <td>-118.0</td>\n",
       "      <td>-454.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:52.344000+00:00</th>\n",
       "      <td>3888632653</td>\n",
       "      <td>503.0</td>\n",
       "      <td>21.0</td>\n",
       "      <td>61.0</td>\n",
       "      <td>-20.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>43.0</td>\n",
       "      <td>-111.0</td>\n",
       "      <td>-457.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:52.594000+00:00</th>\n",
       "      <td>3888632903</td>\n",
       "      <td>503.0</td>\n",
       "      <td>24.0</td>\n",
       "      <td>60.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>-3.0</td>\n",
       "      <td>43.0</td>\n",
       "      <td>-103.0</td>\n",
       "      <td>-459.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:52.846000+00:00</th>\n",
       "      <td>3888633155</td>\n",
       "      <td>501.0</td>\n",
       "      <td>23.0</td>\n",
       "      <td>61.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>2.0</td>\n",
       "      <td>34.0</td>\n",
       "      <td>-106.0</td>\n",
       "      <td>-462.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:53.096000+00:00</th>\n",
       "      <td>3888633405</td>\n",
       "      <td>503.0</td>\n",
       "      <td>18.0</td>\n",
       "      <td>60.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>8.0</td>\n",
       "      <td>32.0</td>\n",
       "      <td>-102.0</td>\n",
       "      <td>-460.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:53.348000+00:00</th>\n",
       "      <td>3888633657</td>\n",
       "      <td>503.0</td>\n",
       "      <td>15.0</td>\n",
       "      <td>60.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>4.0</td>\n",
       "      <td>29.0</td>\n",
       "      <td>-111.0</td>\n",
       "      <td>-463.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:53.598000+00:00</th>\n",
       "      <td>3888633907</td>\n",
       "      <td>502.0</td>\n",
       "      <td>16.0</td>\n",
       "      <td>60.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>4.0</td>\n",
       "      <td>40.0</td>\n",
       "      <td>-106.0</td>\n",
       "      <td>-458.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:53.848000+00:00</th>\n",
       "      <td>3888634157</td>\n",
       "      <td>504.0</td>\n",
       "      <td>15.0</td>\n",
       "      <td>60.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>8.0</td>\n",
       "      <td>32.0</td>\n",
       "      <td>-106.0</td>\n",
       "      <td>-458.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:54.099000+00:00</th>\n",
       "      <td>3888634408</td>\n",
       "      <td>503.0</td>\n",
       "      <td>10.0</td>\n",
       "      <td>59.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>11.0</td>\n",
       "      <td>29.0</td>\n",
       "      <td>-99.0</td>\n",
       "      <td>-459.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:54.340000+00:00</th>\n",
       "      <td>3888634649</td>\n",
       "      <td>503.0</td>\n",
       "      <td>6.0</td>\n",
       "      <td>61.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>4.0</td>\n",
       "      <td>31.0</td>\n",
       "      <td>-97.0</td>\n",
       "      <td>-465.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:54.592000+00:00</th>\n",
       "      <td>3888634901</td>\n",
       "      <td>502.0</td>\n",
       "      <td>4.0</td>\n",
       "      <td>58.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>-7.0</td>\n",
       "      <td>41.0</td>\n",
       "      <td>-99.0</td>\n",
       "      <td>-457.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:54.842000+00:00</th>\n",
       "      <td>3888635151</td>\n",
       "      <td>503.0</td>\n",
       "      <td>10.0</td>\n",
       "      <td>60.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>-8.0</td>\n",
       "      <td>25.0</td>\n",
       "      <td>-103.0</td>\n",
       "      <td>-459.0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                                                Time  IMU A1  IMU A2  IMU A3  \\\n",
       "ride_id UTC                                                                    \n",
       " 16345  2019-11-19 15:02:50.090000+00:00  3888630399   502.0    41.0    60.0   \n",
       "        2019-11-19 15:02:50.340000+00:00  3888630649   501.0    37.0    63.0   \n",
       "        2019-11-19 15:02:50.590000+00:00  3888630899   503.0    36.0    59.0   \n",
       "        2019-11-19 15:02:50.841000+00:00  3888631150   500.0    35.0    62.0   \n",
       "        2019-11-19 15:02:51.092000+00:00  3888631401   501.0    33.0    61.0   \n",
       "        2019-11-19 15:02:51.342000+00:00  3888631651   500.0    34.0    63.0   \n",
       "        2019-11-19 15:02:51.593000+00:00  3888631902   501.0    34.0    61.0   \n",
       "        2019-11-19 15:02:51.843000+00:00  3888632152   502.0    35.0    62.0   \n",
       "        2019-11-19 15:02:52.094000+00:00  3888632403   502.0    23.0    61.0   \n",
       "        2019-11-19 15:02:52.344000+00:00  3888632653   503.0    21.0    61.0   \n",
       "        2019-11-19 15:02:52.594000+00:00  3888632903   503.0    24.0    60.0   \n",
       "        2019-11-19 15:02:52.846000+00:00  3888633155   501.0    23.0    61.0   \n",
       "        2019-11-19 15:02:53.096000+00:00  3888633405   503.0    18.0    60.0   \n",
       "        2019-11-19 15:02:53.348000+00:00  3888633657   503.0    15.0    60.0   \n",
       "        2019-11-19 15:02:53.598000+00:00  3888633907   502.0    16.0    60.0   \n",
       "        2019-11-19 15:02:53.848000+00:00  3888634157   504.0    15.0    60.0   \n",
       "        2019-11-19 15:02:54.099000+00:00  3888634408   503.0    10.0    59.0   \n",
       "        2019-11-19 15:02:54.340000+00:00  3888634649   503.0     6.0    61.0   \n",
       "        2019-11-19 15:02:54.592000+00:00  3888634901   502.0     4.0    58.0   \n",
       "        2019-11-19 15:02:54.842000+00:00  3888635151   503.0    10.0    60.0   \n",
       "\n",
       "                                          IMU G1  IMU G2  IMU G3  IMU M1  \\\n",
       "ride_id UTC                                                                \n",
       " 16345  2019-11-19 15:02:50.090000+00:00   -18.0   -10.0    -1.0    32.0   \n",
       "        2019-11-19 15:02:50.340000+00:00   -18.0   -10.0     0.0    39.0   \n",
       "        2019-11-19 15:02:50.590000+00:00   -18.0    -9.0     0.0    38.0   \n",
       "        2019-11-19 15:02:50.841000+00:00   -19.0    -9.0     3.0    37.0   \n",
       "        2019-11-19 15:02:51.092000+00:00   -18.0   -10.0     0.0    38.0   \n",
       "        2019-11-19 15:02:51.342000+00:00   -18.0   -10.0     0.0    30.0   \n",
       "        2019-11-19 15:02:51.593000+00:00   -19.0    -9.0     3.0    39.0   \n",
       "        2019-11-19 15:02:51.843000+00:00   -19.0   -11.0    11.0    41.0   \n",
       "        2019-11-19 15:02:52.094000+00:00   -19.0   -12.0    16.0    42.0   \n",
       "        2019-11-19 15:02:52.344000+00:00   -20.0   -10.0     1.0    43.0   \n",
       "        2019-11-19 15:02:52.594000+00:00   -19.0    -9.0    -3.0    43.0   \n",
       "        2019-11-19 15:02:52.846000+00:00   -19.0    -9.0     2.0    34.0   \n",
       "        2019-11-19 15:02:53.096000+00:00   -19.0   -10.0     8.0    32.0   \n",
       "        2019-11-19 15:02:53.348000+00:00   -19.0   -10.0     4.0    29.0   \n",
       "        2019-11-19 15:02:53.598000+00:00   -19.0    -9.0     4.0    40.0   \n",
       "        2019-11-19 15:02:53.848000+00:00   -19.0   -10.0     8.0    32.0   \n",
       "        2019-11-19 15:02:54.099000+00:00   -19.0    -9.0    11.0    29.0   \n",
       "        2019-11-19 15:02:54.340000+00:00   -19.0    -9.0     4.0    31.0   \n",
       "        2019-11-19 15:02:54.592000+00:00   -19.0    -9.0    -7.0    41.0   \n",
       "        2019-11-19 15:02:54.842000+00:00   -19.0    -9.0    -8.0    25.0   \n",
       "\n",
       "                                          IMU M2  IMU M3  \n",
       "ride_id UTC                                               \n",
       " 16345  2019-11-19 15:02:50.090000+00:00  -110.0  -468.0  \n",
       "        2019-11-19 15:02:50.340000+00:00  -115.0  -461.0  \n",
       "        2019-11-19 15:02:50.590000+00:00  -110.0  -462.0  \n",
       "        2019-11-19 15:02:50.841000+00:00  -111.0  -457.0  \n",
       "        2019-11-19 15:02:51.092000+00:00  -116.0  -460.0  \n",
       "        2019-11-19 15:02:51.342000+00:00  -108.0  -460.0  \n",
       "        2019-11-19 15:02:51.593000+00:00  -115.0  -453.0  \n",
       "        2019-11-19 15:02:51.843000+00:00  -113.0  -465.0  \n",
       "        2019-11-19 15:02:52.094000+00:00  -118.0  -454.0  \n",
       "        2019-11-19 15:02:52.344000+00:00  -111.0  -457.0  \n",
       "        2019-11-19 15:02:52.594000+00:00  -103.0  -459.0  \n",
       "        2019-11-19 15:02:52.846000+00:00  -106.0  -462.0  \n",
       "        2019-11-19 15:02:53.096000+00:00  -102.0  -460.0  \n",
       "        2019-11-19 15:02:53.348000+00:00  -111.0  -463.0  \n",
       "        2019-11-19 15:02:53.598000+00:00  -106.0  -458.0  \n",
       "        2019-11-19 15:02:53.848000+00:00  -106.0  -458.0  \n",
       "        2019-11-19 15:02:54.099000+00:00   -99.0  -459.0  \n",
       "        2019-11-19 15:02:54.340000+00:00   -97.0  -465.0  \n",
       "        2019-11-19 15:02:54.592000+00:00   -99.0  -457.0  \n",
       "        2019-11-19 15:02:54.842000+00:00  -103.0  -459.0  "
      ]
     },
     "execution_count": 121,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# Drop latitude/longitude columns and remove NaN rows: \n",
    "motion_df = motion_df.drop([\"Latitude\", \"Longitude\"], axis=1)\n",
    "motion_df = motion_df.dropna(axis=0, how='any')\n",
    "print(len(motion_df))\n",
    "motion_df[0:20]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Scrape data from same-day CDIP and add it as an additional column to the dataframe:\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 122,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Format should be appending year, month\n",
    "# Example: Sept. 2019 -> 201909\n",
    "def create_url_string(year, month):\n",
    "    url='https://cdip.ucsd.edu/themes/cdip?tz=UTC&numcolorbands=10&palette=cdip_classic&zoom=auto&ll_fmt=dm&high=6.096&r=999&un=1&pb=1&d2=p70&u2=s:201:st:1:v:parameter:dt:'\n",
    "    if 2014 <= int(year) <= 2019:\n",
    "        year = year\n",
    "    if 1 <= int(month) <= 12 and len(month) == 2:\n",
    "        month = month\n",
    "    url += year\n",
    "    url += month\n",
    "    return url "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 123,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "['2019-11-19', '2019-11-19', '2019-11-19', '2019-11-19', '2019-11-19', '2019-11-19', '2019-11-19', '2019-11-19', '2019-11-19', '2019-11-19'] ['15:02', '15:02', '15:02', '15:02', '15:02', '15:02', '15:02', '15:02', '15:02', '15:02']\n"
     ]
    }
   ],
   "source": [
    "# For each row in the dataframe, parse the index ('UTC' column) to get the year, month information. \n",
    "motion_df.columns\n",
    "\n",
    "dates = []\n",
    "times = []\n",
    "for row in motion_df.index:\n",
    "    time = str(row[1])\n",
    "    date = time.split(\" \")\n",
    "    time = date[1]\n",
    "    date = date[0]\n",
    "    time = time.split(\".\")\n",
    "    time = time[0]\n",
    "    time = time.split(\":\")\n",
    "    time = time[0] + \":\" + time[1]\n",
    "    times.append(time)\n",
    "    dates.append(date)\n",
    "print(dates[0:10], times[0:10])\n",
    "\n",
    "motion_df[\"Date\"] = dates\n",
    "motion_df[\"Time\"] = times"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 124,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th>Time</th>\n",
       "      <th>IMU A1</th>\n",
       "      <th>IMU A2</th>\n",
       "      <th>IMU A3</th>\n",
       "      <th>IMU G1</th>\n",
       "      <th>IMU G2</th>\n",
       "      <th>IMU G3</th>\n",
       "      <th>IMU M1</th>\n",
       "      <th>IMU M2</th>\n",
       "      <th>IMU M3</th>\n",
       "      <th>Date</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>ride_id</th>\n",
       "      <th>UTC</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th rowspan=\"10\" valign=\"top\">16345</th>\n",
       "      <th>2019-11-19 15:02:50.090000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>502.0</td>\n",
       "      <td>41.0</td>\n",
       "      <td>60.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>-1.0</td>\n",
       "      <td>32.0</td>\n",
       "      <td>-110.0</td>\n",
       "      <td>-468.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:50.340000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>501.0</td>\n",
       "      <td>37.0</td>\n",
       "      <td>63.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>39.0</td>\n",
       "      <td>-115.0</td>\n",
       "      <td>-461.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:50.590000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>503.0</td>\n",
       "      <td>36.0</td>\n",
       "      <td>59.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>38.0</td>\n",
       "      <td>-110.0</td>\n",
       "      <td>-462.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:50.841000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>500.0</td>\n",
       "      <td>35.0</td>\n",
       "      <td>62.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>3.0</td>\n",
       "      <td>37.0</td>\n",
       "      <td>-111.0</td>\n",
       "      <td>-457.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:51.092000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>501.0</td>\n",
       "      <td>33.0</td>\n",
       "      <td>61.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>38.0</td>\n",
       "      <td>-116.0</td>\n",
       "      <td>-460.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:51.342000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>500.0</td>\n",
       "      <td>34.0</td>\n",
       "      <td>63.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>30.0</td>\n",
       "      <td>-108.0</td>\n",
       "      <td>-460.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:51.593000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>501.0</td>\n",
       "      <td>34.0</td>\n",
       "      <td>61.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>3.0</td>\n",
       "      <td>39.0</td>\n",
       "      <td>-115.0</td>\n",
       "      <td>-453.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:51.843000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>502.0</td>\n",
       "      <td>35.0</td>\n",
       "      <td>62.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-11.0</td>\n",
       "      <td>11.0</td>\n",
       "      <td>41.0</td>\n",
       "      <td>-113.0</td>\n",
       "      <td>-465.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:52.094000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>502.0</td>\n",
       "      <td>23.0</td>\n",
       "      <td>61.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-12.0</td>\n",
       "      <td>16.0</td>\n",
       "      <td>42.0</td>\n",
       "      <td>-118.0</td>\n",
       "      <td>-454.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:52.344000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>503.0</td>\n",
       "      <td>21.0</td>\n",
       "      <td>61.0</td>\n",
       "      <td>-20.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>43.0</td>\n",
       "      <td>-111.0</td>\n",
       "      <td>-457.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                                           Time  IMU A1  IMU A2  IMU A3  \\\n",
       "ride_id UTC                                                               \n",
       " 16345  2019-11-19 15:02:50.090000+00:00  15:02   502.0    41.0    60.0   \n",
       "        2019-11-19 15:02:50.340000+00:00  15:02   501.0    37.0    63.0   \n",
       "        2019-11-19 15:02:50.590000+00:00  15:02   503.0    36.0    59.0   \n",
       "        2019-11-19 15:02:50.841000+00:00  15:02   500.0    35.0    62.0   \n",
       "        2019-11-19 15:02:51.092000+00:00  15:02   501.0    33.0    61.0   \n",
       "        2019-11-19 15:02:51.342000+00:00  15:02   500.0    34.0    63.0   \n",
       "        2019-11-19 15:02:51.593000+00:00  15:02   501.0    34.0    61.0   \n",
       "        2019-11-19 15:02:51.843000+00:00  15:02   502.0    35.0    62.0   \n",
       "        2019-11-19 15:02:52.094000+00:00  15:02   502.0    23.0    61.0   \n",
       "        2019-11-19 15:02:52.344000+00:00  15:02   503.0    21.0    61.0   \n",
       "\n",
       "                                          IMU G1  IMU G2  IMU G3  IMU M1  \\\n",
       "ride_id UTC                                                                \n",
       " 16345  2019-11-19 15:02:50.090000+00:00   -18.0   -10.0    -1.0    32.0   \n",
       "        2019-11-19 15:02:50.340000+00:00   -18.0   -10.0     0.0    39.0   \n",
       "        2019-11-19 15:02:50.590000+00:00   -18.0    -9.0     0.0    38.0   \n",
       "        2019-11-19 15:02:50.841000+00:00   -19.0    -9.0     3.0    37.0   \n",
       "        2019-11-19 15:02:51.092000+00:00   -18.0   -10.0     0.0    38.0   \n",
       "        2019-11-19 15:02:51.342000+00:00   -18.0   -10.0     0.0    30.0   \n",
       "        2019-11-19 15:02:51.593000+00:00   -19.0    -9.0     3.0    39.0   \n",
       "        2019-11-19 15:02:51.843000+00:00   -19.0   -11.0    11.0    41.0   \n",
       "        2019-11-19 15:02:52.094000+00:00   -19.0   -12.0    16.0    42.0   \n",
       "        2019-11-19 15:02:52.344000+00:00   -20.0   -10.0     1.0    43.0   \n",
       "\n",
       "                                          IMU M2  IMU M3        Date  \n",
       "ride_id UTC                                                           \n",
       " 16345  2019-11-19 15:02:50.090000+00:00  -110.0  -468.0  2019-11-19  \n",
       "        2019-11-19 15:02:50.340000+00:00  -115.0  -461.0  2019-11-19  \n",
       "        2019-11-19 15:02:50.590000+00:00  -110.0  -462.0  2019-11-19  \n",
       "        2019-11-19 15:02:50.841000+00:00  -111.0  -457.0  2019-11-19  \n",
       "        2019-11-19 15:02:51.092000+00:00  -116.0  -460.0  2019-11-19  \n",
       "        2019-11-19 15:02:51.342000+00:00  -108.0  -460.0  2019-11-19  \n",
       "        2019-11-19 15:02:51.593000+00:00  -115.0  -453.0  2019-11-19  \n",
       "        2019-11-19 15:02:51.843000+00:00  -113.0  -465.0  2019-11-19  \n",
       "        2019-11-19 15:02:52.094000+00:00  -118.0  -454.0  2019-11-19  \n",
       "        2019-11-19 15:02:52.344000+00:00  -111.0  -457.0  2019-11-19  "
      ]
     },
     "execution_count": 124,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "motion_df[0:10]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 125,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Define a function that will find the closest time on the CDIP table to the surf ride's time: \n",
    "def find_closest_single_time(date, time_utc):\n",
    "    time_list = []\n",
    "    \n",
    "    # Iterate over each date\n",
    "    for x in df_data['Date(UTC)']:\n",
    "        a = x.split(\" \")\n",
    "        if date == a[0]:\n",
    "            a = str(a[1]).split(':')\n",
    "            y = str(time_utc).split(\":\")\n",
    "            #print(a)\n",
    "            #print(y)\n",
    "            \n",
    "            # Find the closest timestamp (less than 15 minutes away)\n",
    "            time1 = int(a[0])*60 + int(a[1])\n",
    "            time2 = int(y[0])*60 + int(y[1])\n",
    "            \n",
    "            if abs(time1 - time2) <= 15:\n",
    "                time_string = str(a[0]) + \":\" + str(a[1])\n",
    "                time_list.append(time_string)\n",
    "           \n",
    "    return time_list"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Create the output labels from CDIP data for each Smartfin timestamp: \n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 126,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "date, time  2019-11-19 15:02\n",
      "time list  ['15:02']\n",
      "height  1.74\n",
      "date, time  2019-10-24 17:37\n",
      "time list  ['17:32']\n",
      "height  2.00\n",
      "date, time  2019-08-08 16:15\n",
      "time list  ['16:02']\n",
      "height  1.87\n",
      "date, time  2019-07-24 16:22\n",
      "time list  ['16:32']\n",
      "height  1.94\n",
      "date, time  2019-02-26 21:06\n",
      "time list  ['21:02']\n",
      "height  0.92\n",
      "date, time  2018-11-11 20:34\n",
      "time list  ['20:30']\n",
      "height  1.02\n",
      "date, time  2017-10-21 20:46\n",
      "time list  ['20:51']\n",
      "height  4.86\n",
      "date, time  2019-10-26 19:04\n",
      "time list  ['19:02']\n",
      "height  1.25\n",
      "date, time  2019-09-24 15:28\n",
      "time list  ['15:32']\n",
      "height  1.80\n",
      "date, time  2019-02-25 01:31\n",
      "time list  ['01:32']\n",
      "height  1.71\n",
      "date, time  2018-01-18 14:17\n",
      "time list  []\n",
      "date, time  2017-05-03 18:40\n",
      "time list  ['18:51']\n",
      "height  1.80\n",
      "date, time  2019-09-20 16:15\n",
      "time list  ['16:02']\n",
      "height  3.15\n",
      "date, time  2018-11-09 19:16\n",
      "time list  ['19:30']\n",
      "height  1.44\n",
      "date, time  2019-09-18 16:04\n",
      "time list  ['16:02']\n",
      "height  2.49\n",
      "date, time  2019-02-08 17:17\n",
      "time list  ['17:32', '17:02']\n",
      "height  1.38\n",
      "height  1.51\n"
     ]
    }
   ],
   "source": [
    "# Create the output labels from CDIP data for each Smartfin timestamp: \n",
    "old_url = 'old_url'\n",
    "old_date = 'old_date'\n",
    "old_time = 'xx:xx'\n",
    "\n",
    "wave_heights = []\n",
    "wave_periods = []\n",
    "wave_directions = []\n",
    "for d, time in zip(dates,times):\n",
    "    di = d.split(\"-\")\n",
    "    year = di[0]\n",
    "    month = di[1]\n",
    "    url = create_url_string(year, month)\n",
    "    \n",
    "    #We need to load a new table since we have a new surf ride session:\n",
    "    if (url != old_url): \n",
    "        old_url = url\n",
    "        # Create a handle, page, to handle the contents of the website\n",
    "        page = requests.get(url)\n",
    "\n",
    "        # Store the contents of the website under doc\n",
    "        doc = lh.fromstring(page.content)\n",
    "\n",
    "        # Parse data stored between <tr>..</tr> of HTML\n",
    "        tr_elements = doc.xpath('//tr')\n",
    "\n",
    "        # Need to drop the first 3 rows since they aren't in the table\n",
    "        tr_elements = tr_elements[3:]\n",
    "        \n",
    "        # Parse the first row as the header\n",
    "        tr_elements = doc.xpath('//tr')\n",
    "\n",
    "        # Create empty list\n",
    "        headers = []\n",
    "        i = 0\n",
    "\n",
    "        # For each row, store each first element (header) and an empty list\n",
    "        for t in tr_elements[3]:\n",
    "            i+=1\n",
    "            name=t.text_content()\n",
    "            #print(name)\n",
    "            headers.append(name)\n",
    "\n",
    "        #print(headers)\n",
    "\n",
    "        # Create a Pandas dataframe: \n",
    "        data_list = []\n",
    "\n",
    "        #Since out first row is the header, data is stored on the second row onwards\n",
    "        for j in range(4, len(tr_elements)):\n",
    "            data = parse_rows(j)\n",
    "            data_list.append(data)\n",
    "\n",
    "        df_data = pd.DataFrame(data_list, columns=headers)\n",
    "        #print(df_data[:10])\n",
    "        \n",
    "        #Find the closest time in the CDIP data table that corresponds to the Smartfin data: \n",
    "        date = d #YYYY-MM-DD format\n",
    "        print(\"date, time \", date, time)\n",
    "        time_list = find_closest_single_time(date, time)\n",
    "        print(\"time list \", time_list)\n",
    "        \n",
    "        # Now compute the average significant wave height for that time period:\n",
    "        length = df_data['Date(UTC)'].size\n",
    "        wave_height_list = []\n",
    "        wave_period_list = []\n",
    "        wave_direction_list = []\n",
    "        for time in time_list: \n",
    "            date_data = date + \" \" + time\n",
    "            for i in range(0, length):\n",
    "                if df_data['Date(UTC)'][i] == date_data:\n",
    "                    print(\"height \", df_data['Hs(ft)'][i])\n",
    "                    wave_height_list.append(float(df_data['Hs(ft)'][i]))\n",
    "                    wave_period_list.append(float(df_data['Tp(s)'][i]))\n",
    "                    wave_direction_list.append(float(df_data['Dp(deg)'][i]))\n",
    "                    \n",
    "        # Throws error when date doesn't exist on CDIP data (ex: 2018-01-18)\n",
    "        if len(wave_height_list) == 0 or len(wave_period_list) == 0 or len(wave_direction_list) == 0: \n",
    "            avg_wave_height = np.nan\n",
    "            avg_wave_period = np.nan\n",
    "            avg_wave_direction = np.nan\n",
    "            \n",
    "        else: \n",
    "            #print(wave_height_list)\n",
    "            avg_wave_height = sum(wave_height_list)/len(wave_height_list)\n",
    "            avg_wave_period = sum(wave_period_list)/len(wave_period_list)\n",
    "            avg_wave_direction = sum(wave_direction_list)/len(wave_direction_list)\n",
    "            #print(avg_wave_height)\n",
    "            #print(avg_wave_period)\n",
    "            #print(avg_wave_direction) \n",
    "            \n",
    "    # Same date/month but maybe a different day or time:\n",
    "    elif old_date != date or old_time[3] != time[3]:\n",
    "        date = d #YYYY-MM-DD format\n",
    "        #print(date, time)\n",
    "        time_list = find_closest_single_time(date, time)\n",
    "        #print(time_list)\n",
    "        \n",
    "        old_date = date\n",
    "        old_time = time\n",
    "        \n",
    "        # Now compute the average significant wave height for that time period:\n",
    "        length = df_data['Date(UTC)'].size\n",
    "        wave_height_list = []\n",
    "        wave_period_list = []\n",
    "        wave_direction_list = []\n",
    "        for time in time_list: \n",
    "            date_data = date + \" \" + time\n",
    "            for i in range(0, length):\n",
    "                if df_data['Date(UTC)'][i] == date_data:\n",
    "                    #print(df_data['Hs(ft)'][i])\n",
    "                    wave_height_list.append(float(df_data['Hs(ft)'][i]))\n",
    "                    wave_period_list.append(float(df_data['Tp(s)'][i]))\n",
    "                    wave_direction_list.append(float(df_data['Dp(deg)'][i]))\n",
    "                    \n",
    "      \n",
    "        # Throws error when date doesn't exist on CDIP data (ex: 2018-01-18)\n",
    "        if len(wave_height_list) == 0 or len(wave_period_list) == 0 or len(wave_direction_list) == 0: \n",
    "            avg_wave_height = np.nan\n",
    "            avg_wave_period = np.nan\n",
    "            avg_wave_direction = np.nan\n",
    "            \n",
    "        else: \n",
    "            #print(wave_height_list)\n",
    "            avg_wave_height = sum(wave_height_list)/len(wave_height_list)\n",
    "            avg_wave_period = sum(wave_period_list)/len(wave_period_list)\n",
    "            avg_wave_direction = sum(wave_direction_list)/len(wave_direction_list)\n",
    "            #print(avg_wave_height)\n",
    "            #print(avg_wave_period)\n",
    "            #print(avg_wave_direction)    \n",
    "\n",
    "    # If they have the same url, date, and time then they will have the same value computed. \n",
    "    wave_heights.append(avg_wave_height)\n",
    "    wave_periods.append(avg_wave_period)\n",
    "    wave_directions.append(avg_wave_direction)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 127,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "621377\n",
      "621377\n",
      "621377\n",
      "621377\n",
      "ride_id  UTC                             \n",
      " 16345   2019-11-19 15:02:50.090000+00:00    1.74\n",
      "Name: Hs(ft), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16345   2019-11-19 15:02:50.090000+00:00    10.53\n",
      "Name: Tp(s), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16345   2019-11-19 15:02:50.090000+00:00    285.0\n",
      "Name: Dp(deg), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16345   2019-11-19 15:43:24.942000+00:00    1.77\n",
      "Name: Hs(ft), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16345   2019-11-19 15:43:24.942000+00:00    11.76\n",
      "Name: Tp(s), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16345   2019-11-19 15:43:24.942000+00:00    282.0\n",
      "Name: Dp(deg), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16318   2019-11-09 21:07:19.804000+00:00    1.97\n",
      "Name: Hs(ft), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16318   2019-11-09 21:07:19.804000+00:00    5.26\n",
      "Name: Tp(s), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16318   2019-11-09 21:07:19.804000+00:00    294.0\n",
      "Name: Dp(deg), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16300   2019-10-24 17:49:34.380000+00:00    2.0\n",
      "Name: Hs(ft), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16300   2019-10-24 17:49:34.380000+00:00    7.69\n",
      "Name: Tp(s), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16300   2019-10-24 17:49:34.380000+00:00    282.0\n",
      "Name: Dp(deg), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16300   2019-10-24 18:28:32.229000+00:00    2.1\n",
      "Name: Hs(ft), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16300   2019-10-24 18:28:32.229000+00:00    9.09\n",
      "Name: Tp(s), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16300   2019-10-24 18:28:32.229000+00:00    287.0\n",
      "Name: Dp(deg), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16289   2019-10-05 23:35:30.221000+00:00    3.18\n",
      "Name: Hs(ft), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16289   2019-10-05 23:35:30.221000+00:00    8.33\n",
      "Name: Tp(s), dtype: float64\n",
      "ride_id  UTC                             \n",
      " 16289   2019-10-05 23:35:30.221000+00:00    287.0\n",
      "Name: Dp(deg), dtype: float64\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th>Time</th>\n",
       "      <th>IMU A1</th>\n",
       "      <th>IMU A2</th>\n",
       "      <th>IMU A3</th>\n",
       "      <th>IMU G1</th>\n",
       "      <th>IMU G2</th>\n",
       "      <th>IMU G3</th>\n",
       "      <th>IMU M1</th>\n",
       "      <th>IMU M2</th>\n",
       "      <th>IMU M3</th>\n",
       "      <th>Date</th>\n",
       "      <th>Hs(ft)</th>\n",
       "      <th>Tp(s)</th>\n",
       "      <th>Dp(deg)</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>ride_id</th>\n",
       "      <th>UTC</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th rowspan=\"5\" valign=\"top\">16345</th>\n",
       "      <th>2019-11-19 15:02:50.090000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>502.0</td>\n",
       "      <td>41.0</td>\n",
       "      <td>60.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>-1.0</td>\n",
       "      <td>32.0</td>\n",
       "      <td>-110.0</td>\n",
       "      <td>-468.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:50.340000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>501.0</td>\n",
       "      <td>37.0</td>\n",
       "      <td>63.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>39.0</td>\n",
       "      <td>-115.0</td>\n",
       "      <td>-461.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:50.590000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>503.0</td>\n",
       "      <td>36.0</td>\n",
       "      <td>59.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>38.0</td>\n",
       "      <td>-110.0</td>\n",
       "      <td>-462.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:50.841000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>500.0</td>\n",
       "      <td>35.0</td>\n",
       "      <td>62.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>3.0</td>\n",
       "      <td>37.0</td>\n",
       "      <td>-111.0</td>\n",
       "      <td>-457.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:51.092000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>501.0</td>\n",
       "      <td>33.0</td>\n",
       "      <td>61.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>38.0</td>\n",
       "      <td>-116.0</td>\n",
       "      <td>-460.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                                           Time  IMU A1  IMU A2  IMU A3  \\\n",
       "ride_id UTC                                                               \n",
       " 16345  2019-11-19 15:02:50.090000+00:00  15:02   502.0    41.0    60.0   \n",
       "        2019-11-19 15:02:50.340000+00:00  15:02   501.0    37.0    63.0   \n",
       "        2019-11-19 15:02:50.590000+00:00  15:02   503.0    36.0    59.0   \n",
       "        2019-11-19 15:02:50.841000+00:00  15:02   500.0    35.0    62.0   \n",
       "        2019-11-19 15:02:51.092000+00:00  15:02   501.0    33.0    61.0   \n",
       "\n",
       "                                          IMU G1  IMU G2  IMU G3  IMU M1  \\\n",
       "ride_id UTC                                                                \n",
       " 16345  2019-11-19 15:02:50.090000+00:00   -18.0   -10.0    -1.0    32.0   \n",
       "        2019-11-19 15:02:50.340000+00:00   -18.0   -10.0     0.0    39.0   \n",
       "        2019-11-19 15:02:50.590000+00:00   -18.0    -9.0     0.0    38.0   \n",
       "        2019-11-19 15:02:50.841000+00:00   -19.0    -9.0     3.0    37.0   \n",
       "        2019-11-19 15:02:51.092000+00:00   -18.0   -10.0     0.0    38.0   \n",
       "\n",
       "                                          IMU M2  IMU M3        Date  Hs(ft)  \\\n",
       "ride_id UTC                                                                    \n",
       " 16345  2019-11-19 15:02:50.090000+00:00  -110.0  -468.0  2019-11-19    1.74   \n",
       "        2019-11-19 15:02:50.340000+00:00  -115.0  -461.0  2019-11-19    1.74   \n",
       "        2019-11-19 15:02:50.590000+00:00  -110.0  -462.0  2019-11-19    1.74   \n",
       "        2019-11-19 15:02:50.841000+00:00  -111.0  -457.0  2019-11-19    1.74   \n",
       "        2019-11-19 15:02:51.092000+00:00  -116.0  -460.0  2019-11-19    1.74   \n",
       "\n",
       "                                          Tp(s)  Dp(deg)  \n",
       "ride_id UTC                                               \n",
       " 16345  2019-11-19 15:02:50.090000+00:00  10.53    285.0  \n",
       "        2019-11-19 15:02:50.340000+00:00  10.53    285.0  \n",
       "        2019-11-19 15:02:50.590000+00:00  10.53    285.0  \n",
       "        2019-11-19 15:02:50.841000+00:00  10.53    285.0  \n",
       "        2019-11-19 15:02:51.092000+00:00  10.53    285.0  "
      ]
     },
     "execution_count": 127,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "print(len(dates))\n",
    "print(len(wave_heights))\n",
    "print(len(wave_periods))\n",
    "print(len(wave_directions))\n",
    "\n",
    "\n",
    "# Check to make sure different wave heights were appended throughout the dataframe: \n",
    "motion_df[\"Hs(ft)\"] = wave_heights\n",
    "motion_df[\"Tp(s)\"] = wave_periods\n",
    "motion_df[\"Dp(deg)\"] = wave_directions\n",
    "\n",
    "for x in range(0, 60000, 10000):\n",
    "    print(motion_df.iloc[[x]]['Hs(ft)'])\n",
    "    print(motion_df.iloc[[x]]['Tp(s)'])\n",
    "    print(motion_df.iloc[[x]]['Dp(deg)'])\n",
    "\n",
    "motion_df[0:5]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 128,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "508034\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th>Time</th>\n",
       "      <th>IMU A1</th>\n",
       "      <th>IMU A2</th>\n",
       "      <th>IMU A3</th>\n",
       "      <th>IMU G1</th>\n",
       "      <th>IMU G2</th>\n",
       "      <th>IMU G3</th>\n",
       "      <th>IMU M1</th>\n",
       "      <th>IMU M2</th>\n",
       "      <th>IMU M3</th>\n",
       "      <th>Date</th>\n",
       "      <th>Hs(ft)</th>\n",
       "      <th>Tp(s)</th>\n",
       "      <th>Dp(deg)</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>ride_id</th>\n",
       "      <th>UTC</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th rowspan=\"5\" valign=\"top\">16345</th>\n",
       "      <th>2019-11-19 15:02:50.090000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>502.0</td>\n",
       "      <td>41.0</td>\n",
       "      <td>60.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>-1.0</td>\n",
       "      <td>32.0</td>\n",
       "      <td>-110.0</td>\n",
       "      <td>-468.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:50.340000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>501.0</td>\n",
       "      <td>37.0</td>\n",
       "      <td>63.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>39.0</td>\n",
       "      <td>-115.0</td>\n",
       "      <td>-461.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:50.590000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>503.0</td>\n",
       "      <td>36.0</td>\n",
       "      <td>59.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>38.0</td>\n",
       "      <td>-110.0</td>\n",
       "      <td>-462.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:50.841000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>500.0</td>\n",
       "      <td>35.0</td>\n",
       "      <td>62.0</td>\n",
       "      <td>-19.0</td>\n",
       "      <td>-9.0</td>\n",
       "      <td>3.0</td>\n",
       "      <td>37.0</td>\n",
       "      <td>-111.0</td>\n",
       "      <td>-457.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:51.092000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>501.0</td>\n",
       "      <td>33.0</td>\n",
       "      <td>61.0</td>\n",
       "      <td>-18.0</td>\n",
       "      <td>-10.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>38.0</td>\n",
       "      <td>-116.0</td>\n",
       "      <td>-460.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                                           Time  IMU A1  IMU A2  IMU A3  \\\n",
       "ride_id UTC                                                               \n",
       " 16345  2019-11-19 15:02:50.090000+00:00  15:02   502.0    41.0    60.0   \n",
       "        2019-11-19 15:02:50.340000+00:00  15:02   501.0    37.0    63.0   \n",
       "        2019-11-19 15:02:50.590000+00:00  15:02   503.0    36.0    59.0   \n",
       "        2019-11-19 15:02:50.841000+00:00  15:02   500.0    35.0    62.0   \n",
       "        2019-11-19 15:02:51.092000+00:00  15:02   501.0    33.0    61.0   \n",
       "\n",
       "                                          IMU G1  IMU G2  IMU G3  IMU M1  \\\n",
       "ride_id UTC                                                                \n",
       " 16345  2019-11-19 15:02:50.090000+00:00   -18.0   -10.0    -1.0    32.0   \n",
       "        2019-11-19 15:02:50.340000+00:00   -18.0   -10.0     0.0    39.0   \n",
       "        2019-11-19 15:02:50.590000+00:00   -18.0    -9.0     0.0    38.0   \n",
       "        2019-11-19 15:02:50.841000+00:00   -19.0    -9.0     3.0    37.0   \n",
       "        2019-11-19 15:02:51.092000+00:00   -18.0   -10.0     0.0    38.0   \n",
       "\n",
       "                                          IMU M2  IMU M3        Date  Hs(ft)  \\\n",
       "ride_id UTC                                                                    \n",
       " 16345  2019-11-19 15:02:50.090000+00:00  -110.0  -468.0  2019-11-19    1.74   \n",
       "        2019-11-19 15:02:50.340000+00:00  -115.0  -461.0  2019-11-19    1.74   \n",
       "        2019-11-19 15:02:50.590000+00:00  -110.0  -462.0  2019-11-19    1.74   \n",
       "        2019-11-19 15:02:50.841000+00:00  -111.0  -457.0  2019-11-19    1.74   \n",
       "        2019-11-19 15:02:51.092000+00:00  -116.0  -460.0  2019-11-19    1.74   \n",
       "\n",
       "                                          Tp(s)  Dp(deg)  \n",
       "ride_id UTC                                               \n",
       " 16345  2019-11-19 15:02:50.090000+00:00  10.53    285.0  \n",
       "        2019-11-19 15:02:50.340000+00:00  10.53    285.0  \n",
       "        2019-11-19 15:02:50.590000+00:00  10.53    285.0  \n",
       "        2019-11-19 15:02:50.841000+00:00  10.53    285.0  \n",
       "        2019-11-19 15:02:51.092000+00:00  10.53    285.0  "
      ]
     },
     "execution_count": 128,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# Now drop all of the nan values that I introduced earlier: \n",
    "motion_df = motion_df.dropna(axis=0, how='any')\n",
    "print(len(motion_df))\n",
    "motion_df[0:5]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 129,
   "metadata": {},
   "outputs": [],
   "source": [
    "# We're collecting IMU data at 3-4 Hz here. \n",
    "# We're using 4 surf sessions and we have 60,000 data points. \n",
    "# If we only calculated Hs once per minute then we would have 330 data points. \n",
    "\n",
    "# Instead of looking at significant wave height, we could look at wave direction \n",
    "# or we could look at Hs, dir, and period and see which one we predict best. \n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 130,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Convert raw IMU values to real values: \n",
    "g_const = 512 #Raw acceleration constant 512 = 1g (accelerometer's measured force due to gravity)\n",
    "gravity = 9.80665 #Approximate measurement for gravity\n",
    "\n",
    "# Correct the IMU Acceleration columns into units of meters\n",
    "# Dividing by 512 is equivalent to muliplying by 4 to correct the bit shifting by 2 places and dividing by 2048 to convert bits to G's\n",
    "# Multiplying by the 9.81 afterwards is simply to convert G's into m/s^2\n",
    "motion_df['IMU A1'] = motion_df['IMU A1'].apply(lambda x: x / g_const * gravity)\n",
    "motion_df['IMU A2'] = motion_df['IMU A2'].apply(lambda x: x / g_const * gravity)\n",
    "motion_df['IMU A3'] = motion_df['IMU A3'].apply(lambda x: x / g_const * gravity)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 131,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Gyroscopic Rotation converts to deg/s\n",
    "gyro_const = 8.2 # Raw gyrscope constant 8.2 bits per degree\n",
    "motion_df['IMU G1'] = motion_df['IMU G1'].apply(lambda x: x / gyro_const)\n",
    "motion_df['IMU G2'] = motion_df['IMU G2'].apply(lambda x: x / gyro_const)\n",
    "motion_df['IMU G3'] = motion_df['IMU G3'].apply(lambda x: x / gyro_const)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 132,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "\"\\nM1_offset_var = 219.786\\nM2_offset_var = 180\\nM3_offset_var = 280\\n\\nmotion_df['IMU M1'] = motion_df['IMU M1'].apply(lambda x: x - M1_offset_var)\\nmotion_df['IMU M2'] = motion_df['IMU M2'].apply(lambda x: x - M2_offset_var)\\nmotion_df['IMU M3'] = motion_df['IMU M3'].apply(lambda x: x - M3_offset_var)\\n\""
      ]
     },
     "execution_count": 132,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# Magnetometer values\n",
    "# Offset variables help in recentering the magnetic data in order to define direction and use trig functions\n",
    "'''\n",
    "M1_offset_var = 219.786\n",
    "M2_offset_var = 180\n",
    "M3_offset_var = 280\n",
    "\n",
    "motion_df['IMU M1'] = motion_df['IMU M1'].apply(lambda x: x - M1_offset_var)\n",
    "motion_df['IMU M2'] = motion_df['IMU M2'].apply(lambda x: x - M2_offset_var)\n",
    "motion_df['IMU M3'] = motion_df['IMU M3'].apply(lambda x: x - M3_offset_var)\n",
    "'''"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 133,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "508034\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th>Time</th>\n",
       "      <th>IMU A1</th>\n",
       "      <th>IMU A2</th>\n",
       "      <th>IMU A3</th>\n",
       "      <th>IMU G1</th>\n",
       "      <th>IMU G2</th>\n",
       "      <th>IMU G3</th>\n",
       "      <th>IMU M1</th>\n",
       "      <th>IMU M2</th>\n",
       "      <th>IMU M3</th>\n",
       "      <th>Date</th>\n",
       "      <th>Hs(ft)</th>\n",
       "      <th>Tp(s)</th>\n",
       "      <th>Dp(deg)</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>ride_id</th>\n",
       "      <th>UTC</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th rowspan=\"20\" valign=\"top\">16345</th>\n",
       "      <th>2019-11-19 15:02:50.090000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.615114</td>\n",
       "      <td>0.785298</td>\n",
       "      <td>1.149217</td>\n",
       "      <td>-2.195122</td>\n",
       "      <td>-1.219512</td>\n",
       "      <td>-0.121951</td>\n",
       "      <td>32.0</td>\n",
       "      <td>-110.0</td>\n",
       "      <td>-468.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:50.340000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.595960</td>\n",
       "      <td>0.708684</td>\n",
       "      <td>1.206678</td>\n",
       "      <td>-2.195122</td>\n",
       "      <td>-1.219512</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>39.0</td>\n",
       "      <td>-115.0</td>\n",
       "      <td>-461.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:50.590000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.634267</td>\n",
       "      <td>0.689530</td>\n",
       "      <td>1.130063</td>\n",
       "      <td>-2.195122</td>\n",
       "      <td>-1.097561</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>38.0</td>\n",
       "      <td>-110.0</td>\n",
       "      <td>-462.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:50.841000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.576807</td>\n",
       "      <td>0.670376</td>\n",
       "      <td>1.187524</td>\n",
       "      <td>-2.317073</td>\n",
       "      <td>-1.097561</td>\n",
       "      <td>0.365854</td>\n",
       "      <td>37.0</td>\n",
       "      <td>-111.0</td>\n",
       "      <td>-457.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:51.092000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.595960</td>\n",
       "      <td>0.632069</td>\n",
       "      <td>1.168370</td>\n",
       "      <td>-2.195122</td>\n",
       "      <td>-1.219512</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>38.0</td>\n",
       "      <td>-116.0</td>\n",
       "      <td>-460.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:51.342000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.576807</td>\n",
       "      <td>0.651223</td>\n",
       "      <td>1.206678</td>\n",
       "      <td>-2.195122</td>\n",
       "      <td>-1.219512</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>30.0</td>\n",
       "      <td>-108.0</td>\n",
       "      <td>-460.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:51.593000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.595960</td>\n",
       "      <td>0.651223</td>\n",
       "      <td>1.168370</td>\n",
       "      <td>-2.317073</td>\n",
       "      <td>-1.097561</td>\n",
       "      <td>0.365854</td>\n",
       "      <td>39.0</td>\n",
       "      <td>-115.0</td>\n",
       "      <td>-453.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:51.843000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.615114</td>\n",
       "      <td>0.670376</td>\n",
       "      <td>1.187524</td>\n",
       "      <td>-2.317073</td>\n",
       "      <td>-1.341463</td>\n",
       "      <td>1.341463</td>\n",
       "      <td>41.0</td>\n",
       "      <td>-113.0</td>\n",
       "      <td>-465.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:52.094000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.615114</td>\n",
       "      <td>0.440533</td>\n",
       "      <td>1.168370</td>\n",
       "      <td>-2.317073</td>\n",
       "      <td>-1.463415</td>\n",
       "      <td>1.951220</td>\n",
       "      <td>42.0</td>\n",
       "      <td>-118.0</td>\n",
       "      <td>-454.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:52.344000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.634267</td>\n",
       "      <td>0.402226</td>\n",
       "      <td>1.168370</td>\n",
       "      <td>-2.439024</td>\n",
       "      <td>-1.219512</td>\n",
       "      <td>0.121951</td>\n",
       "      <td>43.0</td>\n",
       "      <td>-111.0</td>\n",
       "      <td>-457.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:52.594000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.634267</td>\n",
       "      <td>0.459687</td>\n",
       "      <td>1.149217</td>\n",
       "      <td>-2.317073</td>\n",
       "      <td>-1.097561</td>\n",
       "      <td>-0.365854</td>\n",
       "      <td>43.0</td>\n",
       "      <td>-103.0</td>\n",
       "      <td>-459.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:52.846000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.595960</td>\n",
       "      <td>0.440533</td>\n",
       "      <td>1.168370</td>\n",
       "      <td>-2.317073</td>\n",
       "      <td>-1.097561</td>\n",
       "      <td>0.243902</td>\n",
       "      <td>34.0</td>\n",
       "      <td>-106.0</td>\n",
       "      <td>-462.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:53.096000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.634267</td>\n",
       "      <td>0.344765</td>\n",
       "      <td>1.149217</td>\n",
       "      <td>-2.317073</td>\n",
       "      <td>-1.219512</td>\n",
       "      <td>0.975610</td>\n",
       "      <td>32.0</td>\n",
       "      <td>-102.0</td>\n",
       "      <td>-460.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:53.348000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.634267</td>\n",
       "      <td>0.287304</td>\n",
       "      <td>1.149217</td>\n",
       "      <td>-2.317073</td>\n",
       "      <td>-1.219512</td>\n",
       "      <td>0.487805</td>\n",
       "      <td>29.0</td>\n",
       "      <td>-111.0</td>\n",
       "      <td>-463.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:53.598000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.615114</td>\n",
       "      <td>0.306458</td>\n",
       "      <td>1.149217</td>\n",
       "      <td>-2.317073</td>\n",
       "      <td>-1.097561</td>\n",
       "      <td>0.487805</td>\n",
       "      <td>40.0</td>\n",
       "      <td>-106.0</td>\n",
       "      <td>-458.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:53.848000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.653421</td>\n",
       "      <td>0.287304</td>\n",
       "      <td>1.149217</td>\n",
       "      <td>-2.317073</td>\n",
       "      <td>-1.219512</td>\n",
       "      <td>0.975610</td>\n",
       "      <td>32.0</td>\n",
       "      <td>-106.0</td>\n",
       "      <td>-458.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:54.099000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.634267</td>\n",
       "      <td>0.191536</td>\n",
       "      <td>1.130063</td>\n",
       "      <td>-2.317073</td>\n",
       "      <td>-1.097561</td>\n",
       "      <td>1.341463</td>\n",
       "      <td>29.0</td>\n",
       "      <td>-99.0</td>\n",
       "      <td>-459.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:54.340000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.634267</td>\n",
       "      <td>0.114922</td>\n",
       "      <td>1.168370</td>\n",
       "      <td>-2.317073</td>\n",
       "      <td>-1.097561</td>\n",
       "      <td>0.487805</td>\n",
       "      <td>31.0</td>\n",
       "      <td>-97.0</td>\n",
       "      <td>-465.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:54.592000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.615114</td>\n",
       "      <td>0.076614</td>\n",
       "      <td>1.110910</td>\n",
       "      <td>-2.317073</td>\n",
       "      <td>-1.097561</td>\n",
       "      <td>-0.853659</td>\n",
       "      <td>41.0</td>\n",
       "      <td>-99.0</td>\n",
       "      <td>-457.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2019-11-19 15:02:54.842000+00:00</th>\n",
       "      <td>15:02</td>\n",
       "      <td>9.634267</td>\n",
       "      <td>0.191536</td>\n",
       "      <td>1.149217</td>\n",
       "      <td>-2.317073</td>\n",
       "      <td>-1.097561</td>\n",
       "      <td>-0.975610</td>\n",
       "      <td>25.0</td>\n",
       "      <td>-103.0</td>\n",
       "      <td>-459.0</td>\n",
       "      <td>2019-11-19</td>\n",
       "      <td>1.74</td>\n",
       "      <td>10.53</td>\n",
       "      <td>285.0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                                           Time    IMU A1    IMU A2    IMU A3  \\\n",
       "ride_id UTC                                                                     \n",
       " 16345  2019-11-19 15:02:50.090000+00:00  15:02  9.615114  0.785298  1.149217   \n",
       "        2019-11-19 15:02:50.340000+00:00  15:02  9.595960  0.708684  1.206678   \n",
       "        2019-11-19 15:02:50.590000+00:00  15:02  9.634267  0.689530  1.130063   \n",
       "        2019-11-19 15:02:50.841000+00:00  15:02  9.576807  0.670376  1.187524   \n",
       "        2019-11-19 15:02:51.092000+00:00  15:02  9.595960  0.632069  1.168370   \n",
       "        2019-11-19 15:02:51.342000+00:00  15:02  9.576807  0.651223  1.206678   \n",
       "        2019-11-19 15:02:51.593000+00:00  15:02  9.595960  0.651223  1.168370   \n",
       "        2019-11-19 15:02:51.843000+00:00  15:02  9.615114  0.670376  1.187524   \n",
       "        2019-11-19 15:02:52.094000+00:00  15:02  9.615114  0.440533  1.168370   \n",
       "        2019-11-19 15:02:52.344000+00:00  15:02  9.634267  0.402226  1.168370   \n",
       "        2019-11-19 15:02:52.594000+00:00  15:02  9.634267  0.459687  1.149217   \n",
       "        2019-11-19 15:02:52.846000+00:00  15:02  9.595960  0.440533  1.168370   \n",
       "        2019-11-19 15:02:53.096000+00:00  15:02  9.634267  0.344765  1.149217   \n",
       "        2019-11-19 15:02:53.348000+00:00  15:02  9.634267  0.287304  1.149217   \n",
       "        2019-11-19 15:02:53.598000+00:00  15:02  9.615114  0.306458  1.149217   \n",
       "        2019-11-19 15:02:53.848000+00:00  15:02  9.653421  0.287304  1.149217   \n",
       "        2019-11-19 15:02:54.099000+00:00  15:02  9.634267  0.191536  1.130063   \n",
       "        2019-11-19 15:02:54.340000+00:00  15:02  9.634267  0.114922  1.168370   \n",
       "        2019-11-19 15:02:54.592000+00:00  15:02  9.615114  0.076614  1.110910   \n",
       "        2019-11-19 15:02:54.842000+00:00  15:02  9.634267  0.191536  1.149217   \n",
       "\n",
       "                                            IMU G1    IMU G2    IMU G3  \\\n",
       "ride_id UTC                                                              \n",
       " 16345  2019-11-19 15:02:50.090000+00:00 -2.195122 -1.219512 -0.121951   \n",
       "        2019-11-19 15:02:50.340000+00:00 -2.195122 -1.219512  0.000000   \n",
       "        2019-11-19 15:02:50.590000+00:00 -2.195122 -1.097561  0.000000   \n",
       "        2019-11-19 15:02:50.841000+00:00 -2.317073 -1.097561  0.365854   \n",
       "        2019-11-19 15:02:51.092000+00:00 -2.195122 -1.219512  0.000000   \n",
       "        2019-11-19 15:02:51.342000+00:00 -2.195122 -1.219512  0.000000   \n",
       "        2019-11-19 15:02:51.593000+00:00 -2.317073 -1.097561  0.365854   \n",
       "        2019-11-19 15:02:51.843000+00:00 -2.317073 -1.341463  1.341463   \n",
       "        2019-11-19 15:02:52.094000+00:00 -2.317073 -1.463415  1.951220   \n",
       "        2019-11-19 15:02:52.344000+00:00 -2.439024 -1.219512  0.121951   \n",
       "        2019-11-19 15:02:52.594000+00:00 -2.317073 -1.097561 -0.365854   \n",
       "        2019-11-19 15:02:52.846000+00:00 -2.317073 -1.097561  0.243902   \n",
       "        2019-11-19 15:02:53.096000+00:00 -2.317073 -1.219512  0.975610   \n",
       "        2019-11-19 15:02:53.348000+00:00 -2.317073 -1.219512  0.487805   \n",
       "        2019-11-19 15:02:53.598000+00:00 -2.317073 -1.097561  0.487805   \n",
       "        2019-11-19 15:02:53.848000+00:00 -2.317073 -1.219512  0.975610   \n",
       "        2019-11-19 15:02:54.099000+00:00 -2.317073 -1.097561  1.341463   \n",
       "        2019-11-19 15:02:54.340000+00:00 -2.317073 -1.097561  0.487805   \n",
       "        2019-11-19 15:02:54.592000+00:00 -2.317073 -1.097561 -0.853659   \n",
       "        2019-11-19 15:02:54.842000+00:00 -2.317073 -1.097561 -0.975610   \n",
       "\n",
       "                                          IMU M1  IMU M2  IMU M3        Date  \\\n",
       "ride_id UTC                                                                    \n",
       " 16345  2019-11-19 15:02:50.090000+00:00    32.0  -110.0  -468.0  2019-11-19   \n",
       "        2019-11-19 15:02:50.340000+00:00    39.0  -115.0  -461.0  2019-11-19   \n",
       "        2019-11-19 15:02:50.590000+00:00    38.0  -110.0  -462.0  2019-11-19   \n",
       "        2019-11-19 15:02:50.841000+00:00    37.0  -111.0  -457.0  2019-11-19   \n",
       "        2019-11-19 15:02:51.092000+00:00    38.0  -116.0  -460.0  2019-11-19   \n",
       "        2019-11-19 15:02:51.342000+00:00    30.0  -108.0  -460.0  2019-11-19   \n",
       "        2019-11-19 15:02:51.593000+00:00    39.0  -115.0  -453.0  2019-11-19   \n",
       "        2019-11-19 15:02:51.843000+00:00    41.0  -113.0  -465.0  2019-11-19   \n",
       "        2019-11-19 15:02:52.094000+00:00    42.0  -118.0  -454.0  2019-11-19   \n",
       "        2019-11-19 15:02:52.344000+00:00    43.0  -111.0  -457.0  2019-11-19   \n",
       "        2019-11-19 15:02:52.594000+00:00    43.0  -103.0  -459.0  2019-11-19   \n",
       "        2019-11-19 15:02:52.846000+00:00    34.0  -106.0  -462.0  2019-11-19   \n",
       "        2019-11-19 15:02:53.096000+00:00    32.0  -102.0  -460.0  2019-11-19   \n",
       "        2019-11-19 15:02:53.348000+00:00    29.0  -111.0  -463.0  2019-11-19   \n",
       "        2019-11-19 15:02:53.598000+00:00    40.0  -106.0  -458.0  2019-11-19   \n",
       "        2019-11-19 15:02:53.848000+00:00    32.0  -106.0  -458.0  2019-11-19   \n",
       "        2019-11-19 15:02:54.099000+00:00    29.0   -99.0  -459.0  2019-11-19   \n",
       "        2019-11-19 15:02:54.340000+00:00    31.0   -97.0  -465.0  2019-11-19   \n",
       "        2019-11-19 15:02:54.592000+00:00    41.0   -99.0  -457.0  2019-11-19   \n",
       "        2019-11-19 15:02:54.842000+00:00    25.0  -103.0  -459.0  2019-11-19   \n",
       "\n",
       "                                          Hs(ft)  Tp(s)  Dp(deg)  \n",
       "ride_id UTC                                                       \n",
       " 16345  2019-11-19 15:02:50.090000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:50.340000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:50.590000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:50.841000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:51.092000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:51.342000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:51.593000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:51.843000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:52.094000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:52.344000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:52.594000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:52.846000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:53.096000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:53.348000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:53.598000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:53.848000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:54.099000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:54.340000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:54.592000+00:00    1.74  10.53    285.0  \n",
       "        2019-11-19 15:02:54.842000+00:00    1.74  10.53    285.0  "
      ]
     },
     "execution_count": 133,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "print(len(motion_df))\n",
    "motion_df[:20]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 134,
   "metadata": {},
   "outputs": [],
   "source": [
    "# After converting all to real values, try to export data to .CSV so everyone else doesn't have to webscrape it:\n",
    "#motion_df.to_csv('CSE258_A2_Data.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 135,
   "metadata": {},
   "outputs": [],
   "source": [
    "#motion_df.to_csv('CSE258_A2_Data2.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
