{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Analysis of Hyatt Dataset on GPU\n",
    "\n",
    "The dataset consists of 1.5 Miliion records, feedbacks from customers staying at Hyatt Hotels all across the world. Each feedback is mapped with demographics of customer, dates & days of visit, purpose, ratings of each facility they are provided with, etc. The target variable used for prediction is 'Likelihood_to_recommend' which describes on a scale of 1-10, how likely is the customer going to recommend Hyatt Hotel to someone else."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#Loading Packages\n",
    "#!/usr/bin/env\n",
    "import sys\n",
    "import os.path\n",
    "from pprint import pprint\n",
    "from time import time\n",
    "import warnings\n",
    "warnings.filterwarnings(\"ignore\")\n",
    "\n",
    "start_time = time()\n",
    "\n",
    "PWD = !pwd  #set cwd"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'/raidStorage/wamsi/mapd-ml/notebooks/../thirdparty'"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "mapd_path = os.path.join(PWD[0],'..','thirdparty')\n",
    "sys.path.append(mapd_path)\n",
    "mapd_path"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'/raidStorage/wamsi/mapd-ml/notebooks/..'"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "pygdf_path = os.path.join(PWD[0],'..')\n",
    "sys.path.append(pygdf_path)\n",
    "pygdf_path"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Loading PYGDF library"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import pygdf"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Importing Packages for MapD connection"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from mapd import MapD\n",
    "from mapd import ttypes"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#Importing thrift libraries\n",
    "from thrift.protocol import TBinaryProtocol\n",
    "from thrift.protocol import TJSONProtocol\n",
    "from thrift.transport import TSocket\n",
    "from thrift.transport import THttpClient\n",
    "from thrift.transport import TTransport"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Setting up MapD connection"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def get_client(host_or_uri, port, http):\n",
    "  if http:\n",
    "    transport = THttpClient.THttpClient(host_or_uri)\n",
    "    protocol = TJSONProtocol.TJSONProtocol(transport)\n",
    "  else:\n",
    "    socket = TSocket.TSocket(host_or_uri, port)\n",
    "    transport = TTransport.TBufferedTransport(socket)\n",
    "    protocol = TBinaryProtocol.TBinaryProtocol(transport)\n",
    "\n",
    "  client = MapD.Client(protocol)\n",
    "  transport.open()\n",
    "  return client"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Connection Established\n"
     ]
    }
   ],
   "source": [
    "db_name = 'mapd'\n",
    "user_name = 'mapd'\n",
    "passwd = 'HyperInteractive'\n",
    "hostname = 'bewdy.mapd.com'\n",
    "portno = '9998'\n",
    "\n",
    "client = get_client(hostname, portno, False)\n",
    "session = client.connect(user_name, passwd, db_name)\n",
    "print('Connection Established')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Define Function to convert MapD Query Result into Dictionary"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def getDict(results):\n",
    "    \n",
    "    try:\n",
    "        assert results.row_set.is_columnar == True\n",
    "    except AssertionError:\n",
    "        print('Please use columns not rows in arguments')\n",
    "    \n",
    "    results_dict = {}  #Initialize Dictionary\n",
    "    \n",
    "    nrows = len(results.row_set.columns[0].nulls)\n",
    "    ncols = len(results.row_set.row_desc)\n",
    "    \n",
    "    for i in list(range(0,ncols)):\n",
    "        row_vals = []\n",
    "        dt = False\n",
    "        col_typ = ttypes.TDatumType._VALUES_TO_NAMES[results.row_set.row_desc[i].col_type.type]\n",
    "        col_name = results.row_set.row_desc[i].col_name\n",
    "        col_arr = results.row_set.row_desc[i].col_type.is_array\n",
    "        \n",
    "        if not col_arr:\n",
    "            if col_typ in ['SMALLINT', 'INT', 'BIGINT', 'TIME', 'TIMESTAMP', 'DATE', 'BOOL']:\n",
    "                hold = results.row_set.columns[i].data.int_col\n",
    "            elif col_typ in ['FLOAT', 'DECIMAL', 'DOUBLE']:\n",
    "                hold = results.row_set.columns[i].data.real_col\n",
    "            elif col_typ in ['STR']:\n",
    "                hold = results.row_set.columns[i].data.str_col\n",
    "            else:\n",
    "                if col_typ in ['TIME','TIMESTAMP','DATE']:\n",
    "                  dt = True\n",
    "                  \n",
    "                hold = results.row_set.columns[i].data.arr_col\n",
    "                  \n",
    "        for j in list(range(0,nrows)):\n",
    "            if not dt:\n",
    "                row_vals.append(hold[j])\n",
    "                #print(hold[j])\n",
    "            else:\n",
    "                row_vals.append(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(hold[j])))\n",
    "        \n",
    "        results_dict[col_name] = row_vals\n",
    "    \n",
    "    return results_dict"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Extracting data using Mapdql"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Total Number of columns: 237\n",
      "Columns used for predict: 76\n"
     ]
    }
   ],
   "source": [
    "#columns used in query\n",
    "columns_all = '''Likelihood_Recommend_H,CHECKOUT_HEADER_ID_C,MARKET_CODE_C,MARKET_DESC_C,MARKET_GROUP_C,MAJOR_MARKET_CODE_C,CHANNEL_CODE_C,RATE_PLAN_C,SPLIT_RATE_PLAN_C,CONS_GUEST_ID_C,ROOM_NUM_C,ROOM_TYPE_CODE_C,ROOM_TYPE_DESCRIPTION_C,WALK_IN_FLG_C,RESERVATION_CONFIRMATION_NUM_C,RESERVATION_CONFIRMATION_PAGE_C,RESERVATION_STATUS_C,CHECK_IN_DATE_C,CHECK_OUT_DATE_C,LENGTH_OF_STAY_C,NUMBER_OF_ROOMS_C,ADULT_NUM_C,CHILDREN_NUM_C,POV_CODE_C,QUOTED_RATE_C,PMS_ROOM_REV_C,PMS_ROOM_REV_USD_C,PMS_TOTAL_REV_C,PMS_TOTAL_REV_USD_C,PMS_FOOD_BEVERAGE_REV_C,PMS_FOOD_BEVERAGE_REV_USD_C,PMS_OTHER_REV_C,PMS_OTHER_REV_USD_C,WALK_FLAG_C,NO_SHOW_FLAG_C,DUP_INDEX_C,CONFIRMATION_NUM_R,CONFIRMATION_PAGE_R,RESERVATION_DATE_R,ARRIVAL_FLG_R,ARRIVAL_DATE_R,DEPARTURE_DATE_R,DIRECT_NIGHTS_NUM_R,RESERVATION_STATUS_R,ENTRY_TIME_R,ENTRY_HOTEL_CODE_R,LAST_CHANGE_DATE_R,ROOM_TYPE_CODE_R,ROOM_TYPE_DESCRIPTION_R,MAJOR_MARKET_CODE_R,PMS_RATE_CATEGORY_CODE_R,NUM_ROOMS_R,ADULT_NUM_R,CHILDREN_NUM_R,NT_RATE_R,STATE_R,COUNTRY_CODE_R,ETA_R,FLIGHTT_INFO_R,LENGTH_OF_STAY_R,LENGTH_OF_STAY_CATEGORY_R,CALCULATED_NIGHTS_NUM_R,ROOM_NIGHTS_R,STATUS_CALCULATION_R,GROUPS_VS_FIT_R,GUEST_COUNTRY_R,GOLDPASSPORT_FLG_R,MEMBER_STATUS_R,PACE_CATEGORY_R,PACE_R,OFFER_FLG_R,PAST_VS_FUTURE_R,REVENUE_R,REVENUE_USD_R,CHANNEL_CODE_R,e_delivereddate_I,e_delivereddate_adj_I,e_status_I,e_hy_gss_tier_I,e_hy_gss_conf_num_I,e_hy_gss_conf_page_I,e_hy_feedback_type_I,e_hy_gss_rate_plan_code_I,e_country_I,e_hy_gss_check_in_by_I,e_hy_gss_check_out_by_I,e_hy_gss_marketing_emails_ok_yn_I,e_hy_gss_promo_emails_ok_yn_I,e_hy_gss_club_access_yn_I,e_hy_gss_gender_I,e_hy_gss_title_text_I,e_hy_gss_language_I,e_hy_gss_room_floor_I,e_hy_gss_check_in_time_text_I,e_hy_gss_check_out_time_text_I,e_hy_gss_check_in_time_I,e_hy_gss_check_out_time_I,e_checkin_I,e_checkout_I,a_last_seen_page_name_I,a_last_submitted_page_name_I,Survey_ID_H,Response_Date_H,Guest_Checkin_Date_H,Guest_Checkout_Date_H,Length_Stay_H,Guest_State_H,Guest_Country_H,Gender_H,Age_Range_H,POV_H,Language_H,DOE_H,Booking_Location_H,Num_Adults_H,Num_Kids_H,Num_Rooms_H,Conf_Num_Orig_H,Conf_Num_H,Conf_Page_H,Rate_Plan_H,Gross_Rev_H,Net_Rev_H,Room_Rev_H,CC_Type_H,Currency_H,Clublounge_Used_H,Spa_Used_H,GDS_Source_H,Checkin_Length_H,Room_Num_H,Room_Type_H,GP_Tier_H,Status_H,Feedback_Type_H,Mobile_First_H,Mobile_H,Overall_Sat_H,Guest_Room_H,Tranquility_H,Condition_Hotel_H,Customer_SVC_H,Staff_Cared_H,Internet_Sat_H,Check_In_H,FB_FREQ_H,FB_Overall_Experience_H,Internet_Dissat_Lobby_H,Internet_Dissat_Slow_H,Internet_Dissat_Expensive_H,Internet_Dissat_Connectivity_H,Internet_Dissat_Billing_H,Internet_Dissat_Wired_H,Internet_Dissat_Other_H,TV_Internet_General_H,Room_Dissat_Internet_H,eff_date_CC,avail_room_cnt_CC,occ_room_cnt_CC,average_daily_rate_CC,Spirit_PL,Property_ID_PL,Hotel_Name_Long_PL,Hotel_Name_Short_PL,Award_Category_PL,Status_PL,City_PL,State_PL,US_Region_PL,Postal_Code_PL,Country_PL,Ops_Region_PL,Sub_Continent_PL,Property_DMA_PL,Property_Latitude_PL,Property_Longitude_PL,Currency_PL,Dom_Intl_PL,Guest_NPS_Goal_PL,STR_Number_PL,STR_Market_PL,Brand_PL,Brand_Initial_PL,Club_Type_PL,Hotel_Inventory_PL,Floors_PL,Total_Meeting_Space_PL,Union_PL,GRegion_PL,DRegion_PL,Region_PL,Category_PL,Scope_of_Service_PL,Type_PL,Class_PL,Location_PL,Bucket_PL,Relationship_PL,All_Suites_PL,Bell_Staff_PL,Boutique_PL,Business_Center_PL,Casino_PL,Conference_PL,Convention_PL,Dry_Cleaning_PL,Elevators_PL,Fitness_Center_PL,Fitness_Trainer_PL,Golf_PL,Indoor_Corridors_PL,Laundry_PL,Limo_Service_PL,Mini_Bar_PL,Pool_Indoor_PL,Pool_Outdoor_PL,Regency_Grand_Club_PL,Resort_PL,Restaurant_PL,Self_Parking_PL,Shuttle_Service_PL,Ski_PL,Spa_PL,Spa_fitness_center_PL,Spa_online_booking_PL,Spa_offering_PL,Valet_Parking_PL,GP_Tier,Sub_Channel_Category,Channel_Category,Booking_Channel,NPS_Type,Stay_Sequence,Stay_Sequence_Brand,Completed_Survey_Sequence,Days_Since_Last_Stay,Days_Until_Next_Stay'''\n",
    "print('Total Number of columns: %d' %(len(columns_all.split(','))))\n",
    "\n",
    "columns_pred = '''Likelihood_Recommend_H,ROOM_NUM_C,LENGTH_OF_STAY_C,NUMBER_OF_ROOMS_C,ADULT_NUM_C,CHILDREN_NUM_C,QUOTED_RATE_C,PMS_ROOM_REV_C,PMS_ROOM_REV_USD_C,PMS_TOTAL_REV_C,PMS_TOTAL_REV_USD_C,PMS_FOOD_BEVERAGE_REV_C,PMS_FOOD_BEVERAGE_REV_USD_C,PMS_OTHER_REV_C,PMS_OTHER_REV_USD_C,DUP_INDEX_C,CONFIRMATION_NUM_R,CONFIRMATION_PAGE_R,ARRIVAL_FLG_R,DIRECT_NIGHTS_NUM_R,NUM_ROOMS_R,ADULT_NUM_R,CHILDREN_NUM_R,NT_RATE_R,LENGTH_OF_STAY_R,CALCULATED_NIGHTS_NUM_R,ROOM_NIGHTS_R,PACE_R,REVENUE_R,REVENUE_USD_R,e_hy_gss_conf_num_I,e_hy_gss_conf_page_I,Length_Stay_H,Num_Adults_H,Num_Kids_H,Num_Rooms_H,Conf_Num_Orig_H,Conf_Num_H,Conf_Page_H,Gross_Rev_H,Net_Rev_H,Room_Rev_H,Checkin_Length_H,Room_Num_H,Overall_Sat_H,Guest_Room_H,Tranquility_H,Condition_Hotel_H,Customer_SVC_H,Staff_Cared_H,Internet_Sat_H,Check_In_H,FB_FREQ_H,FB_Overall_Experience_H,Internet_Dissat_Lobby_H,Internet_Dissat_Slow_H,Internet_Dissat_Expensive_H,Internet_Dissat_Connectivity_H,Internet_Dissat_Billing_H,Internet_Dissat_Wired_H,Internet_Dissat_Other_H,TV_Internet_General_H,Room_Dissat_Internet_H,avail_room_cnt_CC,occ_room_cnt_CC,average_daily_rate_CC,Guest_NPS_Goal_PL,STR_Number_PL,Hotel_Inventory_PL,Floors_PL,Total_Meeting_Space_PL,Stay_Sequence,Stay_Sequence_Brand,Completed_Survey_Sequence,Days_Since_Last_Stay,Days_Until_Next_Stay'''\n",
    "print('Columns used for predict: %d' %(len(columns_pred.split(','))))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "query_all = '''SELECT {} FROM hyatt_all where nps_type IN ('Promoter','Detractor','Passive') AND us_region_pl IN ('West') order by checkout_header_id_c'''.format(columns_all)\n",
    "\n",
    "query_pred = '''SELECT {} FROM hyatt_all where nps_type IN ('Promoter','Detractor','Passive') AND us_region_pl IN ('West') order by checkout_header_id_c'''.format(columns_pred)\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Extract data on GPU"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Query_pred is:  SELECT Likelihood_Recommend_H,ROOM_NUM_C,LENGTH_OF_STAY_C,NUMBER_OF_ROOMS_C,ADULT_NUM_C,CHILDREN_NUM_C,QUOTED_RATE_C,PMS_ROOM_REV_C,PMS_ROOM_REV_USD_C,PMS_TOTAL_REV_C,PMS_TOTAL_REV_USD_C,PMS_FOOD_BEVERAGE_REV_C,PMS_FOOD_BEVERAGE_REV_USD_C,PMS_OTHER_REV_C,PMS_OTHER_REV_USD_C,DUP_INDEX_C,CONFIRMATION_NUM_R,CONFIRMATION_PAGE_R,ARRIVAL_FLG_R,DIRECT_NIGHTS_NUM_R,NUM_ROOMS_R,ADULT_NUM_R,CHILDREN_NUM_R,NT_RATE_R,LENGTH_OF_STAY_R,CALCULATED_NIGHTS_NUM_R,ROOM_NIGHTS_R,PACE_R,REVENUE_R,REVENUE_USD_R,e_hy_gss_conf_num_I,e_hy_gss_conf_page_I,Length_Stay_H,Num_Adults_H,Num_Kids_H,Num_Rooms_H,Conf_Num_Orig_H,Conf_Num_H,Conf_Page_H,Gross_Rev_H,Net_Rev_H,Room_Rev_H,Checkin_Length_H,Room_Num_H,Overall_Sat_H,Guest_Room_H,Tranquility_H,Condition_Hotel_H,Customer_SVC_H,Staff_Cared_H,Internet_Sat_H,Check_In_H,FB_FREQ_H,FB_Overall_Experience_H,Internet_Dissat_Lobby_H,Internet_Dissat_Slow_H,Internet_Dissat_Expensive_H,Internet_Dissat_Connectivity_H,Internet_Dissat_Billing_H,Internet_Dissat_Wired_H,Internet_Dissat_Other_H,TV_Internet_General_H,Room_Dissat_Internet_H,avail_room_cnt_CC,occ_room_cnt_CC,average_daily_rate_CC,Guest_NPS_Goal_PL,STR_Number_PL,Hotel_Inventory_PL,Floors_PL,Total_Meeting_Space_PL,Stay_Sequence,Stay_Sequence_Brand,Completed_Survey_Sequence,Days_Since_Last_Stay,Days_Until_Next_Stay FROM hyatt_all where nps_type IN ('Promoter','Detractor','Passive') AND us_region_pl IN ('West') order by checkout_header_id_c\n"
     ]
    }
   ],
   "source": [
    "print('Query_pred is: ',query_pred)\n",
    "\n",
    "results_pred = client.sql_execute_gpudf(session,query_pred,device_id=0,first_n=-1)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Extract data on CPU"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def getDataAll(query):\n",
    "    print('Query_all is: ',query_all)\n",
    "    \n",
    "    print('after no')\n",
    "    results_all = client.sql_execute(session,query,True,None,-1)\n",
    "    #results.row_set.columns[0].nulls\n",
    "    #results.row_set.row_desc\n",
    "    \n",
    "    return results_all"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Query_all is:  SELECT Likelihood_Recommend_H,CHECKOUT_HEADER_ID_C,MARKET_CODE_C,MARKET_DESC_C,MARKET_GROUP_C,MAJOR_MARKET_CODE_C,CHANNEL_CODE_C,RATE_PLAN_C,SPLIT_RATE_PLAN_C,CONS_GUEST_ID_C,ROOM_NUM_C,ROOM_TYPE_CODE_C,ROOM_TYPE_DESCRIPTION_C,WALK_IN_FLG_C,RESERVATION_CONFIRMATION_NUM_C,RESERVATION_CONFIRMATION_PAGE_C,RESERVATION_STATUS_C,CHECK_IN_DATE_C,CHECK_OUT_DATE_C,LENGTH_OF_STAY_C,NUMBER_OF_ROOMS_C,ADULT_NUM_C,CHILDREN_NUM_C,POV_CODE_C,QUOTED_RATE_C,PMS_ROOM_REV_C,PMS_ROOM_REV_USD_C,PMS_TOTAL_REV_C,PMS_TOTAL_REV_USD_C,PMS_FOOD_BEVERAGE_REV_C,PMS_FOOD_BEVERAGE_REV_USD_C,PMS_OTHER_REV_C,PMS_OTHER_REV_USD_C,WALK_FLAG_C,NO_SHOW_FLAG_C,DUP_INDEX_C,CONFIRMATION_NUM_R,CONFIRMATION_PAGE_R,RESERVATION_DATE_R,ARRIVAL_FLG_R,ARRIVAL_DATE_R,DEPARTURE_DATE_R,DIRECT_NIGHTS_NUM_R,RESERVATION_STATUS_R,ENTRY_TIME_R,ENTRY_HOTEL_CODE_R,LAST_CHANGE_DATE_R,ROOM_TYPE_CODE_R,ROOM_TYPE_DESCRIPTION_R,MAJOR_MARKET_CODE_R,PMS_RATE_CATEGORY_CODE_R,NUM_ROOMS_R,ADULT_NUM_R,CHILDREN_NUM_R,NT_RATE_R,STATE_R,COUNTRY_CODE_R,ETA_R,FLIGHTT_INFO_R,LENGTH_OF_STAY_R,LENGTH_OF_STAY_CATEGORY_R,CALCULATED_NIGHTS_NUM_R,ROOM_NIGHTS_R,STATUS_CALCULATION_R,GROUPS_VS_FIT_R,GUEST_COUNTRY_R,GOLDPASSPORT_FLG_R,MEMBER_STATUS_R,PACE_CATEGORY_R,PACE_R,OFFER_FLG_R,PAST_VS_FUTURE_R,REVENUE_R,REVENUE_USD_R,CHANNEL_CODE_R,e_delivereddate_I,e_delivereddate_adj_I,e_status_I,e_hy_gss_tier_I,e_hy_gss_conf_num_I,e_hy_gss_conf_page_I,e_hy_feedback_type_I,e_hy_gss_rate_plan_code_I,e_country_I,e_hy_gss_check_in_by_I,e_hy_gss_check_out_by_I,e_hy_gss_marketing_emails_ok_yn_I,e_hy_gss_promo_emails_ok_yn_I,e_hy_gss_club_access_yn_I,e_hy_gss_gender_I,e_hy_gss_title_text_I,e_hy_gss_language_I,e_hy_gss_room_floor_I,e_hy_gss_check_in_time_text_I,e_hy_gss_check_out_time_text_I,e_hy_gss_check_in_time_I,e_hy_gss_check_out_time_I,e_checkin_I,e_checkout_I,a_last_seen_page_name_I,a_last_submitted_page_name_I,Survey_ID_H,Response_Date_H,Guest_Checkin_Date_H,Guest_Checkout_Date_H,Length_Stay_H,Guest_State_H,Guest_Country_H,Gender_H,Age_Range_H,POV_H,Language_H,DOE_H,Booking_Location_H,Num_Adults_H,Num_Kids_H,Num_Rooms_H,Conf_Num_Orig_H,Conf_Num_H,Conf_Page_H,Rate_Plan_H,Gross_Rev_H,Net_Rev_H,Room_Rev_H,CC_Type_H,Currency_H,Clublounge_Used_H,Spa_Used_H,GDS_Source_H,Checkin_Length_H,Room_Num_H,Room_Type_H,GP_Tier_H,Status_H,Feedback_Type_H,Mobile_First_H,Mobile_H,Overall_Sat_H,Guest_Room_H,Tranquility_H,Condition_Hotel_H,Customer_SVC_H,Staff_Cared_H,Internet_Sat_H,Check_In_H,FB_FREQ_H,FB_Overall_Experience_H,Internet_Dissat_Lobby_H,Internet_Dissat_Slow_H,Internet_Dissat_Expensive_H,Internet_Dissat_Connectivity_H,Internet_Dissat_Billing_H,Internet_Dissat_Wired_H,Internet_Dissat_Other_H,TV_Internet_General_H,Room_Dissat_Internet_H,eff_date_CC,avail_room_cnt_CC,occ_room_cnt_CC,average_daily_rate_CC,Spirit_PL,Property_ID_PL,Hotel_Name_Long_PL,Hotel_Name_Short_PL,Award_Category_PL,Status_PL,City_PL,State_PL,US_Region_PL,Postal_Code_PL,Country_PL,Ops_Region_PL,Sub_Continent_PL,Property_DMA_PL,Property_Latitude_PL,Property_Longitude_PL,Currency_PL,Dom_Intl_PL,Guest_NPS_Goal_PL,STR_Number_PL,STR_Market_PL,Brand_PL,Brand_Initial_PL,Club_Type_PL,Hotel_Inventory_PL,Floors_PL,Total_Meeting_Space_PL,Union_PL,GRegion_PL,DRegion_PL,Region_PL,Category_PL,Scope_of_Service_PL,Type_PL,Class_PL,Location_PL,Bucket_PL,Relationship_PL,All_Suites_PL,Bell_Staff_PL,Boutique_PL,Business_Center_PL,Casino_PL,Conference_PL,Convention_PL,Dry_Cleaning_PL,Elevators_PL,Fitness_Center_PL,Fitness_Trainer_PL,Golf_PL,Indoor_Corridors_PL,Laundry_PL,Limo_Service_PL,Mini_Bar_PL,Pool_Indoor_PL,Pool_Outdoor_PL,Regency_Grand_Club_PL,Resort_PL,Restaurant_PL,Self_Parking_PL,Shuttle_Service_PL,Ski_PL,Spa_PL,Spa_fitness_center_PL,Spa_online_booking_PL,Spa_offering_PL,Valet_Parking_PL,GP_Tier,Sub_Channel_Category,Channel_Category,Booking_Channel,NPS_Type,Stay_Sequence,Stay_Sequence_Brand,Completed_Survey_Sequence,Days_Since_Last_Stay,Days_Until_Next_Stay FROM hyatt_all where nps_type IN ('Promoter','Detractor','Passive') AND us_region_pl IN ('West') order by checkout_header_id_c\n",
      "after no\n"
     ]
    }
   ],
   "source": [
    "from multiprocessing.pool import ThreadPool\n",
    "\n",
    "pool = ThreadPool(processes=4)\n",
    "data_thread = pool.apply_async(getDataAll, args = (query_all,))\n",
    "pool.close()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Create handle for GPU pointer"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from numba import cuda\n",
    "from numba.cuda.cudadrv import drvapi\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<numba.cuda.cudadrv.driver.OwnedPointer object at 0x7f279cf689b0>\n",
      "_MemoryInfo(free=22380150784, total=25630801920)\n"
     ]
    }
   ],
   "source": [
    "gpu_handle = drvapi.cu_ipc_mem_handle(*results_pred.df_handle)\n",
    "ipc_handle = cuda.driver.IpcHandle(None,gpu_handle,results_pred.df_size)\n",
    "contxt = cuda.current_context()\n",
    "\n",
    "ipc = ipc_handle.open(contxt)\n",
    "pprint(ipc)\n",
    "pprint(cuda.current_context().get_memory_info())"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "ipc is space in the GPU memory containing data"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Converting ipc into NDArray"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import numpy as np"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "type = np.dtype(np.byte)\n",
    "data_arr = cuda.devicearray.DeviceNDArray(shape = ipc.size, strides = type.itemsize,\\\n",
    "                                         dtype = type, gpu_data = ipc) "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Reading data using GPU Arrow"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from pygdf.gpuarrow import GpuArrowReader"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "reader = GpuArrowReader(data_arr)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Wrapping result in PYGDF"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from pygdf.dataframe import DataFrame"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "df = DataFrame(reader.to_dict().items())"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Overview of dataset"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "  Likelihood_Recommend_H ROOM_NUM_C LENGTH_OF_STAY_C NUMBER_OF_ROOMS_C ADULT_NUM_C CHILDREN_NUM_C QUOTED_RATE_C ... PMS_ROOM_REV_C\n",
       "0                      6      339.0              3.0               1.0         2.0            2.0         134.0 ...          402.0\n",
       "1                     10     1432.0              2.0               1.0         1.0                        209.0 ...          418.0\n",
       "2                      9     3360.0              1.0               1.0         2.0                        306.0 ...          306.0\n",
       "3                      7      233.0              4.0               1.0         2.0            2.0         42.74 ...         170.96\n",
       "4                      9     2254.0              7.0               1.0         2.0            2.0         310.0 ...         2170.0\n",
       "[186998 more rows]\n",
       "[68 more columns]"
      ]
     },
     "execution_count": 23,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Data Preprocessing"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "predict_column = set(['Likelihood_Recommend_H'])\n",
    "feature_columns = set(df.columns) - predict_column\n",
    "var_num = set()\n",
    "var_cat = set()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "separate cat and num columns"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Number of numerical columns: 24\n",
      "Number of categorical columns: 39\n"
     ]
    }
   ],
   "source": [
    "unique_val = {}\n",
    "for col in feature_columns:\n",
    "    try:\n",
    "        val = df[col].unique_k(999)\n",
    "        unique_val[col] = val\n",
    "    except ValueError:\n",
    "        #more than 999 unique values\n",
    "        var_num.add(col)\n",
    "    else:\n",
    "        #value less than 999\n",
    "        value = len(val)\n",
    "        if value <= 1:\n",
    "            del df[col]\n",
    "        elif 1<value<999:\n",
    "            var_cat.add(col)\n",
    "        else:\n",
    "            var_num.add(col)\n",
    "            \n",
    "print('Number of numerical columns: {}'.format(len(var_num)))\n",
    "print('Number of categorical columns: {}'.format(len(var_cat)))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Replace Null values with mean and scale for num columns"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#change dtype to float 64\n",
    "for col in df.columns:\n",
    "    df[col] = df[col].astype(np.float64)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "for col in var_num:\n",
    "    df[col] = df[col].fillna(df[col].mean())\n",
    "    assert df[col].null_count == 0\n",
    "    std = df[col].std()\n",
    "    # drop near constant columns\n",
    "    if not np.isfinite(std) or std < 1e-4:\n",
    "        del df[col]\n",
    "        print('drop near constant', col)\n",
    "    else:\n",
    "        df[col] = df[col].scale()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": true
   },
   "source": [
    "One-hot encode cat columns"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "for col in var_cat:\n",
    "    cats = unique_val[col][1:]  # drop first\n",
    "    df = df.one_hot_encoding(col, prefix=col, cats=cats)\n",
    "    del df[col]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "nrows = len(df) #total no of rows"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#Ensure dtypes are in float 64\n",
    "{df[k].dtype for k in df.columns}\n",
    "df['intercept'] = np.ones(nrows, dtype=np.float64)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Divide training and testing datasets into 70:30"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Total number of rows: 187003\n",
      "Training datsets has: 130902\n",
      "Test datsets has: 56101\n"
     ]
    }
   ],
   "source": [
    "cp = int(.70 * nrows)\n",
    "\n",
    "df_train, df_test = df.loc[:cp-1], df.loc[cp:]\n",
    "\n",
    "print('Total number of rows: %d' %(nrows))\n",
    "print('Training datsets has: %d' %(len(df_train)))\n",
    "print('Test datsets has: %d' %(len(df_test)))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Convert data frames into matrices"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "train_data_mat = df_train.as_gpu_matrix(columns=df.columns[1:])\n",
    "train_result_mat = df_train.as_gpu_matrix(columns=[df.columns[0]])\n",
    "test_data_mat = df_test.as_gpu_matrix(columns=df.columns[1:])\n",
    "test_result_mat = df_test.as_gpu_matrix(columns=[df.columns[0]])"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Close IPC handle as matrix copies are created"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "ipc_handle.close()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "ctype pointers to GPU matrices"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "train_data_mat_ptr address 0x10723200000\n",
      "train_result_mat_ptr address 0x107d1a5fc00\n",
      "test_data_mat_ptr address 0x107d1c00000\n",
      "test_result_mat_ptr address 0x1081c8bc200\n"
     ]
    }
   ],
   "source": [
    "train_data_mat_ptr = train_data_mat.device_ctypes_pointer\n",
    "train_result_mat_ptr = train_result_mat.device_ctypes_pointer\n",
    "print('train_data_mat_ptr address', hex(train_data_mat_ptr.value))\n",
    "print('train_result_mat_ptr address', hex(train_result_mat_ptr.value))\n",
    "\n",
    "test_data_mat_ptr = test_data_mat.device_ctypes_pointer\n",
    "test_result_mat_ptr = test_result_mat.device_ctypes_pointer\n",
    "print('test_data_mat_ptr address', hex(test_data_mat_ptr.value))\n",
    "print('test_result_mat_ptr address', hex(test_result_mat_ptr.value))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(130902, 2796)\n",
      "(130902, 1)\n",
      "(56101, 2796)\n",
      "(56101, 1)\n"
     ]
    }
   ],
   "source": [
    "print(train_data_mat.shape)\n",
    "print(train_result_mat.shape)\n",
    "print(test_data_mat.shape)\n",
    "print(test_result_mat.shape)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Convert data frames into matrices"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "mtime = time()\n",
    "#train_data_mat = df_train.as_matrix(columns=df.columns[1:])\n",
    "#train_result_mat = df_train.as_matrix(columns=[df.columns[0]])\n",
    "#test_data_mat = df_test.as_matrix(columns=df.columns[1:])\n",
    "#test_result_mat = df_test.as_matrix(columns=[df.columns[0]])\n",
    "metime = time()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "matrices to numpy arrays"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#train_data_np = np.array(train_data_mat,dtype = np.float64)\n",
    "#train_result_np = np.array(train_result_mat,dtype = np.float64)\n",
    "#test_data_np = np.array(test_data_mat,dtype = np.float64)\n",
    "#test_result_np = np.array(test_result_mat,dtype = np.float64)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#print(train_data_np.shape)\n",
    "#rint(train_result_np.shape)\n",
    "#print(test_data_np.shape)\n",
    "#print(test_result_np.shape)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Linear regression using H2oaiglm"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "metadata": {
    "collapsed": true,
    "scrolled": false
   },
   "outputs": [],
   "source": [
    "#Load h2oaiglm\n",
    "import h2oaiglm\n",
    "#from h2oaiglm.solvers.kmeans_gpu import KMeansGPU2\n",
    "from ctypes import *\n",
    "import pandas as pd"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "xtrain=c_void_p(train_data_mat_ptr.value)\n",
    "ytrain=c_void_p(train_result_mat_ptr.value)\n",
    "xtest=c_void_p(test_data_mat_ptr.value)\n",
    "ytest=c_void_p(test_result_mat_ptr.value)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Select Data size and algorithm to process"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "No. of Features=2796 mTrain=130902 mValid=56101\n",
      "fortran=1\n"
     ]
    }
   ],
   "source": [
    "n=train_data_mat.shape[1]\n",
    "mTrain=train_data_mat.shape[0]\n",
    "mValid=test_data_mat.shape[0]\n",
    "\n",
    "print(\"No. of Features=%d mTrain=%d mValid=%d\" % (n,mTrain,mValid))\n",
    "# Order of data\n",
    "fortran = 1\n",
    "print(\"fortran=%d\" % (fortran))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Define function to use PYGDF data pointers in GPU"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def RunH2Oaiglm(arg):\n",
    "    intercept,standardize, lambda_min_ratio, nFolds, nAlphas, nLambdas, nGPUs = arg\n",
    "    \n",
    "        # set solver cpu/gpu according to input args\n",
    "    if((nGPUs>0) and (h2oaiglm.ElasticNetSolverGPU is None)):\n",
    "        print(\"\\nGPU solver unavailable, using CPU solver\\n\")\n",
    "        nGPUs=0\n",
    "\n",
    "    sharedA = 0\n",
    "    sourceme = 0\n",
    "    sourceDev = 0\n",
    "    nThreads = 1 if(nGPUs==0) else nGPUs # not required number of threads, but normal.\n",
    "\n",
    "    print(\"Setting up Solver\")\n",
    "    Solver = h2oaiglm.ElasticNetSolverGPU if(nGPUs>0) else h2oaiglm.ElasticNetSolverCPU\n",
    "    print(Solver)\n",
    "\n",
    "\n",
    "    #  Solver = h2oaiglm.ElasticNetSolverCPU\n",
    "    assert Solver != None, \"Couldn't instantiate ElasticNetSolver\"\n",
    "    enet = Solver(sharedA, nThreads, nGPUs, 'c' if fortran else 'r', intercept, standardize, \\\n",
    "                  lambda_min_ratio, nLambdas, nFolds, nAlphas)\n",
    "\n",
    "    # Not using weights\n",
    "    #e = c_vod_p(0)\n",
    "\n",
    "    print(\"Solving\")\n",
    "    ## Solve\n",
    "    #t0 = time.time()\n",
    "    #print(\"vars: %d %d %d %d %d %d %d\" % (sourceDev, mTrain, n, mValid, intercept, standardize, precision))\n",
    "    #enet.fit(sourceDev, mTrain, n, mValid, intercept, standardize, precision, a, b, c, d, e)\n",
    "    mod = enet.fit(xtrain, ytrain)\n",
    "    #t1 = time.time()\n",
    "    print(\"Done Solving\")\n",
    "    \n",
    "    #Display most important metrics\n",
    "    print('Alphas: ',len(enet.getalphas()))\n",
    "    al = enet.getalphas()\n",
    "    print('Lambdas: ',(enet.getlambdas()))\n",
    "    print('Tols: ',(enet.gettols()))\n",
    "    print('RMSE: ',(enet.getrmse()))\n",
    "    \n",
    "    print('Predicting')\n",
    "    pred = enet.predict(xtest)\n",
    "    print('Prediction complete')\n",
    "    \n",
    "    return al,mod,pred"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Setting up parameters"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Maximum Number of GPUS: 4\n"
     ]
    }
   ],
   "source": [
    "intercept = 1 #\n",
    "standardize = 0\n",
    "lambda_min_ratio=1E-9\n",
    "nFolds=5\n",
    "nAlphas=4\n",
    "nLambdas=100\n",
    "\n",
    "import subprocess\n",
    "maxNGPUS = int(subprocess.check_output(\"nvidia-smi -L | wc -l\", shell=True))\n",
    "print(\"Maximum Number of GPUS:\", maxNGPUS)\n",
    "nGPUs=maxNGPUS # choose all GPUs"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Run the model to predict"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Setting up Solver\n",
      "<class 'h2oaiglm.solvers.elastic_net_gpu.ElasticNetSolverGPU'>\n",
      "Solving\n"
     ]
    },
    {
     "ename": "IndexError",
     "evalue": "tuple index out of range",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mIndexError\u001b[0m                                Traceback (most recent call last)",
      "\u001b[0;32m~/anaconda3/envs/pyg/lib/python3.5/site-packages/h2oaiglm/solvers/elastic_net_base.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, trainX, trainY, validX, validY, weight, givefullpath, dopredict)\u001b[0m\n\u001b[1;32m    430\u001b[0m                     \u001b[0mshapeX\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrainX\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 431\u001b[0;31m                     \u001b[0mmTrain\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mshapeX\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    432\u001b[0m                     \u001b[0mn1\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mshapeX\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mIndexError\u001b[0m: tuple index out of range",
      "\nDuring handling of the above exception, another exception occurred:\n",
      "\u001b[0;31mIndexError\u001b[0m                                Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-49-40f9cbd9e569>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[0marg\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mintercept\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mstandardize\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlambda_min_ratio\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnFolds\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnAlphas\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnLambdas\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnGPUs\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      2\u001b[0m \u001b[0mptime\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtime\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mal\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mmod\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mpred_val\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mRunH2Oaiglm\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      4\u001b[0m \u001b[0mpetime\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtime\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m<ipython-input-47-4f2bebd4e977>\u001b[0m in \u001b[0;36mRunH2Oaiglm\u001b[0;34m(arg)\u001b[0m\n\u001b[1;32m     29\u001b[0m     \u001b[0;31m#print(\"vars: %d %d %d %d %d %d %d\" % (sourceDev, mTrain, n, mValid, intercept, standardize, precision))\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     30\u001b[0m     \u001b[0;31m#enet.fit(sourceDev, mTrain, n, mValid, intercept, standardize, precision, a, b, c, d, e)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 31\u001b[0;31m     \u001b[0mmod\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0menet\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mxtrain\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mytrain\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     32\u001b[0m     \u001b[0;31m#t1 = time.time()\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     33\u001b[0m     \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"Done Solving\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/envs/pyg/lib/python3.5/site-packages/h2oaiglm/solvers/elastic_net_gpu.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, trainX, trainY, validX, validY, weight, givefullpath, dopredict)\u001b[0m\n\u001b[1;32m     20\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     21\u001b[0m         \u001b[0;32mdef\u001b[0m \u001b[0mfit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrainX\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrainY\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalidX\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mc_void_p\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalidY\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mc_void_p\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mweight\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mc_void_p\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgivefullpath\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdopredict\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 22\u001b[0;31m             \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msolver\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrainX\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrainY\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalidX\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalidY\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mweight\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgivefullpath\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdopredict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     23\u001b[0m         \u001b[0;32mdef\u001b[0m \u001b[0mgetrmse\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     24\u001b[0m             \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msolver\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgetrmse\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/envs/pyg/lib/python3.5/site-packages/h2oaiglm/solvers/elastic_net_base.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, trainX, trainY, validX, validY, weight, givefullpath, dopredict)\u001b[0m\n\u001b[1;32m    437\u001b[0m                 \u001b[0;31m# get shapes\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    438\u001b[0m                 \u001b[0mshapeX\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrainX\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 439\u001b[0;31m                 \u001b[0mmTrain\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mshapeX\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    440\u001b[0m                 \u001b[0mn1\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mshapeX\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    441\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mIndexError\u001b[0m: tuple index out of range"
     ]
    }
   ],
   "source": [
    "arg = intercept,standardize, lambda_min_ratio, nFolds, nAlphas, nLambdas, nGPUs \n",
    "ptime = time()\n",
    "al,mod,pred_val = RunH2Oaiglm(arg)\n",
    "petime = time() "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "print(mod.shape)\n",
    "print(pred_val.shape)\n",
    "#np.ones((pred_val.shape[0],1),dtype=pred_val.dtype)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "al[0][2]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Join predection to Original Data Frame"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "df_pred = pd.DataFrame(pred_val[np.newaxis][0].T)\n",
    "#df_pred.columns = ['pred_recommend_rating']\n",
    "#df_pred"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "print('Waiting for CPU result')\n",
    "pool.join()\n",
    "print('Done waiting')\n",
    "df_org = pd.DataFrame(getDict(data_thread.get()))\n",
    "df_org_test = df_org[cp:]\n",
    "df_org_test = df_org_test.reset_index(drop = True)\n",
    "df_org_test['eff_date_CC'].replace(-9223372036854775808,1,inplace=True)\n",
    "\n",
    "df_org_test = df_org_test[columns_all.split(',')]  #arrange dataframe according to mapd table layout\n",
    "#len(df_org_test)\n",
    "\n",
    "df_org_test['pred_recommend_rating'] = df_pred    #join pred result\n",
    "#df_org_test"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Exporting the dataframe to csv file"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import csv"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "csv_time = time()\n",
    "\n",
    "os.system(\"rm -f hyatt_predict1.csv;\")\n",
    "export_path = os.path.join(PWD[0],'hyatt_predict1.csv')\n",
    "export_path\n",
    "\n",
    "df_org_test.to_csv(export_path,sep = ',',index = False,quoting=csv.QUOTE_NONNUMERIC)\n",
    "#h2o.export_file(hf_org_test, path = export_path)\n",
    "\n",
    "csve_time = time()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Create a temp table in MapD and load prediction data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "drop_query = '''DROP TABLE IF EXISTS hyatt_all_pred_3;'''\n",
    "\n",
    "temp_query = '''CREATE TABLE hyatt_all_pred_3 (Likelihood_Recommend_H integer,\n",
    "  CHECKOUT_HEADER_ID_C float,\n",
    "  MARKET_CODE_C text encoding dict,\n",
    "  MARKET_DESC_C text encoding dict,\n",
    "  MARKET_GROUP_C text encoding dict,\n",
    "  MAJOR_MARKET_CODE_C text encoding dict,\n",
    "  CHANNEL_CODE_C text encoding dict,\n",
    "  RATE_PLAN_C text encoding dict,\n",
    "  SPLIT_RATE_PLAN_C text encoding dict,\n",
    "  CONS_GUEST_ID_C float,\n",
    "  ROOM_NUM_C float,\n",
    "  ROOM_TYPE_CODE_C text encoding dict,\n",
    "  ROOM_TYPE_DESCRIPTION_C text encoding dict,\n",
    "  WALK_IN_FLG_C text encoding dict,\n",
    "  RESERVATION_CONFIRMATION_NUM_C float,\n",
    "  RESERVATION_CONFIRMATION_PAGE_C float,\n",
    "  RESERVATION_STATUS_C text encoding dict,\n",
    "  CHECK_IN_DATE_C date,\n",
    "  CHECK_OUT_DATE_C date,\n",
    "  LENGTH_OF_STAY_C float,\n",
    "  NUMBER_OF_ROOMS_C float,\n",
    "  ADULT_NUM_C float,\n",
    "  CHILDREN_NUM_C float,\n",
    "  POV_CODE_C text encoding dict,\n",
    "  QUOTED_RATE_C float,\n",
    "  PMS_ROOM_REV_C float,\n",
    "  PMS_ROOM_REV_USD_C float,\n",
    "  PMS_TOTAL_REV_C float,\n",
    "  PMS_TOTAL_REV_USD_C float,\n",
    "  PMS_FOOD_BEVERAGE_REV_C float,\n",
    "  PMS_FOOD_BEVERAGE_REV_USD_C float,\n",
    "  PMS_OTHER_REV_C float,\n",
    "  PMS_OTHER_REV_USD_C float,\n",
    "  WALK_FLAG_C text encoding dict,\n",
    "  NO_SHOW_FLAG_C text encoding dict,\n",
    "  DUP_INDEX_C float,\n",
    "  CONFIRMATION_NUM_R float,\n",
    "  CONFIRMATION_PAGE_R float,\n",
    "  RESERVATION_DATE_R date,\n",
    "  ARRIVAL_FLG_R float,\n",
    "  ARRIVAL_DATE_R date,\n",
    "  DEPARTURE_DATE_R date,\n",
    "  DIRECT_NIGHTS_NUM_R float,\n",
    "  RESERVATION_STATUS_R text encoding dict,\n",
    "  ENTRY_TIME_R text encoding dict,\n",
    "  ENTRY_HOTEL_CODE_R text encoding dict,\n",
    "  LAST_CHANGE_DATE_R timestamp,\n",
    "  ROOM_TYPE_CODE_R text encoding dict,\n",
    "  ROOM_TYPE_DESCRIPTION_R text encoding dict,\n",
    "  MAJOR_MARKET_CODE_R text encoding dict,\n",
    "  PMS_RATE_CATEGORY_CODE_R text encoding dict,\n",
    "  NUM_ROOMS_R float,\n",
    "  ADULT_NUM_R float,\n",
    "  CHILDREN_NUM_R float,\n",
    "  NT_RATE_R float,\n",
    "  STATE_R text encoding dict,\n",
    "  COUNTRY_CODE_R text encoding dict,\n",
    "  ETA_R text encoding dict,\n",
    "  FLIGHTT_INFO_R text encoding dict,\n",
    "  LENGTH_OF_STAY_R float,\n",
    "  LENGTH_OF_STAY_CATEGORY_R text encoding dict,\n",
    "  CALCULATED_NIGHTS_NUM_R float,\n",
    "  ROOM_NIGHTS_R float,\n",
    "  STATUS_CALCULATION_R text encoding dict,\n",
    "  GROUPS_VS_FIT_R text encoding dict,\n",
    "  GUEST_COUNTRY_R text encoding dict,\n",
    "  GOLDPASSPORT_FLG_R text encoding dict,\n",
    "  MEMBER_STATUS_R text encoding dict,\n",
    "  PACE_CATEGORY_R text encoding dict,\n",
    "  PACE_R float,\n",
    "  OFFER_FLG_R text encoding dict,\n",
    "  PAST_VS_FUTURE_R text encoding dict,\n",
    "  REVENUE_R float,\n",
    "  REVENUE_USD_R float,\n",
    "  CHANNEL_CODE_R text encoding dict,\n",
    "  e_delivereddate_I timestamp,\n",
    "  e_delivereddate_adj_I timestamp,\n",
    "  e_status_I text encoding dict,\n",
    "  e_hy_gss_tier_I text encoding dict,\n",
    "  e_hy_gss_conf_num_I float,\n",
    "  e_hy_gss_conf_page_I float,\n",
    "  e_hy_feedback_type_I text encoding dict,\n",
    "  e_hy_gss_rate_plan_code_I text encoding dict,\n",
    "  e_country_I text encoding dict,\n",
    "  e_hy_gss_check_in_by_I text encoding dict,\n",
    "  e_hy_gss_check_out_by_I text encoding dict,\n",
    "  e_hy_gss_marketing_emails_ok_yn_I text encoding dict,\n",
    "  e_hy_gss_promo_emails_ok_yn_I text encoding dict,\n",
    "  e_hy_gss_club_access_yn_I text encoding dict,\n",
    "  e_hy_gss_gender_I text encoding dict,\n",
    "  e_hy_gss_title_text_I text encoding dict,\n",
    "  e_hy_gss_language_I text encoding dict,\n",
    "  e_hy_gss_room_floor_I text encoding dict,\n",
    "  e_hy_gss_check_in_time_text_I text encoding dict,\n",
    "  e_hy_gss_check_out_time_text_I text encoding dict,\n",
    "  e_hy_gss_check_in_time_I text encoding dict,\n",
    "  e_hy_gss_check_out_time_I text encoding dict,\n",
    "  e_checkin_I date,\n",
    "  e_checkout_I date,\n",
    "  a_last_seen_page_name_I text encoding dict,\n",
    "  a_last_submitted_page_name_I text encoding dict,\n",
    "  Survey_ID_H float,\n",
    "  Response_Date_H timestamp,\n",
    "  Guest_Checkin_Date_H date,\n",
    "  Guest_Checkout_Date_H date,\n",
    "  Length_Stay_H float,\n",
    "  Guest_State_H text encoding dict,\n",
    "  Guest_Country_H text encoding dict,\n",
    "  Gender_H text encoding dict,\n",
    "  Age_Range_H text encoding dict,\n",
    "  POV_H text encoding dict,\n",
    "  Language_H text encoding dict,\n",
    "  DOE_H date,\n",
    "  Booking_Location_H text encoding dict,\n",
    "  Num_Adults_H float,\n",
    "  Num_Kids_H float,\n",
    "  Num_Rooms_H float,\n",
    "  Conf_Num_Orig_H float,\n",
    "  Conf_Num_H float,\n",
    "  Conf_Page_H float,\n",
    "  Rate_Plan_H text encoding dict,\n",
    "  Gross_Rev_H float,\n",
    "  Net_Rev_H float,\n",
    "  Room_Rev_H float,\n",
    "  CC_Type_H text encoding dict,\n",
    "  Currency_H text encoding dict,\n",
    "  Clublounge_Used_H text encoding dict,\n",
    "  Spa_Used_H text encoding dict,\n",
    "  GDS_Source_H text encoding dict,\n",
    "  Checkin_Length_H float,\n",
    "  Room_Num_H float,\n",
    "  Room_Type_H text encoding dict,\n",
    "  GP_Tier_H text encoding dict,\n",
    "  Status_H text encoding dict,\n",
    "  Feedback_Type_H text encoding dict,\n",
    "  Mobile_First_H text encoding dict,\n",
    "  Mobile_H text encoding dict,\n",
    "  Overall_Sat_H float,\n",
    "  Guest_Room_H float,\n",
    "  Tranquility_H float,\n",
    "  Condition_Hotel_H float,\n",
    "  Customer_SVC_H float,\n",
    "  Staff_Cared_H float,\n",
    "  Internet_Sat_H float,\n",
    "  Check_In_H float,\n",
    "  FB_FREQ_H float,\n",
    "  FB_Overall_Experience_H float,\n",
    "  Internet_Dissat_Lobby_H float,\n",
    "  Internet_Dissat_Slow_H float,\n",
    "  Internet_Dissat_Expensive_H float,\n",
    "  Internet_Dissat_Connectivity_H float,\n",
    "  Internet_Dissat_Billing_H float,\n",
    "  Internet_Dissat_Wired_H float,\n",
    "  Internet_Dissat_Other_H float,\n",
    "  TV_Internet_General_H float,\n",
    "  Room_Dissat_Internet_H float,\n",
    "  eff_date_CC date,\n",
    "  avail_room_cnt_CC float,\n",
    "  occ_room_cnt_CC float,\n",
    "  average_daily_rate_CC float,\n",
    "  Spirit_PL text encoding dict,\n",
    "  Property_ID_PL text encoding dict,\n",
    "  Hotel_Name_Long_PL text encoding dict,\n",
    "  Hotel_Name_Short_PL text encoding dict,\n",
    "  Award_Category_PL text encoding dict,\n",
    "  Status_PL text encoding dict,\n",
    "  City_PL text encoding dict,\n",
    "  State_PL text encoding dict,\n",
    "  US_Region_PL text encoding dict,\n",
    "  Postal_Code_PL text encoding dict,\n",
    "  Country_PL text encoding dict,\n",
    "  Ops_Region_PL text encoding dict,\n",
    "  Sub_Continent_PL text encoding dict,\n",
    "  Property_DMA_PL text encoding dict,\n",
    "  Property_Latitude_PL float,\n",
    "  Property_Longitude_PL float,\n",
    "  Currency_PL text encoding dict,\n",
    "  Dom_Intl_PL text encoding dict,\n",
    "  Guest_NPS_Goal_PL float,\n",
    "  STR_Number_PL float,\n",
    "  STR_Market_PL text encoding dict,\n",
    "  Brand_PL text encoding dict,\n",
    "  Brand_Initial_PL text encoding dict,\n",
    "  Club_Type_PL text encoding dict,\n",
    "  Hotel_Inventory_PL float,\n",
    "  Floors_PL float,\n",
    "  Total_Meeting_Space_PL float,\n",
    "  Union_PL text encoding dict,\n",
    "  GRegion_PL text encoding dict,\n",
    "  DRegion_PL text encoding dict,\n",
    "  Region_PL text encoding dict,\n",
    "  Category_PL text encoding dict,\n",
    "  Scope_of_Service_PL text encoding dict,\n",
    "  Type_PL text encoding dict,\n",
    "  Class_PL text encoding dict,\n",
    "  Location_PL text encoding dict,\n",
    "  Bucket_PL text encoding dict,\n",
    "  Relationship_PL text encoding dict,\n",
    "  All_Suites_PL text encoding dict,\n",
    "  Bell_Staff_PL text encoding dict,\n",
    "  Boutique_PL text encoding dict,\n",
    "  Business_Center_PL text encoding dict,\n",
    "  Casino_PL text encoding dict,\n",
    "  Conference_PL text encoding dict,\n",
    "  Convention_PL text encoding dict,\n",
    "  Dry_Cleaning_PL text encoding dict,\n",
    "  Elevators_PL text encoding dict,\n",
    "  Fitness_Center_PL text encoding dict,\n",
    "  Fitness_Trainer_PL text encoding dict,\n",
    "  Golf_PL text encoding dict,\n",
    "  Indoor_Corridors_PL text encoding dict,\n",
    "  Laundry_PL text encoding dict,\n",
    "  Limo_Service_PL text encoding dict,\n",
    "  Mini_Bar_PL text encoding dict,\n",
    "  Pool_Indoor_PL text encoding dict,\n",
    "  Pool_Outdoor_PL text encoding dict,\n",
    "  Regency_Grand_Club_PL text encoding dict,\n",
    "  Resort_PL text encoding dict,\n",
    "  Restaurant_PL text encoding dict,\n",
    "  Self_Parking_PL text encoding dict,\n",
    "  Shuttle_Service_PL text encoding dict,\n",
    "  Ski_PL text encoding dict,\n",
    "  Spa_PL text encoding dict,\n",
    "  Spa_fitness_center_PL text encoding dict,\n",
    "  Spa_online_booking_PL text encoding dict,\n",
    "  Spa_offering_PL text encoding dict,\n",
    "  Valet_Parking_PL text encoding dict,\n",
    "  GP_Tier text encoding dict,\n",
    "  Sub_Channel_Category text encoding dict,\n",
    "  Channel_Category text encoding dict,\n",
    "  Booking_Channel text encoding dict,\n",
    "  NPS_Type text encoding dict,\n",
    "  Stay_Sequence float,\n",
    "  Stay_Sequence_Brand float,\n",
    "  Completed_Survey_Sequence float,\n",
    "  Days_Since_Last_Stay float,\n",
    "  Days_Until_Next_Stay float,\n",
    "  pred_recommend_rating Integer);'''\n",
    "\n",
    "load_query = \"COPY hyatt_all_pred_3 FROM '/raidStorage/wamsi/mapd-ml/notebooks/hyatt_predict1.csv';\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#create table\n",
    "\n",
    "try:                                    #if the table does not exist\n",
    "    client.sql_execute(session,temp_query,True,None,first_n = -1)\n",
    "\n",
    "except:                                 #if table exists\n",
    "    client.sql_execute(session,drop_query,True,None,first_n = -1)\n",
    "    client.sql_execute(session,temp_query,True,None,first_n = -1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#load data\n",
    "load = client.sql_execute(session,load_query,True,None,first_n = -1)\n",
    "print('Records {}'.format(load.row_set.columns[0].data.str_col[0]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "print('Total time for execution: %d' %((time()-start_time)))\n",
    "print('Time for making matrices: %d' %(metime - mtime))\n",
    "print('Time for predicting: %d' %(petime - ptime))\n",
    "\n",
    "#print('Time taken for extracting data: %d'%((qse_time-qs_time)))\n",
    "#print('Time for creating dataframe %d'%(dfe_time-df_time))\n",
    "#print('Preprocessing time: %d'%((dpe_time-dp_time)))\n",
    "#print('Time to predict: %d'%((he_time-h_time)))\n",
    "#print('Time to write to csv: %d'%(csve_time-csv_time))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Start MapD immerse"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "%%HTML\n",
    "<a href=\"http://localhost:9999/\" target=\"_blank\">MapD Immerse</a>"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.1"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
