#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
Super Search Engine is powered by python webkit headless browser
which is used for scraping AJAX-powered webpages. 


url:  the page whose source you want to get
time: wait for seconds

"""


import sys, os, string, subprocess, re, glob,shutil,time,requests
import argparse

import bs4
from bs4 import BeautifulSoup

# -*- coding: utf-8 -*-

import sqlite3 as lite


myApiKey = r'pvezh6ggtf6rfyk2ha7gbnvh'
#download a category froma a store 
def download_product_list( storeDir='./BestbuyProducts',cat_id = None,category_name=None,apiKey="pvezh6ggtf6rfyk2ha7gbnvh"):
    page=1
    str1 = r'http://api.remix.bestbuy.com/v1/products(categoryPath.id='+cat_id+r')'
    show = r'?show=sku,name,regularPrice,salePrice,image,largeFrontImage,largeImage,leftViewImage,rightViewImage,topViewImage,mobileUrl&pageSize=100&page='
    str1 = str1+show
        
    page=1
    str_2 =r'&apiKey='+apiKey 
    url=str1+str(page)+str_2
    print "url: ", url
    total_pages = get_page_number_from_url(url)
    print "total_pages: ", total_pages 
    out_xml = "pagex.html"
    #create folder
    cat_xml_dir = storeDir+r'/'+category_name+r"/xml"
    cmd = r"mkdir -p " +  cat_xml_dir
    retcode = subprocess.call(cmd , shell=True)
    
    for page in range(1,total_pages+1):
        print "get page: ", page
        url = str1+str(page)+str_2
        print url
        resp = requests.get(url)
        
        out_xml = cat_xml_dir +r"/page_"+str(page)+".xml"
        try:  # open output xml file
          out_file = open (out_xml, 'w')                  
        except Exception,err:
          print Exception, err, " open ", out_xml, "failed!"
          sys.exit(1)
        #print out_file htmlstr     
        out_file.write(resp.text)
        out_file.close()
        time.sleep(0.3)

#get page number from url
def get_page_number_from_url(url):
    out_xml="./page_0.xml"
    resp = requests.get(url)       
    
    try:  # open output xml file
        out_file = open (out_xml, 'w')                  
    except Exception,err:
        print Exception, err, " open ", out_xml, "failed!"
        sys.exit(1)
    #print out_file htmlstr     
    out_file.write(resp.text)
    out_file.close()
    print "process--" , out_xml    
    soup = BeautifulSoup(open(out_xml), "xml")
    total_pages = soup.products["totalPages"]
    total_pages = int(total_pages)
    print "total_pages: ",total_pages
    return total_pages
    
#Cache all product images
def download_images(storeDir='./BestbuyProducts',category_name=None, pages=None):
    page=1
    xml_file="page_x"
    print "total pages: ",pages, " will be down loaded"
    #create folder
    cat_images_dir = storeDir+r'/'+category_name+r"/images"
    cmd = r"mkdir -p " +  cat_images_dir
    retcode = subprocess.call(cmd , shell=True)
    
    for page in range(1,pages+1):
        print "read page: ", page
        
        #xml_file="./BestbuyProducts/xml/page_"+str(page)+".xml"
        xml_file=storeDir+r'/'+category_name+r'/xml/page_'+str(page)+".xml"
        print "process--" , xml_file
        
        soup = BeautifulSoup(open(xml_file), "xml")
        for product in soup.find_all("product"):
            #extract product info
            sku = product.sku.string            
            print "sku = ", sku
            #name = product.name, this doesn't work as we exepected
            name = product.find("name")
            product_name = name.string            
            print "name = ", product_name            
            regularPrice = product.regularPrice.string
            print "regularPrice = " , regularPrice
            salePrice = product.salePrice.string
            print "salePrice = ", salePrice
            
            #all images
            image = product.image.string
            print "image = " , image
            
            largeFrontImage=product.largeFrontImage.string
            print "largeFrontImage = " , largeFrontImage
            
            largeImage=product.largeImage.string
            print "largeImage = " , largeImage
            
            leftViewImage=product.leftViewImage.string
            print "leftViewImage = " , leftViewImage
            
            rightViewImage=product.rightViewImage.string
            print "rightViewImage = " , rightViewImage
            
            topViewImage=product.topViewImage.string
            print "topViewImage = " , topViewImage
            
            images=[image,largeFrontImage,largeImage,leftViewImage,rightViewImage,topViewImage]
            #download image
            #wait to follow access limitation
            #time.sleep(0.3)
            for image in images:
                #time.sleep(0.3)
                if image is not None:
                    try:
                        retcode = subprocess.call("wget --directory-prefix="+cat_images_dir+ " " + image, shell=True)  
                        if retcode != 0:
                            print " download ", image, "failed!"
                    except Exception,err:
                        print Exception, err, " download ", image, "failed!"
        
# cache product id and image relationship to database            
def populate_db(db_name=None,store_id=1,category_id=None,categoryName=None,storeDir=None,pages=1):
    page=1
    xml_file="page_x"
    cat_xml_dir=storeDir+r'/'+categoryName+r'/xml'
    number=1
    print "total pages: ",pages
    for page in range(1,pages+1):
        
        xml_file=cat_xml_dir+"/page_"+str(page)+".xml"
        print "=== process--" , xml_file
        
        soup = BeautifulSoup(open(xml_file), "xml")
        for product in soup.find_all("product"):
            #extract product info
            print "+++number: ",number
            product_number = product.sku.string            
            print "sku = ", product_number
            #name = product.name, this doesn't work as we exepected
            name = product.find("name")
            product_name = name.string            
            print "name = ", product_name            
            regularPrice = product.regularPrice.string
            print "regularPrice = " , regularPrice
            salePrice = product.salePrice.string
            print "salePrice = ", salePrice
            image = product.image.string
            image_file_name = extract_image_file_name (image)
            print "image_file_name = " , image_file_name
            
            largeFrontImage=product.largeFrontImage.string
            print "largeFrontImage = " , largeFrontImage
            largeFrontImage_file_name = extract_image_file_name (largeFrontImage)
            print "largeFrontImage_file_name = " , largeFrontImage_file_name
            
            
            largeImage=product.largeImage.string
            print "largeImage = " , largeImage
            largeImage_file_name = extract_image_file_name (largeImage)
            print "largeImage_file_name = " , largeImage_file_name
            
            
            leftViewImage=product.leftViewImage.string
            print "leftViewImage = " , leftViewImage
            leftViewImage_file_name = extract_image_file_name (leftViewImage)
            print "leftViewImage_file_name = " , leftViewImage_file_name
            
            rightViewImage=product.rightViewImage.string
            print "rightViewImage = " , rightViewImage
            rightViewImage_file_name = extract_image_file_name (rightViewImage)
            print "rightViewImage_file_name = " , rightViewImage_file_name
            
            topViewImage=product.topViewImage.string
            print "topViewImage = " , topViewImage
            topViewImage_file_name = extract_image_file_name (topViewImage)
            print "topViewImage_file_name = " , topViewImage_file_name
            
            mobileUrl = product.mobileUrl.string
            if mobileUrl is None:
                mobileUrl="not_exsit"
            print "mobileUrl = ", mobileUrl
            number=number+1
            
            #Insert Row to db
#            store_id
#            category_id
#            product_number
#            name  
#            regularPrice
#            salePrice
#            image    
#            largeFrontImage
#            largeImage
#            leftViewImage
#            rightViewImage
#            topViewImage
#            mobileUrl

            row_tuple = (store_id,category_id,product_number,product_name,regularPrice,salePrice, image_file_name,largeFrontImage_file_name,largeImage_file_name,leftViewImage_file_name,rightViewImage_file_name,topViewImage_file_name,mobileUrl, )
            print "row: ", row_tuple
            instert_record(db=db_name,tuple_value=row_tuple)
            
              

def extract_image_file_name (image_url):
    #print "image url :", image_url
    file_name=""
    i_url=image_url
    print "i_url :", i_url
      
    
    if isinstance(i_url,basestring):
        print "yes"
        tmp_list = image_url.split("/")
        length = len(tmp_list)
        file_name = tmp_list[length-1]
    else:
        file_name = "not_exist"
    return file_name
    
# insert row to db
def instert_record(db=None, tuple_value=None):
    #
    con = lite.connect(db)
    with con: 
        cur = con.cursor()    
        cur.execute('SELECT SQLITE_VERSION()')
        
        data = cur.fetchone()
        
        print "SQLite version: %s" % data  
            
        print "+++ Insert Row: ", tuple_value
        #row=("store_id","category_id","product_number","name","regularPrice","salePrice","image","largeFrontImage","largeImage","leftViewImage","rightViewImage","topViewImage","mobileUrl",)
        cur.execute("INSERT INTO store_management_product(store_id,category_id,product_number,name,regularPrice,salePrice,image,largeFrontImage,largeImage,leftViewImage,rightViewImage,topViewImage,mobileUrl) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?);",tuple_value)
        
        #cur.execute("INSERT INTO store_management_product(store_id,category_id,name,product_number,image,regularPrice,salePrice,mobileUrl) VALUES (?,?,?,?,?,?,?,?);",tuple_value)
        
# get_product_id_from_image(image_file=None)
def get_product_id_from_image(db='StoresProducts.db',image_file='4910933_sc.jpg'):
    #query db
    con = lite.connect(db)
    with con: 
        cur = con.cursor()    
        t=(image_file,image_file,image_file,image_file,image_file,image_file,)
        
        cur.execute("SELECT  product_number FROM  store_management_product WHERE image=? OR  largeFrontImage=? OR largeImage=? OR leftViewImage=? OR rightViewImage=? OR topViewImage=?", t)
        
        row = cur.fetchone()
        #there may not exist
        #print "Product_id: %s" %row[0]
        if row:
            p_id=row[0]
        else:
            p_id="unknown"
	return p_id

def retrive_test(db='StoresProducts.db'):
    #
    con = lite.connect(db)
    with con: 
        cur = con.cursor()    
        
        cur.execute("SELECT  product_number,image FROM  store_management_product ")
        
        rows = cur.fetchall()
        for row in rows:
            print "Product_id: %s, image: %s" %(row[0],row[1])
            p_id=get_product_id_from_image(db,row[1])
            print "retrived p_id %s, when image= %s" %(p_id, row[1])
            
# retrive real time product info form retailer's website
def retrive_product_info(p_id='9999198700050008'):
    #
    str1 = r'http://api.remix.bestbuy.com/v1/products/'
    str2=r'.json?show=sku,name,image,regularPrice,salePrice,mobileUrl&apiKey=pvezh6ggtf6rfyk2ha7gbnvh'    
    url = str1+p_id+str2
    print url
    resp = requests.get(url)
    json_data = resp.json()
    json_data['Source']='local'
    return json_data
    
# main function to test the module

def main(argv=None):
    if argv is None:
      argv = sys.argv
    print argv
    
    #reload(sys) 
    #sys.setdefaultencoding('utf-8')
    
    
    # parse the command line arguments and options
##    parser = argparse.ArgumentParser()
##    parser.add_argument("-url", "--image_url", help="the url of image will be searched")
##    parser.add_argument("-out", "--out_html", help="the response html")
##    parser.add_argument("-time", "--delay_time", \
##		      type=float, \
##		      default=2, \
##		      help="float type the delay time to get the full response from outside ")
##    args = parser.parse_args()   
##   
##    
##     # default value for test purpose
##    if args.image_url:
##      image_url    =args.image_url
##    else:
##      image_url = "empty"
##
##    if args.out_html:
##        resp_html = args.out_html
##    else:
##        resp_html = "googleImageSearchResp.html"
##	
##    delay_time   =args.delay_time
##
##    print "Image address is : " + image_url
##    print "Delay time is : " + str(delay_time)
##    print "output html is: " + resp_html
##
##     

    #page_number= get_page_number_from_url(r'http://api.remix.bestbuy.com/v1/products(categoryPath.id=abcat0500000)?show=sku,name,,regularPrice,salePrice,image,largeFrontImage,largeImage,leftViewImage,rightViewImage,topViewImage,mobileUrl&pageSize=100&page=1&apiKey=pvezh6ggtf6rfyk2ha7gbnvh')
    #url=r"http://api.remix.bestbuy.com/v1/products(categoryPath.id=abcat0400000)?show=sku,name,,regularPrice,salePrice,image,largeFrontImage,largeImage,leftViewImage,rightViewImage,topViewImage,mobileUrl&pageSize=100&page=1&apiKey=pvezh6ggtf6rfyk2ha7gbnvh"
    #page_number= get_page_number_from_url(url)
    #print "page_number: ",page_number
       
    
    #### 0 define parameters
    
    myApiKey = r'pvezh6ggtf6rfyk2ha7gbnvh'
    myStoreDir = r'./BestbuyProducts_01'
    myDB=r'./StoresProducts_01.db'
    
    #prepare db and folder
    #create folder
    cmd = "rm -rf "+myStoreDir
    retcode = subprocess.call(cmd , shell=True)    
    cmd = "mkdir "+myStoreDir
    retcode = subprocess.call(cmd , shell=True)
    
    #copy new empty db file
    cmd = "cp -f ./StoresProducts.db.fresh "+myDB
    retcode = subprocess.call(cmd , shell=True)
        
    ##### 1. download production list #########################################3 
    
    #abcat0500000 Computers & Tablets    
    url=r'http://api.remix.bestbuy.com/v1/products(categoryPath.id=abcat0500000)?show=sku,name,,regularPrice,salePrice,image,largeFrontImage,largeImage,leftViewImage,rightViewImage,topViewImage,mobileUrl&pageSize=100&page=1&apiKey=pvezh6ggtf6rfyk2ha7gbnvh'
    page_number_Computers_Tablets = get_page_number_from_url(url)
    print "page_number_Computers_Tablets: ",page_number_Computers_Tablets    
    time.sleep(1)
    download_product_list( storeDir=myStoreDir, cat_id = 'abcat0500000',category_name=r'Computers_Tablets',apiKey=myApiKey)
    
    
    #abcat0800000 Mobile Phones
    url=r'http://api.remix.bestbuy.com/v1/products(categoryPath.id=abcat0800000)?show=sku,name,,regularPrice,salePrice,image,largeFrontImage,largeImage,leftViewImage,rightViewImage,topViewImage,mobileUrl&pageSize=100&page=1&apiKey=pvezh6ggtf6rfyk2ha7gbnvh'
    page_number_Mobile_Phones = get_page_number_from_url(url)
    print "page_number_Mobile_Phones: ",page_number_Mobile_Phones    
    time.sleep(1)
    download_product_list( storeDir=myStoreDir, cat_id = 'abcat0800000',category_name=r'Mobile_Phones',apiKey=myApiKey)

    #abcat0400000 Cameras & Camcorders
    url=r'http://api.remix.bestbuy.com/v1/products(categoryPath.id=abcat0400000)?show=sku,name,,regularPrice,salePrice,image,largeFrontImage,largeImage,leftViewImage,rightViewImage,topViewImage,mobileUrl&pageSize=100&page=1&apiKey=pvezh6ggtf6rfyk2ha7gbnvh'
    page_number_Cameras_Camcorders = get_page_number_from_url(url)
    print "page_number_Cameras_Camcorders: ",page_number_Cameras_Camcorders    
    time.sleep(1)
    download_product_list( storeDir=myStoreDir, cat_id = 'abcat0400000',category_name=r'Cameras_Camcorders',apiKey=myApiKey)
    
    print "Download product lists is finished"

    
    ##### 2. populate database #################################
    
    #test value
    #row=("store_id","category_id","product_number","name","regularPrice","salePrice","image","largeFrontImage","largeImage","leftViewImage","rightViewImage","topViewImage","mobileUrl",)
    #instert_record(db=myDB,tuple_value=row)
    #------category 1
    populate_db(db_name=myDB,store_id=1,category_id=1,categoryName='Computers_Tablets',storeDir=myStoreDir,pages=page_number_Computers_Tablets)
    
    #------category 2
    populate_db(db_name=myDB,store_id=1,category_id=2,categoryName='Mobile_Phones',storeDir=myStoreDir,pages=page_number_Mobile_Phones)
    
    #------category 3
    populate_db(db_name=myDB,store_id=1,category_id=3,categoryName='Cameras_Camcorders',storeDir=myStoreDir,pages=page_number_Cameras_Camcorders)
    
    print "Populate database is finished"
    
    #product_id = get_product_id_from_image(db=myDB,image_file='7098526le.jpg')
    #print "product_id: ",product_id
    
    #retrive_test(db=myDB)    
    #retrive_product_info()
    
    
    ###### Download images ######################################################
    
    #------category 1 Computers_Tablets
    download_images(storeDir=myStoreDir,category_name=r'Computers_Tablets', pages=page_number_Computers_Tablets)
    
    #------category 2 Computers_Tablets
    download_images(storeDir=myStoreDir,category_name=r'Mobile_Phones', pages=page_number_Mobile_Phones)
    
    #------category 3 Computers_Tablets
    download_images(storeDir=myStoreDir,category_name=r'Cameras_Camcorders', pages=page_number_Cameras_Camcorders)
    
    print "Download images is finished"

 

if __name__ == "__main__":
    sys.exit(main())  
