# --encoding:utf-8--

import urllib

from dolphin.common.spiderconst import SpiderConst
from dolphin.config.confighelper import confighelper
from dolphin.common.dolphinhttpclient import dolphinhttpclient

from dolphin.common.commonlogger import commonlogger

#from Scrapy.cmdline import execute
logger = commonlogger().getlogger()

class GoogleBookBiz:
    def __init__(self):
        return

    def get_scrapy_urls(self, query_key_word):        
        url_main = confighelper.getValue(self, 'global', 'google_book_api_url')
        initial_url = url_main + query_key_word    
        return initial_url

    def get_all_scrapy_urls(self, query_key_word):
        startIndex = 0
        urls = []
        url_param = {
            "q": query_key_word,
            "maxResults": 40
        }
        url_main = confighelper.getValue(self, 'global', 'google_book_api_url')
        initial_url = url_main + "?" + urllib.parse.urlencode(url_param)
        total_elements = self.get_total_elements_num_by_keyword(self,initial_url)
        logger.info("get total elements:" + str(total_elements))
        if(total_elements == 0):
            return urls
        while True:
            #
            # Google return invalid total elements
            # Great than a number has no data
            # Get the top 600 elements if greater than 600
            #
            if((startIndex - 40) < total_elements and (startIndex - 40) < 641):
                query_key_word_obj = {
                        "q": query_key_word
                }
                scrapy_param = urllib.parse.urlencode(query_key_word_obj) + "&maxResults="+ str(SpiderConst.GOOGLE_BOOK_DEFAULT_SCRAPY_SIZE) +"&startIndex=" + str(startIndex)
                scrapy_url = "?" + scrapy_param
                urls.append(scrapy_url)
                startIndex = startIndex + SpiderConst.GOOGLE_BOOK_DEFAULT_SCRAPY_SIZE
            else:
                break
        return urls

    def get_total_elements_num_by_keyword(self, initial_url):
        total_element = 0
        response_text = dolphinhttpclient.get_response_data_google(
            dolphinhttpclient, initial_url)
        if(response_text is not None):
            logger.info(response_text)
            total_element = response_text["totalItems"]
        return total_element