#!/usr/bin/python
# -*- coding:utf-8 -*-

import Queue
import threading
import time
import sys
import os
import urllib
import re
import codecs
import logging
import ConfigParser

from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.common.keys import Keys #导入模拟点击
from cvsparser import CvsParser

reload(sys)
sys.setdefaultencoding("utf-8")

# dcap = dict(DesiredCapabilities.PHANTOMJS)  #设置userAgent
# dcap["phantomjs.page.settings.userAgent"] = ("Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36")

browser = webdriver.PhantomJS(executable_path='C:\Python27\Scripts\phantomjs.exe')
# browser = webdriver.PhantomJS()
# browser = webdriver.Chrome()
browser.implicitly_wait(30)  ##设置超时时间
browser.set_page_load_timeout(30)  ##设置超时时间
browser.maximize_window()  # 设置全屏

cvs_parser = CvsParser()
baidu_map_url = 'http://map.baidu.com/'
file_name = ''

def logConf(is_debug, file_name):
    if is_debug:
        logging.basicConfig(level=logging.INFO,
            format='%(asctime)s %(filename)s+%(lineno)d [%(levelname)s] %(message)s', filemode='w')
    else:
        logging.basicConfig(level=logging.INFO,
            format='%(asctime)s %(filename)s+%(lineno)d [%(levelname)s] %(message)s',
            filename=file_name,
            filemode='w')

def getSysTime():
    time_tuple = time.localtime(time.time())  # 获取当前的时间返回一个时间元组
    fmt = '%Y%m%d-%H%M%S'  # 格式化时间
    return time.strftime(fmt, time_tuple)  # 把传入的元组按照格式，输出字符串

curr_time = getSysTime()

def getConf(config_file, key):
    file_lines = []
    try:
        # file_cfg = open(config_file, "r")
        # file_lines = file_cfg.readlines()
        file_cfg = codecs.open(config_file, 'r', 'gbk')
        file_lines = file_cfg.readlines()
        file_cfg.close()
    except Exception, err:
        logging.error(err)
        return None

    for line in file_lines:
        word_array = line.split('=')
        if len(word_array) != 2:
            logging.error('read config %s failed', config_file)
            return None

        if word_array[0] == key:
            return word_array[1]

    return None

def isEndPage():
	try:
		next_class = browser.find_element_by_xpath("//a[@tid='toNextPage']").get_attribute('class')
		if len(next_class) > 0 and next_class == "next next-none":
			logging.info('reach end page')
			return True
	except Exception, error:
		logging.error("%s", error)
		return True
	return False

def getCurrPage():
	try:
		return browser.find_element_by_class_name("curPage").text
	except Exception, error:
		return "-1"

def goNextPage():
	if isEndPage():
		return False

	pre_page = getCurrPage()

	while True:
		try:
			logging.info('click next page, pre_page=%s', pre_page)
			browser.find_element_by_xpath("//a[@tid='toNextPage']").click()
			time.sleep(1)
			if isEndPage():
				return False
			
			count = 0
			is_ok = False
			while True:
				curr_page = getCurrPage()
				logging.info('get curr_page=%s', curr_page)
				if curr_page == "-1" or pre_page == curr_page:
					logging.info('waitting for go to next page')
					time.sleep(1)
					count = count + 1
					if count >= 5:
						break
					continue
				else:
					is_ok = True
					break

			if is_ok:
				break
		except Exception, error:
			logging.error("%s", error)
			return False

	return True

def search(query_content, beg_page, page_num):
	name_list = []
	get_page_num = 0
	page_index = 1

	while True:
		try:
			browser.get(baidu_map_url)
		except Exception as error:
			logging.error("please check your network env, try again")
			continue

		try:
			browser.find_element_by_id('sole-input').send_keys(query_content)
			#time.sleep(1)
			browser.find_element_by_id('search-button').click()
			time.sleep(3)
			break
		except Exception as error:
			logging.error("%s", error)
			time.sleep(2)
			continue

	pre_page_list = []
	max_try_times = 3
	while True:
		name_elements = []
		address_elements = []
		try:
			get_ret = True
			try_times = 0
			while True:
				name_elements = browser.find_elements_by_class_name('n-blue')
				addres_elements = browser.find_elements_by_class_name('n-grey')
				if len(name_elements) > 0:
					break
				else:
					logging.info('get \"n-blue\" element size = 0, try again')
					try_times = try_times + 1
					if try_times == max_try_times:
						get_ret = False
						break
						
					time.sleep(1)
					continue

			if not get_ret:
				if not goNextPage():
					break
				#logging.info('get \"n-blue\" element size = 0, had get all search result, finished')
				continue

		except Exception, error:
			logging.error("get page list failed, try again, error: %s", error)
			time.sleep(2)
			continue

		count = 0
		page_list = []
		status = True
		
		for index in range(0, len(name_elements)):
			count = count + 1
			try:
				name = name_elements[index].text
			except Exception, error:
				status = False
				break

			try:
				address = addres_elements[index].text
			except Exception, error:
				pass

			if len(name) == 0:
				status = False
				break
			
			page_list.append((str(page_index) + "." + str(count), name, address))
			logging.info('%d.%d %s %s', page_index, count, name, address)

		logging.info("")

		if status and len(page_list) > 0:
			if len(pre_page_list) == 0:
				pre_page_list = page_list
			elif pre_page_list[0] == page_list[0] or pre_page_list[1] == page_list[1] or pre_page_list[2] == page_list[2]:
				logging.info("found the same page = %d, try again", page_index)
				time.sleep(3)
				continue
		else:
			logging.error('get page = %u error, try again', page_index)
			time.sleep(2)
			continue

		if page_index < beg_page:
			logging.info('jump up page=%d', page_index)
			if not goNextPage():
				break

			page_index = page_index + 1
			continue

		if status:
			for name in page_list:
				name_list.append(name)

			#page_list.append(('', '', ''))
			writeToFile(page_list)
		else:
			logging.error('get page = %u error, try again', page_index)
			time.sleep(2)
			continue

		get_page_num = get_page_num + 1
		if get_page_num == page_num:
			break

		if not goNextPage():
			break

		page_index = page_index + 1

	return name_list

def writeToFile(var_list):
	try:
		cvs_parser.open(file_name)
		cvs_parser.writeRows(var_list)
		cvs_parser.close()
	except Exception, error:
		logging.error("%s", error)

def gogogo():
	beg_page = 1
	if len(sys.argv) >= 2:
		beg_page = int(sys.argv[1])

	page_num = 100
	if len(sys.argv) >= 3:
		page_num = int(sys.argv[2])

	query_key = getConf('key_word.ini', 'keys')
	logging.info('Baidu map query=\"%s\", beg_page=%d, page_num=%d', query_key, beg_page, page_num)

	global file_name
	file_name = query_key + "-" + curr_time + ".csv"
	name_list = search(query_key, beg_page, page_num)

	logging.info('Recording count = %d, save to %s', len(name_list), file_name)

if __name__=="__main__":
	log_file = "baidusearch-" + curr_time + ".log"
	logConf(True, log_file)

	gogogo()

	browser.quit()

	logging.info('Exit baidu map query system')