#encoding: utf-8

import scrapy
from scrapy.http.request.form import FormRequest
import urllib
import urlparse
from mydmoz.items import NumberItem
import sqlite3
import MySQLdb

from scrapy.utils.project import get_project_settings
from scrapy_redis.spiders import RedisSpider
from collections import namedtuple
SETTINGS = get_project_settings()
category = {
            u'诈骗': u'诈骗',
            u'诈骗电话': u'诈骗',
            u'疑似诈骗': u'诈骗',
            u'骚扰': u'骚扰',
            u'骚扰电话':u'骚扰',
            u'保险': u'保险理财',
            u'保险理财': u'保险理财',
            u'股票': u'保险理财',
            u'金融': u'保险理财',
            u'中介': u'房产中介',
            u'房产中介': u'房产中介',
            u'房产销售': u'房产中介',
            u'出租车电话': u'出租车',
            u'出租车': u'出租车',
            u'广告': u'广告推销',
            u'广告推销': u'广告推销',
            u'业务推销': u'广告推销',
            u'广告推销电话': u'广告推销',
            u'推销': u'广告推销',
            u'培训推销': u'广告推销',
            u'快递': u'快递送餐',
            u'猎头': u'招聘猎头',
            u'招聘猎头': u'招聘猎头',
            u'外卖': u'快递送餐',
            u'快递送餐': u'快递送餐',
            u'物流': u'快递送餐',
            u'客服电话': u'客服电话',
            u'响一声': u'响一声',
            u'网络来源电话': u'网络来源电话',
            u'网络推广': u'网络推广',
            u'响一声电话！请谨慎回拨！': u'响一声',
            u'酒店': u'酒店'
            
            }# import sys
# sys.setrecursionlimit(10000)
# import logging
# logger = logging.getLogger('scrapy')
# logger.formatter = logging.Formatter('[%(asctime)s] p%(process)s {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s','%m-%d %H:%M:%S')

def _monkey_patching_HTTPClientParser_statusReceived():
    """
    monkey patching for scrapy.xlib.tx._newclient.HTTPClientParser.statusReceived
    """
#     from scrapy.xlib.tx._newclient import HTTPClientParser, ParseError
    from twisted.web._newclient import HTTPClientParser, ParseError
    old_sr = HTTPClientParser.statusReceived

    def statusReceived(self, status):
        try:
            return old_sr(self, status)
        except ParseError, e:
            if e.args[0] == 'wrong number of parts':
                return old_sr(self, status + ' OK')
            raise
    #statusReceived.__doc__ == old_sr.__doc__
    HTTPClientParser.statusReceived = statusReceived


def to_int(amount):
    if isinstance(amount, unicode):
        return int(filter(unicode.isdigit, amount))
    elif isinstance(amount, str):
        return int(filter(str.isdigit, amount))
    else:
        return 0
    
    
def decode_query_url(url):
    query = urlparse.urlparse(url).query
    params = urlparse.parse_qsl(query)
    res = dict(params)
    return res


def find_express(message):
    express_key = (u'快递', u'快运', u'物流', u'速运',u'速递',u'货运', u'中通',u'天地华宇',
                   u'佳吉', u'百世汇通', u'顺丰', u'宅急送')
    for value in express_key:
        if value in message:
            return u'快递送餐'

    property_keys =(u'地产', u'置业', u'房产',)
    for key in property_keys:
        if key in message:
            return u'房产中介'

    if u'酒店' in message:
        return u'酒店' 

    finance_keys = (u'保险', u'银行', u'信用卡')
    for key in finance_keys:
        if key in message:
            return u'保险理财'
        
    if u'客服' in message:
        return u'客服电话'
    
    if u'招聘' in message:
        return u'招聘猎头'
    
    return message
        
    


class HaomaSpider(RedisSpider):
    name = "haoma"
    allowed_domains = ["baidu.com", "haosou.com"]
    download_delay = 1.8
    redis_key = "haoma"
    redis_acl_1 = 'acl_1'
    redis_acl_2 = 'acl_2'
    def __init__(self, sqlstr=None):
        host = SETTINGS['DB_HOST']
        user = SETTINGS['DB_USER']
        passwd = SETTINGS['DB_PASSWD']
        port = SETTINGS['DB_PORT']
        db = SETTINGS['DB_DB']
        self.con = MySQLdb.connect(host=host, port=port, user=user, passwd=passwd, db=db)
        #self.con = sqlite3.connect('haoma.db')
        self.cur = self.con.cursor()
        self.con.set_character_set('utf8')
        self.cur.execute('SET NAMES utf8;')
        self.cur.execute('SET CHARACTER SET utf8;')
        self.cur.execute('SET character_set_connection=utf8;')
        self.table_name = 'SBJ_Crawlerresult'
        self.start_urls = ''
        self.baidu_url =  "http://www.baidu.com/s?wd="
#         self.baidu_callback = self.parse_baidu
        self.haosou_url = "https://www.so.com/index.php?a=index&q="
#         self.haosou_callback = self.parse_360
#         self.sql = sqlstr

    def next_request(self):
        """Returns a request to be scheduled or none."""
        number = self.server.zrevrange(self.redis_acl_1, 0, 0)[0]
        if number:
            self.server.zrem(self.redis_acl_1, number)
            return [self.make_requests_from_url(self.baidu_url+number),
                    self.make_requests_from_url(self.haosou_url+number)]
        else:
            number = self.server.zrevrange(self.redis_acl_2, 0, 0)[0]
            if number:
                self.server.zrem(self.redis_acl_2, number)
                return [self.make_requests_from_url(self.baidu_url+number),
                        self.make_requests_from_url(self.haosou_url+number)]
                    
 
    def schedule_next_request(self):
        """Schedules a request if available"""
        reqs = self.next_request()
        if reqs:
            for req in reqs:
                self.crawler.engine.crawl(req, spider=self)
                
    def phone_clear(self, line):
        line = line.strip()
        line = line.replace('-', '')
        line = line.replace(' ', '')
        return line
    
    
#    def start_requests(self):
#  
#         res = self.cur.execute(self.sql)
#         #res = self.cur.execute("select * from %s where number='%s'" % (self.table_name, number))
#         res = res.fetchall()
#         for item in res:
#             number = item[0]                                                                 
#             if not item[1]:
#                 yield scrapy.Request(self.haosou_url + number, callback=self.haosou_callback)
#             if not item[2]:
#                 yield scrapy.Request(self.baidu_url + number, callback=self.baidu_callback)
#             continue
#  
#         yield scrapy.Request(self.baidu_url + number, callback=self.baidu_callback)
#         yield scrapy.Request(self.haosou_url + number, callback=self.haosou_callback)


    def parse_baidu(self, response):

        try:
            div = response.xpath("//div[@tpl='liarphone2']")
        except Exception as e:
            div = None
        wd = decode_query_url(response.url)
        print wd
        number = wd['wd']
        if not div:           
            message = "none"
            amount = 0
            cheat = False
        else:
            cheat = div.css('.c-tool-cheat').extract()
            message = div.css('strong::text').extract()[0]
            message = message.strip('"')
            if not cheat:
                message = find_express(message)
            is_cheat = category.get(message, "")
            message = category.get(message, message)
            if is_cheat:
                cheat = True
            amount =  div.css('.op_liarphone2_word::text').extract()[0]
            amount = to_int(amount)
        self.cur.execute("select 360_result from %s where ANO='%s'" % (self.table_name,
                                                                       number))
        res = self.cur.fetchone()                      
        if res:
            haosou = res[0]
            message = message.encode('utf-8')
            if message is None:
                merge_message = haosou
            elif haosou == "none" and haosou is None:
                merge_message = message
            else:
                if  message in haosou:
                    merge_message = haosou
                else:
                    merge_message = "&".join((haosou, message))
            self.cur.execute("update SBJ_Crawlerresult set baidu_result='%s', baidu_count='%s', search_time=NOW(), Crawler_result='%s' where ANO='%s'" %
                             (message, amount, merge_message, number))
        else:
            if not cheat:
                merge_message = "none"
            else:
                merge_message = message
            self.cur.execute("insert into SBJ_Crawlerresult(ANO, baidu_result, baidu_count,Crawler_result, search_time) values('%s', '%s', '%s', '%s', NOW())" % 
                         (number, message,amount, merge_message))
         
        self.con.commit()
        return {'number': number, 'message': message, 'amount': amount}

    def parse_360(self, response):
        div = response.css('div.mohe-tips')
        wd = decode_query_url(response.url)
        number = wd['q'].decode('utf-8')
        if not div:            
            message = 'none'
            amount = 0
        else:
            message = response.css('.mohe-tips .mohe-ph-mark::text').extract()[0]
            message = category.get(message, message)
            amount = response.css('.mohe-tips span > b::text').extract()
            if amount:
                amount = amount[0]
            else:
                amount = 0

        self.cur.execute("select baidu_result from %s where ANO='%s'" % (self.table_name, number))
        res = self.cur.fetchone()                                                            
        if res:
            baidu = res[0]
            baidu = baidu.encode('utf-8')
            if baidu == "none" or baidu is None:
                merge_message = message
            elif message == 'none':
                merge_message = baidu
            else:
                if baidu in message:
                    merge_message = message
                else:   
                    merge_message = "&".join((baidu, message))
            self.cur.execute("update SBJ_Crawlerresult set 360_result='%s', 360_count='%s',Crawler_result='%s', search_time=NOW() where ANO='%s'" %
                             (message, amount,merge_message, number))
        else:
            self.cur.execute("insert into SBJ_Crawlerresult(ANO, 360_result, 360_count, search_time) values('%s', '%s', '%s', NOW())" % 
                         (number, message, amount))
        self.con.commit()
        
        return {'number': number, 'message': message, 'amount': amount}
        
    def parse(self, response):
        netloc = urlparse.urlparse(response.url).netloc
        
        if netloc == 'www.baidu.com':
            result =  self.parse_baidu(response)
        elif netloc == 'www.so.com':
            result = self.parse_360(response)
        yield result
    
