#!/usr/bin/env python
# -*- coding:utf-8 -*-
import copy
import datetime
import re
import sys

import pymongo
import requests
from bs4 import BeautifulSoup
from scpy.logger import get_logger

# from util import get_mongo_conf

logger = get_logger(__file__)
reload(sys)
sys.setdefaultencoding("utf-8")
# MONGO_CONF = get_mongo_conf()
MONGO_CONF = '10.132.23.104', 27017


class LandChinaCrawler(object):
    Mongo_Conn = pymongo.MongoClient(MONGO_CONF[0], MONGO_CONF[1])
    # Mongo_Conn = pymongo.MongoClient('127.0.0.1', 27017)
    URL = 'http://www.landchina.com/default.aspx?tabid=263'

    VIEWSTATE_PATTERN = re.compile(ur'<input type="hidden" name="__VIEWSTATE" id="__VIEWSTATE" value="(.+?)" />')
    EVENTVALI_PATTERN = re.compile(ur'<input type="hidden" name="__EVENTVALIDATION" id="__EVENTVALIDATION" value="(.+?)" />')
    TIME_PATTERN = re.compile(ur'(\d{4}).(\d{2}).(\d{2})')

    DETAIL_KV_MAP = {
        u'行政区': 'administrativeRegion',
        u'电子监管号': 'electronicRegulation',
        u'项目名称': 'projectName',
        u'项目位置': 'location',
        u'面积(公顷)': 'area',
        u'土地用途': 'landUseage',
        u'供地方式': 'landSupplyMode',
        u'土地使用年限': 'useYears',
        u'行业分类': 'industry',
        u'土地级别': 'landGrade',
        u'成交价格(万元)': 'price',
        u'约定交地时间': 'appDeliveryTime',
        u'约定开工时间': 'appStarTime',
        u'约定竣工时间': 'appCompletedTime',
        u'实际开工时间': 'realStarTime',
        u'实际竣工时间': 'realCompletedTime',
        u'批准单位': 'approvalUnit',
        u'合同签订日期': 'contractSigningTime',
        u'土地来源': 'landSource',
    }

    DATA_FORMAT = {
        '_id': '',  #
        'sourceUrl': '',  #
        'title': '',
        'administrativeRegion': '',  # 行政区
        'electronicRegulation': '',  # 电子监管号
        'projectName': '',  # 项目名称
        'location': '',  # 项目位置
        'area': '',  # 面积(公顷)
        'landSource': '',  # 土地来源
        'landUseage': '',  # 土地用途
        'landSupplyMode': '',  # 供地方式
        'useYears': '',  # 土地使用年限
        'industry': '',  # 行业分类
        'landGrade': '',  # 土地级别
        'price': '',  # 成交价格(万元)
        'landUsers': [],  # 土地使用权人
        'appStarTime': '',  # 约定开工时间
        'realStarTime': '',  # 实际开工时间
        'approvalUnit': '',  # 批准单位
        'appDeliveryTime': '',  # 约定交地时间
        'appCompletedTime': '',  # 约定竣工时间
        'realCompletedTime': '',  # 实际竣工时间
        'contractSigningTime': '',  # 合同签订日期
        'appVolumeRatio': {    # 约定容积率
            'min': '',
            'max': '',
        },
        'installmentPayments': [],  # 分期支付约定
        #  {
        #     'stageId': '',   # 支付期号
        #     'appPayTime': '',      # 约定支付日期
        #     'appPayMoney': '',      # 约定支付金额(万元)
        #     'remark': '',      # 备注
        # }
    }

    def __init__(self, page=1):
        self.current_page = page
        self.request = requests.Session()
        self.request.headers['User-Agent'] = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36'
        self.request.headers['Accept-Language'] = 'en-US,en;q=0.8,zh-CN;q=0.6,zh;q=0.4'
        self.request.headers['Connection'] = 'keep-alive'
        self.post_data = {
            '__VIEWSTATE': '',
            '__EVENTVALIDATION': '',
            'hidComName': 'default',
            'TAB_QuerySortItemList': '282:False',
            'TAB_QuerySubmitConditionData': '',
            'TAB_QuerySubmitOrderData': '',
            'TAB_RowButtonActionControl': '',
            'TAB_QuerySubmitPagerData': '',
            'TAB_QuerySubmitSortData': '',
        }
        self.build_post_data(self.get(self.URL))
        logger.info('init new instance done.')

    def get(self, url, deep=0):
        if deep == 3:
            return None
        try:
            return self.request.get(url).content
        except:
            return self.get(url, deep+1)

    def post(self, url, data, deep=0):
        if deep == 3:
            return None
        try:
            return self.request.post(url, data=data).content
        except:
            return self.post(url, data, deep+1)

    def build_post_data(self, html):
        try:
            self.post_data['__VIEWSTATE'] = self.VIEWSTATE_PATTERN.findall(html)[0]
        except: pass
        try:
            self.post_data['__EVENTVALIDATION'] = self.EVENTVALI_PATTERN.findall(html)[0]
        except: pass

    def parse_appvlo(self, td):
        try:
            min, max = re.findall(ur'下限:(.*?)上限:(.*?)$', td.getText().strip())[0]
        except:
            min, max = '', ''
        return {'min': min, 'max': max}

    def parse_instap(self, td):
        if not td:
            return []
        result = []
        for tr in td.find_all('tr')[3:]:
            tds = [x.getText().strip() for x in tr.find_all('td')]
            if len(tds) < 4:
                continue
            if not tds[0].isdigit():
                continue
            result.append({
                'stageId': tds[0],
                'appPayTime': self.deal_time(tds[1]),
                'appPayMoney': tds[2],
                'remark': tds[3],
            })
        return result

    def deal_time(self, string):
        try:
            return '%s 00:00:00' % '-'.join(self.TIME_PATTERN.findall(string)[0])
        except:
            return ''

    def parse_by_url(self, url):
        soup = BeautifulSoup(self.get(url))
        table = soup.find('table', attrs={'class': 'theme'}).find('tbody')
        if not table:
            return None
        result = copy.deepcopy(self.DATA_FORMAT)
        result['title'] = table.find('td', attrs={'class': 'gridHeader'}).getText().strip()
        result['sourceUrl'] = url
        kv_map = self.DETAIL_KV_MAP
        trs = [tr for tr in table.find_all('tr') if tr.parent == table][2:]
        for tr in trs:
            tds = [td for td in tr.find_all('td') if td.parent == tr]
            for k, v in zip(tds[::2], tds[1::2]):
                k = re.sub(ur':|：', '', k.getText()).strip()
                if k in kv_map:
                    if kv_map[k].endswith('Time'):
                        v = self.deal_time(v.getText().strip())
                    else:
                        v = v.getText().strip()
                    result[kv_map[k]] = v
                elif u'土地使用权人' == k:
                    result['landUsers'] = [x.strip() for x in re.split(ur'、|\n| ', v.getText().strip()) if x.strip()]
                elif u'约定容积率' == k:
                    result['appVolumeRatio'] = self.parse_appvlo(v)
                elif u'分期支付约定' == k:
                    result['installmentPayments'] = self.parse_instap(v)
                else:
                    logger.info('need parse [%s-%s]' % (k, v.getText().strip()))
        result['_id'] = result['electronicRegulation']
        s1, s2 = float(result['area']), float(result['landSource'])
        if s1 == s2:
            result['landSource'] = u'现有建设用地'
        elif 0 == s2:
            result['landSource'] = u'新增建设用地'
        else:
            result['landSource'] = u'新增建设用地(来自存量库)'
        return result

    def parse_target(self, html):
        soup = BeautifulSoup(html)
        trs = soup.find_all('tr', attrs={'class': 'gridItem'}) + \
              soup.find_all('tr', attrs={'class': 'gridAlternatingItem'})
        if not trs:
            raise EOFError('the end.')
        for tr in trs:
            try:
                url = 'http://www.landchina.com/' + tr.find('a')['href']
                logger.info('now url : %s' % url)
                result = self.parse_by_url(url)
                if result:
                    yield result
            except Exception, e:
                print 'parse target error :', e
                pass

    def save(self, item):
        try:
            item['updateTime'] = datetime.datetime.now()
            coll = self.Mongo_Conn['crawler_company_all']['landchinaResult']
            if coll.find_one({'_id': item['_id']}):
                coll.update_one({'_id': item['_id']}, {'$set': item}, True)
                return 1
            coll.insert_one(item)
            return 0
        except Exception, e:
            logger.info('exception at save data : [%s-%s]' % (item, e))
            return -1

    def run(self):
        error = 0
        saved = 0
        while True:
            if error >= 10 or saved > 20:
                break
            logger.info('now page : %s' % self.current_page)
            self.post_data['TAB_QuerySubmitPagerData'] = self.current_page
            content = self.post(self.URL, data=self.post_data)
            self.build_post_data(content)
            for item in self.parse_target(content):
                res = self.save(item)
                if res == 1:
                    saved += 1
                elif res == -1:
                    error += 1
                logger.info('save item [%s-%s]' % (res, item))
            self.current_page += 1


def main(start):
    LandChinaCrawler(start).run()


if __name__ == '__main__':
    if len(sys.argv) > 1:
        start = int(sys.argv[1])
    else:
        start = 1
    main(start)
