#!/usr/bin/env python
# -*- coding: utf-8 -*-


import logging.config
import colorlog
from basetask import BaseTask
from utils.converter import Converter
from bs4 import BeautifulSoup
import requests
from process_engine import Context
from utils.htmlHelper import HtmlHelper
from utils.converter import Converter
from db.dbRepository import DbRepository
import datetime
import re

class DailyTrendTask(BaseTask):
    """Download daily trend data"""

    _onSaleNumRegex = re.compile(r"(?<=在售房源)(\d+)(?=套)")
    _dealNumOf90DaysRegex = re.compile(r"(?<=最近90天内成交房源)(\d+)(?=套)")  
    _headers = {"Accept": "text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8",
"Accept-Encoding": "gzip, deflate, br",
"Accept-Language": "zh-CN",
"Cache-Control": "max-age=0",
"Connection": "Keep-Alive",
"Host": "bj.lianjia.com",
"Upgrade-Insecure-Requests": "1",

"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/18.17763"}

    def __init__(self, dbRepository:DbRepository):
        super().__init__('DailyTrendTask')
        self.__logger = logging.getLogger(__name__) 
        self.__dbRepository: DbRepository = dbRepository
        #Sample url: https://bj.lianjia.com/fangjia/dongcheng/
        self.__urlFormat = "https://bj.lianjia.com/fangjia/{0}/"        

    def process(self, context):
        self.__logger.info("%s process", self.name)
        if hasattr(context.args, "parsedCustomParams") and context.args.parsedCustomParams and "district" in context.args.parsedCustomParams:
            self.__districts = context.args.parsedCustomParams["district"]
        if self.__districts and len(self.__districts) > 0:   
            self.__districtTrends = [];
            for district in self.__districts:
                trend = self.__process_one_district(context, district)
                self.__districtTrends.append(trend)
            self.__dbRepository.insertOrUpdateDistrictTrend(self.__districtTrends)
            self.__logger.info("data updated successfully")
        else:
            self.__logger.warn("No district to process")

    def verify(self, context):
        return len(self.__districts) == len(self.__districtTrends)

    def __process_one_district(self, context, district):        
        self.__logger.info("start to download data from site '%s'", district)
        
        soup = self.__request_url(self.__urlFormat.format(district))       
        dataDiv = soup.find("div", class_="m-tongji")
        trend = {}
        trend["district"] = district
        #date as yesterday
        trend["date"] = datetime.date.today() - datetime.timedelta(days=1)
        trendDiv1 = dataDiv.find("div", class_="qushi-2")
        trend["avgPrice"] = float(HtmlHelper.getHtmlString(trendDiv1.find("span", class_="num")))        
        trend["onsaleNum"] = int(re.search(DistrictTrendTask._onSaleNumRegex, HtmlHelper.getHtmlString(trendDiv1.find('a', text=DistrictTrendTask._onSaleNumRegex))).group(0))
        trend["dealNumOf90Days"] = int(re.search(DistrictTrendTask._dealNumOf90DaysRegex, HtmlHelper.getHtmlString(trendDiv1.find('a', text=DistrictTrendTask._dealNumOf90DaysRegex))).group(0))
        numDivs = dataDiv.find_all("div", class_="num")        
        trend["dealNum"] = int(HtmlHelper.getHtmlString(numDivs[0].find("span")))
        trend["showNum"] = int(HtmlHelper.getHtmlString(numDivs[1].find("span")))

        soup = self.__request_url(self.__urlFormat1.format(district))
        totalNumDiv = soup.find("div", class_="total fl")
        trend["totalDealNum"] = int(HtmlHelper.getHtmlString(totalNumDiv.find("span")))
        self.__logger.info("Got data for district '%s'. Data:\n%s", district, trend)
        return trend

    def __request_url(self, url):  
        response = requests.get(url, headers = DistrictTrendTask._headers)
        ##set encoding to fix messy code issue for chinese characters
        response.encoding = response.apparent_encoding
        soup = BeautifulSoup(response.text, "html.parser")
        return soup

        
    if __name__ == '__main__':    
        import logging
        from custom_logging import setup_logging
        from db.dbRepository import DbRepository
        from lianjia.daily_trend_task import DailyTrendTask 
        setup_logging()

        dbRepository = DbRepository()
        task = DailyTrendTask(dbRepository)
        task.__districts = ['dongcheng','xicheng','chaoyang','haidian']
        context = Context()
        context.args = {}    
        context.add_task(task)
        task.process(context)
        result = task.verify(context)
        if not result:
            logging.getLogger(__name__).error("verify failed");