import datetime
import gc
import threading
import time
from bs4 import BeautifulSoup
import requests
import threadpool
from openpyxl import Workbook

lock = threading.Lock()
url = 'http://www.pm25.com'
prefix = 'http://www.pm25.com/city'
StationDataInfoList = []


def IsNotInList(listname, item):
    #listname = dict.fromkeys(listname,True)
    for val in listname:
        if ((val.stName == item.stName) and (val.cityName == item.cityName)):
            return False
    return True



# pcdItem,index
def GetStationFunc(pcdItem):
    for k in pcdItem.cityinfo:
        count = 0
        cityURL = pcdItem.cityinfo[k]
        webdict = None
        while (count <= 5 and webdict == None):
            time.sleep(0.5)
            # print("%s" % k, cityURL)
            webdict = getStationSiteDict(cityURL)
            count += 1
        if (count >= 6):
            print('%s %s获取失败' % (k, cityURL))
        if webdict is not None:
            print('%s %s获取成功' % (k, cityURL))
            for d in webdict:
                sd = StationDataInfo()
                sd.cityName = k
                sd.province = pcdItem.province
                if sd.province == '直辖市':
                    sd.province = sd.cityName
                elif sd.province == '港澳台':
                    continue
                sd.stName = d
                sd.StWebSite = changeStrToUTF8(url + webdict[d])[2:-1]
                lock.acquire(blocking=True)
                if IsNotInList(StationDataInfoList, sd):
                    StationDataInfoList.append(sd)
                lock.release()
        del webdict
    gc.collect()


def getStationSiteDict(cityWebSite):
    siteDict = {}
    resp = None
    # time.sleep(1)
    try:
        resp = requests.get(cityWebSite, timeout=10)
        html = resp.text
        lj = BeautifulSoup(html, 'html.parser')
        Station_list = lj.find_all('li', attrs={'class': 'pj_area_data_item pj_area_data_item_darkbg'})
            # print(Station_lists_str)
        Station_list = dict.fromkeys(Station_list,True)
        for l in Station_list:
            # a是tag ,可以直接获取tag的属性和tag的NavigableString
            if ((not str(l.a['title']).__contains__('西溪')) or str(l.a['href']).__contains__('西溪')):
                siteDict[l.a.string] = l.a['href']
        return siteDict

    except Exception as e  :
        print('get %s error' % cityWebSite)


    finally:
        if resp is not None:
            resp.close()
        gc.collect()

def changeStrToUTF8(strs):
    s = str(strs.encode('utf-8'))
    return s.replace('\\x', '%')


class ProvCityData:
    def __init__(self):
        self.province = ''
        self.cityinfo = {}

    def AddCityInfoItem(self, city, website):
        self.cityinfo[city] = website

    def printCityInfo(self):
        print(self.cityinfo)


class StationDataInfo():
    def __init__(self):
        self.stName = ''
        self.StWebSite = ''
        self.cityName = ''
        self.province = ''


class StationDatatraper():
    def __init__(self):
        self.ProvCityDataLists = []

    def _getTotalCityList(self):
        resp = None
        try:
            time.sleep(0.2)
            resp = requests.get(url, timeout=10)
            print(resp.status_code)
            html = resp.text
            lj = BeautifulSoup(html, 'html.parser')
            city_lists_str = lj.find_all('div', attrs={'class': 'city_province_item'})
            return city_lists_str
        finally:
            if(resp is not None):
                resp.close()
            gc.collect()


    def _GetProvCityDataList(self, html_str):
        for l in html_str:
            pcd = ProvCityData()
            pcd.province = l.dt.string
            if (pcd.province.__len__() >= 2 and pcd.province != '港澳台'):  # 过滤掉省份是字母简称的情况
                cityWebList = l.dd
                for c in cityWebList:
                    if (c != '\n'):
                        s = str(c).split('"')
                        key = (s[2])[1:-4]
                        # if key not in pcd.cityinfo:  #dist 去重
                        pcd.AddCityInfoItem(key, prefix + s[1])
                self.ProvCityDataLists.append(pcd)

    def _GetStationDataInfoList(self):
        StationDataInfoList.clear()
        pool = threadpool.ThreadPool(10)
        requests = threadpool.makeRequests(GetStationFunc, self.ProvCityDataLists)
        '''列表解析'''
        [pool.putRequest(req) for req in requests]
        pool.wait()

    def _WriteToXlsx(self):

        wb = Workbook()

        dest_filename = '完整清单' + str(datetime.date.today()) + '.xlsx'

        ws1 = wb.active

        ws1.title = "sheet1"

        ws1['A1'] = '站点名称'

        ws1['B1'] = 'URL地址'

        ws1['C1'] = '城市'

        ws1['D1'] = '省份'

        ws1['E1'] = '读取标记'

        for s in StationDataInfoList:
            ws1.append([s.stName, s.StWebSite, s.cityName, s.province, 1])

        wb.save(filename=dest_filename)

        wb.close()

        gc.collect()


    def StartWork(self):
        tclist = self._getTotalCityList()
        self._GetProvCityDataList(tclist)
        self._GetStationDataInfoList()
        self._WriteToXlsx()


if __name__ == '__main__':
    g = StationDatatraper()
    g.StartWork()
