#!/usr/bin/env python3
#coding=utf-8

import urllib.request
import urllib.error
import hashlib
import json
import http.client
from abc import ABCMeta, abstractmethod,abstractproperty
import re
import time
import socket
import threading
from htmlGets import *
from ProxyData import *

PAGE_LIST = []

class htmlGet(object):
    def __init__(self):
        self.headers = {
            'User-Agent': 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0)'
        }

    class _time():
        def __init__(self, year=1990, month=1, day=1):
            self.year = year
            self.month = month
            self.day = day

        def getTimeString(self):
            return str(self.year) + '-' + str(self.month) + '-' + str(self.day)

        def getTimeFromString(self, string):
            self.year = string.split('-')[0]
            self.month = string.split('-')[1]
            self.day = string.split('-')[2]

    def getTimeNow(self):
        return time.strftime("%Y-%m-%d",time.localtime(time.time()))

    def getHtml(self, url, useProxy=False):
        request = urllib.request.Request(url, headers=self.headers)
        if useProxy and OPEN_PROXY:
            # urllib.request.Request.set_proxy(host='192.168.0.101:8888', type='https')
            request.set_proxy(host=PROXY_HOST, type='http')
            # proxy_handler = urllib.request.ProxyHandler({'http': '192.168.0.101:2516'})
            # opener = urllib.request.build_opener(urllib.request.HTTPHandler, proxy_handler)
        try:
            req = urllib.request.urlopen(request, timeout=30)
            html = req.read()
            req.close()
        except urllib.error.URLError as ex:
            print(url, '\n', ex)
        except socket.timeout as ex:
            print(url, '\n', ex)
        except http.client.IncompleteRead as ex:
            print(url, '\n', ex)
            html = b''
        try:
            html = html.decode('gbk')
        except:
            try:
                html = html.decode('utf-8')
            except:
                html = b''
        if html:
            return html
        raise Exception

    @abstractproperty
    def useProxy(self):
        pass

    @abstractproperty
    def pageName(self):
        pass

    @abstractmethod
    def getMainPageList(self):
        pass

    # return [[name, url, time], ...]
    @abstractmethod
    def getPageList(self, url):
        pass

    # return [url,...]
    @abstractmethod
    def getSubPageList(self, subPageUrl):
        pass

    # return [url,...]
    @abstractmethod
    def getImgListInSubPage(self, subSubPageUrl):
        pass

def getKey(string):
    return hashlib.md5(string.encode("utf-8")).hexdigest()

def saveConfig():
    global CONFIG_JSON
    global live
    ConfigFileLock.acquire()
    t_conf_json = json.dumps(CONFIG_JSON)
    with open('conf.json', 'w') as f:
        f.write(t_conf_json)
    ConfigFileLock.release()
    print('saved ')
    if live:
        global saveTimer
        saveTimer = threading.Timer(10, saveConfig)
        saveTimer.setDaemon(True)
        saveTimer.start()

def getPageImg(pageClass):
    mainName = pageClass.pageName()
    useProxy = pageClass.useProxy()
    global ConfigFileLock
    ConfigFileLock.acquire()
    try:
        CONFIG_JSON[mainName]
        try:
            sub_page_info_list = CONFIG_JSON[mainName]['pageList']
            CONFIG_JSON[mainName]['useProxy'] = useProxy
        except:
            sub_page_info_list = CONFIG_JSON[mainName]
            CONFIG_JSON[mainName] = {'useProxy': useProxy,
                                     'pageList': sub_page_info_list}
    except:
        sub_page_info_list = []
        CONFIG_JSON.update({mainName: {'useProxy': useProxy,
                                       'pageList': []}
                            })
    ConfigFileLock.release()

    now_sub_index = 0
    main_page_url_list = pageClass.getMainPageList()
    for main_page_url in main_page_url_list:
        try:
            sub_page_list = pageClass.getPageList(main_page_url)
        except Exception as ex:
            print(ex)
            print(mainName + ' | Down Over')
            return
        for sub_page_info in sub_page_list:
            key = getKey(sub_page_info[1])
            has_key = False
            for n, i in enumerate(sub_page_info_list):
                if i['key'] == key:
                    has_key = True
                    now_sub_index = n
            if not has_key:
                try:
                    sub_sub_page_url_list = pageClass.getSubPageList(sub_page_info[1])
                except Exception as ex:
                    print(ex)
                    continue
                sub_sub_page_info_list = []
                for sub_sub_page_url in sub_sub_page_url_list:
                    try:
                        t_imgList = pageClass.getImgListInSubPage(sub_sub_page_url)
                        if not t_imgList:
                            continue
                    except Exception as ex:
                        print(ex)
                        continue
                    imgList = []
                    for img_url in t_imgList:
                        img_key = getKey(img_url)
                        imgList.append({'url': img_url,
                                        'key': img_key,
                                        'filename': img_key + '.' + img_url[-3:]})
                    sub_sub_page_info_list.append({'url': sub_sub_page_url,
                                                   'imgList': imgList})
                sub_page = {'subPageName': sub_page_info[0],
                            'delete': False,
                            'url': sub_page_info[1],
                            'time': sub_page_info[2],
                            'key': key,
                            'subPageList': sub_sub_page_info_list}
                sub_page_info_list.insert(now_sub_index, sub_page)
                now_sub_index += 1
                ConfigFileLock.acquire()
                CONFIG_JSON.update({mainName: {'useProxy': useProxy,
                                               'pageList': sub_page_info_list}})
                ConfigFileLock.release()
                print('  downSubPage: ' + mainName + ' | ' + sub_page_info[0])

if __name__ == '__main__':
    try:
        with open('conf.json', 'r') as f:
            configJson = f.read()
    except:
        with open('conf.json', 'w') as f:
            f.write('')
        configJson = ''
    try:
        CONFIG_JSON = json.JSONDecoder().decode(configJson)
    except:
        CONFIG_JSON = {}

    ConfigFileLock = threading.Lock()
    # getPageImg(mn22_qcmv())
    # exit()
    queue = []
    # for classPage in CLASS_LIST[-8:]:
    # for classPage in CLASS_LIST[6:10]:
    for classPage in CLASS_LIST[-22:-8]:
        queue.append(classPage())

    queueLock = threading.Lock()
    live = True
    saveTimer = threading.Timer(10, saveConfig)
    saveTimer.setDaemon(True)
    saveTimer.start()
    maxNum = 5
    taskList = []
    while True:
        time.sleep(0.2)
        startNum = 0
        for t in taskList:
            if t and t.isAlive():
                startNum += 1
        if queue:
            if startNum <= maxNum:
                queueLock.acquire()
                task = queue.pop(0)
                queueLock.release()
                taskTh = threading.Thread(target=getPageImg, args=(task, ))
                taskTh.setDaemon(True)
                taskTh.start()
                taskList.append(taskTh)
        elif startNum == 0:
            print('Page Down Over ^_^')
            break

    live = False
    saveTimer.join()