import numpy as np
import pylab as plt
import urllib as urllib
import urllib3
import time
import os
import logging
import logging.handlers
import re
from bs4 import BeautifulSoup
from selenium import webdriver

# my_logger = logging.getLogger('MyLogger')
# my_logger.setLevel(logging.DEBUG)
# handler = logging.handlers.SysLogHandler(address = '/log')
# my_logger.addHandler(handler)

class URLParser(object):
    WEIBO                   = r'weibo.com'
    CHENGTU                 = r'www.chengtu.com'

    str_cd                  = r'成都'
    str_nj                  = r'南京'
    str_wh                  = r'武汉'
    str_colon               = r':'
    str_comma               = r'，'
    str_period              = r'。'
    #str_left_parenthesis    = r'（'
    str_left_parenthesis    = u'（'
    str_left_parenthesis_e  = r'\('
    str_right_parenthesis   = r'）'
    str_blank               = r' '
    str_blank_special       = r' '

    def __init__(self, domain_name, url):
        self.domain_name = domain_name
        self.url         = url
        self.data        = {URLParser.str_cd:[], URLParser.str_nj:[], URLParser.str_wh:[]}
        self.dates       = []
        self.total       = {URLParser.str_cd:0.0, URLParser.str_nj:0.0, URLParser.str_wh:0.0}
        self.average     = {URLParser.str_cd:0.0, URLParser.str_nj:0.0, URLParser.str_wh:0.0}
        self.max         = {URLParser.str_cd:[0, 0.0], URLParser.str_nj:[0, 0.0], URLParser.str_wh:[0, 0.0]}
        self.min         = {URLParser.str_cd:[0, 3000.0], URLParser.str_nj:[0, 3000.0], URLParser.str_wh:[0, 3000.0]}
        self.my_font     = \
            plt.matplotlib.font_manager.FontProperties(fname='./FZYTK.ttf')

        self.pattern_cd                 = re.compile(URLParser.str_cd)
        self.pattern_nj                 = re.compile(URLParser.str_nj)
        self.pattern_wh                 = re.compile(URLParser.str_wh)
        self.pattern_colon              = re.compile(URLParser.str_colon)
        self.pattern_comma              = re.compile(URLParser.str_comma)
        self.pattern_period             = re.compile(URLParser.str_period)
        self.pattern_left_parenthesis   = re.compile(URLParser.str_left_parenthesis)
        self.pattern_left_parenthesis_e = re.compile(URLParser.str_left_parenthesis_e)
        self.pattern_blank              = re.compile(URLParser.str_blank)
        self.pattern_blank_special      = re.compile(URLParser.str_blank_special)
        self.pattern_float              = re.compile(r'\d+\.?\d*')

        self.dates = []
        self.arange = []
        self.xticks = []

        self.headers     = {
            'Host': 'www.chengtu.com',
            'Connection':'keep-alive',
            #'Cache-Control':'max-age=0',
            'Upgrade-Insecure-Requests':'1',
            'User-Agent':'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36',
            'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
            'Accept-Encoding':'gzip, deflate',
            'Accept-Language':'zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7'
        }
        self.host = {'Host':'www.chengtu.com'}

    def log_list(self, list):
        index   = 0
        log_str = ''

        for item in list:
            log_str += str(index) + ':' + str(item) + ' '
            index += 1

        print(log_str)

    def parse_metro_data(self):
        lines   = None
        index   = 0

        try:
            browser = webdriver.Chrome(r'./chromedriver.exe')
            browser.get(self.url)
            time.sleep(10)
            soup = BeautifulSoup(browser.page_source, 'lxml')
            #print(soup.text)
            lines = soup.text.split('\n')
        except Exception as err:
            print(err)

        for line in lines:
            length = len(line)

            # Remove blanks in line
            line = ''.join(line.split())

            if re.search(self.pattern_cd, line) and \
                    re.search(self.pattern_nj, line) and \
                    re.search(self.pattern_wh, line) and \
                    length > 30 and length < 45:
                # print(len(line))
                # print(line)

                str_array1 = re.split(self.pattern_colon, line)
                str_array2 = re.split(self.pattern_period, str_array1[1])
                str_array3 = re.split(self.pattern_comma, str_array2[0])
                # dates      = re.split(self.pattern_blank, str_array1[0])
                # date       = dates[len(dates) - 1]
                date = str_array1[0]
                # print(str_array1)

                # Get value for each day
                float_match1 = re.search(self.pattern_float, str_array3[0])
                float_match2 = re.search(self.pattern_float, str_array3[1])
                float_match3 = re.search(self.pattern_float, str_array3[2])
                float1 = float(float_match1.group())
                float2 = float(float_match2.group())
                try:
                    if float_match3:
                        float3 = float(float_match3.group())
                    #else:
                    #    float3 = self.average[self.str_wh]
                except Exception as err:
                    #float3 = self.data[str_array3[2][0:2]][-1]
                    float3 = self.data[str_array3[2][0:2]][-1]
                    print(err)

                self.data[str_array3[0][0:2]].insert(0, float1)
                self.data[str_array3[1][0:2]].insert(0, float2)
                self.data[str_array3[2][0:2]].insert(0, float3)
                self.dates.insert(0, date)
                # print(date)

                # Calculate total, maximum, minimum value for each city
                self.total[str_array3[0][0:2]] += float1
                self.total[str_array3[1][0:2]] += float2
                self.total[str_array3[2][0:2]] += float3

                if (float1>self.max[str_array3[0][0:2]][1]):
                    self.max[str_array3[0][0:2]][0] = index
                    self.max[str_array3[0][0:2]][1] = float1
                if (float2>self.max[str_array3[1][0:2]][1]):
                    self.max[str_array3[1][0:2]][0] = index
                    self.max[str_array3[1][0:2]][1] = float2
                if (float3>self.max[str_array3[2][0:2]][1]):
                    self.max[str_array3[2][0:2]][0] = index
                    self.max[str_array3[2][0:2]][1] = float3
                if (float1<self.min[str_array3[0][0:2]][1]):
                    self.min[str_array3[0][0:2]][0] = index
                    self.min[str_array3[0][0:2]][1] = float1
                if (float2<self.min[str_array3[1][0:2]][1]):
                    self.min[str_array3[1][0:2]][0] = index
                    self.min[str_array3[1][0:2]][1] = float2
                if (float3<self.min[str_array3[2][0:2]][1]):
                    self.min[str_array3[2][0:2]][0] = index
                    self.min[str_array3[2][0:2]][1] = float3

                index += 1
                # print(str_array1)
                # print(str_array2)
                # print(str_array3)
                # print(self.data)
                # print(float_match1.group())
                # print(float_match2.group())
                # print(float_match3.group())

        count = len(self.data[self.str_cd])

        if count > 0:
            # Calculate average value for each city
            self.average[self.str_cd] = round(self.total[self.str_cd]/float(count), 2)
            self.average[self.str_nj] = round(self.total[self.str_nj]/float(count), 2)
            self.average[self.str_wh] = round(self.total[self.str_wh]/float(count), 2)
            # Calculate the position of minimum value for each city
            self.min[self.str_cd][0] = count - self.min[self.str_cd][0] - 1
            self.min[self.str_nj][0] = count - self.min[self.str_nj][0] - 1
            self.min[self.str_wh][0] = count - self.min[self.str_wh][0] - 1
            # Calculate the position of maximum value for each city
            self.max[self.str_cd][0] = count - self.max[self.str_cd][0] - 1
            self.max[self.str_nj][0] = count - self.max[self.str_nj][0] - 1
            self.max[self.str_wh][0] = count - self.max[self.str_wh][0] - 1
            # Round minimum and maximum value for each city
            self.min[self.str_cd][1] = round(self.min[self.str_cd][1], 2)
            self.min[self.str_nj][1] = round(self.min[self.str_nj][1], 2)
            self.min[self.str_wh][1] = round(self.min[self.str_wh][1], 2)
            self.max[self.str_cd][1] = round(self.max[self.str_cd][1], 2)
            self.max[self.str_nj][1] = round(self.max[self.str_nj][1], 2)
            self.max[self.str_wh][1] = round(self.max[self.str_wh][1], 2)

        # Log raw data for metro passengers
        # print(self.data)
        # self.log_list(self.dates)
        # print(self.total)
        return

    def generate_arange_xticks(self, arange, xticks):
        length     = len(self.dates)

        if length > 0:
            arange.insert(0, 0)
            xticks.insert(0, self.dates[0])
            # self.log_list(self.dates)
        if length > 31:
            arange.insert(1, 31)
            xticks.insert(1, self.dates[31])
            # self.log_list(self.dates)
        if length > 59:
            arange.insert(2, 59)
            xticks.insert(2, self.dates[59])
        if length > 90:
            arange.insert(3, 90)
            xticks.insert(3, self.dates[90])
        if length > 120:
            arange.insert(4, 120)
            xticks.insert(4, self.dates[120])
        if length > 151:
            arange.insert(5, 151)
            xticks.insert(5, self.dates[151])
        if length > 181:
            arange.insert(6, 181)
            xticks.insert(6, self.dates[181])
        if length > 212:
            arange.insert(7, 212)
            xticks.insert(7, self.dates[212])
        if length > 243:
            arange.insert(8, 243)
            xticks.insert(8, self.dates[243])
        if length > 273:
            arange.insert(9, 273)
            xticks.insert(9, self.dates[273])
        if length > 304:
            arange.insert(10, 304)
            xticks.insert(10, self.dates[304])
        if length > 334:
            arange.insert(11, 334)
            xticks.insert(11, self.dates[334])

        return

    def generate_figures(self):
        #
        # Generate figure for average passengers data
        #
        # Recreate output folder
        try:
            os.makedirs('./output')
        except Exception as err:
            print(err)

        # Generate arange and xticks
        self.generate_arange_xticks(self.arange, self.xticks)

        # Generate important attributes
        if re.match(self.pattern_left_parenthesis_e, self.dates[-1]) is not None:
            tmp_date   = re.split(self.pattern_left_parenthesis_e, self.dates[-1])[0]
            tmp_date   = re.split(self.pattern_blank, tmp_date)[-1]
            tmp_date   = re.split(self.pattern_blank_special, tmp_date)[-1]
            str_date   = u'2019年' + tmp_date
        else:
            tmp_date   = re.split(self.pattern_left_parenthesis, self.dates[-1])[0]
            tmp_date   = re.split(self.pattern_blank, tmp_date)[-1]
            tmp_date   = re.split(self.pattern_blank_special, tmp_date)[-1]
            str_date   = u'2019年' + tmp_date

        # print(self.dates[-1])
        # print(str_date)

        title      = u'2019年成都、南京、武汉地铁日客流量走势图'
        ylabel     = u'地铁日客流量(万人次)'
        xlabel     = str_date + '统计'
        file_name  = './output/' + str_date + 'DailyMetroData' + '.png'
        water_mark_daily   = r'Draw By Water          Draw By Water          Draw By Water          ' \
                             r'Draw By Water          Draw By Water          Draw By Water          ' \
                             r'Draw By Water          Draw By Water          Draw By Water          '
        water_mark_average = r'Draw By Water          Draw By Water          Draw By Water'
        x_center   = round(len(self.data[URLParser.str_cd])/2)
        y_center   = 300

        plt.figure(figsize=(round(len(self.data[URLParser.str_cd])/8) + 1, 5), dpi=100)
        # Set line data
        lines_cd = plt.plot(self.data[URLParser.str_cd], color='g')
        lines_nj = plt.plot(self.data[URLParser.str_nj], color='r', linestyle='dotted')
        lines_wh = plt.plot(self.data[URLParser.str_wh], color='b', linestyle='dotted')
        # Add title
        plt.title(title, fontproperties=self.my_font, fontsize=15)
        # Set y label
        plt.ylabel(ylabel, fontproperties=self.my_font)
        # Set x label
        plt.xlabel(xlabel, fontproperties=self.my_font)
        # Set x ticks
        plt.xticks(self.arange, self.xticks, fontproperties=self.my_font, rotation=0)
        # Draw line chart
        plt.legend((lines_cd[0], lines_nj[0], lines_wh[0]),
                  (self.str_cd, self.str_nj, self.str_wh),
                  loc='upper right',
                  prop=self.my_font
                 )
        # plt.show()

        # Add maximum data above lines
        plt.text(self.max[self.str_cd][0],         # maximum data of Chengdu
                 self.max[self.str_cd][1] + 15,     # position on y axis
                 str(self.max[self.str_cd][1]),
                 ha='center',
                 va='bottom',
                 fontproperties=self.my_font,
                 fontsize=7,
                 color='g')
        plt.text(self.max[self.str_nj][0],         # maximum data of Nanjing
                 self.max[self.str_nj][1] + 5,     # position on y axis
                 str(self.max[self.str_nj][1]),
                 ha='center',
                 va='bottom',
                 fontproperties=self.my_font,
                 fontsize=7,
                 color='r')
        plt.text(self.max[self.str_wh][0],         # maximum data of Wuhan
                 self.max[self.str_wh][1] + 5,     # position on y axis
                 str(self.max[self.str_wh][1]),
                 ha='center',
                 va='bottom',
                 fontproperties=self.my_font,
                 fontsize=7,
                 color='b')
        # Add water mark
        plt.text(x_center, y_center, water_mark_daily, ha='center', va='bottom', fontsize=10, color='0.8')
        plt.text(x_center, y_center-50, water_mark_daily, ha='center', va='bottom', fontsize=10, color='0.8')
        # Set the range of Y axis
        plt.ylim(0, 550)
        # Save figure
        plt.savefig(file_name)

        #
        # Generate figure for average passengers data
        #
        # Generate important attributes
        title     = u'2019年成都、南京、武汉地铁日均客流量图'
        ylabel    = u'地铁日均客流量(万人次)'
        file_name = './output/' + str_date + 'AverageMetroData' + '.png'
        x_center  = 1
        y_center  = 250
        x = np.arange(3)
        y = self.average.values()

        plt.figure()
        # Add title
        plt.title(title, fontproperties=self.my_font, fontsize=15)
        # Set y label
        plt.ylabel(ylabel, fontproperties=self.my_font)
        # Set x label
        plt.xlabel(xlabel, fontproperties=self.my_font)
        # Set x ticks
        plt.xticks(np.arange(3), (self.str_cd, self.str_nj, self.str_wh), fontproperties=self.my_font)
        # Draw bars
        plt.bar(range(len(self.average)), self.average.values(), width=0.35, color=['g', 'r', 'b'])

        # Add number above each bar
        for a, b in zip(x, y):
            plt.text(a, b + 0.3, '%.2f'%b, ha='center', va='bottom', fontsize=7)

        # Add water mark
        plt.text(x_center, y_center, water_mark_average, ha='center', va='bottom', fontsize=10, color='0.8')
        plt.text(x_center, y_center-100, water_mark_average, ha='center', va='bottom', fontsize=10, color='0.8')
        plt.text(x_center, y_center-200, water_mark_average, ha='center', va='bottom', fontsize=10, color='0.8')
        # Set the range of Y axis
        plt.ylim(0, 350)
        # Save figure
        plt.savefig(file_name)
        return
        
if __name__ == "__main__":
    url_parser = URLParser(URLParser.CHENGTU,
                           #'http://www.chengtu.com/forum.php?mod=viewthread&tid=547611&extra=page%3D1'
                           'http://www.chengtu.com/forum.php?mod=viewthread&tid=578452&extra=page%3D3'
                          )

    # Parse metro data from web page
    url_parser.parse_metro_data()
    # Generate figure based on parsed passengers data
    url_parser.generate_figures()