import pandas as pd
from utils.read_url import get_cookie, get_url_info
from config import config


class ReadBank(object):
    def get_bank_type(self, html_soup):
        bank_type_soup = html_soup.find_all('div', class_='shang')
        arr = []
        if bank_type_soup:
            for soup in bank_type_soup:

                type_name_data = soup.find('div', class_='zi')
                if type_name_data:
                    type_name = type_name_data.get_text().strip() if type_name_data.get_text() else None

                    type_id_data = soup.find('div', style="float:right").get('id') if soup.find('div', style="float:right") \
                        else None
                    type_id = type_id_data[-2:].strip() if type_id_data else None

                    mid_arr = self.get_bank_list(html_soup, type_id=type_id, type_name=type_name)
                    if mid_arr:
                        arr += mid_arr

        return arr

    @staticmethod
    def get_bank_list_bak(html_soup, type_id):
        """
        read bank list
        :param html_soup:
        :param type_id:
        :return:
        """
        arr = []
        if type_id in ['3', '4', '5', '6', '8', '9', '10']:
            bank_info_soup = html_soup.find('div', id=type_id).find_all('li') if html_soup.find('div', id=type_id) else None
            if bank_info_soup:
                for info_soup in bank_info_soup:
                    info_data = info_soup.find('a')
                    if info_data:
                        name = info_data.get_text().strip() if info_data.get_text() else None
                        url = info_data.get('href')
                        short_name = url[url.index('www')+4:url.index('.com')].replace('bank', '').replace('china', '') \
                            if url else None
                        arr.append([name, short_name, url])
        return arr

    @staticmethod
    def get_bank_list(html_soup, type_id, type_name):
        """
        read bank list
        :param html_soup:
        :param type_id:
        :return:
        """
        arr = []
        if type_id in ['3', '4', '5', '6', '8', '9', '10']:
            bank_info_soup = html_soup.find('div', id=type_id).find_all('a') if html_soup.find('div', id=type_id) else None
            if bank_info_soup:
                for info_soup in bank_info_soup:
                    name = info_soup.get_text().strip() if info_soup.get_text() else None
                    url = info_soup.get('href')
                    try:
                        short_name = url[url.index('www')+4:url.index('.com')].replace('bank', '').replace('china', '')
                    except:
                        short_name = None
                    arr.append([name, short_name, url, type_name, type_id])
        return arr

    def read_main(self):
        # save cookie
        get_cookie(url=config.URL_MAIN)

        html_soup = get_url_info(url_main=config.URL_MAIN, file=config.URL_JRJG)

        arr = self.get_bank_type(html_soup)

        df = pd.DataFrame(arr)
        if not df.empty:
            df.columns = ['bank_name', 'bank_short_en_name', 'url', 'type_name', 'type_id']
            df.to_csv(config.FILE_CSV)
            df.to_excel(config.FILE_EXCEL)

        return df


read_bank = ReadBank()
df = read_bank.read_main()
print(df.head())
