# coding:utf-8
import os

import requests
import re
import json

import sys


class Neihanduanzi(object):
    def __init__(self):
        self.url = 'http://neihanshequ.com/'
        self.pattern = re.compile('<a target="_blank" class="image share_url" href="(.*?)".*?<p>(.*?)</p>', re.S)
        self.filename = 'ext\E2_neihan.json'

    def get_page(self, url):
        _path = os.path.join(os.path.abspath('.'), '../')
        sys.path.append(_path)
        _ei = __import__('Utils.C002_extract_info', fromlist=['C002_extract_info'])
        _ua_file = {'pc': '{0}Utils/{1}'.format(_path, _ei.ExtractInfo.ua_file['pc'])}
        _user_agent = _ei.ExtractInfo.get_useragent(file=_ua_file)
        _response = requests.get(self.url, headers=_user_agent, verify=False)
        return _response.text

    def parse_data(self, str_data):
        _result = self.pattern.findall(str_data)
        # 构建一个数据列表
        _data_list = []
        # 遍历结果列表
        for _url, _content in _result:
            _temp = {}
            _temp['url'] = _url
            _temp['content'] = _content
            _data_list.append(_temp)
        return _data_list

    def save_data(self, data_list):
        with open(self.filename, mode='w', encoding='utf-8')as f:
            for _data in data_list:
                _str_data = json.dumps(_data, ensure_ascii=False) + ',\n'
                f.write(_str_data)

    def run(self):
        # 构建url
        # 发送请求
        _str_data = self.get_page(self.url)
        # 解析数据
        _data_list = self.parse_data(_str_data)
        # 保存数据
        self.save_data(_data_list)


def main():
    neihan = Neihanduanzi()
    neihan.run()


if __name__ == '__main__':
    main()
