# !/usr/bin/env python
# coding: utf-8

import json
from xtls.basecrawler import BaseCrawler
from scpy.logger import get_logger


logger = get_logger(__file__)

headers = '''
    Accept:*/*
    Accept-Encoding:gzip, deflate
    Accept-Language:zh-CN,zh;q=0.8
    Connection:keep-alive
    Content-Length:86
    Content-Type:application/x-www-form-urlencoded; charset=UTF-8
    Host:www.soupilu.com
    Origin:http://www.soupilu.com
    Referer:http://www.soupilu.com/Home/SearchLaw
    User-Agent:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36
    X-Requested-With:XMLHttpRequest
'''

def str_to_dict(s):
    lines = s.split('\n')
    lines = [line.strip() for line in lines if line != '']
    result = dict()
    for item in lines:
        key = item.split(':')[0]
        result[key] = item.split(':')[1]
    return result


post_data = """
    lt:
    lc:
    def:
    inst:
    doc:
    ds:2000/1/1
    de:2016/9/21
    pn:1
    rf:0
    at:1
    rt:0
    et:false
"""

post_data = str_to_dict(post_data)
post_data['lc'] = ' '

headers = str_to_dict(headers)

class LawCrawler(BaseCrawler):
    def __init__(self):
        super(LawCrawler, self).__init__()
        self.url = 'http://www.soupilu.com/api/Law/SearchLaw'
        self.post_data = post_data

    def run(self):
        total = 0
        page = 1
        while True:
            logger.info('Now Page: %d' % page)
            html = self.post(self.url, data=self.post_data, headers=headers)
            contents = json.loads(html)
            with open('./soupilu.json', 'ab') as f:
                f.write(json.dumps(contents, indent=4, encoding='unicode', ensure_ascii=False))
                f.write('\n')
            raw_input()
            if not contents:
                break
            for single in contents:
                _id = single.get('Id')
                total += 1
                logger.info('%d - %s ', (total, _id))

            page += 1
            self.post_data['pn'] = page


if __name__ == "__main__":
    try:
        LawCrawler().run()
    except Exception, e:
        logger.exception(e)



