#!/usr/bin/env python
# -*- coding:utf-8 -*-

"""
windows 端
"""

url = 'http://121.43.121.204:7375'
import sys
from crawler import sd,ln,js,yn,zj

CRAWLERS = {'sd':sd,'ln':ln,'js':js,'yn':yn, 'zj':zj}

import requests
import json
import time


def task(province):
    company_list = []
    for i in range(50):
        try:
            param={'province':province}
            company_data = requests.get(url,params=param)
            company_list = json.loads(company_data.content)
            break
        except Exception,e:
            print e
            time.sleep(2)
            continue

    crawler = CRAWLERS.get(province)
    for company in company_list:
        name = company.get('key')
        try:
            result = crawler.search2(name)
        except Exception,e:
            data={'exception':1}
            print e
            post_result(data)
            return
    data = {'data':result,'companyName':name}
    post_result(data)


def post_result(data):
    for i in range(30):
        try:
            requests.post(url,data=json.dumps(data))
        except Exception,e:
            print e
            continue

if __name__ == '__main__':
    # province = sys.argv[1]
    province = "zj"
    while True:
        task(province)