# -*- coding:utf8 -*-
# !/usr/bin/env python

import re
from bs4 import BeautifulSoup
from utils import kill_captcha
import random
import requests
from scpy.logger import get_logger
import time
import urllib
import sys
import traceback
import threading
from multiprocessing import Process, Pool
import Queue
import bj_parse
import json
import traceback
import bj_2

logger = get_logger(__file__)
url = 'http://121.43.121.204:7375/saic_web'


def client():
    company_list = []
    for i in range(50):
        try:
            param = {'province': "bj"}
            company_data = requests.get(url, params=param)
            company_list = json.loads(company_data.content)
            break
        except Exception, e:
            print e
            time.sleep(2)
            continue

    for company in company_list:
        name = company.get('key')
        try:
            result = bj_2.search_run(name)
        except Exception, e:
            logger.info(e)
            return


if __name__ == "__main__":
    while True:
        client()
