import requests
from bs4 import BeautifulSoup
from pprint import pprint
import json
import pymongo
import time
import random

BK = ['UILixModule','__pageContext__','helpCenterModule','googleAnalyticsModule','globalVariableModule','jserpBadgeCreateModule',]
AST = [
'decoratedJobPostingModule','topCardV2Module','jobDescriptionModule'
]
PROXY = {
    # 'http':'192.168.1.220:3128',
    # 'https':'192.168.1.220:3128',
    # 'https':'192.168.1.220:8087',
    'http':'http://ider:123456@218.109.205.223:12345',
}

def get_proxy():
    ret = requests.get('http://192.168.1.220:9001')
    ret = requests.get('http://localhost:8000')
    # print(ret.json())
    px = random.choice(ret.json())
    # print(px)
    return {
        'http':'http://%s:%s'%(px[0],px[1]),
    }


headers= {
        'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.109 Safari/537.36',
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
        'Accept-Language': 'en-US,en;q=0.5',
        'Connection': 'keep-alive',
        'Accept-Encoding': 'gzip, deflate',
        'cookie':'JSESSIONID=ajax:213744135274235278313; bcookie="v=2&bdc9c737-f933-434e-8be43-d8b2c7209ae8"; bscookie="v=1&201708041306213327c65a84-e837-4c42-8626-ac3237b6a4bc41AQERx-CKBk36ZZ27EJ05uNex9y1gwMVA"; lidc="b=OGST00:g=272:u=1:i=1501741293:t=1501827693:s=AQFYw649Jwwk2414lCbgztWuGS2mZi8vUA"; visit="v=1&G"; _gid=GA1.2.512362220.1501744851; 1leo_auth_token="GST:85z1l1TgeyfkyaGNcazpgOkxSHQ_414nv0vzRhQUKKIh9KQCPcPBjW2T:1501750034:5c6535239bfe6dca9683a8cf647553439c021a81"; lang="v=2&lang=en-us"; _ga=GA1.2.187601058.1501741295; _gat=1'
    } 

session = requests.Session()
session.headers.update(headers)
def parse_dic(dic):
    _err_k = []
    for k,v in dic.items():
        if '.' in k.strip('.'):
            _err_k.append(k)
        if isinstance(v,dict):
            parse_dic(v)
        elif isinstance(v,list):
            parse_lis(v)
    for k in _err_k:
        dic[k.strip('.').split('.')[-1]] = dic[k]
        del(dic[k])

def parse_lis(lis):
    for v in lis:
        if isinstance(v,dict):
            parse_dic(v)

def format_html(html):
    soup = BeautifulSoup(html,'lxml')
    ret_dic = {}
    for sp in soup.find_all('code'):
        _id = sp.attrs['id']
        if _id in BK:
            continue
        jd  = json.loads(sp.string)
        ret_dic[_id] = jd
    if 'errorModule' in ret_dic and len(ret_dic) == 1:
        return ret_dic
    for ast in AST:
        assert ast in ret_dic,'not modle  %s, %s'%(ast,ret_dic.keys())

    return ret_dic

def get_page(_id):
    url = 'https://www.linkedin.com/jobs/view/%s/'
    ts = 1
    while 1:
        try:
            # req = requests.get(url%_id,timeout=10,headers=get_header())
            # req = requests.get(url%_id,timeout=10,headers=headers)
            # req = requests.get(url%_id,timeout=10,headers=headers,proxies=PROXY)
            req = session.get(url%_id,timeout=10,proxies=get_proxy())
            ret_dic = format_html(req.text)
            ret_dic['_id'] = _id
            return ret_dic
        except Exception as e:
            # time.sleep(ts)
            # time.sleep(1)
            ts += 1
            print(e)

def main():
    DB = pymongo.MongoClient('192.168.1.220',29001).linkedin
    for _id in DB.search_job.find({},{'_id':1}):
        if not  DB.job_page.find_one(_id):
            _id = _id.get('_id')
            print(_id)
            ret_dic = get_page(_id)
            parse_dic(ret_dic)
            DB.job_page.insert_one(ret_dic)
            # time.sleep(1)



def test():
    px = get_proxy()
    ret = requests.get('https://httpbin.org/ip',proxies=px)
    print(px,ret)
    # print()
if __name__ == '__main__':
    main()
    # test()

