import logging
from mitmproxy.script import concurrent
from pymongo import MongoClient
import pymongo
import re
import json
import arrow
from functools import partial
from itertools import chain
DB = MongoClient()['tianyancha']

def get_log(name):
    log = logging.getLogger(name)
    ft = logging.Formatter('%(asctime)s - %(funcName)s - %(levelname)s - %(message)s','%Y-%m-%d %H:%M:%S')

    fh = logging.FileHandler('%s.log'%name)
    fh.setFormatter(ft)
    fh.setLevel(logging.DEBUG)

    sh = logging.StreamHandler()
    sh.setFormatter(ft)
    sh.setLevel(logging.DEBUG)

    log.addHandler(fh)
    #log.addHandler(sh)

    log.setLevel(logging.DEBUG)
    return log

Loger = get_log('mainc')

EXPENSE_ATTR = ['staff','holder','inverst','branch',]
EXPENSE_FIND = {'findHistoryRongzi':'date','findTeamMember':'id','findProduct':'id','findJingpin':'jingpinProduct',
                'findTzanli':'tzdate'}
EXPENSE_ATTR_DIC = {'zhixing':'caseCode','bid':'uuid','taxcredit':'idNumber','patent':'uuid',
                    'copyReg':'pid','qualification':'licenceNum','appbkinfo':'filterName'}
STOCK_ATTR = ['seniorExecutive','holdingCompany',]
@concurrent  # Remove this and see what happens
def request(flow):
    flow.request.headers["newheader"] = "foo"
    for attr in chain(EXPENSE_ATTR,EXPENSE_ATTR_DIC.keys(),EXPENSE_FIND.keys()):
        if re.match('^http://www.tianyancha.com/expanse/%s.json'%attr,flow.request.url):
            flow.request.query['ps'] = 1000
            break
    for attr in STOCK_ATTR:
        if re.match('^http://www.tianyancha.com/stock/%s.json'%attr,flow.request.url):
            flow.request.query['ps'] = 1000
            break

@concurrent 
def response(flow):
     #Loger.info('text:%s'%flow.response.text)
    if flow.response.status_code != 200:
        return
    try:
        if re.match('^http://www.tianyancha.com/v2/search/(.*?).json',flow.request.url):
            data = json.loads(flow.response.text).get('data')
            if data:
                data = map(parse_search_index,data)
                DB.search_index.insert_many(data,ordered=False)
        elif re.match('^http://www.tianyancha.com/v2/company/(\d*).json',flow.request.url):
            data = json.loads(flow.response.text).get('data')
            if data:
                mt = re.match('^http://www.tianyancha.com/v2/company/(\d*).json',flow.request.url)
                #data['_id']=data['id']
                data['_id']=int(mt.group(1))
                DB.company_index.insert_one(data)
        elif re.match('^http://www.tianyancha.com/v2/near/s.json',flow.request.url):
            data = json.loads(flow.response.text).get('data')
            if data:
                data = data.get('items')
                data = map(parse_search_index,data)
                DB.near_index.insert_many(data,ordered=False)

        for attr in EXPENSE_ATTR:
            if re.match('^http://www.tianyancha.com/expanse/%s.json'%attr,flow.request.url):
                data = json.loads(flow.response.text).get('data').get('result')
                if data:
                    _id = flow.request.query['id']
                    pr_wp = partial(parse_relationship,_id)
                    data = map(pr_wp,data)
                    DB[attr].insert_many(data,ordered=False)
                break

        for attr in EXPENSE_FIND.keys():
            if re.match('^http://www.tianyancha.com/expanse/%s.json'%attr,flow.request.url):
                data = json.loads(flow.response.text).get('data').get('page').get('rows')
                if data:
                    _id = flow.request.query['name']
                    pr_wp = partial(parse_relationship_2,_id,EXPENSE_FIND[attr])
                    data = map(pr_wp,data)
                    DB[attr].insert_many(data,ordered=False)
                break

        for attr in EXPENSE_ATTR_DIC.keys():
            if re.match('^http://www.tianyancha.com/expanse/%s.json'%attr,flow.request.url):
                data = json.loads(flow.response.text).get('data').get('items')
                if data:
                    _id = flow.request.query['id']
                    pr_wp = partial(parse_relationship_2,_id,EXPENSE_ATTR_DIC[attr])
                    data = map(pr_wp,data)
                    DB[attr].insert_many(data,ordered=False)
                break

        for attr in EXPENSE_ATTR_DIC.keys():
            if re.match('^http://www.tianyancha.com/expanse/%s.json'%attr,flow.request.url):
                data = json.loads(flow.response.text).get('data').get('items')
                if data:
                    _id = int(flow.request.query['id'])
                    pr_wp = partial(parse_relationship_2,_id,EXPENSE_ATTR_DIC[attr])
                    data = map(pr_wp,data)
                    DB[attr].insert_many(data,ordered=False)
                break
        for attr in STOCK_ATTR:
            if re.match('^http://www.tianyancha.com/stock/%s.json'%attr,flow.request.url):
                data = json.loads(flow.response.text).get('data').get('dataList')
                if data:
                    data = map(parse_stock_relationship,data)
                    DB[attr].insert_many(data,ordered=False)
                break
               

        if re.match('^http://www.tianyancha.com/stock/companyInfo.json',flow.request.url):
            data = json.loads(flow.response.text).get('data')
            _id = flow.request.query['graphId']
            if data:
                data['_id'] = _id
                DB['companyInfo'].insert_one(data)
    except pymongo.errors.DuplicateKeyError:
        pass
    except pymongo.errors.BulkWriteError:
        pass
    except Exception as e:
        Loger.exception(e)
        Loger.error(flow.request.url)

def parse_stock_relationship(dic): 
    dic['_id'] = '%s_%s'%(dic['graphId'],dic['id'])
    dic['update'] = arrow.now().floor('day').datetime
    dic['id'] = int(dic['id'])
    return dic

def parse_relationship_2(_id,uuid,dic): 
    dic['org_id'] = _id
    dic['_id'] = '%s_%s'%(_id,dic[uuid])
    dic['update'] = arrow.now().floor('day').datetime
    return dic

def parse_relationship(_id,dic): 
    dic['org_id'] = int(_id)
    dic['_id'] = '%s_%s'%(_id,dic.get('id',dic.get('name','None')))
    dic['update'] = arrow.now().floor('day').datetime
    return dic

# Loger = get_log('main') 
# Loger.info("start  request:")
def parse_search_index(dic):
    dic['id']=int(dic['id'])
    dic['name'] = dic['name'].replace('<em>','').replace('</em>','')
    dic['_id'] = dic['id']
    dic['update'] = arrow.now().floor('day').datetime
    return dic
