#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os,sys,re
from wwpy.big.Local import BigData
from wwpy.util.Wrapper import looper
from wwpy.util.Time import Time
from wwpy.wuba.ETL.UrlInfo import *
from wwpy.db.DBI import DBI,dbc

def get_ip_dict():
    dbi=DBI(**dbc['db_base_dict'])
    rows=dbi.fetch("select ip from ip_filter")
    dbi.close()
    ip_dict={}
    for row in rows:
        ip_dict[row[0]]=1
    return ip_dict

def get_local_dict():
    dbi=DBI(**dbc.db_base_dict)
    rows=dbi.fetch("select local_id,list_name,city1,city2,city3 from display_local ")
    dbi.close()
    local_dict=dict(id={},name={})
    for row in rows:
        local_id,list_name,locals=str(row[0]),row[1],row[2:]
        local_dict['id'][local_id]=locals
        local_dict['name'][list_name]=locals
    return local_dict

def get_cate_dict():
    dbi=DBI(**dbc.db_base_dict)
    rows=dbi.fetch("select cate_id,list_name,cate1,cate2,cate3 from display_cate where channel_id=1000004")
    dbi.close()
    cate_dict=dict(id={},name={})
    for row in rows:
        cate_id,list_name,cates=str(row[0]),row[1],row[2:]
        cate_dict['id'][cate_id]=cates
        cate_dict['name'][list_name]=cates
    return cate_dict

def _handle_click(self):
    bd=BigData()
    bd.input=self.get_click_logs()
    bd.output_sep(self.sep)
    bd.threads=2
    bd.processor=self.click_processor
    bd.get_fkey=self.get_fkey
    bd.read_to_files(self.click_key_files)

def _handle_show(self):
    bd=BigData()
    bd.input=self.get_track_logs()
    bd.output_sep(self.sep)
    bd.threads=6
    bd.processor=self.show_processor
    bd.get_fkey=self.get_fkey
    bd.read_to_files(self.show_key_files)

class MingQi(object):
    
    def __init__(self,date=Time.yesterday(),ip_dict=None,local_dict=None,cate_dict=None,path='.',sep=','):
        self.date=Time.date_format(date)
        self.ip_dict=ip_dict or get_ip_dict()
        self.local_dict=local_dict or get_local_dict()
        self.cate_dict=cate_dict or get_cate_dict()
        self.path=path
        self.sep=sep
        self.mid_path='%s%smid_file' % (self.path,os.sep)
        self.stat_path='%s%sstat_file' % (self.path,os.sep)
        self.mini_date=Time.date_format(date,'')
        self.line_regex='^(?P<id58>[^\s]+)\s(?P<timestamp>[^\s]+)\s(?P<ip>[^\s]+)\s.*GET .*\?(?P<gif>[^\s]+)\sHTTP.*"(?P<url>http(?:[^"]+))"\s.*\|\s(?P<user_id>[\d-]+)$'
        self.default_locals=map(lambda idx:'',range(3))
        self.default_cates=map(lambda idx:'',range(3))
        self.show_key_files=dict(zp_mq_detail_page='%s%szp_mq_detail_show_%s.log' % (self.mid_path,os.sep,self.mini_date),
                                 zp_post_detail_page='%s%szp_post_detail_show_%s.log' % (self.mid_path,os.sep,self.mini_date))
        self.click_key_files=dict(zp_list_page='%s%szp_list_click_%s.log' % (self.mid_path,os.sep,self.mini_date),
                                  zp_mq_list_page='%s%szp_mq_list_click_%s.log' % (self.mid_path,os.sep,self.mini_date),
                                  zp_job_apply_page='%s%szp_job_apply_click_%s.log' % (self.mid_path,os.sep,self.mini_date))
        self.stat_files=dict(
            zp_list_file='%s%szp_list_%s' % (self.stat_path,os.sep,self.mini_date),
            zp_mq_list_file='%s%szp_mq_list_%s' % (self.stat_path,os.sep,self.mini_date),
            zp_mq_detail_file='%s%szp_mq_detail_%s' % (self.stat_path,os.sep,self.mini_date),
            zp_post_detail_file='%s%szp_post_detail_%s' % (self.stat_path,os.sep,self.mini_date)
        )
        self.tables=dict(
            zp_list_table='zp_list_page_metrics',
            zp_mq_list_table='zp_mq_list_page_metrics',
            zp_mq_detail_table='zp_mq_detail_page_metrics',
            zp_post_detail_table='zp_post_detail_page_metrics',
        )
        self.table_fields=dict(
            stat_date="date comment '统计日期'",
            source="varchar(100) comment '来源'",
            post_type="varchar(50) comment '帖子类型'",
            post_id="bigint(20) comment '帖子ID'",
            user_type="varchar(30) comment '用户类型'",
            user_id="bigint(20) comment '用户ID'",
            shop_id="bigint(20) comment '店铺ID'",
            city1="varchar(30) comment '一级城市'",
            city2="varchar(30) comment '二级城市'",
            city3="varchar(30) comment '三级城市'",
            cate1="varchar(30) comment '一级类别'",
            cate2="varchar(30) comment '二级类别'",
            cate3="varchar(30) comment '三级类别'",
            page_tab="varchar(100) comment '页面标签'",
            page_num="int(11) comment '页码'",
            page_pos="double comment '页面位置'",
            pv="int(11) comment '页面浏览量'",
            uv="int(11) comment '独立访客数'",
            click="int(11) comment '点击数'",
            new_post_click="int(11) comment '最新职位点击数'",
            mq_only_click="int(11) comment '名企专区点击数'",
            other_click="int(11) comment '其他点击数'",
        )
        self.fields=dict(
            zp_list_fields='stat_date,post_type,post_id,user_type,user_id,shop_id,city1,city2,city3,cate1,cate2,cate3,page_tab,page_num,page_pos,click'.split(','),
            zp_mq_list_fields='stat_date,source,user_id,shop_id,city1,city2,city3,cate1,cate2,cate3,page_num,page_pos,click'.split(','),
            zp_mq_detail_fields='stat_date,page_tab,user_id,shop_id,city1,city2,city3,cate1,cate2,cate3,pv,uv'.split(','),
            zp_post_detail_fields='stat_date,user_id,post_id,city1,city2,city3,cate1,cate2,cate3,pv,uv,click,other_click'.split(','),
        )
        
    def get_click_logs(self):
        logs=[]
        url=lambda host:'http://10.5.12.167:50075/streamFile/dsap/rawdata/pcclick58/%s/%s-tracklog.58.com.access.log' % (self.mini_date,host)
        for host in ('10.4.17.65','10.4.17.66'):
            log=url(host)
            logs.append(log)
        return logs
    
    def get_track_logs(self):
        logs=[]
        url=lambda hour,idx:'http://10.5.12.167:50075/streamFile/dsap/rawdata/track58/%s/10.4.17.65_tracklog.58.com.access.log.%s-%s-%s' % (self.mini_date,self.date,hour,idx)
        for hour in range(24):
            if hour<10: hour='0%s' % hour
            for idx in range(0,60,10):
                if idx<10: idx='0%s' % idx
                log=url(hour,idx)
                logs.append(log)
        return logs
    
    def handle_line(self,line):
        regex=re.compile(self.line_regex)
        m=regex.match(line)
        line_fields={}
        if m:
            line_fields['id58']=m.group('id58')
            line_fields['ip']=m.group('ip')
            line_fields['timestamp']=m.group('timestamp')
            line_fields['user_id']=m.group('user_id')
            gif=m.group('gif')
            line_fields['url']=m.group('url')
            line_fields['gif_params']=get_fields_from_params(gif) or {}
            return line_fields
        return line_fields
    
    def handle_zp_list_click(self,line_fields):
        params,url,ip=line_fields.get('gif_params',{}),line_fields.get('url',''),line_fields.get('ip','')
        if ip in self.ip_dict:
            return None
        url_fields=get_info_from_url(url,self.local_dict['name'],self.cate_dict['name']) or {}
        page_type='zp_list_page'
        p=url_fields.get('params','')
        if re.search('filter=free',p):
            page_tab='new_post'
        elif re.search('jobfrom=mingqi',p):
            page_tab='mq_only'
        else:
            page_tab='all_post'
        page_pos=params.get('bi_val_pos',1) or 1
        post_id=params.get('post_id',-1)
        post_type=params.get('bi_enum_type','default')
        user_type=params.get('bi_enum_user','geren')
        user_id=params.get('bi_userid',-1)
        uri=url_fields.get('uri','/')
        m=re.search('/pn(\d+)/',uri)
        if m:
            page_num=m.group(1) or 1
        else:
            page_num=1
        city1,city2,city3,cate1,cate2,cate3=map(lambda key:url_fields.get(key,''),('city1','city2','city3','cate1','cate2','cate3'))
        if cate1=='':
            return None
        return (page_type,post_type,post_id,user_type,user_id,city1,city2,city3,cate1,cate2,cate3,page_tab,page_num,page_pos)
        
    def handle_mq_list_click(self,line_fields):
        params,url,ip=line_fields.get('gif_params',{}),line_fields.get('url',''),line_fields.get('ip','')
        if ip in self.ip_dict:
            return None
        page_type='zp_mq_list_page'
        url_fields=get_info_from_url(url,self.local_dict['name'],self.cate_dict['name']) or {}
        p=url_fields.get('params','')
        if not re.search('jobfrom=mingqi',p):
            return None
        source=params.get('from','')
        user_id=params.get('userid',-1)
        shop_id=params.get('shopid',-1) or -1
        post_id=params.get('infoid',-1) or -1
        page_pos=params.get('pagepos',1) or 1
        page_num=params.get('pagenum',1) or 1
        city1,city2,city3,cate1,cate2,cate3=map(lambda key:url_fields.get(key,''),('city1','city2','city3','cate1','cate2','cate3'))
        return (page_type,source,post_id,user_id,shop_id,city1,city2,city3,cate1,cate2,cate3,page_num,page_pos)
    
    def handle_job_apply_click(self,line_fields):
        params,url,ip=line_fields.get('gif_params',{}),line_fields.get('url',''),line_fields.get('ip','')
        if ip in self.ip_dict:
            return None
        page_type='zp_job_apply_page'
        url_fields=get_info_from_url(url,self.local_dict['name'],self.cate_dict['name']) or {}
        m=re.search('/(\d+)x\.shtml',url)
        if not m:
            return None
        source=params.get('from','')
        if not re.search('zhaopin_',source):
            return None
        user_id=params.get('userid',-1)
        post_id=params.get('infoid',m.group(1) or -1)
        city1,city2,city3,cate1,cate2,cate3=map(lambda key:url_fields.get(key,''),('city1','city2','city3','cate1','cate2','cate3'))
        return (page_type,source,user_id,post_id,city1,city2,city3,cate1,cate2,cate3)
    
    def _handle_local(self,local):
        if local=='':
            return self.default_locals
        locals=local.split(',')
        locals.reverse()
        local_id=locals[0]
        return self.local_dict['id'].get(local_id,self.default_locals)
    
    def _handle_cate(self,cate):
        if cate=='':
            return self.default_cates
        cates=cate.split(',')
        cates.reverse()
        cate_id=cates[0]
        return self.cate_dict['id'].get(cate_id,self.default_cates)
        
    def handle_mq_detail_show(self,line_fields):
        params,url,id58=line_fields.get('gif_params',{}),line_fields.get('url',''),line_fields.get('id58','')
        m=re.search('http://qy.58.com/mq/(\d+)/',url)
        if not m:
            return None
        page_type='zp_mq_detail_page'
        user_id=params.get('userid',-1)
        track_url=params.get('trackURL','')
        try:
            exec('track='+track_url)
        except:
            track={}
        shop_id=track.get('shopid',m.group(1) or -1)
        local=track.get('area','')
        cate=track.get('cate','')
        page_tab=track.get('page','qy_index')
        city1,city2,city3=self._handle_local(local)
        cate1,cate2,cate3=self._handle_cate(cate)
        return (page_type,page_tab,user_id,shop_id,city1,city2,city3,cate1,cate2,cate3,id58)
    
    def handle_post_detail_show(self,line_fields):
        params,url,id58=line_fields.get('gif_params',{}),line_fields.get('url',''),line_fields.get('id58','')
        m=re.search('/(\d+)x\.shtml',url)
        if not m:
            return None
        post_id=m.group(1) or -1
        page_type='zp_post_detail_page'
        user_id=params.get('userid',-1)
        track_url=params.get('trackURL','')
        try:
            exec('track='+track_url)
        except:
            track={}
        shop_id=track.get('shopid',-1)
        local=track.get('area','')
        cate=track.get('cate','')
        city1,city2,city3=self._handle_local(local)
        cate1,cate2,cate3=self._handle_cate(cate)
        if cate1=='':
            return None
        return (page_type,user_id,post_id,city1,city2,city3,cate1,cate2,cate3,id58)
    
    def click_processor(self,part,line):
        line_fields=self.handle_line(line)
        if re.search('/pc/click/listclicklog',line):
            return self.handle_zp_list_click(line_fields)
        elif re.search('/pc/click/empty.js.gif',line):
            mlc=self.handle_mq_list_click(line_fields)
            if mlc:
                return mlc
            return self.handle_job_apply_click(line_fields)
        return None
    
    def show_processor(self,part,line):
        if not re.search('/pc/empty.js.gif',line):
            return None
        line_fields=self.handle_line(line)
        mds=self.handle_mq_detail_show(line_fields)
        if mds:
            return mds
        return self.handle_post_detail_show(line_fields)
    
    def get_fkey(self,line):
        return line[0] #page_type
    
    def handle_click(self):
        bd=BigData()
        bd.input=self.get_click_logs()
        bd.output_sep(self.sep)
        bd.threads=2
        bd.processor=self.click_processor
        bd.get_fkey=self.get_fkey
        bd.read_to_files(self.click_key_files)
    
    def handle_show(self):
        bd=BigData()
        bd.input=self.get_track_logs()
        bd.output_sep(self.sep)
        bd.threads=6
        bd.processor=self.show_processor
        bd.get_fkey=self.get_fkey
        bd.read_to_files(self.show_key_files)
        
    def handle(self,processes=2):
        if processes is not None:
            from wwpy.util.Processes import ProcessPool
            pp=ProcessPool(processes)
            pp.put(_handle_click,self)
            pp.put(_handle_show,self)
            pp.wait()
        else:
            self.handle_click()
            self.handle_show()
        
    def _get_reducer_key(self,key):
        return tuple([self.mini_date]+list(key))
        
    def combine_zp_list(self):
        def mapper(key,value):
            fields=value.split(self.sep)
            page_type=fields[0]
            if page_type=='zp_list_page':
                shop_id=-1
                new_key=tuple(fields[1:5]+[shop_id]+fields[5:])
                yield new_key,1
            elif page_type=='zp_mq_list_page':
                source=fields[1]
                if source=='mq_shop_jobclick':
                    post_type='zhaopinmingqi'
                    user_type='wanglintong'
                    page_tab='mq_only'
                    if len(fields)<13:
                        post_id=-1
                        (user_id,shop_id,city1,city2,city3,cate1,cate2,cate3,page_num,page_pos)=fields[2:]
                    else:
                        (post_id,user_id,shop_id,city1,city2,city3,cate1,cate2,cate3,page_num,page_pos)=fields[2:]
                    new_key=(post_type,post_id,user_type,user_id,shop_id,city1,city2,city3,cate1,cate2,cate3,page_tab,page_num,page_pos)
                    yield new_key,1
                
        def reducer(key,values):
            yield self._get_reducer_key(key),sum(values)
            
        bd=BigData()
        bd.input=[self.click_key_files.get('zp_list_page'),self.click_key_files.get('zp_mq_list_page')]
        bd.output=self.stat_files.get('zp_list_file')
        bd.output_sep(self.sep)
        bd.mapper=mapper
        bd.reducer=reducer
        bd.run()
        
    def combine_mq_list(self):
        bd=BigData()
        bd.input=self.click_key_files.get('zp_mq_list_page')
        bd.output=self.stat_files.get('zp_mq_list_file')
        bd.output_sep(self.sep)
        bd.mapper=lambda key,value:(tuple(value.split(self.sep)[1:]),1)
        bd.reducer=lambda key,values:(self._get_reducer_key(key),sum(values))
        bd.run()
        
    def combine_mq_detail(self):
        def mapper(key,value):
            fields=value.split(self.sep)
            new_key=tuple(fields[1:-1])
            id58=fields[-1]
            yield new_key,(1,id58)
        def reducer(key,values):
            pv,vs=0,{}
            for value in values:
                pv+=value[0]
                vs[value[1]]=1
            uv=sum(1 for k in vs.keys())
            yield self._get_reducer_key(key),(pv,uv)
        bd=BigData()
        bd.input=self.show_key_files.get('zp_mq_detail_page')
        bd.output=self.stat_files.get('zp_mq_detail_file')
        bd.output_sep(self.sep)
        bd.mapper=mapper
        bd.reducer=reducer
        bd.run()
    
    def combine_post_detail(self):
        def mapper(key,value):
            fields=value.split(self.sep)
            if fields[0]=='zp_post_detail_page':
                new_key=tuple(fields[1:-1])
                id58=fields[-1]
                yield new_key,('show',1,id58)
            else:
                new_key=tuple(fields[2:])
                if fields[1]=='zhaopin_job_microresume_jobseeker_applyposition':
                    yield new_key,('click',1,0)
                else:
                    yield new_key,('click',0,1)
        def reducer(key,values):
            pv,click,other_click,vs=0,0,0,{}
            for value in values:
                if value[0]=='show':
                    pv+=value[1]
                    vs[value[2]]=1
                elif value[0]=='click':
                    click+=value[1]
                    other_click+=value[2]
            uv=sum(1 for k in vs.keys())
            yield self._get_reducer_key(key),(pv,uv,click,other_click)
        bd=BigData()
        bd.input=[self.show_key_files.get('zp_post_detail_page'),self.click_key_files.get('zp_job_apply_page')]
        bd.output=self.stat_files.get('zp_post_detail_file')
        bd.output_sep(self.sep)
        bd.mapper=mapper
        bd.reducer=reducer
        bd.run()
        
    def combine(self):
        self.combine_zp_list()
        self.combine_mq_list()
        self.combine_mq_detail()
        self.combine_post_detail()
        
    def get_table_fields(self,fields):
        table_fields=[]
        for field in fields:
            field_value=self.table_fields.get(field)
            if field_value is None:
                raise Exception('No defined field %s' % field)
            table_field='%s %s' % (field,field_value)
            table_fields.append(table_field)
        return table_fields
    
    def load_zp_list(self):
        dbi=DBI(**dbc.db_zp_ib)
        zp_list_file=self.stat_files.get('zp_list_file')
        table=self.tables.get('zp_list_table')
        table_fields=self.get_table_fields(self.fields.get('zp_list_fields'))
        dbi.save(zp_list_file,table,table_fields=table_fields,sep=self.sep,is_update=False,options={'ENGINE':'BRIGHTHOUSE'},where=dict(stat_date=self.mini_date))
        dbi.close()
        
    def load_mq_list(self):
        dbi=DBI(**dbc.db_zp_ib)
        mq_list_file=self.stat_files.get('zp_mq_list_file')
        table=self.tables.get('zp_mq_list_table')
        table_fields=self.get_table_fields(self.fields.get('zp_mq_list_fields'))
        dbi.save(mq_list_file,table,table_fields=table_fields,sep=self.sep,is_update=False,options={'ENGINE':'BRIGHTHOUSE'},where=dict(stat_date=self.mini_date))
        dbi.close()
        
    def load_mq_detail(self):
        dbi=DBI(**dbc.db_zp_ib)
        mq_detail_file=self.stat_files.get('zp_mq_detail_file')
        table=self.tables.get('zp_mq_detail_table')
        table_fields=self.get_table_fields(self.fields.get('zp_mq_detail_fields'))
        dbi.save(mq_detail_file,table,table_fields=table_fields,sep=self.sep,is_update=False,options={'ENGINE':'BRIGHTHOUSE'},where=dict(stat_date=self.mini_date))
        dbi.close()
        
    def load_post_detail(self):
        dbi=DBI(**dbc.db_zp_ib)
        post_detail_file=self.stat_files.get('zp_post_detail_file')
        table=self.tables.get('zp_post_detail_table')
        table_fields=self.get_table_fields(self.fields.get('zp_post_detail_fields'))
        dbi.save(post_detail_file,table,table_fields=table_fields,sep=self.sep,is_update=False,options={'ENGINE':'BRIGHTHOUSE'},where=dict(stat_date=self.mini_date))
        dbi.close()
        
    def load(self):
        self.load_zp_list()
        self.load_mq_list()
        self.load_mq_detail()
        self.load_post_detail()
    
@looper('DAY')
def main(date):
    mq=MingQi(date,path='/work2/opt/analytic/zhaopin/mingqi')
    mq.handle()
    mq.combine()
    mq.load()
    
if __name__ == '__main__':
    main()
    
