#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os,sys,re
from wwpy.big.Local import BigData
from wwpy.util.Wrapper import looper

@looper('DAY')
def main(date):
    def get_logs(date):
        logs=[]
        path='/work2/opt/analytic/log_sys/wuba/click/click_file'
        for hour in range(24):
            hour=hour if hour>=10 else '0%s' % hour
            log='%s/click_%s_%s' % (path,date,hour)
            if not os.path.exists(log):
                log='%s.gz' % log
            logs.append(log)
        return logs
    
    def mapper(key,value):
        fields=value.split('<$>')
        host,uri,params=fields[13:16]
        page_num=fields[19]
        if host=='bj.58.com' and re.search('/yewu/',uri):
            yield 'post_click',1
            if re.search('filter=free',params):
                yield 'new_post_click',1
            elif re.search('jobfrom=mingqi',params):
                yield 'mingqi_zhuanqu_click',1
            if page_num=='1':
                yield 'pn1_post_click',1
                
    def reducer(key,values):
        yield key,sum(values)
    
    def handler(results):
        keys='post_click,new_post_click,mingqi_zhuanqu_click,pn1_post_click'.split(',')
        info=dict(zip(keys,[0,0,0,0]))
        for key,value in results:
            info[key]=value
        yield date,map(lambda key:info[key],keys)
    
    bd=BigData()
    bd.input=get_logs(date)
    bd.output='mingqi_click_%s.txt' % date
    bd.mapper_num=8
    bd.mapper=mapper
    bd.reducer=reducer
    bd.handler=handler
    bd.run()
    

if __name__ == '__main__':
    main()

