#!/usr/bin/env python
#-*- coding:utf-8 -*-
#import os,sys,re
#from wwpy.util.Time import Time
#from wwpy.util.Url import Url
#from wwpy.util.Wrapper import looper
#from wwpy.util.Logger import logger
#from wwpy.io.Reader import reader
#from wwpy.wuba.data.DataDict import BelongLocalDict,BelongCateDict
#from wwpy.db.DBI import DBI,dbc

import os,sys,re
import urllib2, urllib
from py.util.Time import Time
from py.util.Url import Url
from py.util.Wrapper import looper
from py.util.Logger import logger
from py.io.Reader import reader
from py.wuba.data.DataDict import BelongLocalDict,BelongCateDict
from py.db.DBI import DBI,dbc

reqUrl=('http://dsap2.web.58dns.org/dsap-agent/?targetserver=bjm6-15-146.58os.org'
        '&targetport=50075&targetpage=browseDirectory&targetprefix=jsp&namenodeInfoPort=50070'
        '&dir=%s&nnaddr=10.9.14.196:9000')	
srcUrl='http://10.9.15.19:50075/streamFile%s/%s?nnaddr=10.9.14.196:9000';		
pattern=r'''<a.*?:9000['"]>\s*([^_].*?)\s*</a>\s*</[bB]>\s*<br>\s*</td>'''
p = re.compile(pattern,re.DOTALL | re.IGNORECASE)		
class UserPost(object):
    
    def __init__(self,date=Time.yesterday(''),path='.'):
        self.date=date
        self.path=path
        self.sep='|'
        self.info={}
        
    @property
    def log_path(self):
        path='%s%slog' % (self.path,os.sep)
        if not os.path.exists(path):
            os.mkdir(path)
        return path
    
    @property
    def output_path(self):
        path='%s%sfile' % (self.path,os.sep)
        if not os.path.exists(path):
            os.mkdir(path)
        return path
        
    def _hms(self):
        for hour in range(24):
            hour='0%s' % hour if hour<10 else hour
            for minute in range(0,60,10):
                minute='00' if minute==0 else minute
                yield hour,minute
                
    def log(self,hour,minute):
        return '%s%spost_info_%s_%s_%s.log' % (self.log_path,os.sep,self.date,hour,minute)
    
    def local_log_name(self,remoteName):
        return '%s%spost_info_%s_%s.log' % (self.log_path,os.sep,self.date,remoteName)
    
    def getNameList(self,hdfsDir):
        urlpath = reqUrl % hdfsDir
        #request=urllib2.Request(url=urlpath)
        urlfile = urllib2.urlopen(urlpath)
        pageContent=urlfile.read()
        print 'page length : %s' % len(pageContent)
        nameList=[]
        for m in p.finditer(pageContent):
            name=m.group(1)
            print(name)
            nameList.append(name)
        return nameList
    
    def get_logs(self):
        def line_func(line):
            try:
                fields=line.strip().split(self.sep)
                (info_id,user_id),local_path,cate_path,add_time,operation,(state,ip,phone)=fields[1:3],fields[4],fields[6],fields[7],fields[10],fields[12:15]
                local_id=local_path.split(',')[-1]
                cate_id=cate_path.split(',')[-1]
                line=self.sep.join(str(field) for field in [info_id,user_id,local_id,cate_id,add_time,operation,state,ip,phone])+"\n"
                return line
            except:
                return None
        hdfsPath = "/dsap/rawdata/infodetail"
        hdfsDir = os.path.join(hdfsPath,str(self.date))
        nameList = self.getNameList(hdfsDir)
        print 'have %s files to download!' % len(nameList)
        for name in nameList:
            srcPath=srcUrl % (hdfsDir,name)
            #urllib.urlretrieve(srcPath,os.path.join(localDir,name))
            #url=Url('http://10.9.15.19:50075/streamFile/dsap/rawdata/infodetail/%s/infodetail.log.%s-%s-%s?nnaddr=10.9.14.196:9000' % (self.date,Time.date_format(self.date,'-'),hour,minute))
            url=Url(srcPath)
            log=self.local_log_name(name);
            if not os.path.exists(log):
                url.download(file=log,line_func=line_func)
                
    @property
    def post_file(self):
        return '%s%suser_post_%s' % (self.output_path,os.sep,self.date)
    
    @property
    def attr_file(self):
        return '%s%suser_attr_%s' % (self.output_path,os.sep,self.date)
    
    def local_dict(self):
        o=BelongLocalDict()
        rows=o.select(fields=('local_id','city1'))
        info={}
        for row in rows:
            local_id,city1=str(row[0]),row[1]
            info[local_id]=city1
        return info
    
    def cate_dict(self):
        o=BelongCateDict()
        rows=o.select(fields=('cate_id','cate1','cate2'))
        info={}
        for row in rows:
            cate_id,cate1,cate2=str(row[0]),row[1],row[2]
            info[cate_id]=[cate1,cate2]
        return info
    
    def handle_logs(self):
        ld=self.local_dict()
        cd=self.cate_dict()
        r=reader.read(source=self.log_path+os.sep+'post_info_%s_*' % self.date).split(self.sep)
        info={}
        for line in r.lines:
            info_id,user_id,local_id,cate_id,add_time,operation,state,ip,phone=line
            phones=re.split('[^\d-]',phone) if re.search(r'[^\d-]',phone) else [phone]
            info.setdefault(user_id,{})
            try:
                operation=int(operation)
            except ValueError:
                operation=operation
            if operation==0:
                info[user_id].setdefault('locals',{})
                city=ld.get(local_id,'')
                info[user_id]['locals'].setdefault(city,{})
                info[user_id]['locals'][city]['count']=info[user_id]['locals'][city].get('count',0)+1
                info[user_id]['locals'][city]['time']=add_time
                info[user_id].setdefault('cates',{})
                cate1,cate2=cd.get(cate_id,('',''))
                info[user_id]['cates'].setdefault((cate1,cate2),{})
                info[user_id]['cates'][cate1,cate2]['count']=info[user_id]['cates'][cate1,cate2].get('count',0)+1
                info[user_id]['cates'][cate1,cate2]['time']=add_time
                info[user_id].setdefault('phones',{})
                for phone in phones:
                    if phone=='': continue
                    info[user_id]['phones'].setdefault(phone,{})
                    info[user_id]['phones'][phone]['count']=info[user_id]['phones'][phone].get('count',0)+1
                    info[user_id]['phones'][phone]['time']=add_time
                info[user_id].setdefault('post_count',0)
                info[user_id]['post_count']+=1
            elif operation==1:
                info[user_id].setdefault('modify_post_count',0)
                info[user_id]['modify_post_count']+=1
            try:
                state=int(state)
            except ValueError:
                state=state
            if state==0:
                info[user_id].setdefault('delete_post_count',0)
                info[user_id]['delete_post_count']+=1
            self.info=info
        return info
    
    def output_post(self):
        def _get_attr(user_id,field):
            sorted_attrs=sorted(self.info[user_id].get(field,{}).iteritems(),key=lambda v:(v[1]['count'],Time.str2time(v[1]['time'])),reverse=True)
            if len(sorted_attrs)>0:
                attr,_=sorted_attrs[0]
            else:
                attr=''
            return attr
        
        def _get_cate_attr(user_id,field='cates'):
            sorted_attrs=sorted(self.info[user_id].get(field,{}).iteritems(),key=lambda v:(v[1]['count'],Time.str2time(v[1]['time'])),reverse=True)
            if len(sorted_attrs)>0:
                attr,_=sorted_attrs[0]
            else:
                attr=('','')
            return self.sep.join(attr)
        
        f=open(self.post_file,'w')
        for user_id in self.info:
            metrics=map(lambda field:self.info[user_id].get(field,0),['post_count','modify_post_count','delete_post_count'])
            if any(metrics):
                city=_get_attr(user_id,'locals')
                cates=_get_cate_attr(user_id)
                phone=_get_attr(user_id,'phones')
                attrs=[city,cates,phone]
                f.write(self.sep.join(str(field) for field in [self.date,user_id]+attrs+metrics)+'\n')
        f.close()
        
    def output_attrs(self):
        def _get_attrs(user_id,field):
            sorted_attrs=sorted(self.info[user_id].get(field,{}).iteritems(),key=lambda v:(v[1]['count'],Time.str2time(v[1]['time'])),reverse=True)
            attrs=[]
            for key,value in sorted_attrs:
                if isinstance(key,(tuple,list)):
                    k='_'.join(str(k) for k in key)
                else:
                    k=key
                v=','.join([str(value['count']),str(Time.str2time(value['time']))])
                attr=':'.join([str(k),str(v)])
                attrs.append(attr)
            return '#'.join(attrs)
        
        f=open(self.attr_file,'w')
        for user_id in self.info:
            post_count=self.info[user_id].get('post_count',0)
            if post_count:
                attrs=map(lambda field:_get_attrs(user_id,field),['locals','cates','phones'])
                f.write(self.sep.join(str(field) for field in [self.date,user_id]+attrs)+'\n')
        f.close()
        
    @property
    def table(self):
        return 'user_post'
    
    @property
    def table_fields(self):
        table_fields=[
            "stat_date date comment '统计日期'",
            "user_id bigint(20) comment '用户ID'",
            "city varchar(30) comment '城市'",
            "cate1 varchar(30) comment '一级城市'",
            "cate2 varchar(30) comment '二级城市'",
            "phone varchar(30) comment '手机号'",
            "post_count int(10) comment '发帖数'",
            "modify_post_count int(10) comment '修改帖子数'",
            "delete_post_count int(10) comment '删除帖子数'",
        ]
        return table_fields
    
    def load(self):
        dbi=DBI(**dbc['db_58_ib'])
        dbi.save(self.post_file,self.table,self.table_fields,sep='|',is_update=False,where=dict(stat_date=self.date),options={'ENGINE':'BRIGHTHOUSE'})
        dbi.close()

 
@looper('DAY')
def main(date):
    path='/data/logs/wuba/user/post'
    up=UserPost(date,path)
    up.get_logs()
    up.handle_logs()
    up.output_attrs()
    up.output_post()
    up.load()
    
if __name__ == '__main__':
    main()
    
