#! /usr/bin/python
# -*- coding: UTF-8 -*-

# 画实际传播曲线

import pickle,urllib,urllib2
import logging


def q_query(q, host='aragorn', port=None):
    '''
    General q query
    '''
    if host == 'boromir':
        base = 'http://shire:hobbits@60.28.199.71:5001/q.csv?'
    elif host == 'gimli':
        base = 'http://shire:hobbits@60.28.199.69:5001/q.csv?'
    elif host == 'aragorn':
        if port:
            base = 'http://shire:hobbits@aragorn:%s/q.csv?'%port
        else:
            base = 'http://shire:hobbits@aragorn:5002/q.csv?'
    print 'fetching log ...'
    record = urllib.urlopen(base+urllib.quote(q)).read()
    # the first line is header, the last line is null
    print 'parsering log ...'
    rows = record.split('\n')[1:-1]
    rows = [row.split(',') for row in rows]
    print 'log len: ', len(rows)
    return rows

def q_reader(q_base, batch=500000, MAX_LEN = 20000000, host='aragorn', port=None):
    '''
    Read the q query data iteraterly
    '''
    if 'where' in q_base:
        q_base += ', i>=%s, i<%s'
    else:
        q_base += ' where i>=%s, i<%s'
    cur_seek = 0
    while 1:
        print 'current seek: ', cur_seek
        q = q_base%(cur_seek,cur_seek+batch)
        par_log = q_query(q, host, port)
        if par_log:
            yield par_log
        cur_seek += batch
        if cur_seek > MAX_LEN:
            break

def get_raw_sub(t,sid,c=True):
    try:
        if c:
            return pickle.load(open('data/sub%s.dat'%sid))
    except IOError:
        pass

    a = q_query('select from interest.%s where subject=%s' %(t,sid))
    times = [i[3][:10] for i in a]
    all = {}
    for t in times:
        if all.get(t) is None:
            all[t] = 1
        else:   
            all[t] += 1
    it = sorted(all.items(),key=lambda x:x[0])

    pickle.dump(it,open('data/sub%s.dat'%sid,'w'))
    return it

def get_raw_evt(sid):
    try:
        return pickle.load(open('data/evt%s.dat'%sid))
    except:
        pass

    a = q_query('select from event_users where event=%s' %sid)
    times = [i[4][:10] for i in a]
    all = {}
    for t in times:
        if all.get(t) is None:
            all[t] = 1
        else:   
            all[t] += 1
    it = sorted(all.items(),key=lambda x:x[0])

    pickle.dump(it,open('data/evt%s.dat'%sid,'w'))
    return it

def get_raw_ol(sid):
    try:
        if 1:
            return pickle.load(open('data/ol%s.dat'%sid))
    except:
        pass

    times = []
    udegs = []
    import MySQLdb
    connection = MySQLdb.connect(host='cirdan', port=3306, db='luz_farm',
                user='luzong', passwd='fulllink', init_command="set names utf8")
    cursor = connection.cursor()
    cursor.execute('select time,user_id from online_user where online_id="%s"'%sid)
    for i in cursor.fetchall():
        t, u = i
        logging.info(t,u)
        times.append(i[0].isoformat()[:10])
#        user = get_user(str(u))
#        cons = user and len(user.r_contact_ids()) or 0
#        cons += user and len(user.friend_ids()) or 0
#        udegs.append(cons)

    all = {}
#    degs = {}
    for i,t in enumerate(times):
        if all.get(t) is None:
            all[t] = 1
#            degs[t] = 0
        else:   
            all[t] += 1
#            degs[t] += udegs[i]

    it = sorted(all.items(),key=lambda x:x[0])
#    it = [(ii[0],ii[1],degs[ii[0]]) for ii in it]
    it = [(ii[0],ii[1]) for ii in it]

    pickle.dump(it,open('data/ol%s.dat'%sid,'w'))
    return it
    
def get_raw_grp(sid):
    try:
        it = pickle.load(open('data/grp%s.dat'%sid))
        return it
    except:
        pass

    a = q_query('select from group_member where gid=%s' %sid)
    times = [i[3][:10] for i in a]
    all = {}
    for t in times:
        if all.get(t) is None:
            all[t] = 1
        else:   
            all[t] += 1
    it = sorted(all.items(),key=lambda x:x[0])

    pickle.dump(it,open('data/grp%s.dat'%sid,'w'))
    return it

def draw_trend(t,sid,c=True,limit=0):
    if t=='event':
        data = get_raw_evt(sid)
    elif t=='group':
        data = get_raw_grp(sid)
    elif t=='online':
        data = urllib2.urlopen(
            'http://balin:30020/%s'%sid).read()
        data = pickle.loads(data)
    else:
        data = get_raw_sub(t,sid,c=c)
    print data
    dates = [d[0] for d in data]
    perday = [d[1] for d in data]
    #if t=='online':
    #    degs = [d[2]/10 for d in data]
    #    meandegs = [d[2]/d[1]*10 for d in data]
    sofar = [perday[0]]
    for i in xrange(1,len(perday)):
        sofar.append(sofar[i-1]+perday[i])

    import matplotlib.pyplot as plt
    import matplotlib.ticker as ticker
    import matplotlib.dates as mdates
    import matplotlib.mlab as mlab
    if limit>0:
        sofar = sofar[:limit]
        perday = perday[:limit]

    plt.title('%s'%sid)
    plt.xlabel('day')
    plt.ylabel('hits')
    plt.plot(sofar,'bo-',[i*3 for i in perday],'r-')
    plt.grid(True)
    plt.show()

if __name__ == '__main__':
    #dashanxue draw_trend('group','33120')
    draw_trend('group','80105')
    #9zhou draw_trend('group','12544')
    #draw_trend('group','80654',limit=90)
    #draw_trend('event','10227593')
    #draw_trend('online','10143835',limit=60)
    #draw_trend('book','1473250',limit=60)

