#!/usr/local/bin/python
# coding: utf8
import httplib
import json
import sys
import os
import Queue
import threading
import datetime
import MySQLdb
import time

#the first must be rowkey id
#the follows must match the fields of each line in datafile

MYSQL_SERVER = "19.2.168.63"
MYSQL_DATABASE = "ADSP_STAT"
MYSQL_USER = "adstat_user"
MYSQL_PASSWD = "QWjk201Adsp9UHFd"
QUERY_STRING = "select id,targetcode,ip,area,type,backurl,insertime,remark,user_id,user_name,cookieid from "

tianya_ad_access_fields = 'rkey_id,targetcode,ip,area,type,backurl,insertime,remark,user_id,user_name,cookieid'
tianya_ad_access_store = 'hbase'
tianya_ad_access_address = '127.0.0.1'
tianya_ad_access_desc = 'tianya_ad_access'

webhost = '19.2.174.82'
#webhost = '127.0.0.1'
q = Queue.Queue(maxsize = 1000)

def jobs(job, table, f_log, thread_id):
    while (True):
        data = q.get()
        q.task_done()
        #f_log.write("%d running\n" % (thread_id,))
        if (data == 'good game'):
            break
        if (len(data) != 0):
            job(table, data, f_log)
        else:
            f_log.write("from queue, empty data\n")
            f_log.flush()
    f_log.write('thread %d done\n' % (thread_id,))
    f_log.flush()

def tianya_ad_access_add(table, data, f_log):
    jsonhead = {}
    jsonstr = []
    url = '/%s/add' % (table,)
    fields = globals()['%s_fields' % (table, )].split(',')
    lines = data.split('\n')
    line_num = len(lines)
    for j in range(0, line_num):
        line = lines[j]
        if (len(line) != 0):
            _fields = line.split('\t')
            if (len(_fields) != len(fields)):
                f_log.write('(%s) != (%s), ignore the line\n%s\n' % (_fields, fields, line))
                f_log.flush()
                continue
            jstr = {}
            jstr[fields[0]] = _fields[6].split()[0] + '-' + '%09d' % (int(_fields[0]))
            for i in range(1, len(_fields)):
                jstr[fields[i]] = _fields[i]
            jsonstr.append(jstr)
        else:
            if (j < line_num-1):
                f_log.write("null line, ignore: %s\n" % (data,));
                f_log.flush()
    if (len(jsonstr) == 0):
        f_log.write("jsonstr empty: %s\n" % (data,))
        f_log.flush()
        return
    jsonhead["array"] = jsonstr
    try:
        jsonstr = json.dumps(jsonhead, encoding='utf8', ensure_ascii=False).encode('utf8')
    except:
        f_log.write('json error: %s\n' % (data,))
        return
    #f_log.write('%s\n' % (jsonstr,))
    #return

    conn = ''
    try:
        conn = httplib.HTTPConnection(webhost, 80)
        #conn.debuglevel = 1
        conn.connect()
    except:
        f_log.write('TABLE=>%s: CONNECT TO WEB SERVER ERROR\n%s\n' % (table, data))
        return
    conn.putrequest("POST", url)
    conn.putheader("Content-Length", len(jsonstr))
    conn.putheader("Content-Type", "text/json")
    #conn.putheader("Connection", "close")
    conn.endheaders()

    try:
        conn.send(jsonstr)
    except:
        f_log.write('TABLE=>%s: SEND DATA TO WEB SERVER ERROR\n%s\n' % (table, data))
        conn.close()
        return

    res = ''
    try:
        res = conn.getresponse()
    except:
        f_log.write('TABLE=>%s: GET RESPONSE FROM WEB SERVER ERROR\n%s\n' % (table, data))
        conn.close()
        return
    if (res.status != 200):
        f_log.write('TABLE=>%s:\nDATASERVER RETURN CODE=>%d\nDATASERVER RETURN REASON=>%s\nDATASERVER RETURN CONTENT=>%s\n' % (table, res.status, res.reason, res.read()))
        f_log.write('LOCAL CONTENT=>%s\n' % (data,))
        f_log.flush()
    conn.close()


def add(*args):
    if (len(args) < 2):
        print 'action "add" need more arguments'
        sys.exit(-1);
    #os.chdir('/www/userdata/tianya_data');
    table = args[0].strip()
    if (len(table) == 0):
        print 'action "add" must specify tablename.'
        sys.exit(-1)
    args = args[1:]
    for i in range(0, len(args)):
        datafile = args[i].strip()

        if (len(datafile) == 0):
            print 'action "add" must specify datafile.'
            continue

        if (datafile[0] != '/'):
            print 'action "add" <%s> must be absolute path.' % (datafile,)
            continue

        multithread = False

        #print table
        #print datafile

        if (os.path.isfile(datafile) == False):
            print 'action "add" datafile must exist' % (datafile,)
            continue
            #sys.exit(-1)

        if (os.path.getsize(datafile) > 10*1024*1024):
            multithread = True

        count = 0
        f_log = open('logs/%s_add.log.%s' % (table, os.path.basename(datafile)), "wt")
        try:
            job = globals()['%s_add' % (table, )]
        except KeyError:
            f_log.write("%s invalid\n" % (table, ))
            continue

        f = open(datafile, "rt")
        startime = datetime.datetime.now()
        if (multithread):
            threads = []
            for i in range(0, 5):
                threads.append(threading.Thread(target=jobs, args=(job, table, f_log, i)));
                threads[i].start()
            while (True):
                data = ''
                for i in range(0, 100):
                    line = f.readline()
                    if not line:
                        break
                    count += 1
                    data += line
                if (len(data) == 0):
                    q.put("good game");
                    q.put("good game");
                    q.put("good game");
                    q.put("good game");
                    q.put("good game");
                    break;
                q.put(data)

            q.join()
            for t in threads:
                t.join()
        else:
            while (True):
                data = ''
                for i in range(0, 1000):
                    line = f.readline()
                    if not line:
                        break
                    count += 1
                    data += line
                if (len(data) == 0):
                    break;
                job(table, data, f_log)

        f.close()
        print 'upload: %s' % (datafile,)
        endtime = datetime.datetime.now()
        totaltime = (endtime-startime).seconds
        hour = totaltime / 3600
        mins = (totaltime % 3600) / 60
        secds = (totaltime % 3600) % 60
        f_log.write('time used: %dH %dM %dS\n' % (hour, mins, secds))
        if (totaltime != 0):
            f_log.write('qps: %d\n' % (count/totaltime, ))
            f_log.flush()
        else:
            f_log.write('count: %d\n' % (count,))
            f_log.flush()
        f_log.close()

def dump(dirp):
    if (len(dirp) == 0):
        return False, "directory null"
    dirp = os.path.abspath(dirp)
    if not os.path.exists(dirp):
        return False, "directory doses not exist"

    now = datetime.datetime.now()
    yest = now + datetime.timedelta(days=-1)

    query = 'select id,targetcode,ip,area,type,backurl,insertime,remark,user_id,user_name,cookieid from adv_access_%s' % (yest.strftime('%Y%m%d'))
    #query = 'select id,targetcode,ip,area,type,backurl,insertime,remark,user_id,user_name,cookieid from adv_access_temp'

    dbfile = dirp+'/adv_access_%s' % (yest.strftime('%Y%m%d'))
    tailfile = dirp +'/adv_access_%s' % (now.strftime('%Y%m%d'))

    fp = ''
    f_id = 0
    tail_f_id = 0

    try:
        f = open(dbfile, 'r')
        while True:
            data = f.read(65535)
            if not data:
                break
            f_id += data.count('\n')

        f.close()
    except:
        pass

    f_id += 1
    tail_f_id += 1

    f = open(dbfile, 'a')
    tail_f = open(tailfile, 'a')
    try:
        conn = MySQLdb.connect(host=MYSQL_SERVER, user=MYSQL_USER, 
                               passwd=MYSQL_PASSWD, db=MYSQL_DATABASE,
                               charset='utf8')
        cursor = conn.cursor()
    except Exception, e:
        f.close()
        tail_f.close()
        os.unlink(dbfile)
        os.unlink(tailfile)
        print str(e)
        return False, "db connection error(%s)" % (dbfile)

    srow = 0
    while True:
        sql = '%s limit %d,%d' % (query, srow, 1000)

        try:
            ret = cursor.execute(sql)
        except Exception, e:
            f.close()
            tail_f.close()
            os.unlink(dbfile)
            os.unlink(tailfile)
            print str(e)
            return False, "execute query error(%s)" % (dbfile)

        if not ret:
            break

        for row in cursor.fetchall():
            d = '%s' % (row[6])
            if d.split()[0] == yest.strftime('%Y-%m-%d'):
                fp = f
                fp.write('%d\t' % (f_id))
                f_id += 1
            elif d.split()[0] == now.strftime('%Y-%m-%d'):
                fp = tail_f
                fp.write('%d\t' % (tail_f_id))
                tail_f_id += 1
            else:
                f.close()
                tail_f.close()
                os.unlink(dbfile)
                os.unlink(tailfile)
                return False, "data error"

            try:
                fp.write('%s\t' % (row[1].encode('utf8')))
            except:
                fp.write('NULL\t')
            try:
                fp.write('%s\t' % (row[2].encode('utf8')))
            except:
                fp.write('NULL\t')
            try:
                fp.write('%s\t' % (row[3].encode('utf8')))
            except:
                fp.write('NULL\t')
            fp.write('%d\t' % (row[4]))
            try:
                fp.write('%s\t' % (row[5].encode('utf8')))
            except:
                fp.write('NULL\t')
            fp.write('%s\t' % (row[6]))
            try:
                fp.write('%s\t' % (row[7].encode('utf8')))
            except:
                fp.write('NULL\t')
            try:
                fp.write('%s\t' % (row[8].encode('utf8')))
            except:
                fp.write('NULL\t')
            try:
                fp.write('%s\t' % (row[9].encode('utf8')))
            except:
                fp.write('NULL\t')
            try:
                fp.write('%s\n' % (row[10].encode('utf8')))
            except:
                fp.write('NULL\n')
        srow += 1000
        time.sleep(1)

    f.close()
    tail_f.close()
    cursor.close()
    conn.close()
    return True, dbfile

if __name__ == '__main__':
    os.chdir('/www/userdata/tianya_data/ad')
    ret, dbfile = dump('data')
    if (ret):
        print 'download: %s' % (dbfile)
        add('tianya_ad_access', dbfile)
    else:
        print '%s' % (dbfile)
