'''
Created on Sep 24, 2012

@author: peng
'''
import os
import MySQLdb

BELUGA_DB_CONFIG = {'host': '10.42.133.171',
                    'user': 'dev_analysis',
                    'password': 'analysis_dev_pwd',
                    'db': 'beluga_analysis',
                    'port': 3306}


class MySQLdbWrapper:

    conn = None

    def connect(self):
        self.conn = MySQLdb.connect(host=BELUGA_DB_CONFIG['host'],
                                    user=BELUGA_DB_CONFIG['user'],
                                    passwd=BELUGA_DB_CONFIG['password'],
                                    db=BELUGA_DB_CONFIG['db'],
                                    port=BELUGA_DB_CONFIG['port'])
        self.conn.set_character_set('utf8')
        self.conn.autocommit(True)

    def cursor(self):
        try:
            # if not self.conn:
            self.connect()
            return self.conn.cursor()
        except MySQLdb.OperationalError:
            self.connect()
            return self.conn.cursor()

_db = MySQLdbWrapper()


def get_data(sql_list):
    try:
        cursor = _db.cursor()
        sql = ' '.join(sql_list)
        cursor.execute(sql)
        data = cursor.fetchall()
        return data
    finally:
        cursor.close()


data_dict = {}
path = '/mnt/export/new-user-list'


def flush():
    for key in data_dict.keys():
        file_path = os.path.join(path, 'd-' + key)
        if not os.path.exists(file_path):
            os.mkdir(file_path)
        f = open(os.path.join(file_path, key), 'a+')
        data = '\n'.join(data_dict[key])
        f.write(data)
        f.write('\n')
        f.close()
    data_dict.clear()


sql = \
'''
select b.app_key, b.udid, a.first_launch_time, c.promotion_id, c.app_version, c.channel, d.country, e.os_version, f.`language`, g.resolution, h.access, i.carrier, j.device_model 
from dev_basic a, dim_app_udid_basic b, dim_apcv_basic c, dim_country_basic d, dim_os_version_basic e, dim_language_basic f, dim_resolution_basic g, dim_access_basic h, dim_carrier_basic i, dim_device_model_basic j
where a.dev_id = b.dev_id and a.apcv_id = c.apcv_id and a.country = d.id and a.os_version = e.id and a.`language` = f.id and a.resolution = g.id and a.access = h.id and a.carrier = i.id and a.device_model = j.id
limit 100000 OFFSET %s
'''

dolphin_app = ['6e1cd3bc2cda139a29b552ae5d742b80',
                'c193d4184fa743121f63b52786783fd1',
                '7602204983c15bdc9d571034f1c28202',
                '0430b1b9a46321c8facdbca3a64763cf',
                'f63cdbd0d71c89d34c9d345cb73137e9',
                '7772f9ca1e6710045684d3b4a4f0e65b',
                '5f932ec16fef7bb1d39ef75d51dd487e',
                'fc15fda4c481fd309d69df1ff3081bfd',
                '73e8bd7c89a26510923c6141c976ac40',
                'd0e577f2bd57e63c33ea1c499334bb56',
                'a23210004e18709b3c70deaee5302a27']


if __name__ == '__main__':

    offset = 0

    while True:
        rows = get_data([sql % offset])
        print offset
        if len(rows) == 0:
            break
        for row in rows:
            # values
            app_key = row[0]
            if len(app_key) != 32:
                continue
            if app_key in dolphin_app:
                continue
            udid = row[1]
            if len(udid) != 32:
                continue
            first_launch_time = row[2]
            if first_launch_time == None:
                first_launch_time = '1970-01-01'
            promotion_id = row[3]
            app_version = row[4]
            channel = row[5]
            country = row[6]
            os_version = row[7]
            language = row[8]
            resolution = row[9]
            access = row[10]
            carrier = row[11]
            device_model = row[12]

            date = str(first_launch_time)[:10].replace('-', '')
            data = '%s,%s,%s,%s,%s,uid:%s\t%s,%s,%s,%s,%s,%s,%s' % \
                   (app_key, date, promotion_id, app_version, channel, udid, \
                    country, os_version, language, resolution, access, carrier,
                    device_model)
            if date in data_dict:
                data_dict[date].append(data)
            else:
                data_dict[date] = [data]
        # flush to files
        flush()
        offset += 100000
