import json
import sqlite3
import random
import math
import operator
from datetime import datetime, timedelta
from flask import Flask, jsonify, make_response, abort, request

_debug = False 

rest_service = Flask(__name__)

database = "data/twitterdata.db"

tweet_status_map = {
    1: 'tweet',
    2: 'tweet_with_url',
    3: 'retweet',
    4: 'retweet_with_url'
}

weekday_map = {
    0: 'Monday',
    1: 'Tuesday',
    2: 'Wednesday',
    3: 'Thursday',
    4: 'Friday',
    5: 'Saturday', 
    6: 'Sunday'
}

timezone_offset = {
    'UTC': 0,
    'EST': -14400000
}

base_uri = "/twitter/api/"
project_status = "alpha"
uri = base_uri + project_status

conn = sqlite3.connect(database)
c = conn.cursor()

date_format = "%a %b %d %H:%M:%S %Y"

def fenter(fname):
    if _debug:
        print "entering {0}".format(fname)

def fleave(fname):
    if _debug:
        print "leaving {0}".format(fname)

def timestamp_to_datetime(timestamp):
    return datetime.fromtimestamp(float(timestamp)/1000.0)

def datetime_to_timestamp(dt):
    return (int(dt.strftime("%s"))*1000)

def get_data_by_handle_json(handle):
    fenter("get_data_by_handle")
    try:
        with open("data/"+handle+".json", 'r') as fin:
            data = json.load(fin)
    except IOError:
        abort(404)

    fleave("get_data_by_handle")
    return data

def get_data_by_handle(handle):
    try:
        c.execute("SELECT * FROM handle WHERE handle=?", (handle,))
        row = c.fetchone()
        handle_int = row[0]
        c.execute("SELECT * FROM tweets WHERE handle=?", (handle_int,))
        data = c.fetchall()
        if len(data) == 0:
            raise ValueError
    except ValueError:
        return {'error': 'no records for this timeframe'}
    except Exception:
        abort(404)

    return data

def get_twitter_handles():
    try:
        c.execute("SELECT handle FROM handle")
        data = c.fetchall()
    except Exception:
        abort(404)

    return [e[0] for e in data]

def remove_timezone_from_date_string(date_string):
    fenter("remove_timezone")
    #For some stupid reason, datetime objects don't understand the
    # %z parameter for timezone offset, so I get rid of it here
    splitd = date_string.split(" ")
    del splitd[4]
    fleave("remove_timezone")
    return " ".join(splitd)

def hourly_tuple_key(data_element): 
    fenter("hourly_tuple_key")
    date_string = data_element[0]
    redate_string = remove_timezone_from_date_string(date_string)
    dt = datetime.strptime(redate_string, date_format)
    fleave("hourly_tuple_key")
    return (dt.year, dt.month, dt.day, dt.hour)

def daily_tuple_key(data_element):
    fenter("daily_tuple_key")
    date_string = data_element[0]
    redate_string = remove_timezone_from_date_string(date_string)
    dt = datetime.strptime(redate_string, date_format)
    fleave("daily_tuple_key")
    return (dt.year, dt.month, dt.day)

def transform_tuple_keys_to_timestamp(binned_data):
    fenter("transform")
    binned_data_by_timestamp = {}
    for k, v in binned_data.iteritems():
        dt = datetime(*k)
        binned_data_by_timestamp[int(dt.strftime("%s"))*1000] = v

    fleave("transform")
    return binned_data_by_timestamp

def filter_and_fill_dates(binned_data, method, start_timestamp, end_timestamp=None):
    new_data = {}

    start_datetime = timestamp_to_datetime(start_timestamp)
    if not end_timestamp:
        end_datetime = datetime.now()
    else:
        end_datetime = timestamp_to_datetime(end_timestamp)

    for key, value in binned_data.iteritems():
        if method=="days":
            key_datetime = datetime(key[0],key[1],key[2],key[3])
        elif method=="weeks":
            key_datetime = datetime(key[0],key[1],key[2])
            key_datetime = key_datetime - timedelta(milliseconds=timezone_offset['EST'])

        if key_datetime >= start_datetime and key_datetime <= end_datetime:
            new_data[key] = value

    if method == "days":
        time_diff = timedelta(seconds=3600) #3600 seconds == 1 hour
        key_function = lambda dt: (dt.year, dt.month, dt.day, dt.hour)
    elif method == "weeks":
        time_diff = timedelta(days=1)
        key_function = lambda dt: (dt.year, dt.month, dt.day)

    this_time = start_datetime
    while this_time <= end_datetime:
        if not key_function(this_time) in binned_data:
            new_data[key_function(this_time)] = [] #a placeholder for 'no data'
        this_time = this_time + time_diff

    return new_data 

def bin_by_hour(data):
    fenter("bin_by_hour")
    bins = {}
    for d in data:
        key = hourly_tuple_key(d)
        bins.setdefault(key, []).append(d)

    fleave("bin_by_hour")
    return bins

def bin_by_day(data):
    fenter("bin_by_day")
    bins = {}
    for d in data:
        key = daily_tuple_key(d)
        bins.setdefault(key, []).append(d)

    fleave("bin_by_day")
    return bins

def count_tweet_status_in_bins_json(binned_data):
    fenter("count_tweet_statuses")
    bin_status_counts = {}
    for k, tweets in binned_data.iteritems():
        tweet_status_counts = {'tweet': 0, 'tweet_with_url': 0, 'retweet': 0, 'retweet_with_url': 0}
        for tweet in tweets:
            if tweet['entities']['urls']:
                if 'retweeted_status' in tweet:
                    tweet_status_counts['retweet_with_url'] += 1
                else:
                    tweet_status_counts['tweet_with_url'] += 1
            else:
                if 'retweeted_status' in tweet:
                    tweet_status_counts['retweet'] += 1
                else:
                    tweet_status_counts['tweet'] += 1
        bin_status_counts[k] = tweet_status_counts

    fleave("count_tweet_statuses")
    return bin_status_count

def count_tweet_status_in_bins(binned_data):
    bin_counts = {}
    for binn, tweets in binned_data.iteritems():
        status_counts = {'tweet': 0, 'tweet_with_url': 0, 'retweet': 0, 'retweet_with_url': 0}
        for tweet in tweets:
            status_counts[tweet_status_map[tweet[1]]] += 1
        bin_counts[binn] = status_counts

    return bin_counts

def statistics_by_day(data, start_timestamp, end_timestamp):
    day_bins = {}
    anchor_dt = timestamp_to_datetime(start_timestamp)
    this_dt = timestamp_to_datetime(start_timestamp)
    end_dt = timestamp_to_datetime(end_timestamp)

    for i in range(7):
        day_bins[this_dt.weekday()] = []
        while this_dt <= end_dt:
            if (this_dt.year, this_dt.month, this_dt.day) in data:
                day_bins[this_dt.weekday()].append(data[(this_dt.year, this_dt.month, this_dt.day)])
            this_dt = this_dt + timedelta(days=7)
        this_dt = anchor_dt + timedelta(days=i+1)

    stats_per_bin = {}
    for k, v in day_bins.iteritems():
        total = sum([sum(elem.values()) for elem in v])
        if total == 0:
            mean = std_dev = 0
        else:
            mean = float(total) / len(day_bins[k])
            std_dev = math.sqrt(sum([ (sum(elem.values()) - mean) ** 2 for elem in v]) / float(total))
        stats_per_bin[k] = {'total': total, 'mean': mean, 'std_dev': std_dev}
    
    stats_per_bin['bins'] = day_bins 

    return stats_per_bin 

def statistics_by_hour(data, start_timestamp, end_timestamp):
    hour_bins = {}
    anchor_dt = timestamp_to_datetime(start_timestamp)
    this_dt = timestamp_to_datetime(start_timestamp)
    end_dt = timestamp_to_datetime(end_timestamp)

    for i in range(24):
        hour_bins[this_dt.hour] = []
        while this_dt <= end_dt:
            if (this_dt.year, this_dt.month, this_dt.day, this_dt.hour) in data:
                hour_bins[this_dt.hour].append(data[(this_dt.year, this_dt.month, this_dt.day, this_dt.hour)])
            this_dt = this_dt + timedelta(days=1)
        this_dt = anchor_dt + timedelta(seconds=3600*(i+1))

    #timezone hotfix:  the math totally works... don't worry about why
    shifted_hour_bins = {(k+24-4)%24 : v for k, v in hour_bins.iteritems()}
    hour_bins = shifted_hour_bins

    stats_per_bin = {}
    for k, v in hour_bins.iteritems():
        total = sum([sum(elem.values()) for elem in v])
        if total == 0:
            mean = std_dev = 0
        else:
            mean = float(total) / len(hour_bins[k])
            std_dev = math.sqrt(sum([ (sum(elem.values()) - mean) ** 2 for elem in v]) / float(total))
        stats_per_bin[k] = {'total': total, 'mean': mean, 'std_dev': std_dev}

    stats_per_bin['bins'] = hour_bins

    return stats_per_bin

def detect_anomaly(mean, std_dev, num_std_devs, data_element):
    if abs(sum(data_element.values()) - mean) > (std_dev * num_std_devs):
        return True
    else:
        return False

@rest_service.after_request
def after_request(response):
    response.headers.add('Access-Control-Allow-Origin', '*')
    response.headers.add('Access-Control-Allow-Headers', 'Origin, X-Requested-With')
    return response

@rest_service.errorhandler(404)
def not_found(error):
    return make_response(jsonify( {'error': 'Not found' } ), 404)

@rest_service.route(uri+'/<twitter_handle>', methods=['GET'])
def get_twitter_data(twitter_handle):
    data = get_data_by_handle(twitter_handle)

    return jsonify( {'data': data} )

@rest_service.route(uri+'/get_handles', methods=['GET'])
def get_handles():
    twitter_handle = request.args.get('handle')
    handles = get_twitter_handles()

    return jsonify( {'records': [{'name': h} for h in handles]} )

@rest_service.route(uri+'/get_statistics', methods=['GET'])
def get_statistics():
    twitter_handle = request.args.get('handle')
    date = request.args.get('date')
    method = request.args.get('method')

    if not method:
        method = 'weeks'

    data = get_data_by_handle(twitter_handle)
    if method == 'weeks':
        binned_data = bin_by_day(data)
    elif method == 'days':
        binned_data = bin_by_hour(data)

    target_datetime = timestamp_to_datetime(float(date) + timezone_offset['EST'])
    if method == 'weeks':
        weekday = target_datetime.weekday()
        if weekday == 6:  # This fixes a bug which gets the previous week if the given datetime is a Sunday
            weekday = -1

    if method == 'weeks':
        start_datetime = datetime(target_datetime.year, target_datetime.month, target_datetime.day) - timedelta(days=weekday+1) + timedelta(milliseconds=timezone_offset['EST']*-1)
        end_datetime = start_datetime + timedelta(days=6, hours=23, minutes=59, seconds=59, milliseconds=999)
    elif method == 'days': 
        start_datetime = datetime(target_datetime.year, target_datetime.month, target_datetime.day) + timedelta(milliseconds=timezone_offset['EST']*-1)
        end_datetime = start_datetime + timedelta(hours=23, minutes=59, seconds=59, milliseconds=999)


    three_months_prior = start_datetime - timedelta(days=90)
    start_timestamp = datetime_to_timestamp(start_datetime)
    end_timestamp = datetime_to_timestamp(end_datetime)
    three_months_timestamp = datetime_to_timestamp(three_months_prior)

    filtered_data = filter_and_fill_dates(binned_data, method, three_months_timestamp, end_timestamp)
    tweet_statuses = count_tweet_status_in_bins(filtered_data)
    if method == 'weeks':
        stats = statistics_by_day(tweet_statuses, three_months_timestamp, end_timestamp)
    elif method == 'days':
        stats = statistics_by_hour(tweet_statuses, three_months_timestamp, end_timestamp)

    week_of_interest_data = filter_and_fill_dates(binned_data, method, start_timestamp, end_timestamp)
    week_of_interest_statuses = count_tweet_status_in_bins(week_of_interest_data)
    if method == 'weeks':
        woi_stats = statistics_by_day(week_of_interest_statuses, start_timestamp, end_timestamp)
    elif method == 'days':
        woi_stats = statistics_by_hour(week_of_interest_statuses, start_timestamp, end_timestamp)
    
    #s = sorted([(k, v) for k, v in woi_stats.iteritems()], key=lambda e: e[0])
    #for t in s:
    #    print t

    upper_95 = lambda m, sd: m + (sd*2)
    lower_95 = lambda m, sd: m - (sd*2)

    if method == 'weeks':
        return jsonify({'start': str(start_datetime), 
        'end': str(end_datetime),
        'records': [
            {'Time': weekday_map[i%7],
            'Volume': woi_stats[i%7]['total'],
            'Mean': stats[i%7]['mean'],
            'TweetsNoURL': woi_stats['bins'][i%7][0]['tweet'] if len(woi_stats['bins'][i%7]) > 0 else 0,
            'TweetsWithURL': woi_stats['bins'][i%7][0]['tweet_with_url'] if len(woi_stats['bins'][i%7]) > 0 else 0,
            'ReTweetsNoURL': woi_stats['bins'][i%7][0]['retweet'] if len(woi_stats['bins'][i%7]) > 0 else 0,
            'ReTweetsWithURL': woi_stats['bins'][i%7][0]['retweet_with_url'] if len(woi_stats['bins'][i%7]) > 0 else 0,
            '95PercentLowerBound': lower_95(stats[i%7]['mean'], stats[i%7]['std_dev']), 
            '95PercentUpperBound': upper_95(stats[i%7]['mean'], stats[i%7]['std_dev'])
        } for i in range(6,13)]
    })
    elif method == 'days':
        return jsonify({'start': str(start_datetime),
        'end': str(end_datetime),
        'records': [
            {'Time': i,
            'Volume': woi_stats[i]['total'],
            'Mean': stats[i]['mean'],
            'TweetsNoURL': woi_stats['bins'][i][0]['tweet'] if len(woi_stats['bins'][i]) > 0 else 0,
            'TweetsWithURL': woi_stats['bins'][i][0]['tweet_with_url'] if len(woi_stats['bins'][i]) > 0 else 0,
            'ReTweetsNoURL': woi_stats['bins'][i][0]['retweet'] if len(woi_stats['bins'][i]) > 0 else 0,
            'ReTweetsWithURL': woi_stats['bins'][i][0]['retweet_with_url'] if len(woi_stats['bins'][i]) > 0 else 0,
            '95PercentLowerBound': lower_95(stats[i]['mean'], stats[i]['std_dev']),
            '95PercentUpperBound': upper_95(stats[i]['mean'], stats[i]['std_dev'])
        } for i in range(0, 24)]
    })


@rest_service.route(uri+'/get_datetimes', methods=['GET'])
def get_datetimes():
    twitter_handle = request.args.get('handle')
    data = get_data_by_handle(twitter_handle)

    start = request.args.get('start')
    end = request.args.get('end')
    bin_method = request.args.get('method')

    #timezone offset:
    start = float(start)
    end = float(end)

    # TODO: Abstract this later
    if bin_method:
        if bin_method == "weeks":
            binned_data = bin_by_day(data)
        elif bin_method == "days":
            binned_data = bin_by_hour(data)
        else:
            abort(404)
    else:       #default behvaior
        bin_method = "days"
        binned_data = bin_by_hour(data)

    #s = sorted([(k, v) for k, v in binned_data.iteritems()], key=lambda e: e[0])
    #for t in s:
        #print t

    start_datetime = timestamp_to_datetime(start)
    end_datetime = timestamp_to_datetime(end)
    #end_datetime = end_datetime.replace(hour=23, minute=59, second=59)
    #end = datetime_to_timestamp(end_datetime)

    three_months_prior = start_datetime - timedelta(days=90)
    three_months_timestamp = datetime_to_timestamp(three_months_prior)
    three_months_data = filter_and_fill_dates(binned_data, bin_method, three_months_timestamp, end)
    three_months_tweet_statuses = count_tweet_status_in_bins(three_months_data)

    filtered_data = filter_and_fill_dates(binned_data, bin_method, start, end)
    tweet_statuses = count_tweet_status_in_bins(filtered_data)
    
    #s = sorted([(k, v) for k, v in filtered_data.iteritems()], key=lambda e: e[0])
    #for t in s:
        #print t

    if bin_method == "weeks":
        stats = statistics_by_day(three_months_tweet_statuses, three_months_timestamp, end)
        for k, v in tweet_statuses.iteritems():
            dt = datetime(*k)
            v['anomaly'] = detect_anomaly(stats[dt.weekday()]['mean'], stats[dt.weekday()]['std_dev'], 2, v)
    elif bin_method == "days":
        stats = statistics_by_hour(three_months_tweet_statuses, three_months_timestamp, end)
        for k, v in tweet_statuses.iteritems():
            dt = datetime(*k)
            v['anomaly'] = detect_anomaly(stats[dt.hour]['mean'], stats[dt.hour]['std_dev'], 2, v)
  
    maximum = max([sum(sbin.values()) for sbin in tweet_statuses.values()])
    total = sum([sum(sbin.values()) for sbin in tweet_statuses.values()])

    return_data = transform_tuple_keys_to_timestamp(tweet_statuses)

    #shifted_data = {key-timezone_offset['EST']: value for key, value in return_data.iteritems()}
    #return_data = shifted_data

    return jsonify( {'total': total, 'maximum': maximum, 'length': len(return_data), 'records': sorted([{'time': k, 'datetime': timestamp_to_datetime(k), 'data': v} for k, v in return_data.iteritems()], key=lambda e: e['time'])} )


if __name__ == "__main__":
    rest_service.run(host="0.0.0.0", debug=False)
