#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
import bisect
from datetime import datetime, timedelta, date
import time

EXTENDED_MAP = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-.'
EXTENDED_MAP_LENGTH = len(EXTENDED_MAP)
UNITS = [ "", "K", "M", "G", "T" ]
DAY_OF_WEEK = [ "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat" ]
MONTH = [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" ]

def human_units(value):
  if value == 0:
    return ""
  count = 0
  value = float(value)
  while value / 1024 > 1:
    value = value / 1024
    count += 1
  return "%.1f%s" % (value, UNITS[count])

def extendedEncode(arrVals, maxVal):
  chartData = ''
  if maxVal == 0:
    maxVal = 1

  for i in arrVals:
    # Scale the value to maxVal.
    scaledVal = EXTENDED_MAP_LENGTH * EXTENDED_MAP_LENGTH * i / maxVal;

    if scaledVal > (EXTENDED_MAP_LENGTH * EXTENDED_MAP_LENGTH) - 1:
      chartData += ".."
    elif scaledVal < 0:
      chartData += '__'
    else:
      # Calculate first and second digits and add them to the output.
      quotient = int(scaledVal / EXTENDED_MAP_LENGTH)
      remainder = scaledVal - EXTENDED_MAP_LENGTH * quotient;
      chartData += EXTENDED_MAP[int(quotient)] + EXTENDED_MAP[int(remainder)];

  return chartData;


class DataGraph(db.Model):
  name = db.StringProperty()

class DataPoint(db.Model):
  time = db.DateTimeProperty()
  value = db.IntegerProperty()
  graph = db.ReferenceProperty(DataGraph)

def generate_data(data, cap):
  deltas = []
  times = [time.mktime(t.time.timetuple()) for t in data]
  values = [v.value for v in data]
  last = None
  last_time = None
  for (v, t) in zip(values, times):
    if last != None:
      if v < last:
        last = -2**32 + last
      if t != last_time:
        delta = (v - last)/(t - last_time)
        if delta < cap:
          deltas.append([delta, t])
    last = v
    last_time = t
  return deltas


def average(list):
  return float(sum(list)) / len(list)


def generate_data_series(data, duration):
  deltas = []
  values = [d[0] for d in data]
  times = [d[1] for d in data]
  last = 0
  last_time = None
  last_index = -1
  now = int(time.time())
  begin = now - duration
  for t in range(begin, now, duration / 360):
    index = bisect.bisect_left(times, t)
    if index >= len(values) or index == last_index or abs(times[index] - t) > duration / 360:
      v = 0
    else:
      v = average(values[last_index+1:index+1])
    deltas.append([v, t])
    last = v
    last_time = t
    last_index = index

  return deltas

def generate_daily_img(download_data, upload_data):
  first = datetime.fromtimestamp(download_data[0][1])
  time_tuple = time.struct_time((first.year, first.month, first.day, first.hour+1, 0, 0, first.weekday(), first.toordinal() - date(first.year, 1, 1).toordinal() + 1, -1))
  offset = time.mktime(time_tuple) - download_data[0][1]
  x_axis = []
  x_position = []
  for i in range(15):
    x_axis += ["%02d:00" % ((first.hour + 1 + i * 2) % 24)]
    x_position += [(offset + 2 * i * 3600)/30/3600*100]

  return generate_img(download_data, upload_data, "Daily%20Graph", x_axis, x_position, offset / 30 / 3600 * 100, 30)


def generate_weekly_img(download_data, upload_data):
  first = datetime.fromtimestamp(download_data[0][1])
  time_tuple = time.struct_time((first.year, first.month, first.day, 0, 0, 0, first.weekday(), first.toordinal() - date(first.year, 1, 1).toordinal() + 1, -1))
  offset = time.mktime(time_tuple) - download_data[0][1]
  x_axis = []
  x_position = []
  for i in range(11):
    x = (offset + (12 * 3600) + i * 24 * 3600)/10/24/3600*100
    if x > 2 and x < 98:
      x_position += [x]
      x_axis += [DAY_OF_WEEK[(first.weekday() + 1 + i) % 7]]

  return generate_img(download_data, upload_data, "Weekly%20Graph", x_axis, x_position, offset/ 10 / 24 / 3600 * 100, 10)


def generate_monthly_img(download_data, upload_data):
  first = datetime.fromtimestamp(download_data[0][1])
  time_tuple = time.struct_time((first.year, first.month, first.day + 1, 0, 0, 0, first.weekday(), first.toordinal() - date(first.year, 1, 1).toordinal() + 1, -1))
  offset = (time.mktime(time_tuple) - download_data[0][1])/2
  x_axis = []
  x_position = []
  for i in range(15):
    x_axis += [(first + timedelta(1+2*i)).day]
    x_position += [(offset + (6 * 3600) + i * 24 * 3600)/15/24/3600*100]

  return generate_img(download_data, upload_data, "Monthly%20Graph", x_axis, x_position, offset / 15 / 24 / 3600 * 100, 30)


def generate_yearly_img(download_data, upload_data):
  first = datetime.fromtimestamp(download_data[0][1])
  time_tuple = time.struct_time((first.year, first.month, 1, 0, 0, 0, first.weekday(), first.toordinal() - date(first.year, 1, 1).toordinal() + 1, -1))
  offset = time.mktime(time_tuple) - download_data[0][1]
  x_axis = []
  x_position = []
  for i in range(13):
    x = (offset + (15 * 24 * 3600) + i * 30 * 24 * 3600)/12/30/24/3600*100
    if x > 2 and x < 98:
      x_position += [x]
      x_axis += [MONTH[(first.month - 1 + i) % 12]]

  return generate_img(download_data, upload_data, "Yearly%20Graph", x_axis, x_position, offset / 12 / 30 / 24 / 3600 * 100, 12)


def generate_img(download_data, upload_data, title, x_axis, x_position, offset, num_lines):
  max_delta = max([max(download_data)[0], max(upload_data)[0]])
  img = ""
  img += '<img src="'
  img += 'http://chart.apis.google.com/chart?cht=lc&chma=40&chxl=0:|'
  img += "|".join([str(x) for x in x_axis])
  img += '|1:|'
  img += "|".join([human_units(i) for i in [0, max_delta/4, max_delta/2, max_delta*3/4, max_delta]])
  img += '&chxp=0,'
  img += ",".join([str(x) for x in x_position])
  img += '&chdlp=b&chg='
  img += str(100.0 / num_lines)
  img += ',25,4,1,'
  img += str(offset)
  img += ',0&chdl=In|Out&chtt='
  img += title
  img += '&chs=1000x300&chco=33A640,0000FF&chm=B,66FF3395,0,0,0&chxt=x,y&chxr=1,0,' + str(int(max_delta)) + ',' + str(int(max_delta / 4)) + '&chd=e:'
  img += extendedEncode([v[0] for v in download_data], max_delta)
  img += ","
  img += extendedEncode([v[0] for v in upload_data], max_delta)
  img += ('">')
  return img

class MainHandler(webapp.RequestHandler):
  def get(self):
    self.response.headers['Content-Type'] = 'text/html'
    now = time.time()
    year_ago = datetime.fromtimestamp(now - 400 * 24 * 3600)
    down = DataGraph.gql("WHERE name = :1", "down").get()
    down_points = DataPoint.gql("WHERE graph = :1 AND time > :2 ORDER BY time LIMIT 200000", down, year_ago).fetch(200000)
    up = DataGraph.gql("WHERE name = :1", "up").get()
    up_points = DataPoint.gql("WHERE graph = :1 AND time > :2 ORDER BY time LIMIT 200000", up, year_ago).fetch(200000)

    down_cap = 1500000  # 15MB/s down
    up_cap = 1000000  # 1MB/s up

    download_deltas = generate_data(down_points, down_cap)
    upload_deltas = generate_data(up_points, up_cap)

    download_data = generate_data_series(download_deltas, 30*3600)
    upload_data = generate_data_series(upload_deltas, 30*3600)
    img = generate_daily_img(download_data, upload_data)
    self.response.out.write(img)
    download_data = generate_data_series(download_deltas, 10*24*3600)
    upload_data = generate_data_series(upload_deltas, 10*24*3600)
    img = generate_weekly_img(download_data, upload_data)
    self.response.out.write(img)
    download_data = generate_data_series(download_deltas, 30*24*3600)
    upload_data = generate_data_series(upload_deltas, 30*24*3600)
    img = generate_monthly_img(download_data, upload_data)
    self.response.out.write(img)
    download_data = generate_data_series(download_deltas, 365*24*3600)
    upload_data = generate_data_series(upload_deltas, 365*24*3600)
    img = generate_yearly_img(download_data, upload_data)
    self.response.out.write(img)


  def post(self):
    name = self.request.get('name')
    graph = DataGraph.gql("WHERE name = :1", name).get()
    if graph == None:
      graph = DataGraph(name=name)
      graph.put()

    point = DataPoint()
    point.time = datetime.fromtimestamp(long(self.request.get('date')))
    point.value = long(self.request.get('data'))
    point.graph = graph
    point.put()

def main():
  application = webapp.WSGIApplication([('/', MainHandler)],
                                       debug=True)
  util.run_wsgi_app(application)


if __name__ == '__main__':
  main()
