#  Copyright (c) 2010
#  The Regents of the University of Michigan
#  All Rights Reserved

#  Permission is granted to use, copy, create derivative works, and
#  redistribute this software and such derivative works for any purpose,
#  so long as the name of the University of Michigan is not used in
#  any advertising or publicity pertaining to the use or distribution
#  of this software without specific, written prior authorization. If
#  the above copyright notice or any other identification of the
#  University of Michigan is included in any copy of any portion of
#  this software, then the disclaimer below must also be included.

#  This software is provided as is, without representation or warranty
#  of any kind either express or implied, including without limitation
#  the implied warranties of merchantability, fitness for a particular
#  purpose, or noninfringement.  The Regents of the University of
#  Michigan shall not be liable for any damages, including special,
#  indirect, incidental, or consequential damages, with respect to any
#  claim arising out of or in connection with the use of the software,
#  even if it has been or is hereafter advised of the possibility of
#  such damages.
import cPickle as pickle
import csv
import gzip
import hashlib
import itertools
import os
import sys

import matplotlib
matplotlib.use('agg')
from matplotlib import pyplot

import libtorrent
import numpy
from util import *

def sizes_with_metadata(metadata_root):
  enc = str.encode
  data = pickle.load(open('content_to_ip.dat'))
  data.reverse()

  metadata_files = []
  for i in range(16):
    metadata_files.extend(os.listdir(os.path.join(metadata_root, hex(i)[2].upper())))

  to_del = len('.torrent.gz')
  def make_sha_rev_tuple(x):
    hsh = x[:-to_del]
    return (hashlib.sha1(hsh.decode('hex')).digest(), hsh)

  metadata_files = dict(make_sha_rev_tuple(x) for x in metadata_files)

  def rev_sha_with(x):
    hsh, count = x
    hsh = metadata_files.get(hsh)
    return count if hsh else None

  def rev_sha_without(x):
    hsh, count = x
    hsh = metadata_files.get(hsh)
    return None if hsh else count

  # haves     = list of counts for torrents where we have .torrent file.
  # have_nots = "    "  "      "   "        "     "  dont have .torrent file.
  haves = filter(lambda x: x is not None,
                 itertools.imap(rev_sha_with, data))
  have_nots = filter(lambda x: x is not None,
                     itertools.imap(rev_sha_without, data))

  haves.sort()
  have_nots.sort()
  overall_max = max(max(haves), max(have_nots))
  overall_min = min(min(haves), min(have_nots))
  print 'min: %d' % overall_min
  print 'total peers: %d' % (sum(haves) + sum(have_nots))
  print 'covered: %d' % (sum(haves))
  print 'uncovered: %d' % sum(have_nots)

  bins = (overall_max - overall_min)

  # Histograms map count of peers -> number of retrieved .torrents.
  haves_hist, edges = numpy.histogram(haves, bins, (overall_min, overall_max))
  have_nots_hist, edges = numpy.histogram(have_nots, bins, (overall_min, overall_max))
  print "counts of retrieved .torrents for 1-20 peers:"
  print haves_hist[:20]
  print sum(haves_hist[:20])  # How many .torrents do we have

  print "counts of missed .torrents for 1-20 peers:"
  print have_nots_hist[:20]
  print sum(have_nots_hist[:20])

  print 'total retrieved:'
  print sum(haves_hist)
  print 'total missed:'
  print sum(have_nots_hist)
  iter = itertools.izip(itertools.count(),
                        itertools.izip_longest(haves_hist, have_nots_hist))

  # pr = []
  # for idx, (have_n, have_not_n) in iter:
  #   if have_n is None:
  #     have_n = 0
  #   if have_not_n is None:
  #     have_not_n = 0
  #   tot = have_n + have_not_n
  #   pr_have = float(have_n) / tot
  #   pr.append(pr_have)
  # pyplot.plot(pr, 'r+')
  # pyplot.savefig('have_pr.png')

#  return 0, 1

def main(argv):
  sizes_with_metadata(argv[0])


if __name__ == '__main__':
  main(sys.argv[1:])
