#!/usr/bin/env python
#  Copyright (c) 2010
#  The Regents of the University of Michigan
#  All Rights Reserved

#  Permission is granted to use, copy, create derivative works, and
#  redistribute this software and such derivative works for any purpose,
#  so long as the name of the University of Michigan is not used in
#  any advertising or publicity pertaining to the use or distribution
#  of this software without specific, written prior authorization. If
#  the above copyright notice or any other identification of the
#  University of Michigan is included in any copy of any portion of
#  this software, then the disclaimer below must also be included.

#  This software is provided as is, without representation or warranty
#  of any kind either express or implied, including without limitation
#  the implied warranties of merchantability, fitness for a particular
#  purpose, or noninfringement.  The Regents of the University of
#  Michigan shall not be liable for any damages, including special,
#  indirect, incidental, or consequential damages, with respect to any
#  claim arising out of or in connection with the use of the software,
#  even if it has been or is hereafter advised of the possibility of
#  such damages.

import collections
import cPickle
import glob
import sys

def rollup(data):
  already_found_keys = {}
  grand_total_stores = 0
#  unique_keys = set()
  grand_total_value_bytes = 0
  total_bytes = 0
  bencoded_stores = 0
  bencoded_bytes = 0
  peer_record_stores = 0
  peer_record_bytes = 0
  unvanish_acceptable_stores = 0
  unvanish_acceptable_bytes = 0
  sybil_finds = []

  for d in data:
    grand_total_stores += d['total_stores']
#    unique_keys.update(d['unique_keys'])
    grand_total_value_bytes += d['value_bytes']
    new_found_keys = d['found_keys']
    total_bytes += d['total_bytes']
    bencoded_stores += d['bencoded_stores']
    bencoded_bytes += d['bencoded_bytes']
    peer_record_stores += d['peer_record_stores']
    peer_record_bytes += d['peer_record_bytes']
    unvanish_acceptable_stores += d['unvanish_acceptable_stores']
    unvanish_acceptable_bytes += d['unvanish_acceptable_bytes']
    sybil_finds.extend(d['sybil_finds'])

    for key in new_found_keys:
      if key not in already_found_keys:
        already_found_keys[key] = new_found_keys[key]
      else:
        already_found_keys[key] = min(already_found_keys[key],
                                      new_found_keys[key])
  return (already_found_keys, grand_total_stores,# unique_keys,
          grand_total_value_bytes, total_bytes, bencoded_stores, bencoded_bytes,
          peer_record_stores, peer_record_bytes,
          unvanish_acceptable_stores, unvanish_acceptable_bytes, sybil_finds)

if __name__ == '__main__':
  nhops = int(sys.argv[1])
  for hopnum in range(1,nhops + 1):
    data = [cPickle.load(open(filename)) for filename in glob.glob('hop%d.ec2*.pickle' % hopnum)]
    print 'done loading hop %d' % hopnum
    results = rollup(data)
    cPickle.dump({'found_keys': results[0], 'total_stores': results[1],
                #  'unique_keys': results[2],
                  'value_bytes': results[2],
                  'total_bytes': results[3],
                  'bencoded_stores': results[4],
                  'bencoded_bytes': results[5],
                  'peer_record_stores': results[6],
                  'peer_record_bytes': results[7],
                  'unvanish_acceptable_stores': results[8],
                  'unvanish_acceptable_bytes': results[9],
                  'sybil_finds': results[10]},
                 open('hop%02d.rollup.pickle' % hopnum, 'wb'), protocol=cPickle.HIGHEST_PROTOCOL)
