#!/usr/bin/env python

#  Copyright (c) 2010
#  The Regents of the University of Michigan
#  All Rights Reserved

#  Permission is granted to use, copy, create derivative works, and
#  redistribute this software and such derivative works for any purpose,
#  so long as the name of the University of Michigan is not used in
#  any advertising or publicity pertaining to the use or distribution
#  of this software without specific, written prior authorization. If
#  the above copyright notice or any other identification of the
#  University of Michigan is included in any copy of any portion of
#  this software, then the disclaimer below must also be included.

#  This software is provided as is, without representation or warranty
#  of any kind either express or implied, including without limitation
#  the implied warranties of merchantability, fitness for a particular
#  purpose, or noninfringement.  The Regents of the University of
#  Michigan shall not be liable for any damages, including special,
#  indirect, incidental, or consequential damages, with respect to any
#  claim arising out of or in connection with the use of the software,
#  even if it has been or is hereafter advised of the possibility of
#  such damages.
import sys

CLEARVIEW_DIR = '/home/swolchok/clearview'
PROVIDENCE_PATH = CLEARVIEW_DIR + '/pyclearview/providence.py'

sys.path.append(CLEARVIEW_DIR + '/pyclearview')
from dhtselect import dht, dhtutil

import event
import libtorrent

import psycopg2

from log_analysis import bulk_inserter, find_peer_lists, find_torrent_descriptions
import generate_bootstrap_list
import log_parser
from mojito_analysis import scan_parser
import vuze_torrent_downloader

import datetime
import glob
import gzip
import hashlib
import itertools
import logging
import multiprocessing
import os
import random
import shutil
import signal
import subprocess
import sys
import tempfile
import time
import urllib2

ACTUALLY_SET_UP_SEARCH_ENGINE = False
DUMP_DATABASE = False
SAVE_SPACE = True

def get_db_conn():
  conn = psycopg2.connect('dbname=dhtsearch')
  c = conn.cursor()
  c.execute('SET work_mem=\'512MB\';')
  return conn


def get_num_procs():
  cpuinfo = open('/proc/cpuinfo')
  procs = [line for line in cpuinfo if line.startswith('processor')]
  nprocs = int(procs[-1].split(':')[1]) + 1
  return nprocs


EXT_IP = None
def get_external_ip():
  global EXT_IP
  if EXT_IP is None:
    EXT_IP = urllib2.urlopen('http://www.whatismyip.com/automation/n09230945.asp').read().strip()
  return EXT_IP

IMPORT_CONCURRENCY = get_num_procs()


class Error(Exception): pass

PROVIDENCE_LIVE_NODES = None

def run_providence_and_rebuild_clearview():
  global PROVIDENCE_LIVE_NODES
  if int(open('/proc/sys/net/core/rmem_max').read()) < 1000000:
    print >> sys.stderr, 'Need to run prep_providence.sh!'
    sys.exit(1)
#  subprocess.Popen(['sudo',
#                    CLEARVIEW_DIR + '/superseed/modifiedscripts/prep_providence.sh']).wait()

  ext_ip = get_external_ip()
    
  print repr(ext_ip)
  temp_dir = tempfile.mkdtemp()
  old_cwd = os.getcwd()
  os.chdir(temp_dir)

  prov = subprocess.Popen(['python', PROVIDENCE_PATH, dht.NAME.lower(), ext_ip, 'once'], shell=False)
  print 'providence PID:', prov.pid
  prov.wait()

#  gen = subprocess.Popen(['python',
#                          os.path.join(CLEARVIEW_DIR,
#                                       'pyclearview/generate_bootstrap_list.py'),
#                          glob.glob('scan.*.out.gz')[-1], 'partial'])
#  gen.wait()
  scan_log = glob.glob('scan.*.out.gz')[-1]
  generate_bootstrap_list.main(('gbl', scan_log, 'partial'))

  def copy_workaround(src, dest):
    os.remove(os.path.join(dest, src))
    shutil.copy(src, dest)
  copy_workaround('bootstrap_nodes.h', CLEARVIEW_DIR)
  copy_workaround('bootstrap_nodes.c', CLEARVIEW_DIR)

  PROVIDENCE_LIVE_NODES = frozenset(
      scan_parser.slurp_responsive_nodes(scan_log))

  os.chdir(old_cwd)

  build = subprocess.Popen([os.path.join(CLEARVIEW_DIR, 'build.sh')],
                           cwd=CLEARVIEW_DIR)
  if build.wait():
    raise Error('Error rebuilding clearview!')

#  shutil.rmtree(temp_dir)


from cyrecrawl_helpers import do_import

def import_clearview_results(data_dir, descs_table='torrent_descs_new',
                             peers_table='peer_lists_new'):
  # Create IMPORT_CONCURRENCY update tasks, each processing store*.log
  # in their assigned set of hops.
  hops = glob.glob('%s/hop*/stores*.log' % data_dir)
  print 'concurrency is %d' % IMPORT_CONCURRENCY
  hop_groups = [hops[offset::IMPORT_CONCURRENCY]
                for offset in range(IMPORT_CONCURRENCY)]
  conn = get_db_conn()
  c = conn.cursor()
  c.execute('''DROP TABLE IF EXISTS %s''' % descs_table)
  c.execute('''CREATE TABLE %s
  (name TEXT, size BIGINT, hash BYTEA,
   seeders INTEGER, leechers INTEGER, peers INTEGER, dhtkey BYTEA);''' % descs_table)
  c.execute('''DROP TABLE IF EXISTS %s''' % peers_table)
  c.execute('''CREATE TABLE %s
  (dhtkey BYTEA, peer INET, port INTEGER);''' % peers_table)
  conn.commit()

  import_tasks = []
  count = 0
  for group in hop_groups:
    task = multiprocessing.Process(target=do_import, args=(group, descs_table,peers_table ))
    import_tasks.append(task)
    task.start()

  print 'spawned all tasks'
  for task in import_tasks:
    task.join()

  if ACTUALLY_SET_UP_SEARCH_ENGINE or SAVE_SPACE:
    uniqify_query = '''CREATE TABLE %s_uniq AS
                       SELECT * FROM (
                         SELECT DISTINCT ON (name, hash) *
                         FROM %s ORDER BY name, hash) AS res''' % (descs_table, descs_table)
    if ACTUALLY_SET_UP_SEARCH_ENGINE:
      uniqify_query += ' ORDER BY res.peers'
    uniqify_query += ';'
    c.execute('SET work_mem=\'4GB\';')
    c.execute(uniqify_query)
    c.execute('DROP TABLE %s;' % descs_table)
    c.execute('ALTER TABLE %s_uniq RENAME TO %s;' % (descs_table, descs_table))
    conn.commit()
    # Before postgresql-8.4, GROUP BY is faster than DISTINCT. thanks, #postgresql!
    c.execute('''CREATE TABLE %s_uniq AS
                 SELECT * FROM %s
                 GROUP BY dhtkey, peer, port''' % (peers_table, peers_table))
    c.execute('DROP TABLE %s;' % peers_table)
    c.execute('ALTER TABLE %s_uniq RENAME TO %s' % (peers_table, peers_table))
  c.execute('SET maintenance_work_mem=\'4GB\';')
  c.execute('ANALYZE;')
  conn.commit()
  conn.close()


def switch_to_new_torrent_descs_table():
  conn = get_db_conn()
  c = conn.cursor()
  c.execute('DROP TABLE IF EXISTS torrent_descs;')
  conn.commit()
  c.execute('ALTER TABLE torrent_descs_new RENAME TO torrent_descs;')
  conn.commit()
  c.execute('ALTER INDEX idx_hash_new RENAME TO idx_hash;')
  conn.commit()
  #c.execute('ALTER INDEX idx_dhtkey_new RENAME TO idx_dhtkey;')
  #conn.commit()
  #c.execute('ALTER INDEX idx_name_new RENAME TO idx_name;')
  #conn.commit()
  if ACTUALLY_SET_UP_SEARCH_ENGINE:
    c.execute('ALTER INDEX idx_name_gin_new RENAME TO idx_name_gin;')
    conn.commit()
  #c.execute('ALTER INDEX idx_rel_key_new RENAME TO idx_rel_key;')
  #conn.commit()
  c.execute('DROP TABLE IF EXISTS peer_lists;')
  c.execute('ALTER TABLE peer_lists_new RENAME TO peer_lists;')
  c.execute('ALTER INDEX idx_peers_new RENAME TO idx_peers;')
  conn.commit()
  conn.close()


def build_cheap_indices(batch_suffix='new'):
  conn = get_db_conn()
  c = conn.cursor()
  c.execute('SET maintenance_work_mem=\'4GB\';')
  c.execute('CREATE INDEX idx_hash_%s on torrent_descs_%s(hash);' % (batch_suffix, batch_suffix))
  c.execute('CREATE INDEX idx_peers_%s ON peer_lists_%s(dhtkey);' % (batch_suffix, batch_suffix))
  conn.commit()
  conn.commit()
  conn.close()

def build_expensive_indices():
  conn = get_db_conn()
  c = conn.cursor()
  c.execute('SET maintenance_work_mem=\'4GB\';')
  #c.execute('CREATE INDEX idx_dhtkey_new on torrent_descs_new(dhtkey);')
  #conn.commit()
  #c.execute('CREATE INDEX idx_name_new ON torrent_descs_new(name);')
  #conn.commit()
  c.execute('CREATE INDEX idx_name_gin_new ON torrent_descs_new USING gin(to_tsvector(\'english\', name));')
  conn.commit()
  conn.close()


def populate_related_content_keys():
  conn = get_db_conn()
  c = conn.cursor()
  c.execute('''UPDATE torrent_descs_new
            SET related_content_key = digest('az:rcm:assoc:' || 
            upper(encode(hash, 'hex')), 'sha1') WHERE hash is not NULL;''')
  conn.commit()
  c.execute('CREATE INDEX idx_rel_key_new ON torrent_descs_new(related_content_key);')
  conn.commit()
  conn.close()

from cyrecrawl_helpers import send_fetch_requests

def fetch_real_torrents():
  conn = get_db_conn()
  c = conn.cursor()
  # "Expensive", but only takes a couple minutes.
  c.execute('SET work_mem=\'4GB\';')
  start = datetime.datetime.today()
  c.execute('''SELECT DISTINCT hash
               FROM torrent_descs_new WHERE hash is not NULL''')
  hashes = c.fetchall()
  end = datetime.datetime.today()

  logging.info('Unique on hash took %s' % ((end-start),))

  c.execute('''DROP TABLE IF EXISTS actual_torrents_new;''')
  c.execute('''CREATE TABLE actual_torrents_new
  (hash BYTEA PRIMARY KEY, seeders INTEGER, leechers INTEGER, name TEXT,
   files TEXT, title TEXT, description TEXT, comment TEXT)''')
  conn.commit()

  query = '''INSERT INTO actual_torrents_new (hash, seeders, leechers, name,
             files, title, description, comment) VALUES'''
  inserter = bulk_inserter.BulkInserter(conn, query, 8)

  already_got_torrents = set()

  torrent_dir = tempfile.mkdtemp(time.strftime('.%Y-%m-%d.%H:%M:%S'), 'torrents-')
  def process_torrent(torrent_hash, torrent_file):
    if torrent_hash in already_got_torrents:
      return

    outfyl = gzip.open(torrent_dir + '/' + str(torrent_hash).encode('hex').upper() + '.torrent.gz', 'wb')
    outfyl.write(torrent_file)
    outfyl.close()

    try:
      td = libtorrent.bdecode(torrent_file)
    except RuntimeError:
      print 'bdecode error!'
      return
    
    if td is None:
      print 'bdecode error!'
      return

    comment = td.get('comment')

    title = None
    description = None
    if 'azureus_properties' in td and 'Content' in td['azureus_properties']:
      title = td['azureus_properties']['Content'].get('Title')
      description = td['azureus_properties']['Content'].get('Description')

    if 'info' not in td:
      print 'no info dict in %s' % td
      malformed += 1
      return

    infodict = td['info']
    name = infodict.get('name')

    filenames = None
    if 'files' in infodict:
      try:
        filenames = '\n'.join(fyl['path'][-1] for fyl in infodict['files'])
      except KeyError:
        pass

    already_got_torrents.add(torrent_hash)
    if ACTUALLY_SET_UP_SEARCH_ENGINE:
      c.execute('''SELECT seeders, leechers  
      FROM torrent_descs_new WHERE hash = %s LIMIT 1;''',
                (psycopg2.Binary(torrent_hash),))
      seeders, leechers = c.fetchone()
      inserter.insert((psycopg2.Binary(torrent_hash), seeders, leechers, name,
                       filenames, title, description, comment))


  downloader = vuze_torrent_downloader.VuzeTorrentDownloader((get_external_ip(),
                                                              random.randint(1024, 65534)),
                                                             process_torrent)

  send_fetch_requests([x[0] for x in hashes], downloader, c,
                      PROVIDENCE_LIVE_NODES, 0, len(hashes))

  event.signal(signal.SIGTERM, event.abort)
  event.signal(signal.SIGINT, event.abort)
  event.dispatch()
  inserter.done()
  conn.commit()
  conn.close()

  # Dump hashes.
  if DUMP_DATABASE:
    hash_dump = open('%s/related_content_hashes' % data_dir, 'w')
    write = hash_dump.write
    for hash in hashes:
      write(str(hash[0]).encode('hex'))
      write('\n')
    hash_dump.close()


def dump_peer_hashes(data_dir):
  conn = get_db_conn()
  c = conn.cursor()

  c.execute('''SELECT DISTINCT dhtkey
               FROM peer_lists WHERE dhtkey is not NULL;''')
  hashes = c.fetchall()

  peer_hash_dump = open('%s/peer_hashes' % data_dir, 'w')
  write = peer_hash_dump.write
  for hash in hashes:
    write(str(hash[0]).encode('hex'))
    write('\n')

  c.execute("""CREATE OR REPLACE FUNCTION sha1(bytea) RETURNS text AS $$
               SELECT encode(digest($1, 'sha1'),'hex') AS result
               $$ LANGUAGE SQL STRICT IMMUTABLE;""")
  c.execute('''SELECT DISTINCT D.hash
               FROM torrent_descs D, peer_lists P
               WHERE sha1(D.hash) = encode(P.dhtkey,'hex');''')
  hashes = c.fetchall()
  peer_with_desc_hash_dump = open('%s/peer_with_desc_hashes' % data_dir, 'w')
  write = peer_with_desc_hash_dump.write
  for hash in hashes:
    write(str(hash[0]).encode('hex'))
    write('\n')


def grouper(n, iterable, fillvalue=None):
    "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
    args = [iter(iterable)] * n
    return itertools.izip_longest(fillvalue=fillvalue, *args)


def main(argv):
  if len(argv) > 1 and argv[1] == 'just_clearview':
    argv = [argv[0]] + argv[2:]
    cv_only = True
  else:
    cv_only = False

  if len(argv) >= 2:
    data_dir = argv[1]
  else:
    pre_prov_time = datetime.datetime.today()
    run_providence_and_rebuild_clearview()
    post_prov_time = datetime.datetime.today()
    logging.info('first prov run took %s' % (post_prov_time - pre_prov_time))
    cv = subprocess.Popen(['./start_clearview.sh'], stdout=subprocess.PIPE)
    data_dir = cv.communicate()[0].split('\n')[-2].strip()

    logging.info('Clearview finished at %s, got dir %s' % (datetime.datetime.today(),
                                                    repr(data_dir)))

    if cv.returncode:
      sys.stderr.write('Error code %d running ClearView!\n' % cv.returncode)
      return 1

  if cv_only:
    return 0

  if len(argv) >= 3:
    global PROVIDENCE_LIVE_NODES
    PROVIDENCE_LIVE_NODES = frozenset(
      scan_parser.slurp_responsive_nodes(argv[2]))
    print "loaded %d live nodes" % len(PROVIDENCE_LIVE_NODES)

  if data_dir != 'noimport':
    post_crawl_time = pre_import_time = datetime.datetime.today()
    import_clearview_results(data_dir)
    post_import_time = pre_rel_con_time = datetime.datetime.today()

    logging.info('Import took %s' % (post_import_time - pre_import_time))
  else:
    pre_rel_con_time = datetime.datetime.today()

#  populate_related_content_keys()
  post_rel_con_time = pre_index_time = datetime.datetime.today()
  logging.info('Populating related content took %s' % (post_rel_con_time - pre_rel_con_time))


  build_cheap_indices()
  post_index_time = datetime.datetime.today()
  logging.info('Cheap indexing took %s' % (post_index_time - pre_index_time))

  pre_second_prov_time = datetime.datetime.today()
  run_providence_and_rebuild_clearview()
  pre_fetch_time = post_second_prov_time = datetime.datetime.today()
  logging.info('Running providence again took %s' % (post_second_prov_time - pre_second_prov_time))

  
  fetch_real_torrents()
  post_fetch_time = datetime.datetime.today()
  logging.info('Fetching real torrents took %s' % (post_fetch_time - pre_fetch_time))

  if ACTUALLY_SET_UP_SEARCH_ENGINE:
    pre_index_time = datetime.datetime.today()
    build_expensive_indices()
    post_index_time = datetime.datetime.today()

    logging.info('Expensive indexing took %s' % (post_index_time - pre_index_time))

  switch_to_new_torrent_descs_table()

  if DUMP_DATABASE:
    dump_peer_hashes(data_dir)

  return 0

if __name__ == '__main__':
  dhtutil.the_send_queue.send_slowed_callback = lambda: None
  dhtutil.the_send_queue.actions_per_sec = 750
  dhtutil.HACK_DISABLE_TIMER = False
  logging.basicConfig(filename="recrawl.log",
                      level=logging.DEBUG,
                      format="%(asctime)s: %(message)s")
  sys.exit(main(sys.argv))
