#!/usr/bin/env python
#  Copyright (c) 2010
#  The Regents of the University of Michigan
#  All Rights Reserved

#  Permission is granted to use, copy, create derivative works, and
#  redistribute this software and such derivative works for any purpose,
#  so long as the name of the University of Michigan is not used in
#  any advertising or publicity pertaining to the use or distribution
#  of this software without specific, written prior authorization. If
#  the above copyright notice or any other identification of the
#  University of Michigan is included in any copy of any portion of
#  this software, then the disclaimer below must also be included.

#  This software is provided as is, without representation or warranty
#  of any kind either express or implied, including without limitation
#  the implied warranties of merchantability, fitness for a particular
#  purpose, or noninfringement.  The Regents of the University of
#  Michigan shall not be liable for any damages, including special,
#  indirect, incidental, or consequential damages, with respect to any
#  claim arising out of or in connection with the use of the software,
#  even if it has been or is hereafter advised of the possibility of
#  such damages.

from .rules_helpers import rules_list, refine_rules_lists
from .rules import *

import libtorrent

import collections
import re

WORD_SPLIT_RE = re.compile(r'[^a-zA-Z0-9]+')


def tokenize(word):
  if word is None:
    return []
  return WORD_SPLIT_RE.split(word)


def get_extension(filename):
  if filename.rfind('.') == -1:
    return ''
  else:
    return filename.rsplit('.', 1)[-1]


def classify(torrent):
  td = libtorrent.bdecode(torrent)
  if td is None:
    return None
  try:
    infodict = td['info']
  except:
    return None
  name = infodict.get('name')
  files = infodict.get('files')
  if files is not None:
    filenames = [fyl['path'][-1] for fyl in files]
  else:
    filenames = None

  if files is None:
    if name is None:
      biggest_extension = None
    else:
      biggest_extension = get_extension(name)
  else:
    extensions_and_sizes = collections.defaultdict(int)
    for fyl in files:
      extensions_and_sizes[get_extension(fyl['path'][-1])] += fyl['length']
    biggest_extension = max(extensions_and_sizes.iteritems(), key=lambda x: x[1])[0]

  name_tokens = tokenize(name.lower())
  tokens = name_tokens[:]
  if filenames is not None:
    for filename in filenames:
      tokens.extend(tokenize(filename.lower()))

  biggest_extension = biggest_extension.lower()
  for rule in rules_list:
    category = rule(name_tokens, tokens, biggest_extension)
    if category is not None:
      for rule in refine_rules_lists[category]:
        replacement = rule(name_tokens, tokens, biggest_extension)
        if replacement is not None:
          return replacement
      return category

