#!/usr/bin/env python

# pybtrss - Downloads bittorrent files from RSS feeds
#
# Copyright (C) 2006 Riku 'Shrike' Lindblad
# Copyright (C) 2007 Marko Koivusalo
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# 
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
# 
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.

import urllib
import os, os.path
import re
import sys
import logging

import socket
socket.setdefaulttimeout(10) # time out in 10 seconds

try:
    from optparse import OptionParser, SUPPRESS_HELP
except ImportError:
    print "Please install Optik 1.4.1 (or higher) or update your Python"
    sys.exit(1)

try:
    import yaml
except ImportError:
    print "Please install PyYAML from http://pyyaml.org/wiki/PyYAML or from your distro repository"
    sys.exit(1)

class Torrent:
    """Represents a torrent"""

    def __init__(self, raw_torrent, match):
        """ Accepts torrent file as string """
        # valid torrent files start with an announce block
        if not raw_torrent.startswith("d8:announce"):
            raise Exception("Invalid torrent: %s", match['torrent'])

        self.valid = False

        data = self.decode(raw_torrent)

        # provide easier access to torrent file list
        self.files = []
        self.single_file = False
        
        # calculate md5 sum from pieces, this can be used as rudimentary duplicate indicator
        # this is NOT guaranteed to be same in two exact same files (due different piece lenghts in torrent)
        """
        import md5
        m = md5.new()
        m.update(data["info"]["pieces"])
        self.meta_sum = m.hexdigest()

        # single file torrent
        if data["info"].has_key("length"): 
            t = {}
            t["name"] = data["info"]["name"]
            t["size"] = data["info"]["length"]
            t["path"] = "/"
            self.single_file = True
            self.files.append(t)
        else:
            # multifile torrent
            for item in data["info"]["files"]:
                t = {}
                f = item["path"]
                if f.find("/") != -1:
                    f = f[f.rfind("/")+1:]
                t["name"] = f
                t["size"] = item["length"]
                self.files.append(t)
        """

        # grab the title (user readable string) and filename (the name we save as)
        self.title, self.filename = self.parse_title(match)

        # this is a valid torrent
        self.valid = True

    def __str__(self):
        return self.title
        
    def parse_title(self, match):
        """Figure out a title and filename for the torrent

        Returns a tuple of title, filename"""
        
        # in case of single file torrent, use the the filename from inside the torrent
        title = ""
        if self.single_file:
            title = self.files[0]["name"]
        else:
            # create a proper filename from crap we got from the feed
            # TODO: Add feed name here too?
            title = match['title']
            title = title.replace("/", "_")

        # neatify further
        title = title.replace(" ", "_")
        title = title.encode('iso8859-1', 'ignore') # Damn \u200b -character, how I loathe thee

        return (title, title+".torrent")
        
    def tokenize(self, text, match=re.compile("([idel])|(\d+):|(-?\d+)").match):
        i = 0
        while i < len(text):
            m = match(text, i)
            s = m.group(m.lastindex)
            i = m.end()
            if m.lastindex == 2:
                yield "s"
                yield text[i:i+int(s)]
                i = i + int(s)
            else:
                yield s

    def decode_item(self, next, token):
        if token == "i":
            # integer: "i" value "e"
            data = int(next())
            if next() != "e":
                raise ValueError
        elif token == "s":
            # string: "s" value (virtual tokens)
            data = next()
        elif token == "l" or token == "d":
            # container: "l" (or "d") values "e"
            data = []
            tok = next()
            while tok != "e":
                data.append(self.decode_item(next, tok))
                tok = next()
            if token == "d":
                data = dict(zip(data[0::2], data[1::2]))
        else:
            raise ValueError
        return data

    def decode(self, text):
        try:
            src = self.tokenize(text)
            data = self.decode_item(src.next, src.next())
            for token in src: # look for more tokens
                raise SyntaxError("trailing junk")
        except (AttributeError, ValueError, StopIteration):
            raise SyntaxError("syntax error")
        return data

class PyBtDl:

    moduledir = os.path.join(sys.path[0], "modules/")

    def __init__(self, options):
        try:
            # hack, since stdout is limited to basic conf level ...
            lev = logging.INFO
            if options.debug:
                lev = logging.DEBUG
            logging.basicConfig(level=lev,
                                format='%(asctime)s %(levelname)-8s %(message)s',
                                filename=os.path.join(sys.path[0], 'pybtdl.log'),
                                filemode="a",
                                datefmt='%Y-%m-%d %H:%M:%S')
        except TypeError:
            # For pre-2.4 python
            logger = logging.getLogger() # root logger
            handler = logging.FileHandler(os.path.join(sys.path[0], 'pybtdl.log'))
            formatter = logging.Formatter('%(asctime)s %(levelname)-8s %(message)s')
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            logger.setLevel(logging.INFO)

        if options.verbose or options.debug or options.test or options.learn or options.reset or options.try_pattern:
            # define a Handler which writes to the sys.stderr
            console = logging.StreamHandler()
            if options.verbose or options.test or options.learn:
                console.setLevel(logging.INFO)
            if options.debug:
                console.setLevel(logging.DEBUG)
            # set a format which is simpler for console use
            formatter = logging.Formatter('%(levelname)-8s %(message)s')
            # tell the handler to use this format
            console.setFormatter(formatter)
            # add the handler to the root logger
            logging.getLogger().addHandler(console)
            
        if options.test and options.learn:
            print "--test and --learn are mutually exclusive"
            sys.exit(1)

        self.options = options

        self.configname = "confignamenotset";
        self.config = {}
        self.session = {}
        self.modules = {}
        self.modules["sources"] = {}
        self.modules["triggers"] = {}
        self.modules["filters"] = {}

        self.load_modules()
        self.load_filters()
        self.load_triggers()
        self.load_config()

        # on try mode, disable downloading trough test mode
        if options.try_pattern:
            options.test = True
        
        # reset is essentially learn with session wipe
        if not options.reset:
            self.load_session()
        else:
            logging.info("Reseting session")
            options.learn = True

    def load_config(self):
        """Load the configuration file"""
        possible = [os.path.join(sys.path[0], self.options.config), self.options.config]
        for config in possible:
            if os.path.exists(config):
                self.config = yaml.safe_load(file(config))
                self.configname = os.path.basename(config)[:-4]
                return
        logging.warning("ERROR: No configuration file found!")
        sys.exit(0)

    def load_session(self):
        # sessions are config-specific
        sessionfile = os.path.join(sys.path[0], 'session-%s.yml' % self.configname)
        if os.path.exists(sessionfile):
            try:
                self.session = yaml.safe_load(file(sessionfile))
            except yaml.scanner.ScannerError:
                logging.error("Session file corrupted! Creating a new one.")
                self.session = {}
            except yaml.parser.ParserError:
                logging.error("Session file corrupted! Creating a new one.")
                self.session = {}
        else:
            self.session = {}

    def save_session(self):
        try:
            sessionfile = os.path.join(sys.path[0], 'session-%s.yml' % self.configname)
            f = file(sessionfile, 'w')
            yaml.dump(self.session, f)
            f.close()
        except:
            logging.error("Failed to save session data!")

    def load_modules(self):
        """Load all source modules"""
        for module in self.find_module(self.moduledir, "source"):
            logging.debug("Loading module %s" % module)
            ns = {}
            execfile(os.path.join(self.moduledir, module), ns, ns)
            # check that module has required configuration
            if not ns.has_key("__type__") or not ns.has_key("__instance__"):
                logging.error("Module %s is missing __type__ or __instance__ definition" % module)
                continue
            logging.debug("Creating instance %s for module %s" % (ns["__instance__"], ns["__type__"]))
            self.modules["sources"][ ns["__type__"] ] = ns[ ns["__instance__"] ]()

    def load_filters(self):
        """Load all filter modules"""
        for filter in self.find_module(self.moduledir, "filter"):
            logging.debug("Loading filter %s" % filter)
            ns = {}
            execfile(os.path.join(self.moduledir, filter), ns, ns)
            # check that module has required configuration
            if not ns.has_key("__type__") or not ns.has_key("__instance__"):
                logging.error("Filter %s is missing __type__ or __instance__ definition" % filter)
                continue
            logging.debug("Creating instance %s for filter %s" % (ns["__instance__"], ns["__type__"]))
            self.modules["filters"][ ns["__type__"] ] = ns[ ns["__instance__"] ]()

    def load_triggers(self):
        """Load all trigger modules"""
        for trigger in self.find_module(self.moduledir, "trigger"):
            logging.debug("Loading tigger %s" % trigger)
            ns = {}
            execfile(os.path.join(self.moduledir, trigger), ns, ns)
            self.modules["triggers"][trigger] = ns

    def find_module(self, directory, type):
        """Find all modules of certain type"""
        modules = [m for m in os.listdir(directory) if m.startswith(type+"_") and m.endswith(".py")]
        return modules


    def get_sources(self):
        return self.config.get('sources', [])
        
    def get_patterns(self, source):
        """Get patterns for a source"""
        
        # on try, return only our try_match
        if options.try_pattern:
            n = {}
            n['pattern'] = options.try_pattern;
            n['dl'] = "irrelevant"
            return [n]
        
        import types
        l = self.config['sources'][source].get('patterns', [])
        l += self.config.get('global_patterns', [])
        patterns = []
        for p in l:
            try:
                n = {}
                # dict type specified download location
                if type(p) == types.DictType:
                    key = p.keys()[0]
                    n['pattern'] = key
                    n['dl'] = p[key]['location']
                else:
                    n['pattern'] = p
                    n['dl'] = self.config['sources'][source]['dl']['location']
            except KeyError, e:
                logging.error("Source %s is missing required element %s" % (source, e))
                continue
            patterns.append(n)
        return patterns

    def download_torrent(self, match):
        """Download torrent from match

        Returns None or Exception on failure, a Torrent object on success
        """

        # match =
        # {'pattern': 'b\\d\\d\\d', 'dl': '~/tmp/', 'torrent': u'http://www.dattebayo.com/t/b124.torrent', 'title': u'b124'}
        
        # get torrent
        # urllib2 is too smart here, it borks on basic auth urls
        try:
            f = urllib.urlopen(match['torrent'])
            content = f.read()
            f.close()
        except IOError:
            raise Exception("IOError when loading torrent %s", match['torrent'])

        # create torrent object from torrent & match data
        torrent = Torrent(content, match)

        if not torrent.valid:
            raise Exception("Invalid torrent")

        # if we have same metasum in seen list, skip the download
#        if torrent.meta_sum in self.session['seen']:
#            logging.debug("Skipping %s because meta_sum has been seen before" % (match['torrent']))
#            return
        
#        self.session['seen'].append(torrent.meta_sum)

        # write torrent to a file
        destfile = os.path.join(os.path.expanduser(match['dl']), torrent.filename)
        try:
            if not os.path.exists(destfile):
                f = file(destfile, 'w')
                f.write(content)
                f.close()
            else:
                raise Exception("File '%s' already exists" % destfile)
        except IOError:
            raise Exception("IOError when saving torrent %s" % destfile)

        return torrent

    def get_session(self, module_type, name):
        """Initialize session for module_type with specified name"""
        self.session.setdefault(module_type, {})
        self.session[module_type].setdefault(name, {})
        return self.session[module_type][name]

    def run(self):
        sources = self.get_sources()

        try:

            # Go through all the torrent sources
            for src_name in sources:
                logging.debug("<===== Checking source %s" % src_name)
                src_config = self.config['sources'][src_name]
                patterns = self.get_patterns(src_name)
                src_type = src_config.get('type', 'type_missing')
                if not self.modules['sources'].has_key(src_type):
                    logging.error("Source %s has unknown type '%s'" % (src_name, src_type))
                    continue

                session = self.get_session('modules', src_type)

                # if source has interval specified
                if src_config.has_key('interval') and not self.options.learn:
                    session.setdefault(src_name, {})
                    session[src_name].setdefault('count', 0)
                    session[src_name]['count'] = session[src_name]['count'] + 1
                    # if count exceeded interval
                    if session[src_name]['count'] >= src_config['interval']:
                        session[src_name]['count'] = 0
                    else:
                        logging.debug("Source '%s' has not exceeded interval count (%i/%i) -> abort" % (src_name, session[src_name]['count'], src_config['interval']))
                        continue
                
                # get module by type and run it with patterns
                module_source = self.modules['sources'][src_type]
                try:
                    matches = module_source.run(src_name, patterns, src_config, session)
                    if matches == None:
                        continue
                except Exception, e:
                    logging.error("Module %s: %s" % (src_type, e))
                    if options.debug:
                        import traceback
                        print '-'*60
                        traceback.print_exc(file=sys.stdout)
                        print '-'*60                            
                    continue

                # remove global ignores from matches
                for match in matches[:]:
                    for ignore in self.config.get('global_ignores', []):
                        if re.search(ignore, match['title'], re.IGNORECASE|re.UNICODE) or re.search(ignore, match['torrent'], re.IGNORECASE|re.UNICODE):
                            logging.debug("Removing '%s' due global ignore '%s' from matches" % (match['title'], ignore))
                            matches.remove(match)

                #
                # run filters for source
                #
                
                filters = src_config.get('filters', [])
                # check for improper configuration (easy to use dict-form instead)
                import types
                if (type(filters) != types.ListType):
                    logging.error("Source %s filters are configured improperly, should be in list form." % (src_name))
                    filters = []
                # add seen filter for all sources, except when it's being disabled explicitly
                if self.options.noskip == False and src_config.get('disable_seen_filter', False) == False:
                    filters.insert(0, 'seen')
                    
                for filter_cfg in filters:
                    filter_name = None
                    filter_config = {}
                    if (type(filter_cfg) == types.DictType):
                        # check for invalid configuration
                        if len(filter_cfg.keys())>1:
                            logging.error("Source '%s' has improperly configured filter! Check yml syntax (parameters identation).", src_name)
                            continue
                        # first key filter name (weirdish structure)
                        filter_name = filter_cfg.keys()[0]
                        filter_config = filter_cfg[filter_name]
                    else:
                        filter_name = filter_cfg
                    if not self.modules['filters'].has_key(filter_name):
                        logging.error("Source '%s' has unknown filter type '%s'" % (src_name, filter_name))
                        continue
                    module_filter = self.modules['filters'][filter_name]
                    filter_session = self.get_session('filters', filter_name)
                    try:
                        module_filter.run(src_name, matches, filter_config, filter_session)
                    except Exception, e:
                        logging.error("Module %s: %s" % (src_type, e))
                        if options.debug:
                            import traceback
                            print '-'*60
                            traceback.print_exc(file=sys.stdout)
                            print '-'*60
                        continue

                fetched_torrents = []
                logging.debug("=====> Remaining matches %s" % (len(matches)))
                
                for match in matches:
                    if match.get('filtered', False):
                        continue

                    logging.info("%s matched %s" % (match['title'], match['pattern']))

                    # download match
                    if self.options.test == False and self.options.learn == False:
                        try:
                            result = self.download_torrent(match)
                            if result:
                                fetched_torrents.append(result.title)
                                logging.info("Downloaded torrent: '%s'" % urllib.unquote(match['torrent']))
                        except Exception, e:
                            logging.error("Error while downloading torrent: '%s'" % urllib.unquote(match['torrent']), e.args)
                            if options.debug:
                                import traceback
                                print '-'*60
                                traceback.print_exc(file=sys.stdout)
                                print '-'*60
                            continue

                    # verbose learned (happends actually in filter_seen)
                    if self.options.learn:
                        logging.info("Learned torrent '%s' (%s)" % (match['title'], src_name))

                # run triggers for new fetched torrents
                if fetched_torrents or True:
                    for trigger in self.config.get('triggers', {}):
                        logging.debug("should run trigger %s" % (str(trigger)))
                        #for trigger in self.modules["triggers"]:
                        #    self.modules["triggers"][trigger]['fetched_torrent'](src_name, fetched_torrents)
        finally:
            if not self.options.test:
                self.save_session()

    def moduleDoc(self, name):
	"""Prints module <name> documentation (all types)"""
        module_types = ['sources', 'filters']
        found = False
        for module_type in module_types:
            module = self.modules[module_type].get(name, None)
            if module!=None:
                found = True
                if module.__doc__ == None:
                    print "Module %s does not have documentation" % name
                else:
                    print module.__doc__
        if not found:
            print "Could not find module %s" % name

    def moduleList(self):
	"""Prints all modules"""
        module_types = ['sources', 'filters']
        print "Type:\t\tName:"
        print "-"*60
        for module_type in module_types:
            for module_name in self.modules[module_type]:
                print "%s\t\t%s" % (module_type, module_name)

if __name__ == "__main__":
    parser = OptionParser()
    parser.add_option("--test",
                      action="store_true", dest="test", default=0,
                      help="Do not download any torrent files. Verboses progress.")

    parser.add_option("--try",
                      action="store", dest="try_pattern", 
                      help="Test match and show what it would find from sources. Verboses progress.")

    parser.add_option("--learn",
                      action="store_true", dest="learn", default=0,
                      help="Matches are not downloaded but will be skipped in the future. Verboses progress.")

    parser.add_option("--no-skip",
                      action="store_true", dest="noskip", default=0,
                      help="Disable previously downloaded skipping (session).")

    parser.add_option("--reset-session",
                      action="store_true", dest="reset", default=0,
                      help="Forgets everything that has been downloaded and learns current matches.")

    parser.add_option("--doc",
                      action="store", dest="doc",
                      help="Display module / filter documentation (example: --doc seen). Name is given without type. See --list.")

    parser.add_option("--list",
                      action="store_true", dest="list", default=0,
                      help="List all available modules.")

    parser.add_option("-c",
                      action="store", dest="config", default="config.yml",
                      help="Specify configuration file. Default is config.yml")

    parser.add_option("-v", 
                      action="store_true", dest="verbose", default=0,
                      help="Verbose operation. Default is off.")

    parser.add_option("-d", 
                      action="store_true", dest="debug", default=0,
                      help=SUPPRESS_HELP)

    (options, args) = parser.parse_args()
    
    bt = PyBtDl(options)
    if options.doc:
        bt.moduleDoc(options.doc)
    elif options.list:
        bt.moduleList()
    else:
        bt.run()
