﻿import HTMLParser
import database
from components import tools
import logging
import re

logging.basicConfig(level=logging.DEBUG)

logging.info("Loading nyaatorrents crawler")

def get(min_nid):
    """Return a iterable object with all objects up to 'min_nid'
    if no min_nid or 0 given return all"""
    with database.SQLiteCursor("nyaa") as cur:
        cur.execute("SELECT * FROM history WHERE")
options = {
         "function": get,
         "id_key": "nid",
         "aid_key": "aid",
         }
# Some easy filtering of names
tags = {"url": "url",
        "category": "category",
        "tlistname": "name",
        "size": "size",
        "nid": "nid"
        }

# Declaration for the sqlite3 database
TABLES = {
          "history": {
              "id": {"type": "integer",
                     "pk": 1,
                     "def": "integer primary key autoincrement"},
              "name": {"type": "text",
                       "notnull": 1,
                       "def": "text not null collate nocase"},
              "aid": {"type": "integer",
                      "dflt_value": "0",
                      "pk": 0,
                      "notnull": 0,
                      "def": "integer default 0"},
              "url": {"type": "text",
                      "notnull": 1,
                      "def": "text not null collate nocase"},
              "nid": {"type": "integer",
                      "pk": 0,
                      "notnull": 1,
                      "def": "integer not null unique"}
              }
          }
logging.info("Loading nyaatorrents database")
database.initialize_database("nyaa", TABLES)

def crawling_generator():
    with database.SQLiteCursor("nyaa") as cur:
        cur.execute("SELECT count(nid) AS count, max(nid) AS last FROM history;")
        row = cur.fetchone()
        if (row['count'] < 600):
            logging.debug("Method: filling")
            method = 'fill'
            target_nid = 0
        else:
            logging.debug("Method: updating")
            method = 'update'
            target_nid = row['last']
        counter = 0
        url = 'http://www.nyaa.eu/?page=torrents&cats=1_37&offset='
        page = 1
        logging.info("Target NID: " + str(target_nid))
        while (True):
            logging.debug("Page retrieving: " + str(page))
            content = tools.urlopen(url + str(page))
            page += 1
            parser = Parser()
            parser.feed(content.read())
            parser.close()
            inner_counter = 0
            if (len(parser.result) == 0):
                break
            for row in parser.result:
                if (row['nid'] <= target_nid):
                    logging.info("Reached target")
                    inner_counter = 0
                    break
                inner_counter += 1
                yield (row['nid'], row['url'], row['name'])
            if (inner_counter == 0):
                break
            counter += inner_counter
            logging.debug("Retrieved " + str(counter) + " so far")
            if (counter >= 600) and (method == 'fill'):
                logging.info("Reached target")
                break
            
def crawl():
    logging.info("Starting crawler")
    with database.SQLiteCursor("nyaa") as cur:
        cur.executemany("INSERT INTO history (nid, url, name) VALUES (?, ?, ?);",
                        crawling_generator())
    logging.info("Ending crawler")
class Parser(HTMLParser.HTMLParser):
    _tid_regex = re.compile(r"(?<=&)tid=(?P<nid>[0-9]+)(&|$)")
    def __init__(self):
        HTMLParser.HTMLParser.__init__(self)
        self.tlist = False
        self.tlistrow = False
        self.tlistname = False
        self.data = {}
        self.data_key = None
        
        self.result = []
    def handle_starttag(self, tag, attrs):
        attrs = self._resolve_attrs(attrs)
        _class = attrs.get("class", None)
        if (_class):
            _class = _class.split(" ")
        if (tag == "table") and ("tlist" in _class):
            self.tlist = True
        elif (self.tlist):
            if (tag == "tr") and ("tlistrow" in _class):
                self.data = {}
                self.tlistrow = True
            elif (self.tlistrow):
                if (tag == "td") and ("tlistname" in _class):
                    self.tlistname = True
                    self.data_key = "tlistname"
                elif (tag == "a") and (self.tlistname):
                    self.data["url"] = attrs.get("href")
                    nid = self._tid_regex.search(self.data['url'])
                    if (not nid):
                        logging.debug("Something went horrible wrong: " + self.data['url'])
                    else:
                        try:
                            nid = int(nid.group("nid"))
                        except (TypeError):
                            nid = None
                        self.data['nid'] = nid
                elif (tag == "td") and ("tlistsize" in _class):
                    self.data_key = "size"
    def handle_endtag(self, tag):
        if (tag == "table") and (self.tlist):
            self.tlist = False
        elif (tag == "tr") and (self.tlistrow):
            self.result.append(self.data)
            self.tlistrow = False
        elif (tag == "td") and (self.tlistname):
            self.tlistname = False
    def handle_data(self, data):
        if (self.data_key):
            if (self.data_key == "size"):
                data = tuple(data.split(" "))
            else:
                data = data.decode("utf-8", "replace")
            self.data[tags[self.data_key]] = data
            #logging.debug("DATA: " + self.data_key + "= " + repr(data))
            self.data_key = None
        
    def _resolve_attrs(self, attrs):
        d = {}
        for x, y in attrs:
            d[x] = y
        return d
    
    
logging.info("Completed nyaatorrents loading")