import time
import traceback
import re
import datetime
import logging
log = logging.getLogger("spider")
import appparser
import appdb

from google.appengine.api import urlfetch
from google.appengine.ext import db

class SpiderCommand(appdb.BatchCommand):
  def entity(self, e):
    e.count = self.fetch(e)
  
  def getEntityKeyName(self, e):
    return appdb.GenURLKeyName(self.getEntityName(), e.url)
  
  def fetch(self, target):
    log.debug("start fetch: %s" %(target.url))
    result = urlfetch.fetch(target.url)
    log.debug("end fetch: status[%d]" %(result.status_code))
    if 200 == result.status_code:
      parser = self.getParser(target)
      content = re.sub("font-family:[^;]+;", "", result.content)
      content = re.sub("mso-number-format:\"[^\"]+\";", "", content)
      content = self.checkTrash(content.decode(self.getCharset(result)))
      parser.feed(content)
      parser.close()
      return self.saveContent(target, parser)
    return 0
  
  def getCharset(self, result):
    charset = "gb2312"
    m = re.match(".* charset=(?P<charset>.*)", result.headers["Content-Type"], re.IGNORECASE)
    if m: charset = m.group("charset")
    else:
      m = re.match(".*<meta[^<^>^0-9]*charset=(?P<charset>[a-zA-Z0-9\-]*)\"?>",
                   result.content, re.DOTALL|re.IGNORECASE)
      if m: charset = m.group("charset")
    return charset
    
  def repl2(self, m):
    return m.group("trash1")[0:2]+" "+m.group("trash1")[2]

  def repl1(self, m):
    return re.sub('(?P<trash1>\w[\'"]\w)', self.repl2, m.group())
    
  def checkTrash(self, content):
    rTrash = [("(<[^<^>]*>)", self.repl1),
              ("style=&{head};", ""),
              ]
    for r, repl in rTrash:
      content = re.sub(r, repl, content)
    return content
    
class SiteSpider(SpiderCommand):
  def getEntityName(self):
    return "ScanSite"
  
  def getParser(self, target):
    return appparser.URLParser(target.url)
    
  def saveContent(self, target, parser):
    for url in parser.urls:
      self.tobeAdd[appdb.GenURLKeyName("Table", url)] = \
          appdb.ScanTable(key_name = appdb.GenURLKeyName("Table", url), url = url)
    return len(parser.urls)

class TableSpider(SpiderCommand):
  def getEntityName(self):
    return "ScanTable"
  
  def getParser(self, target):
    parser = appparser.TableParser(target.url)
    parser.dates.date = (target.date, target.fmt)
    parser.units.unit = target.unit
    parser.tags.globalTags = set(unicode(target.tags).split("+"))
    parser.tags.globalTags.discard("None")
    parser.tags.globalTags.discard("")
    return parser
    
  def saveContent(self, target, parser):
    count = len(parser.records)
    for rec in parser.records:
      if appdb.GetRecord(rec[2][0], rec[3]) \
      or self.tobeAdd.has_key(appdb.GenRecordKeyName(rec[2][0], rec[3])):
        log.warn("The Record[%s, %s] already exists, ignored",
                 rec[2][0].strftime("%Y.%m.%d"), "+".join(rec[3]))
        count -= 1
        continue
      record = appdb.ScanRecord(key_name = appdb.GenRecordKeyName(rec[2][0], rec[3]),
                                value = float(rec[0]),
                                unit = rec[1],
                                date = rec[2][0],
                                dfmt = rec[2][1],
                                tags = "+".join(rec[3]),
                                source = parser.source)
      self.tobeAdd[appdb.GenRecordKeyName(rec[2][0], rec[3])] = record
    parser.checkDateUnit()
    if parser.hasBlankDate() \
    and appdb.RefreshTable.get_by_key_name(appdb.GenURLKeyName("RT", parser.source)) is None:
      self.tobeAdd[appdb.GenURLKeyName("RT", parser.source)] = \
          appdb.RefreshTable(key_name = appdb.GenURLKeyName("RT", parser.source),
                             value = appdb.GenURLKeyName("Table", parser.source),
                             url = parser.source)
    return count
  
class RefreshTableSpider(TableSpider):
  def __init__(self, pagesize):
    TableSpider.__init__(self, pagesize)
    self.tables = []

  def entities(self):
    return self.tables
  
  def saveContent(self, target, parser):
    if not parser.hasBlankDate():
      self.tobeDel[appdb.GenURLKeyName("RT", parser.source)] = \
          appdb.RefreshTable(key_name = appdb.GenURLKeyName("RT", parser.source),
                             value = appdb.GenURLKeyName("Table", parser.source),
                             url = parser.source)
    return TableSpider.saveContent(self, target, parser)

  def scanned(self, e):
    return
  
class LiteTableSpider(TableSpider):
  def __init__(self, pagesize, reqHdlr):
    TableSpider.__init__(self, pagesize)
    self.tables = []
    self.reqHdlr = reqHdlr

  def entities(self):
    table = appdb.ScanTable(key_name = "Test_Table",
                            url = self.reqHdlr.request.get("url"))
    if len(self.reqHdlr.request.get("date")):
      table.date = datetime.datetime.strptime(self.reqHdlr.request.get("date"), "%Y%m%d%H%M%S")
    if len(self.reqHdlr.request.get("dfmt")):
      table.date = self.reqHdlr.request.get("dfmt")
    if len(self.reqHdlr.request.get("unit")):
      table.unit = self.reqHdlr.request.get("unit")
    if len(self.reqHdlr.request.get("tags")):
      table.globalTags = self.reqHdlr.request.get("tags").split("+")
    return [table]
      
  def saveContent(self, target, parser):
    self.reqHdlr.response.headers["Content-Type"] = "text/plain"
    self.reqHdlr.response.out.write("BlankDate: %s\n" %(parser.hasBlankDate()))
    for rec in parser.records:
      self.reqHdlr.response.out.write("%s,%s,%s,%s,%s\n" %(rec[2][0].strftime("%Y-%m-%d"),
                                                      rec[2][1],
                                                      str(rec[0]),
                                                      rec[1],
                                                      "+".join(rec[3])))
    return len(parser.records)
  
class CheckTableSpider(SpiderCommand):
  def getEntityName(self):
    return "Table"
  
  def getParser(self, target):
    parser = appparser.TableParser(target.url)
    parser.dates.date = (target.date, target.fmt)
    parser.units.unit = target.unit
    parser.tags.globalTags = set(unicode(target.tags).split("+"))
    parser.tags.globalTags.discard("None")
    parser.tags.globalTags.discard("")
    appparser.HookUpdateGlobalDate = CheckTableSpider_HookUpdateGlobalDate
    return parser
    
  def saveContent(self, target, parser):
    return len(parser.records)

  def scanned(self, e):
    return
  
def CheckTableSpider_HookUpdateGlobalDate(parser):
  table = appdb.TableTBF.get_by_key_name(appdb.GenURLKeyName("CT", parser.source))
  if table is None:
    table = appdb.TableTBF(key_name = appdb.GenURLKeyName("CT", parser.source),
                           url = parser.source)
  if "UpdateGlobalDate" not in table.reason:
    table.reason.append("UpdateGlobalDate")
  appdb.DBBatch(db.put, [table])
  