#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
Monitors a source for anyything new. The source can be a number of things, usually it would be a RSS/Atom feed, located either one the web or created locally. When something new is found it will store all the contents in the db and run applicable processors on it
"""
#import external modules
import warnings; warnings.filterwarnings('ignore')
import threading, datetime, time, sys, os, imp
from sqlalchemy import *
from sqlalchemy.exceptions import * #Import sqlalchemy exceptions
#import helios modules
import feedparser, logger

###Make sure we're in the right directory
os.chdir("/home/joel/Documents/Projects/helios/heliosreader")

##Initialise settings DB
settingsDB = create_engine('sqlite:///settings.db')
settingsDB.echo = False
settingsMetadata = MetaData(settingsDB)
settings = Table('settings', settingsMetadata, autoload=True)
dbType = settingsDB.execute(settings.select(settings.c.settingName == "databaseType")).fetchone()

##Initialise helios DB
if dbType.settingValue == "sqlite3":
    db = create_engine('sqlite:///data/opencalais.db')
    db.echo = False
    metadata = MetaData(db)
elif dbType.settingValue == "mysql":
    username = settingsDB.execute(settings.select(settings.c.settingName == "mysqlUsername")).fetchone().settingValue
    password = settingsDB.execute(settings.select(settings.c.settingName == "mysqlPassword")).fetchone().settingValue
    host = settingsDB.execute(settings.select(settings.c.settingName == "mysqlHost")).fetchone().settingValue
    name = settingsDB.execute(settings.select(settings.c.settingName == "mysqlName")).fetchone().settingValue
    db = create_engine('mysql://' + username + ':' + password + '@' + host + '/' + name)
    db.echo = False
    metadata = MetaData(db)
sources = Table('sources', metadata, autoload=True)
items = Table('items', metadata, autoload=True)

class ItemPreprocessor():
    """
    Run all preprocessors (plugins that will prepare the itemBody for processing, usually by fetching the full content)
    Searches Plugins/Preprocessors directory for the relevant plugins. Currently only does one.
    """
    def __init__(self):
	self.modules = []
	#Get a list of all the plugins in ./Plugins/Preprocessors
	for mod in os.listdir('./Plugins/Preprocessors'):
	    if ".pyc" not in mod and ".py~" not in mod:
		self.modules.append(mod)
	
    def findRelevantModule(self,entryGuid, entryLink):
	## Load each plugin, one by one
	for plugin in self.modules:
	    preprocessor = imp.load_source(plugin.replace('.py', ''),'./Plugins/Preprocessors/' + plugin) ##Load plugin
	    newItemBody = preprocessor.isApplicable(entryLink) ##Test to see if its applicable
	    if newItemBody == False: ##Not applicable so move on
		pass
	    elif newItemBody == None: ##Applicable, but failed to scrape content
		logger.logError("ItemPreprocessor: " + preprocessor.name() + " failed to scrape content. Link: " + entryLink)
		return False
	    else: ##Applicabe, and returned content
		result = db.execute(select([items], items.c.itemGuid==entryGuid))
		item = result.fetchone()
		result.close()
		try:
		    db.execute(items.update().where(items.c.itemID==item['itemID']).values(itemBody=newItemBody.encode('utf-8')))
		    return True
		except: #Failed to update DB, so record in error.log
		    logger.logError("ItemPreprocessor: Database update failure using " + preprocessor.name() + " : " + entryLink)
		    return False
	##Nothing relevant found
	return None

class ItemProcessor():
    """
    Processes all new aticles found by the monitor thread.
    Loads each plugin from Plugins/Processors
    Adds linked data
    """
    def __init__(self):
	self.modules = []
	#Get a list of all the plugins in ./Plugins/Processors
	for mod in os.listdir('./Plugins/Processors'):
	    if ".pyc" not in mod and ".py~" not in mod and ".db" not in mod:
		self.modules.append(mod)
		
    def loadModules(self,entryGuid):
	#get item
	try:
	    result = db.execute(select([items], items.c.itemGuid==entryGuid))
	    item = result.fetchone()
	    result.close()
	    dbFailure = False
	except: dbFailure = True
	## Load each plugin, one by one
	if dbFailure == False:
	    for plugin in self.modules:
		processor = imp.load_source(plugin.replace('.py', ''),'./Plugins/Processors/' + plugin) ##Load plugin
		module = processor.Processor() #Initialise Module
		moduleResult = module.processRecord(item['itemBody']) #Call processRecord method
		
		if moduleResult[0] == False: #Check if module completed successfully
		    logger.logError("ItemProcessor: " + moduleResult[1] + " : " + item.itemGuid) #if not, log error
		elif moduleResult[1] != {}: #check its returned some enities/concepts/facts/whatever
		    ##Begin linking the found data to the item
		    #Initialise linkedData.db 
		    ##!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
		    linkedDataDB = create_engine('sqlite:///data/linkedData.db')
		    linkedDataDB.echo = False
		    linkedDataMetadata = MetaData(linkedDataDB)
		    
		    for tableName in moduleResult[1].keys(): #cycle through each type of result
			table = "items2" + module.databasePrefix + tableName #form the table name
			table2update = Table(table, linkedDataMetadata, autoload=True)
			for data2update in moduleResult[1][tableName]:
			    ##Begin building query to test for existance
			    query = table2update.select()
			    query = query.where(table2update.c.items_itemID == item['itemID'])
			    for dataKey in data2update.keys(): #Cycle through each key
				query = query.where(table2update.c[module.databasePrefix + dataKey] == data2update[dataKey]) #and another WHERE clause to query
			    
			    result = linkedDataDB.execute(query) #Check for existence
			    if result.fetchone() == None: ##add the links
				prefixedValues = {}
				prefixedValues['items_itemID'] = item['itemID']
				for dataKey in data2update.keys(): #Cycle through each key, and the right prefixes, etc
				    prefixedValues[module.databasePrefix + dataKey] = data2update[dataKey]
				i = table2update.insert()
				i.execute(prefixedValues)

	    
class fetchSource():

    def run(self, source):
	self.source = source
	if self.source['sourceLastChecked'] != '': #Check that a lastChecked value is set
	    #Check the fetch interval, only fetch feed if it's passed
	    #dueToCheck = datetime.datetime.now() > (self.source['sourceLastChecked'] + datetime.timedelta(seconds=self.source['sourceUpdateInterval']))
	    dueToCheck = datetime.datetime.now() > (self.source['sourceLastChecked'] + datetime.timedelta(seconds=900))
	    if dueToCheck == True: self.fetch() #Fetch the feed
	else: self.fetch #Fetch the feed
    
    def fetch(self):
        parsedSource = feedparser.parse(self.source['sourceURL'], self.source['sourceEtag'])#,self.source['sourceLastModified'])
	
	##status checking doesn't work
	#if parsedSource.has_key('status') == False:
	parsedSource.status = 200
        if str(parsedSource.status) == "200": #See if there's anything new
	    print "Checking: " + self.source['sourceTitle']
            try: x = parsedSource.modified #Check to see if a last-modified value has been recieved
            except: parsedSource.modified = None
            
            #update etag / last-modified / last time checked in db
	    db.execute(sources.update().where(sources.c.sourceID==self.source.sourceID).values(sourceEtag=self.source['sourceEtag'],sourceLastModified=self.source['sourceLastModified'],sourceLastChecked=datetime.datetime.now())) 
            
            for entry in parsedSource.entries:
		if entry.has_key('guid') == False:
		    if entry.has_key('link') == True:
			entry.guid = entry.link + ":" + entry.title
		    else: break
			##Should have another backup for the guid....
		
		#See if an entry exists in DB
		try: 
		    result = db.execute(items.select(items.c.itemGuid == entry.guid))
		    dbFailure = False
		except OperationalError, e:
		    dbFailure = True
		    if 'Could not decode to UTF-8' in e.message:
			logger.logError("UTF-8 Error when retreiving item. Guid: " + entry.guid)
		    else:
			logger.logError("Uknown Error when retreiving item. Guid: " + entry.guid)
		    
		if dbFailure == True: pass
		elif result.fetchone() == None:
                    #if not, begin preparing to add it
                    entry.sourceID = self.source['sourceID']

		    ##Check for existence, and either replace contents, or ignore item
		    itemOK = True
		    if entry.has_key('title') == False:
			entry.title = u"((Title UNKOWN))"
		    if entry.title == "":
			entry.title = u"((Title UNKOWN))"
		    if entry.has_key('author') == False:
			entry.author = u"No Author"
		    if entry.has_key('link') == False:
			itemOK = False #Item has no link, so is classed as useless
		    if entry.has_key('date_parsed') == False:
			entry.date_parsed = datetime.datetime.now() ## If there is no date attribute, use the current datetime
		    else:
			try: entry.date_parsed = datetime.datetime(entry.date_parsed[0], entry.date_parsed[1], entry.date_parsed[2], entry.date_parsed[3], entry.date_parsed[4], entry.date_parsed[5])
			except: entry.date_parsed = datetime.datetime.now()
			
		    ###TODO: This needs to be reworked to ensure that we always store the main article body
		    if entry.has_key('content') == True:
			entry.summary = entry.content[0].value
		    elif entry.has_key('summary') == False:
			itemOK = False #Item has no body, so is classed as useless
			
		    if itemOK == True:
			##Item has all the nec. attributes, so add item to database
			try: 
			    i = items.insert()
			    i.execute(itemGuid=entry.guid, itemTitle=entry.title, itemBody=entry.summary, itemAuthor=entry.author, itemLink=entry.link, itemDatePublished=entry.date_parsed, itemDateParsed=datetime.datetime.now(), sources_sourceID=self.source['sourceID'], itemProcessFlag=0)
			    print "Added Link: " + entry.title
			    itemAdded = True
			except: 
			    itemAdded = False
			    logger.logError("Failed to add item. GUID: " + entry.guid)
			
			##If Item is successfully added, run the processing modules
			if itemAdded == True:
			    ##Run PreProcessors
			    x = ItemPreprocessor()
			    x.findRelevantModule(entry.guid,entry.link)
			    ##Run Processors
			    x = ItemProcessor()
			    x.loadModules(entry.guid)
			    ##Run PostProcessors
			    #x = None
			

class main(object):
    def __init__(self):
        while True: #start main loop
	    #Get list of current FeedIDs
            result = db.execute(sources.select())
	    sourcesDetails = result.fetchall()
            for source in sourcesDetails:
		t = fetchSource()
                t.run(source)
		time.sleep(1)
	    result.close()
	    #time.sleep(300)
x = main()