# -*- coding: utf-8 -*-
#    Copyright 2005 Spike^ekipS <spikeekips@gmail.com>
#
#       This program is free software; you can redistribute it and/or modify
#    it under the terms of the GNU General Public License as published by
#    the Free Software Foundation; either version 2 of the License, or
#    (at your option) any later version.
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU General Public License for more details.
#
#    You should have received a copy of the GNU General Public License
#    along with this program; if not, write to the Free Software
#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA

import time, pickle, datetime, logging
from twisted.web import xmlrpc
from twisted.internet import reactor

from model.channel import Channel as model_channel
from model.status import Status as model_status
from model.channel_entry_enclosures \
	import ChannelEntryEnclosures as model_channel_entry_enclosures
from model import channel_entry as model_channel_entry

import database, indexing
from util import datetime__
from rss import feed
from channel_entry import get_latest_posts

logger = logging.getLogger("zzim.channel")

import config

def addChannelByPageURL (href) :
	__channel = Channel(href)
	return __channel.add()

def addChannelByRSSURL (href) :
	__channel = Channel(href)
	return __channel.add()

def is_exists (href) :
	__o = list( \
		model_channel.select(model_channel.q.href == href) \
	)

	if len(__o) > 0 :
		return __o[0].id

	return False

def get_uid_channel_entry (obj_entry) :
	return "%s.%s" % (obj_entry.channelID, obj_entry.id, )

def get_uid_channel (obj_channel) :
	return "%s.0" % obj_channel.id

class Channel :

	channel = None
	href = None
	id = None

	def __init__ (self, **kwargs) :
		"""
		if id and not href :
			self.getByID(id)
		if href and not id :
			self.getByHref(href)
		"""
		if kwargs.has_key("id") :
			self.id = kwargs["id"]
			self.getByID(kwargs["id"])

		if kwargs.has_key("href") :
			self.href = kwargs["href"]
			self.getByHref(kwargs["href"])

	def getByID (self, id) :
		try :
			self.channel = model_channel.get(id)
		except :
			self.channel = None

	def getByHref (self, href) :
		try :
			self.channel = list( \
				model_channel.select(model_channel.q.href == href) \
			)[0]
		except :
			self.channel = None

	##################################################
	# add channel
	def add (self) :
		__o = is_exists(self.href)
		if __o :
			return __o

		__feed = feed.Feed(str(self.href))
		__defer_parsed = __feed.get()
		__defer_parsed.addCallback(self.__cb_addChannel)
		__defer_parsed.addErrback(self.__eb_addChannel)

		return __defer_parsed

	def __cb_addChannel (self, retval) :
		if not retval :
			return False

		try :
			self.__updateChannel(retval)
		except Exception, e :
			print e
			return False

		try :
			self.__updateEntry(retval)
		except Exception, e :
			print e
			return False

		return self.channel.id

	def __eb_addChannel (self, f) :
		print f
		return False

	##################################################
	# update channel
	def update (self) :
		if not self.channel :
			return False

		__feed = feed.Feed(str(self.channel.href))
		__defer_parsed = __feed.get()
		__defer_parsed.addCallback(self.__cb_updateChannel)
		__defer_parsed.addErrback(self.__eb_updateChannel)

		return __defer_parsed

	def __cb_updateChannel (self, retval) :
		if not retval :
			return False

		if not self.__updateChannel(retval) :
			return False

		if not self.__updateEntry(retval) :
			return False

		return True

	def __eb_updateChannel (self, f) :
		print f
		return False

	##################################################
	# common API
	def __updateChannel (self, feed_parsed) :
		if self.channel :
			if not self.channel.title and \
					feed_parsed["feed"].has_key("title") :
				self.channel.title = feed_parsed["feed"]["title"]

			if not self.channel.language and \
					feed_parsed["feed"].has_key("language") :
				self.channel.language = feed_parsed["feed"]["language"]

			if not self.channel.link and feed_parsed["feed"].has_key("link") :
				self.channel.link = feed_parsed["feed"]["link"]

			if not self.channel.subtitle and feed_parsed["feed"].has_key("subtitle") :
				self.channel.subtitle = feed_parsed["feed"]["subtitle"]

			if not self.channel.version and feed_parsed["feed"].has_key("version") :
				self.channel.version = feed_parsed["version"]

			if not self.channel.encoding and feed_parsed["feed"].has_key("encoding") :
				self.channel.encoding = feed_parsed["encoding"]

			if not self.channel.namespace and \
					feed_parsed.has_key("namespaces") :
				try :
					self.channel.namespace = feed_parsed["namespaces"].keys()[0]
				except :
					pass
		else :
			# save in Feed
			__language = str()
			__link = str()
			__subtitle = str()
			__title = str()
			__version = str()
			__encoding = "utf-8"
			__namespace = str()

			if feed_parsed["feed"].has_key("language") :
				__language = feed_parsed["feed"]["language"]

			if feed_parsed["feed"].has_key("link") :
				__link = feed_parsed["feed"]["link"]

			if feed_parsed["feed"].has_key("subtitle") :
				__subtitle = feed_parsed["feed"]["subtitle"]

			if feed_parsed["feed"].has_key("title") :
				__title = feed_parsed["feed"]["title"]

			if feed_parsed["feed"].has_key("version") :
				__version = feed_parsed["version"]

			if feed_parsed["feed"].has_key("encoding") :
				__encoding = feed_parsed["encoding"]

			if	feed_parsed.has_key("namespaces") :
				try :
					__namespace = feed_parsed["namespaces"].keys()[0]
				except :
					pass

			__kwargs = { \
				"href" : self.href, \
				"language" : __language, \
				"link" : __link, \
				"subtitle" : __subtitle, \
				"title" : __title, \
				"version" : __version, \
				"encoding" : __encoding, \
				"namespace" : __namespace, \
			}

			self.channel = model_channel(**__kwargs)

			__data_for_indexing = \
				( \
					get_uid_channel(self.channel), \
					indexing.serialize("channel", self.channel, True) \
				)
			reactor.callLater(0.1, self.__indexing, __data_for_indexing)

		# update status
		self.__updateStatus(feed_parsed.status)

		return True

	def __updateStatus (self, status_code) :
		if not self.channel :
			return False

		__current_datetime = datetime__.now()
		try :
			__status = model_status.get(self.channel.id)
		except :
			# insert into Status
			__kwargs = { \
				"id" : self.channel.id, \
				"channelID" : self.channel.id, \
				"status" : str(status_code), \
				"timeAdded" : __current_datetime, \
				"timeUpdated" : __current_datetime, \
			}

			__status = model_status(**__kwargs)
		else :
			__status.status = str(status_code)
			__status.timeUpdated = __current_datetime

		return True

	def __updateEntry (self, feed_parsed) :
		if not self.channel :
			return False

		if len(feed_parsed.entries) < 1 :
			return True

		# if entry table does not exits, create new one.
		__model_entry = model_channel_entry.get_entry(self.channel.id)

		try :
			__model_entry.createTable( \
				ifNotExists=True, \
				connection=database.get_connection(config.database["entry"]) \
			)
		except Exception, e :
			#raise
			return False

		__o = list( \
			__model_entry.select( \
				orderBy=__model_entry.q.id, reversed=True \
			).limit(1) \
		)
		if not __o : # save entire new entriees
			__new_entries = feed_parsed.entries
		else :
			__new_entries = get_latest_posts(__o[0], feed_parsed.entries)

		if len(__new_entries) < 1 :
			return True
		else :
			# insert item into DB by reverse order.
			__k = dict()
			[__k.update({i : __new_entries[i]}) for i in range(len(__new_entries))]
			__l = __k.items()
			__l.sort()
			__l.reverse()
			__new_entries = [i[1] for i in __l]

		__list_entry_for_indexing = list()
		for i in __new_entries :
			__kwargs = dict()
			__kwargs.update({"channelID" : self.channel.id, })

			try :
				if i.has_key("author") : __kwargs.update({"author" : i["author"]})
				if i.has_key("link") : __kwargs.update({"link" : i["link"]})
				if i.has_key("links") :
					__kwargs.update({"links" : pickle.dumps(i["links"])})
				if i.has_key("publisher") : __kwargs.update({"publisher" : i["publisher"]})
				if i.has_key("subtitle") : __kwargs.update({"subtitle" : i["subtitle"]})
				if i.has_key("title") : __kwargs.update({"title" : i["title"]})

				if i.has_key("summary") : __kwargs.update({"summary" : i["summary"]})
				if i.has_key("content") and len(i["content"]) > 0 and \
						i["content"][0].has_key("value"):
					__kwargs.update({"summary" : i["content"][0]["value"]})
				if i.has_key("id") : __kwargs.update({"uid" : i["id"]})
				if i.has_key("gid") : __kwargs.update({"uid" : i["gid"]})
				if i.has_key("uid") : __kwargs.update({"uid" : i["uid"]})

				if i.has_key("updated_parsed") and type(i["updated_parsed"]) is time.struct_time  :
					#__time_updated = datetime.datetime(*i["updated_parsed"][:-2])
					__time_updated = datetime__.get_datetime(i["updated_parsed"])
				else :
					#__time_updated = datetime.datetime(*time.localtime()[:-2])
					__time_updated = datetime__.now()

				__kwargs.update({"timeUpdated" : __time_updated})
				__entry = __model_entry(**__kwargs)
			except Exception, e :
				raise

			# save enclosures.
			if i.has_key("enclosures") and type(i["enclosures"]) in (tuple, list, ) :
				for e in i["enclosures"] :
					__kwargs_enclosure = dict()
					if e.has_key("href") and e["href"] :
						__kwargs_enclosure.update({"href" : e["href"]})
					else :
						continue

					if e.has_key("type") : __kwargs_enclosure.update({"type" : e["type"]})
					if e.has_key("length") :
						try :
							__kwargs_enclosure.update({"length" : int(e["length"])})
						except :
							pass

					__kwargs_enclosure.update({ \
						"channelID" : self.channel.id, \
						"entryID" : __entry.id, \
					})

					# save.
					model_channel_entry_enclosures(**__kwargs_enclosure)

			print (self.channel.id, __entry.id, )

			__list_entry_for_indexing.append( \
				( \
					get_uid_channel_entry(__entry), \
					indexing.serialize("channel_entry", __entry, True) \
				) \
			)

		reactor.callLater(0.1, self.__indexing, __list_entry_for_indexing)

	def __indexing (self, list_entry) :
		for i in list_entry :
			try :
				server = xmlrpc.Proxy("http://localhost:%d/RPC2" % config.nokcene_port)
				__d = server.callRemote("indexDocument", *(i[0], i[1], True, ))
				__d.addCallback(self.__cb_indexing)
				__d.addErrback(self.__eb_indexing)
			except Exception, e :
				print e
				continue

		return
		return __d

	def __cb_indexing (self, retval) :
		print "indexing"
		return True

	def __eb_indexing (self, f) :
		print f
		return False


"""
Description
-----------

ChangeLog
---------


Usage
-----


"""

__author__ =  "Spike^ekipS <spikeekips@gmail.com>"
__version__=  "0.1"
__nonsense__ = ""

__file__ = "Channels.py"

