#!/usr/bin/python
# -*- coding: utf-8 -*-

import urllib
import urllib2
import ClientCookie
from parsers import *
from dbEngine import *
from wykops import *

DEBUG = 1
POWIAZANE = '/powiazane'
WYKOPALI = '/wykopali'
ZAKOPALI = '/zakopali'
USER = 'spambot'
PASSWD = 'qk26pff74b'

class WykopBot:
	def __init__(self, dbEngine, user, passwd):
		self.dbEngine = dbEngine
		self.headers = {'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-GB; rv:1.8.1.12) Gecko/20080201 Firefox/2.0.0.12',
	    	'Accept': 'text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5',
	    	'Accept-Language': 'en-gb,en;q=0.5',
	    	'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
	    	'Connection': 'keep-alive'
	    	}
		site = urllib2.Request('http://www.wykop.pl/zaloguj','username=' + user + '&password=' + passwd, self.headers)
		#req = urllib2.Request(url, data, headers)
		response = ClientCookie.urlopen(site)
		##można sprawdzić czy się zalogował
		#f = open('f.html','w')
		#f.write(self.__getUrl('http://www.wykop.pl/ludzie/postaldude'))
		#f.close()
		## usuwasz # w  powyższym kodzie i sprawdzasz co jest w pliku 
		#(jak są dane użytkownika to działa)

	def updateFind(self, linkStr, home):
		wParser = WykopHTMLParser()
		wParser.feed(self.__getUrl(linkStr))
		self.find = wParser.getWykop()
		self.find.setLink(linkStr)
		if home:
                        pub = datetime.now()
                        self.find.setPublished(pub)
	
		self.updateUser(self.find.getUser())
		
		if DEBUG: print " Znalezisko", linkStr
		
		if not self.dbEngine.containsFind(linkStr):
			if DEBUG: print 'dodano'
			self.dbEngine.addFind(self.find, linkStr)
		else:
                        if not seld.dbEngine.isPublished(linkStr):
                                self.dbEngine.updateFind(self.find)
		
		self.updateComments(linkStr)
		self.updateLinks(linkStr + POWIAZANE)
		self.updateWykopali(linkStr + WYKOPALI)
		self.updateZakopali(linkStr + ZAKOPALI)
		del(wParser)

	def updateComments(self, linkCommentStr):
		cParser = CommentHTMLParser()
		cParser.feed(self.__getUrl(linkCommentStr))
		commentList = cParser.getCommList()
		for comment in commentList:
			self.updateUser(comment.user)
			if not self.dbEngine.containsComment(self.find.getLink(), comment):
				self.dbEngine.addComment(self.find.getLink(), comment)
		del(cParser)

	def updateUser(self, linkUserStr):
		uParser = UserHTMLParser()
		if not self.dbEngine.containsUser(linkUserStr):
			uParser.feed(self.__getUrl(linkUserStr))
			print 'dodajemy użytkownika!!!!!!!!! ' , linkUserStr
			self.dbEngine.addUser(uParser.getUser(),linkUserStr)
		else: print 'Przeszło kiedykolwiek', linkUserStr
		del(uParser)
		
	def updateLinks(self, linkPowiazaneStr):
		lParser = ConnectedLinksHTMLParser()
		lParser.feed(self.__getUrl(linkPowiazaneStr))
		for link in lParser.getLinkList():
                        if link.getAutor():
                                self.updateUser(link.getAutor())
                        if not self.dbEngine.containsLink(self.find.getLink(),link):
        			self.dbEngine.addLink(self.find.getLink(), link)
		del(lParser)

	def updateWykopali(self, linkWykopaliStr):
		wyParser = WykopaliHTMLParser()
		wyParser.feed(self.__getUrl(linkWykopaliStr))
		for wykopali in wyParser.getWykopaliList():
                        self.updateUser(wykopali.getUser())
                        raParser = RankHTMLParser()
                        raParser.feed(urllib.urlopen(wykopali.getUser()).read())
                        rank = raParser.getRank()
                        if rank != -1:
                                wykopali.setUserRank(rank)
			if not self.dbEngine.containsWykopali(self.find.getLink(), wykopali):
				self.dbEngine.addWykopali(self.find.getLink(), wykopali)
		del(wyParser)

	def updateZakopali(self,linkZakopaliStr):
                zaParser = ZakopaliHTMLParser()
		zaParser.feed(self.__getUrl(linkZakopaliStr))
		for zakopali in zaParser.getZakopaliList():
                        zakopali.toString()
                        self.updateUser(zakopali.getUser())
                        raParser = RankHTMLParser()
                        raParser.feed(urllib.urlopen(zakopali.getUser()).read())
                        rank = raParser.getRank()
                        if rank != -1:
                                zakopali.setUserRank(rank)
			if not self.dbEngine.containsZakopali(self.find.getLink(), zakopali):
				self.dbEngine.addZakopali(self.find.getLink(), zakopali)
		del(zaParser)

	def __getUrl(self, urlStr):
		if DEBUG: print urlStr
		return ClientCookie.urlopen(urllib2.Request(urlStr, '', self.headers)).read()


