# -*- coding: utf8 -*-
"""
Function attributes:
* load: boolean. If False the function will not load. Optional.
* name: strinbot.logs. Optional.
* thread: boolean. If True, Function will work by starting a thread. Optional.
* rule: string or regexp object. Requered.
* aliases: list of strings or regexp objects. Optional.
"""
#
# functions to parse incoming messages from rc (recent changes bot user).
#
import re, time
from util.functions import *
from core.ircfuncs import quiet_nick
from core.optimizers import fmt, File
from util.wm_funcs import *
from util.pywikilib import Site, Page, User

"""
bot.ctrl_msg		=	bot.main_msg(evt, "##bots-ca", msg)
bot.vnd_msg			=	bot.main_msg(evt, "#wikipedia-ca-vandalisme", msg)
bot.patrol_msg      =	bot.main_msg(evt, "#wikipedia-ca-patrol", msg)
bot.newpages_msg	=	bot.main_msg(evt, "#wikipedia-ca", msg)
"""

_main = {"code": "pt", "gent": u"português"}

print "[%s] loading data from:" % time.strftime("%H:%M:%S")
projects = (_main,)
for proj in projects:
	proj['chan'] = "#%s.wikipedia" % proj['code']
	s = Site(proj['code'])
	print "\t%s" % s.sitename()
	proj['site'] = s.load_siteinfo()

def join_social_channels(bot, evt, cmd):
	#print "[%s] JOIN_CHANS %s %s  %s" % ( time.strftime("%H:%M:%S"), evt.chan, evt.source, bot.nickname )
	if evt.source == bot.nickname and evt.msg == bot.defchan:
		bot.join(",".join([proj['chan'] for proj in projects]))
		bot.ctrl_msg(evt, "wmBot joined: %s" % ",".join([proj['chan'] for proj in projects]))
join_social_channels.rule = "join"
join_social_channels.evttypes = ("join",)

def get_site(string):
	"""Usually sended from irc wikimedia bot, it tries to return the Site object
	of the project related to rc channel.
	"""
	lang = string.lstrip("#").split(".")[0]
	for proj in projects:
		if proj['code'] == lang:
			return proj['site']
	return Site(lang).load_siteinfo()

def check_changes(page, oldid, diff):
	wrong=[]
	missing=[]

	newv, oldv = page.revisions(oldid, diff)

	"""
	added = removed = ''
	for word in difflib.ndiff(oldv.split(), newv.split()):
		if word.startswith('+'):
			added += word[2:] + ' '
		elif word.startswith('-'):
			removed += word[2:] + ' '
		added = added.strip()
		removed = removed.strip()
	print "noves",added.encode("ascii","replace")
	print "tretes",removed.encode("ascii","replace")
	print
	"""

	#cerquem si s'ha escrit un text únicament amb majúscules
	if not re.search(ur'(?:[A-ZÑÇÁÀÉÈÍÏÓÒÚÙÜ]+ ){3,}', oldv, re.U) and \
	(u"<!-- comença la taula" not in newv.lower() or "==bibliografia==" not in newv.lower()):
		m  = re.search(ur'([A-ZÑÇÁÀÉÈÍÏÓÒÚÙÜ]+ ){3,}', newv, re.U)
		if m:
			wrong.append(m.group(1))

	#convertim el text a minúscules per cercar coincidències
	oldv=oldv.lower()
	newv=newv.lower()

	#cerquem elements inapropiats
	for regexp in blacklist:
		if not re.search(regexp, oldv): #, re.U):
			m = re.search(ur"(%s)" % regexp, newv) #, re.U)
			if m:
				word = re.sub(ur"\n"," ",m.group(1))
				wrong.append(word)

	#treiem enllaços i codi htlm de colors, interpretats com elements inapropiats
	#per la regexp ur"[bcçdefghjklmnñpqrstvwxz]{5,}"
	regexps = [ur"http://.*?([bcdefghjklmnpqrstvwxz]{5,})", ur"#([abcdef]{6})"]
	for regexp in regexps:
		m = re.search(ur"(%s)" % regexp, newv)
		if m and m.group(2) in wrong:
			#print u"retirada: %s"%m.group(2).encode("ascii","replace")
			wrong.remove(m.group(2))

	#treiem formes col·loquials detectades, si estracta d'una pàgina de discussió
	if "hola" in wrong and page.namespace() % 2 != 0:
	   wrong.remove("hola")

	#cerquem elements esborrats
	if page.namespace() == 0 and newv:
		for tpl in whitelist:
			element1  = re.search(ur"(%s)" % tpl, oldv)
			element2  = re.search(ur"(%s)" % tpl, newv)
			plant1=plant2=""
			if element1:
				plant1 = element1.group(1)
			if element2:
				plant2 = element2.group(1)

			if not plant2 and plant1:
				missing.append(plant1)

	return (wrong, missing)

_watchlist = File("watch-%s.log" % _main['site'].abredged_sitename())
watchlist = _watchlist.get()
def social_edit(bot, evt, cmd):
	#Edició
	evt.lang = _main['code']
	chan = "##wikipedia-pt-bots"
	flags = cmd.dict['flags']
	if "B" in flags:
		return True
	if bot._debug[4]: print u"[%s] <%r> [[%r]] o:%s d:%s out:" % (time.strftime("%H:%M:%S"), cmd.dict['user'], cmd.dict['page'], cmd.dict['oldid'], cmd.dict['diff'])

	site = get_site(evt.target)
	user = User(site, cmd.dict['user'])
	page = Page(site, cmd.dict['page'])
	summary = cmd.dict['summary']
	diffsize = cmd.dict['diffsize']
	diff = cmd.dict['diff']
	oldid = cmd.dict['oldid']
	title = wikiquote(page.title())
	section = "#%s" % re.search("/\* *(.+) *?\*/", summary).group(1) if re.search("/\* *(.+) *\*/", summary) else ""
	#bot.main_msg(evt, chan, fmt("&t;[[$a$b]]&N;", a=page.title(), b=section.strip()))

	output=""

	diffL = "http://%s/?diff=%s" % (site.sitename(), diff)
	botlogs = bot.bots(("master", 0)).logs
	if page.canonical_ns() == "project" and page.title() in watchlist:
		users = bot.bots(("master", 0)).channels[chan].users()
		bot.main_msg(evt,
			chan,
			"&+e;$user&N; has edited &t;[[$page$section]]&N; $diff",
			user = quiet_nick(user.nick(), users),
			page = page.title(),
			diff = diffL,
			section = section.strip()
		)
social_edit.rule = RE_IRC_RC['edit']
social_edit.fullmsg = True
#social_edit.thread = True
social_edit.evttypes = ("pubmsg",)
social_edit.onchan = _main['chan']
