#!coding: utf-8
from httpget import get
from urlparse import urljoin
import urllib
import json
import pickle
import settings

def to_unicode_or_bust(obj, encoding='utf-8'):
	if isinstance(obj, basestring):
		if not isinstance(obj, unicode):
			obj = unicode(obj, encoding)
	return obj
toU = to_unicode_or_bust

def Uquote(s):
	""" Make unicode string urlcoded """	
	s = to_unicode_or_bust(s).encode('utf-8')
	return urllib.quote(s)

def Ucmp(a,b):
	""" Compare two unicode strings """
	return Uquote(a)==Uquote(b)


class QueriesCache():
	def __init__(self):
		try:
			f = open(settings.cache_file_name,"r")
			self.queries = pickle.load(f)
			f.close()
		except IOError, EOFError:
			self.queries = {}
	def __del__(self):
		f = open(settings.cache_file_name,"w")
		pickle.dump(self.queries,f)
		f.close()

	def get(self,idx):
		if self.queries.has_key(idx):
			return self.queries[idx]
		else:
			return None
	def put(self,idx, obj):
		self.queries[idx]=obj
if settings.cached:
	qc = QueriesCache()
qcounter = 0

def get_adapter(domain,query):
	global qcounter
	qcounter +=1 
	if not settings.silent: print "Query number", qcounter
	if settings.cached:
		res = qc.get((domain,query))
		if res:
			if not settings.silent: print "Cached!:",query
			return res
	res = get(urljoin(domain,query))
	if settings.cached:
		qc.put((domain,query),res)
	return toU(res)


def get_json(domain,query, cached = True):
	""" Get results of query as compiled json """
	if cached:
		return json.loads(get_adapter(domain,query))
	else:
		return json.loads(get(urljoin(domain,query)))

def page_source(domain, name):
	""" Get page source """
	name = Uquote(name)
	return get_adapter(domain,'/w/index.php?title='+name+'&action=raw')


def page_categories(domain,name):
	""" Get list of categories of page of name, from wiki on domain """
	name = Uquote(name)
	res=get_json(domain,'/w/api.php?titles='+name+'&action=query&prop=categories&format=json')
	catlist = []
	for i in res["query"]["pages"].values():
		if not i.has_key("categories"): continue
		for j in i["categories"]:
			catlist.append(j["title"])
	while res.has_key("query-continue"):
		res=get_json(domain,'/w/api.php?titles='+name+'&action=query&prop=categories&format=json&'+urllib.urlencode(res["query-continue"]["categories"]))
		for i in res["query"]["pages"].values():
			if not i.has_key("categories"): continue
			for j in i["categories"]:
				catlist.append(j["title"])
	return catlist


def all_pages(domain,only_articles = False, **kvargs):
	""" Get list of all pages, from wiki on domain. If start ="page name" is given, than begin from "page name" """
	if only_articles:
		oa="&namespace=0"
	else:
		oa=""
	cntfrom = ""
	if "start" in kvargs:
		cntfrom = "&apfrom="+Uquote(kvargs['start'])
	res=get_json(domain,'/w/api.php?&action=query&list=allpages'+oa+'&aplimit=200&format=json'+cntfrom)
	pagelist = []
	for i in res["query"]["allpages"]:
		yield i['title']
	while res.has_key("query-continue"):
		cntfrom = res['query-continue']['allpages']['apfrom']
		res=get_json(domain,'/w/api.php?action=query&list=allpages'+oa+'&aplimit=200&format=json&apfrom='+Uquote(cntfrom))
		for i in res["query"]["allpages"]:
			yield i['title']

def random_pages(domain,count,only_articles = False):
	""" Get list of random pages"""
	if only_articles:
		oa="&rnnamespace=0"
	else:
		oa=""
	js = get_json(domain,"w/api.php?action=query&list=random&rnlimit=%d" %count + oa + "&format=json",False)
	res = []
	for i in  js["query"]["random"]:
		res.append(i['title'])
	return res

def follow_redirect(domain,name):
	""" If page is a redirect than return page to which it redirects, else return the same page name """
	old_name=name
	name = Uquote(name)
	res=get_json(domain,'/w/api.php?action=query&titles='+name+'&redirects&format=json')
	if res['query'].has_key('redirects'):
		lst=res['query']['redirects']
		for i in lst:
			if Ucmp(i['from'],old_name):
				return i['to']
	return old_name

def category_pages(domain,name):
	""" Return list of pages of category """
	name = Uquote(name)
	res=get_json(domain,'/w/api.php?cmtitle='+name+'&action=query&list=categorymembers&format=json&cmlimit=100')
	pageslist = []
	for i in res["query"]["categorymembers"]:
		pageslist.append(i['title'])
	while res.has_key("query-continue"):
		cntfrom=res['query-continue']['categorymembers']['cmcontinue']
		res=get_json(domain,'/w/api.php?cmtitle='+name+'&cmcontinue='+Uquote(cntfrom)+'&action=query&list=categorymembers&format=json&cmlimit=100')
		for i in res["query"]["categorymembers"]:
			pageslist.append(i['title'])
	return pageslist


def user_touched_pages(domain,name):
	""" Return dict of pages edited by user, and how many times he edited it"""
	name = Uquote(name)
	res = get_json(domain,'/w/api.php?action=query&uclimit=300&list=usercontribs&ucuser='+name+'&format=json')
	touches = {}
	for i in res["query"]["usercontribs"]:
		if touches.has_key(i["title"]):
			touches[i["title"]]+=1
		else:
			touches[i["title"]]=1
	while res.has_key("query-continue"):
		cntfrom = res['query-continue']['usercontribs']['ucstart']
		res = get_json(domain,'/w/api.php?action=query&uclimit=300&list=usercontribs&ucuser='+name+'&format=json&ucstart='+cntfrom)
		for i in res["query"]["usercontribs"]:
			if touches.has_key(i["title"]):
				touches[i["title"]]+=1
			else:
				touches[i["title"]]=1
	return touches

def page_authors(domain,name):
	""" Return dict of page editors, and how much each edited the page """
	name=Uquote(name)
	res=get_json(domain,'/w/api.php?action=query&prop=revisions&titles='+name+'&rvprop=user&rvlimit=100&format=json')
	authors = {}
	for i in res["query"]["pages"].values()[0]['revisions']:
		if i.has_key('user'):
			usr=i['user']
		else:
			continue
		if authors.has_key(usr):
			authors[usr]+=1
		else:
			authors[usr]=1
	while res.has_key('query-continue'):
		cntfrom= str(res['query-continue']['revisions']['rvstartid'])
		res=get_json(domain,'/w/api.php?action=query&prop=revisions&titles='+name+'&rvprop=user&rvlimit=100&format=json&rvstartid='+cntfrom)
		for i in res["query"]["pages"].values()[0]['revisions']:
			if i.has_key('user'):
				usr=i['user']
			else:
				continue
			if authors.has_key(usr):
				authors[usr]+=1
			else:
				authors[usr]=1
	return authors

def user_friends(domain,name):
	""" Return list of users which had edited same pages as "name" """
	my_interests=user_touched_pages(domain,name).keys()
	friends = {}
	my_pages_count = len(my_interests)
	count = 0
	for page in my_interests:
		count+=1
		print "In page %d/%d :" %(count,my_pages_count),page
		authors = page_authors(domain,page).keys()
		for user in authors:
			if friends.has_key(user):
				friends[user]+=1
			else:
				friends[user]=1
	return friends

def page_info(domain, name):
	""" return page info """
	name = Uquote(name)
	res = get_json(domain,'/w/api.php?action=query&prop=info&titles='+name+'&format=json')
	return res['query']['pages'].values()[0]

class NameSpaces():
	""" Number id's of namespaces """
	page = 0
	category = 14
namespaces = NameSpaces()

#to check whether page is category, you write page_info(dom,name)['ns'] == namespaces.category


def category_tree(domain,name,level=0):
	if page_info(domain,name)['ns'] != namespaces.category:
		return None
	cats = category_pages(domain,name)
	res = {}
	for i in cats:
		if not settings.silent: print "\t"*level,i
		res[i]=category_tree(domain,i,level+1)
	return res

def print_tree(tree,level=0):
	if tree == None:
		return
	for k,v in tree.iteritems():
		node = '[[:'+k+']]'
		if level <2:
			print '='*(level+1),node,'='*(level+1)
		else:
			print '*'*(level-1),node
		print_tree(v,level+1)
visited = set()

def print_DOT(tree,parent=None):
	if parent in visited:
		return
	visited.add(parent)
	for k,v in tree.iteritems():
		#if parent!=None:
		if v!=None:
			print '"'+parent+'"','->','"'+k+'";'
			print_DOT(v,k)

def page_iwiki(domain,name, langs=['uk','en','ru']):
	""" Get iwiki for page "name" and langs listed in "langs" """
	name=Uquote(name)
	res = get_json(domain,'/w/api.php?action=query&titles='+name+'&prop=langlinks&format=json&lllimit=300')
	llinks = res['query']
	if not 'pages' in llinks:
		return {}
	llinks = llinks['pages'].values()[0]
	if not 'langlinks' in llinks:
		return {}
	else:
		llinks=llinks['langlinks']
	res = {}
	for i in llinks:
		if i['lang'] in langs:
			res[i['lang']] = i['*']
	return res

def translation_by_iwiki(entry,from_lang,to_lang):
	d = 'http://'+from_lang+'.wikipedia.org/'
	entry = follow_redirect(d,entry)
	res = page_iwiki(d,entry,[to_lang])
	if res.has_key(to_lang):
		return res[to_lang]

if __name__=="__main__":
	if not settings.silent:
		print "Something kinda like unit testing... """
	cat_tree= category_tree("http://uk.wikibooks.org/","Категорія:Головна")
	print_DOT(cat_tree,"Категорія:Головна")
	#for i in all_pages("http://uk.wikiquote.org/",True):
	#for i in random_pages("http://uk.wikipedia.org/",10,True):
	#	print i
