# -*- coding:UTF-8 -*-
'''
Copyright 2009-2010 http://code.google.com/p/mygaepy/. All rights reserved.

Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
  * Redistributions of source code must retain the above copyright
    notice, this list of conditions and the following disclaimer.
  * Redistributions in binary form must reproduce the above
    copyright notice, this list of conditions and the following
    disclaimer in the documentation and/or other materials provided
    with the distribution.
  * Neither the name of http://code.google.com/p/mygaepy/ nor the names of its
    contributors may be used to endorse or promote products derived
    from this software without specific prior written permission.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''

from google.appengine.api import memcache
from google.appengine.runtime import apiproxy_errors
from google.appengine.ext import db
import logging
import random
import time
from models import *
import pickle
import calendar
import heapq
import settings

def getBlogById(id):
	blog = None
	try:
		blog = memcache.get("blog_by_%d"%id)
	except apiproxy_errors.OverQuotaError, message:
		logging.error(message)
	if blog is not None:
		return blog
	query = Blog.gql("WHERE bid=:1 ORDER BY pubday DESC",id)
	blogs = query.fetch(1)
	if blogs:
		blog = blogs[0]
		try:
			r=memcache.set("blog_by_%d"%id, blog)
			if not r:
				memcache.flush_all()
		except apiproxy_errors.OverQuotaError, message:
			logging.error(message)
		return blog
	else:
		return None

def getBlogs(page = 0, limit = settings.BLOGPERPAGE):
    """
    return the page-th page, limit items every page. This an approximate calculation
    """
    maxid = getMaxBlogId() - page*limit
    query = Blog.gql("WHERE bid<=:1 ORDER BY bid DESC LIMIT %d"%(limit), maxid)
    blogs = query.fetch(limit)
    return blogs

def getNextBlogId():
	info=getSysInfo()
	bid=info.nextbid
	info.nextbid+=1
	setSysInfo(info)
	return  bid

def getMaxBlogId():
	query = Blog.gql("ORDER BY bid DESC")
	blogs=query.fetch(1)
	if blogs:
		return blogs[0].bid
	else:
		return 0

def getBlogsByTag(tag):
	"""
	return a list of bloginfo
	"""
	blogs=getTag(tag)
	r=[]
	for b in blogs:
		r.append(b[1])
	return r

def getSysInfo():
	sysinfo=None
	try:
		sysinfo=memcache.get("SysInfo")
	except apiproxy_errors.OverQuotaError, message:
		logging.error(message)
	if sysinfo is not None:
		return sysinfo
	query = SysInfo.gql("")
	info=query.fetch(1)
	if info:
		sysinfo=info[0]
		try:
			r=memcache.set("SysInfo",sysinfo)
			if not r:
				logging.error("Memcache set sysinfo failed.")
				memcache.flush_all()
		except apiproxy_errors.OverQuotaError, message:
			logging.error(message)
	else:
		#initial the sysinfo
		sysinfo=SysInfo()
		sysinfo.nextbid=1
		sysinfo.visited=0
		sysinfo.blognum=0
		sysinfo.put()
	return sysinfo

def setSysInfo(sysinfo):
	sysinfo.put()
	try:
		r=memcache.set("SysInfo",sysinfo)
		if not r:
			logging.error("Memcache set sysinfo failed.")
	except apiproxy_errors.OverQuotaError, message:
			logging.error(message)

def setTag(tag,bid,bloginfo):
    query = TagInfo.gql("")
    blogids=[]
    for t in query:
        tags=pickle.loads(t.map)
        if tag in tags:
            tags[tag].append((bid,bloginfo))
            try:
                t.map=pickle.dumps(tags)
                t.put()
                return True
            except Exception, message:
                logging.error(message)
                blogids=tags.pop(tag)
                blogids.append((bid,bloginfo))
                t.map=pickle.dumps(tags)
                t.put()
    try:
        tags=pickle.loads(t.map)
        tags[tag]=[(bid,bloginfo)]
        t.map=pickle.dumps(tags)
        t.put()
    except:
        info=TagInfo()
        tags={}
        tags[tag]=[(bid,bloginfo)]
        info.map=pickle.dumps(tags)
        info.put()
    return True

def getTag(tag):
    query = TagInfo.gql("")
    for t in query:
        tags=pickle.loads(t.map)
        if tag in tags:
            return tags[tag]
    return []

def deleteTag(tag, bid):
	query = TagInfo.gql("")
	for t in query:
		tags=pickle.loads(t.map)
		if tag in tags:
			dl=[]
			for x in tags[tag]:
				if x[0]==bid:
					dl.append(x)
			for x in dl:
				tags[tag].remove(x)
			t.map=pickle.dumps(tags)
			t.put()
	blog=getBlogById(bid)
	try:
		blog.tags.remove(tag)
	except:
		pass
	blog.put()

def getAllTags(limit=0):
    query = TagInfo.gql("")
    tag=[]
    for t in query:
        tags=pickle.loads(t.map)
        tag.extend(tags.keys())
    return tag

def DeleteTagByName(tag):
	query = TagInfo.gql("")
	for t in query:
		tags=pickle.loads(t.map)
		if tag in tags:
			blogs=tags[tag]
			for id in blogs:
				deleteTag(tag,id[0])
			del tags[tag]
			t.map=pickle.dumps(tags)
			t.put()

def getBlogEditAuth(bid):
	try:
		r=memcache.get("blogeditauth%d"%bid)
		memcache.delete("blogeditauth%d"%bid)
		return r
	except apiproxy_errors.OverQuotaError, message:
		logging.error(message)
		return None

def setBlogEditAuth(bid):
	auth=str(random.random())
	try:
		r=memcache.set("blogeditauth%d"%bid,auth)
		return auth
	except apiproxy_errors.OverQuotaError, message:
		logging.error(message)
		return None

def setArchieve(year, month, bloginfo):
	query = Archieve.gql("WHERE year=:1 AND month=:2",year, month)
	ars=query.fetch(1)
	if len(ars)==1:
		ar=ars[0]
		bloginfos=pickle.loads(ar.blogs)
		bloginfos.append(bloginfo)
		ar.blogs=pickle.dumps(bloginfos)
		ar.put()	
	else:
		ar=Archieve()
		ar.year=year
		ar.month=month
		ar.blogs=pickle.dumps([bloginfo])
		ar.put()
	#delete cache
	try:
		memcache.delete("year-month-%d"%year)
	except apiproxy_errors.OverQuotaError, message:
		logging.error(message)
	

def delArchieve(year,month, blogid):
	"""
	delete blogid from archieve
	"""
	query = Archieve.gql("WHERE year=:1 AND month=:2", year, month)
	ars=query.fetch(1)
	if len(ars)==1:
		ar=ars[0]
		bloginfos=pickle.loads(ar.blogs)
		for bi in bloginfos:
			if bi.bid == blogid:
				bloginfo = bi
		try:
			bloginfos.remove(bloginfo)
		except:
			pass
		ar.blogs=pickle.dumps(bloginfos)
		ar.put()
		
	
def getBlogByMonth(year, month):
    query =  Archieve.gql("WHERE year=:1 AND month=:2",year, month)
    ar=query.fetch(1)
    if len(ar)==1:
    	blogs=pickle.loads(ar[0].blogs)
    	blogs.sort(reverse=True)
    	return blogs
    else:
    	return []

def calcMostPopTag(limit=settings.POPTAGNUM):
	poptags=[]
	query = TagInfo.gql("")
	for t in query:
		tags=pickle.loads(t.map)
		for tag in tags:
			tc=TagCount(tag,len(tags[tag]))
			if len(poptags)>=limit:
				heapq.heappop(poptags)
				heapq.heappush(poptags, tc)
			else:
				heapq.heappush(poptags, tc)
	return poptags

def getMostPopTags():
	try:
		tags=memcache.get("MostPopTags")
	except apiproxy_errors.OverQuotaError, message:
		logging.error(message)
	if tags is None:
		tags=calcMostPopTag(settings.POPTAGNUM)
		try:
			r=memcache.set("MostPopTags",tags)
			if not r:
				logging.error("Memcache set sysinfo failed.")
		except apiproxy_errors.OverQuotaError, message:
			logging.error(message)
	return tags

def getMonths(year):
	"""
	return a list  [year-month] of year
	"""
	m = None
	try:
		m = memcache.get("year-month-%d"%year)
	except apiproxy_errors.OverQuotaError, message:
		logging.error(message)
	if m is not None:
		return m
	query = Archieve.gql("WHERE year=:1 ORDER BY month DESC",year)
	ars=query.fetch(12)
	m=[]
	for ar in ars:
		m.append("%d-%d"%(ar.year,ar.month))
	try:
		r=memcache.set("year-month-%d"%year, m)
		if not r:
			memcache.flush_all()
	except apiproxy_errors.OverQuotaError, message:
		logging.error(message)
	return m
 
def getPickled(name):
	query = Pickled.gql("WHERE name=:1", name)
	p=query.fetch(1)
	if len(p)==1:
		return p[0]
	else:
		return None

def getEBooks(offset=0,limit=20):
	"""
	return a list of Books
	"""
	query=db.GqlQuery ("SELECT * FROM eBookIndex WHERE bookid>=:1 ORDER BY bookid",offset)
	return query.fetch(limit)

def getEBooks():
    """
    return a list of Books
    """
    query=db.GqlQuery ("SELECT * FROM eBookTitle")
    title = None
    for _ in query:
        title = _
    titles = pickle.loads(title.bookindex)
    titles.sort()
    return titles 
    

def getNextBookId():
	"""
	return a generated id of next book.
	issue: concurrence
	"""
	query=db.GqlQuery ("SELECT * FROM eBookIndex ORDER BY bookid DESC LIMIT 1")
	books=query.fetch(1)
	if len(books)==0:
		return 1
	else:
		return books[0].bookid+1

def getBookIndexById(bookid):
	"""
	return a dict of the book which id is bookid:
	{
	"pagename": zip_id
	}
	"""
	index=None
	try:
		index=memcache.get("ebook%dindexcache"%bookid)
		if index is None:
			query=db.GqlQuery ("SELECT * FROM eBookIndex WHERE bookid=:1", bookid)
			bookindex=query.fetch(1)
			if len(bookindex)!=0:
				bookindex=bookindex[0]
			else:
				return None
			index=pickle.loads(bookindex.bookindex)
			memcache.set("ebook%dindexcache"%bookid,index)
	except apiproxy_errors.OverQuotaError, message:
		logging.error(message)
	return index

def getEbookData(bookid,partid):
    """
    return the zipped data
    """
    query=db.GqlQuery ("SELECT * FROM eBookData WHERE bookid=:1 AND partid=:2", bookid, partid)
    data=query.fetch(1)
    if len(data)==1:
        return data[0].zipdata
    else:
        return ''


def getDictInfo(dictname):
    dictinfo = memcache.get("dictonary_info_%s"%dictname)
    if dictinfo is None:
        query = db.GqlQuery ("SELECT * FROM DictInfo WHERE dictname=:1",dictname)
        dictinfo = query.get()
        if dictinfo is None:
            query = db.GqlQuery ("SELECT * FROM DictInfo WHERE dictname=:1", "langdaoec")
            dictinfo = query.get()
            memcache.set("dictonary_info_%s"%dictname, dictinfo)
    return dictinfo        
"""
"""
def ELFHash(p):
    hash = 0
    x =0
    for s in p:
        hash = (hash<<4) + ord(s)
        x = hash & 0xF0000000L
        if x != 0:
            hash ^= (x >> 24);
            hash &= ~x;
    return (hash & 0x7FFFFFFF);

def getExplaination(word, dictname, BUCKETSIZE):
    dictid = abs(ELFHash(word)) % BUCKETSIZE
    dictionary = memcache.get("dictonary_%s_%d_cache"%(dictname, dictid))
    if dictionary is None:
        query = db.GqlQuery ("SELECT * FROM DictData WHERE dictid=:1 and dictname=:2",
                             dictid, dictname)
        q = query.fetch(1)
        index = None
        if len(q) !=0:
            index = q[0]
        if index is not None:
            dictionary = pickle.loads(index.data)
            memcache.set("dictonary_%s_%d_cache"%(dictname, dictid),dictionary)
            return dictionary.get(word, None)
        else:
            return None
    else:
        return dictionary.get(word, None)