#coding:utf-8
#author wendyeq@gmail.com
# Create your views here.
from __future__ import division#must be top
from django.shortcuts import render_to_response
from django.http import HttpResponse,HttpResponseForbidden
from django.utils import simplejson
from django.core.serializers.json import DateTimeAwareJSONEncoder
from google.appengine.ext import db
from google.appengine.api import urlfetch
from google.appengine.api import images
from models import *
from StringIO import StringIO
import random
import logging
import datetime
import time
import itertools
import color
import flickrapi
import png

def search_by_url(request):
    url = request.POST.get('url')
    result = urlfetch.fetch(url)
    if result.status_code == 200:
        logging.info(url)
    json = simplejson.dumps("response")
    return HttpResponse(json, mimetype='application/json')

def find_by_photo(request):
    test_start = time.time()
    url = request.POST.get('url')
    logging.info(url)
    photo = Photo.get_by_key_name(url)
    dicts={}
    if isinstance(photo, Photo):
        li = photo.similars
        li.sort(reverse=True)
        dicts['stat']='ok'
        dicts['size']=len(li)
        dicts['similars'] = li           
    else:
        dicts['stat']='fail'
    dicts['src'] = url    
    dicts['method'] = 'find_by_photo'    
    dicts['time'] = time.time() - test_start
    json = simplejson.dumps(dicts,cls=DateTimeAwareJSONEncoder)
    return HttpResponse(json, mimetype='application/json')

def random_photos(request):
    test_start = time.time()
    dicts = {}
    c = Count.get_by_key_name("Photo")
    if not isinstance(c,Count):
        photo_count()
        c = Count.get_by_key_name("Photo")
    dicts['count'] = c.counts
    query = db.Query(Photo).order('-similar_times').order('-create_time') 
    li = []
    if(c.counts == 0):
        dicts['stat'] = "No Photo!"
    else:
        dicts['stat'] = "Ok"
        if(c.counts < 10):
            rd_photos = query.fetch(10)
        else:              
            rd_photos = random.sample(query.fetch(100),10)        
        for p in rd_photos:
            dicta = {}
            dicta['link'] = p.key().name()
            dicta['width'] = p.width
            dicta['create_time'] = p.create_time
            li.append(dicta)
    dicts['photos'] = li    
    dicts['method'] = 'random_photos'     
    dicts['time'] = time.time() - test_start    
    json = simplejson.dumps(dicts,cls=DateTimeAwareJSONEncoder)
    return HttpResponse(json, mimetype='application/json')

def geturls(request):
    #if 'X-AppEngine-Cron' in request.META and request.META['X-AppEngine-Cron']=='true':
        test_start = time.time()
        flickrinfo = FlickrInfo.all().get()
        if flickrinfo == None:
            flickrinfo = FlickrInfo(key_name='eac7e7fcb5aed451e7b49622f01d8965')
            flickrinfo.secret = '4f7faa3d166c0445' 
            flickrinfo.put() 
        flickr = flickrapi.FlickrAPI(flickrinfo.key().name(),flickrinfo.secret)
        hope = 50
        succ = 0
        fail = 0
        photo_urls = []
        for i in range(hope): 
            try:
                p_id=str(flickrinfo.photo_id+i)
                p = flickr.photos_getSizes(photo_id=p_id)
                if  p.attrib['stat']=='ok':
                    sizes = p.find('sizes')
                    size = sizes.getiterator('size')
                    for s in size:
                        if s.attrib['label']=='Small':
                            #logging.info(s.attrib['source'])
                            p_url = PhotoURL(key_name=p_id)
                            p_url.link = db.Link(s.attrib['source'])
                            photo_urls.append(p_url)
                succ += 1                
            except:
                fail += 1
                logging.info("getSizes fail photo_id = %s." %(flickrinfo.photo_id+i))
            finally:
                logging.info("finally")
        flickrinfo.photo_id += hope
        flickrinfo.fail_id += fail
        flickrinfo.update_time = datetime.datetime.now()
        flickrinfo.put()
        db.put(photo_urls)
        photo_url_count()
        logging.info('done!Get URLS cost %s s , succ %s, fail %s' %(time.time() - test_start,succ,fail))
        return HttpResponse('done!Get URLS cost %s s , succ %s, fail %s' %(time.time() - test_start,succ,fail)) 
    #else: 
        #return HttpResponseForbidden()

#old version
def geturla(request):
    flickrinfos = FlickrInfo.all().fetch(1)
    if(len(flickrinfos)==0):
        flickrinfo = FlickrInfo(key_name='eac7e7fcb5aed451e7b49622f01d8965')
        flickrinfo.secret = '4f7faa3d166c0445'
        flickrinfo.user_id = '73509078@N00'
        flickrinfo.perpage = 100
        flickrinfo.put()
    else:
        flickrinfo = flickrinfos[0]
    if(flickrinfo.total!=0 and (flickrinfo.total//flickrinfo.perpage)+1>flickrinfo.page):
        page = flickrinfo.page + 1
    elif(flickrinfo.total!=0 and (flickrinfo.total//flickrinfo.perpage)+1==flickrinfo.page):
        pass        
    else:
        page = 1
    flickr = flickrapi.FlickrAPI(flickrinfo.key().name(),flickrinfo.secret)
    allphoto = flickr.photos_search(user_id=flickrinfo.user_id, page=page, per_page=flickrinfo.perpage)
    if(allphoto.attrib['stat']=='ok'):
        photos = allphoto.find('photos')
        flickrinfo.total = int(photos.attrib['total'])
        flickrinfo.page = int(photos.attrib['page'])
        flickrinfo.perpage = int(photos.attrib['perpage'])
        flickrinfo.update_time = datetime.datetime.now()
        flickrinfo.put()
        photo = photos.getiterator('photo')#return list
        start=time.time()
        ps = []
        for p in photo:
            farm_id = p.attrib['farm']
            server_id = p.attrib['server']
            photo_id = p.attrib['id']
            secret = p.attrib['secret']
            link = "http://farm"+farm_id+".static.flickr.com/"+server_id+"/"+photo_id+"_"+secret+"_m.jpg"
            pt = PhotoURL(key_name=photo_id)
            pt.link = db.Link(link)
            ps.append(pt)
        db.put(ps)
        logging.info(time.time()-start)
    else:
        logging.info('error')
    return HttpResponse('done!') 

def prep_photo(request):
    test_start=time.time()
    url_query = db.Query(PhotoURL)
    url_query.filter('fail_count < ', 5).filter('isReaded =', False).order('fail_count').order('-create_time')  
    photos_url = url_query.fetch(20)
    #logging.info(len(photos_url))
    new_photos = []
    old_photos_url = []
    succ = 0
    fail = 0
    for p_url in photos_url:
        url = p_url.link
        if not isinstance(Photo.get_by_key_name(url), Photo):
            try:
                result = urlfetch.fetch(url)
                if result.status_code == 200:
                    img = images.Image(result.content)
                    new_photo = Photo(key_name=url,width=img.width,height=img.height)
                    img.rotate(360)                 
                    img_png = img.execute_transforms(output_encoding=images.PNG)
                    reader_png = png.Reader(file=StringIO(img_png))
                    width, height, pixel_array, metadata = reader_png.asDirect()
                    total = width*height
                    logging.info("total %s, width %s, height %s" %(total,width,height))
                    hsv_histogram = []
                    dic = {}
                    for i in pixel_array:
                        j = 0
                        while j < len(i):
                            h, s, v = color.rgb_to_hsv(i[j],i[j+1],i[j+2]) #return (h,s,v) i[j]..i[j+3] is r,g,b,alpha
                            k = color.hsv_vector(h, s, v) #return int
                            if k in dic.keys():
                                dic[k] += 1
                            else: 
                                dic[k] = 1
                            j += 4 #if native j+=3 but in GAE j+=4
                    for k in range(0,72):
                        if k in dic.keys():                            
                            hsv_histogram.append(dic[k]/total)
                        else: 
                            hsv_histogram.append(0.0)
                    new_photo.hsv_histogram=hsv_histogram                    
                    #new_photo.put()
                    new_photos.append(new_photo)
                    p_url.isReaded = True
                    p_url.update_time = datetime.datetime.now()
                    old_photos_url.append(p_url)
                    logging.info("end")
                    succ +=1
            except :
                p_url.fail_count += 1
                p_url.update_time = datetime.datetime.now()
                old_photos_url.append(p_url)
                fail += 1
                logging.info("download")
    db.put(new_photos)
    db.put(old_photos_url)
    photo_count()
    logging.info("prep_photo succ %s , fail %s " %(succ,fail))
    return HttpResponse('done!Prep Photo cost %s s, succ %s, fail %s' %(time.time() - test_start, succ , fail)) 

def similar_photo(request):
    logging.info("similarphoto")
    test_start = time.time()
    query = db.Query(Photo).filter('similar_times < ', 30).order('similar_times').order('-create_time') 
    photos = query.fetch(50)
    #ps = []
    for p in photos:
        if len(p.similars) < 100:
            for s in photos:
                if s is not p:
                    sum = 0
                    for i in range(0,72):
                        sum += min(s.hsv_histogram[i],p.hsv_histogram[i])                        
                    if sum >= 0.5:
                        logging.info(s.key().name()+str(sum))
                        if (str(sum)+"##"+s.key().name()) not in p.similars:
                            p.similars.append(str(sum)+"##"+s.key().name())
                            p.similar_times += 1
                            p.update_time = datetime.datetime.now()
                            #ps.append(p)
                            p.put()
    #db.put(ps)        
    return HttpResponse('done!Similar Photo cost %s s' %(time.time() - test_start))

def photo_count():
    c = Count.get_by_key_name("Photo")
    query = Photo.all()
    if not isinstance(c,Count):
        c = Count(key_name="Photo")
        c.create_time = datetime.datetime(2010,9,1,8,00,00)
        c.put()    
        query.filter('create_time > ', c.create_time)
    else:
        query.filter('create_time > ', c.update_time)
    c.counts += query.count()
    c.update_time=datetime.datetime.now()
    c.put()

def photo_url_count():
    c = Count.get_by_key_name("PhotoURL")
    query = PhotoURL.all()
    if not isinstance(c,Count):
        c = Count(key_name="PhotoURL")
        c.create_time = datetime.datetime(2010,9,1,8,00,00)
        c.put()    
        query.filter('create_time > ', c.create_time)
    else:
        query.filter('create_time > ', c.update_time)
    c.counts += query.count()
    c.update_time=datetime.datetime.now()
    c.put()    
