
import os
os.environ[ 'DJANGO_SETTINGS_MODULE' ] = 'lads.settings'

from celery import task
from celery.decorators import periodic_task
from celery.task.schedules import crontab
from home.models import Lad, Image, ProxyAddress
from lads.imageripper import ImageRipper, download
from lads.image import init_thumbnail
from lads.utils import get_face_request, get_body_request, get_nude_request

#import os
#os.environ['CELERY_CONFIG_MODULE'] = 'home.celeryconfig'

from celery import Celery
celery = Celery('tasks', broker='amqp://guest@localhost//')

_NUM_PHOTOS_TO_ADD = 32

from celery.task import Task

def _save_image_thumbnail( lad, downloaded, i ):
	image_info = Image.create2( lad, downloaded, i )
	# check: is there a valid image, not html or other?
	if image_info != None:				
		path, w, h = image_info
		if lad.thumbnail == None:
			lad.thumbnail = init_thumbnail( path )
			lad.save()
	

@task( name = 'home.tasks.add_photos_task' )
def add_photos_task():
	print "got a task"
	while True:
		lads = Lad.objects.filter( image = None )

		if lads.count() == 0:
			return

		lads2 = lads.order_by( '-id' )
		proxy = {}
		proxy[ 'get' ] = ProxyAddress.get2
		proxy[ 'delete' ] = ProxyAddress.delete2
		proxy[ 'count' ] = ProxyAddress.count2
		image_ripper = ImageRipper( image_size = 'large', proxy = proxy )#.rip_one_image( name_surname.encode( "utf-8" ) )

		for lad in lads2:
			# find lad with no photo		
			name_surname = lad.name_surname

			if image_ripper.is_proxy_list_empty():
				return
			
			num_face_photos = int( _NUM_PHOTOS_TO_ADD * 0.2 )
			num_body_photos = int( _NUM_PHOTOS_TO_ADD * 0.5 )
			num_nude_photos = _NUM_PHOTOS_TO_ADD - num_face_photos - num_body_photos
		
			urls = []
				
			image_ripper.use_proxy()
	
			urls += image_ripper.get_images_urls( get_face_request( name_surname ).encode( "utf-8" ), num_face_photos )
			urls += image_ripper.get_images_urls( get_body_request( name_surname ).encode( "utf-8" ), num_body_photos )
			urls += image_ripper.get_images_urls( get_nude_request( name_surname ).encode( "utf-8" ), num_nude_photos )

			if urls == []:
				# bad proxy
				# let's have another chance
				continue
	
			image_ripper.restore_proxy()
		
			while len( urls ) > 0 and urls[ 0 ] == None:
				urls = urls[ 1: ]
			
			
			if len( urls ) > 0:
				urls0 = urls[ 0 ]	
				
				# probably urls0 here is invalid
				#print "urls0={0}".format( urls0 )
				#print "lad={0}".format( lad )
				downloaded = download( urls0 )
				#print "downloaded={0}".format( downloaded )

				_save_image_thumbnail( lad, downloaded, 0 )
			
			if len( urls ) > 1:				
				urls = urls[ 1: ]			
				i = 1		
				
				for url in urls:
					image = download( url )
					# check: is there a valid image, not html or other?
					if image != None:
						_save_image_thumbnail( lad, image, i )
						i += 1
				print u"added for '{0}'".format( lad.slug )