# -*-coding: utf-8 -*-
'''
image crawler worker
input:
	original_url, dest_path
run command:
	celery -A image worker -l info -Q image
'''
import os
import sys
import shutil
import random
import string
import collections
import HTMLParser
import urllib
import hashlib
import xml.etree.ElementTree as ET
import logging
import requests
from config import celery

reload(sys)
sys.setdefaultencoding('utf-8')


console = logging.StreamHandler()
console.setLevel(logging.INFO)
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
console.setFormatter(formatter)


@celery.task(ignore_result=True, throw=True, bind=True)
def crawl(self, original_url, dest_path):
	'''
	dest_path: absolute path
	'''
	try:
		response = requests.get(original_url, stream=True)
		if not os.path.exists(os.path.dirname(dest_path)):
			os.makedirs(os.path.dirname(dest_path))
		with open(dest_path, 'wb') as out_file:
			shutil.copyfileobj(response.raw, out_file)
		del response.raw
	except Exception, e:
		print e
		self.retry(countdown=retry_delay)


def async_crawl(original_url, dest_path):
	crawl.apply_async(args=(original_url, dest_path), queue='image', routing_key='image')

