# -*- coding: utf-8 -*-
from django.core.management.base import NoArgsCommand
from django.conf import settings
from urllib2 import Request, urlopen
from urllib import quote
from urlparse import urljoin
import re, threading, thread, os, time
from Queue import Queue, Empty

from dramas.main.BeautifulSoup import BeautifulSoup
from dramas.main.utils import url_read
from dramas.main.views import video_image_obj
from dramas.main.settings import category_image_group
url_template = 'http://www.xiaoli.cc/category/%s'

def download_cover(cate, page_url):
    ext = '.jpg'
    id = page_url.split('/')[-1]
    download_path = os.path.join(settings.MEDIA_ROOT, 'upload', category_image_group[cate], "%s%s" %(id, ext), )
    print download_path
    if not os.path.exists(download_path):
        content = url_read(page_url)
        video_image_m = video_image_obj.search(content)
        if video_image_m:
            video_image = video_image_m.group(1)
            url_path = urljoin(url_template, quote(video_image))
            headers = {'Referer':'http://www.xiaoli.cc/video/detail/%s' % id,
                       'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.0.1) Gecko/2008070208 Firefox/3.0.1',
                       }
            url_path_request = Request(url_path, headers = headers)
            result = urlopen(url_path_request)
            download_dir = os.path.dirname(download_path)
            if not os.path.exists(download_dir):
                os.makedirs(download_dir)
            f = open(download_path,'wb')
            f.write(result.read())
            f.close()
        else:
            if len(content) == 941:
                print 'retry download video_image'
                download_cover(cate, page_url)
            else:
                print 'no video_image'
    else:
        print 'already exist'
        
class WorkThread(threading.Thread):
    def __init__(self, cover_queue, result_queue, *args, **kwargs):
        self.cover_queue = cover_queue
        self.result_queue = result_queue
        super(WorkThread, self).__init__(*args, **kwargs)
        
    def run(self):
        while True:
            try:
                cate,url = self.cover_queue.get(timeout = 0.1)
                result = download_cover(cate, url)
                self.result_queue.put((cate,url))
            except Empty:
                if self.cover_queue.no_more_items:
                    break
                else:
                    continue
    
class CoverQueue(Queue):
    def _init(self, maxsize):
        Queue._init(self, maxsize)
        self.no_more_items = False
        self.stop_new_threads = False
        self.work_threads = []
        self.threads_count = 0
        self.result_queue = Queue()
        
    def _put(self, item):
        Queue._put(self, item)
        if not self.stop_new_threads:
            try:
                t = WorkThread(self, self.result_queue)
                t.start()
                self.work_threads.append(t)
            except thread.error:
                self.stop_new_threads = True
    
    def work_threads_join(self):
        self.threads_count = len(self.work_threads)
        for t in self.work_threads:
            t.join()

def put_urls(cates, cover_queue):
    for cate,task_urls in cates:
        for task_url in task_urls:
#            print cate,task_url
            cover_queue.put((cate,task_url))
            
    cover_queue.no_more_items = True
    cover_queue.work_threads_join()
     
class Command(NoArgsCommand):
    def handle_noargs(self, **options):
        cates = []
        for i in range(1,2):
            url = url_template % i
            print url
            content = url_read(url)
            soup = BeautifulSoup(content)
            rows = soup.find('table', attrs = {'id':'categoryTable'}).findAll('tr')[1:]
            print len(rows)
            task_urls = []
            for row in rows:
                task_url = urljoin(url_template,row.a['href'])
                task_urls.append(task_url)
            cates.append((i, task_urls))
            
        total_length = sum(len(c[1]) for c in cates)
        count = 0
        for cate,task_urls in cates:
            length = len(task_urls)
            for i,task_url in enumerate(task_urls):
                print '%s/%s  : %s/%s' % (i, length, count, total_length)
                download_cover(cate, task_url)
                count = count + 1
#                
#        cover_queue = CoverQueue(10000)
#        t = threading.Thread(target = put_urls, args = [cates, cover_queue])
#        t.start()
#        t.join()
#        result_queue = cover_queue.result_queue.queue