import re,sys,os,socket
from urllib2 import *
from threading import Thread
from Queue import Queue
import time,gzip,tempfile,string,random

class GD:
  def __init__(self,handler,url,dir=".\\",thread_num=10):
    dir = handler.get_dir_name(dir,url)
    if not os.path.exists(dir):
      os.mkdir(dir)
    f=file(dir+"url.txt","w")
    f.write(url)
    f.close()
    self.url=url
    self.handler=handler
    self.thread_num=thread_num
    self.dir=dir
    self.q=Queue()

  def work(self):
    while 1:
      try:
        sub_url,file_name=self.q.get(block=False)
      except:
        break
      self.handler.download_img(sub_url,file_name)
      self.q.task_done()
  def build_index_page(self):
    from glob import glob
    l = glob(self.dir + "*")
    l = map(lambda s : s[s.rfind(os.path.sep)+1:],l)
    def is_image_file(name):
      _,ext = os.path.splitext(name)
      return ext.lower() in ['.jpg','.jpeg','.gif','.png','.bmp']
    l = filter(is_image_file,l)
    print l
    if len(l)==0: return
    def extractct_number(s):
      l = re.findall("\d+",s)
      if len(l)==0:
        return 0
      else:
        return int(l[0])
    l.sort(key = extractct_number)
    f = file(self.dir+"index.html","w")
    for s in l:
      f.write('<img src="%s">\n' % s)
    f.close()
  def start_download(self):
    ct=1
    url_lst=self.handler.extract_url_lst(self.url)
    self.handler.write_log( "%d images to download" % len(url_lst) )
    self.handler.write_log( "downloading to %s" % self.dir )
    for url in url_lst:
      file_name=self.dir+"%d.%s" % (ct,self.handler.ext)
      ct+=1
      if os.path.exists(file_name): continue
      self.q.put((url,file_name))
    thread_lst = []
    for i in range(self.thread_num):
      t=Thread(target=self.work)
      t.daemon = True
      t.start()
      thread_lst.append(t)
    self.q.join()
    print self.dir
    self.build_index_page()
    self.handler.write_log( "Finish downloading from %s" % self.url )
