# -*- coding: utf-8 -*-
import sys
import os.path
import urllib
from urllib import urlencode
import urllib2
import cookielib
import thread
import threading

PATH_BEAUTIFULSOUP = "./BeautifulSoup-3.2.1/"
if not PATH_BEAUTIFULSOUP in sys.path:
    sys.path.append(PATH_BEAUTIFULSOUP)
from BeautifulSoup import BeautifulSoup


class HttpSocket():
    headers = {
        'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
        'Origin': '',
        'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.17 (KHTML, like Gecko)  Chrome/24.0.1312.57 Safari/537.17',
        'Content-Type': 'application/x-www-form-urlencoded',
        'Referer': '',
        'Accept-Encoding': 'gzip,deflate,sdch',
        'Accept-Language': 'en-US,en;q=0.8',
        'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3'
    }

    cookie_jar = None

    opener = None

    dotnet_viewstate = {
        '__VIEWSTATE': '',
        '__EVENTVALIDATION': '',
        '__LASTFOCUS': '',
        '__VIEWSTATEENCRYPTED':'',
        '__EVENTTARGET': '',
        '__EVENTARGUMENT': ''
    }

    thread_max = 10

    def __init__(self):
        self.cookie_jar = cookielib.CookieJar()
        self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookie_jar), urllib2.HTTPSHandler(debuglevel=1))


    def merger_post_fields(self, fields):
        postfields = self.dotnet_viewstate.copy()

        if not self.dotnet_viewstate['__VIEWSTATE']:
            postfields = {}

        if fields is None:
            fields = {}
            postfields = {}
        else:
            postfields.update(fields)
        return postfields

    def request(self,url, fields=None):
        if fields != None:
            postfields = self.merger_post_fields(fields)
            postfields = urlencode(postfields)
            req = urllib2.Request(url, postfields, self.headers)
        else:
            req = urllib2.Request(url, None, self.headers)
        self.cookie_jar.add_cookie_header(req)
        return req

    def get(self, request, deep = 0):
        deep +=1
        ret = None
        try:
            ret = self.opener.open(request)
        except :
            if deep>2:
                print "error occurred: "
                print sys.exc_info()
                sys.exit(0)
            ret = self.get(request, deep)
        return ret
        #return self.opener.open(request)

    def open(self,url, getvstate = False, fields=None, request=None):
        req = None
        if fields != None:
            postfields = self.merger_post_fields(fields)
            postfields = urlencode(postfields)
            req = self.request(url,fields)
        else:
            req = self.request(url)

        s =   self.get(req)

        if getvstate:
            context = s.read()
            soup = BeautifulSoup(context)

            if soup.find(id = "__VIEWSTATE"):
                self.dotnet_viewstate['__VIEWSTATE'] = soup.find(id = "__VIEWSTATE")['value']
            if soup.find(id = "__EVENTVALIDATION"):
                self.dotnet_viewstate['__EVENTVALIDATION'] = soup.find(id = "__EVENTVALIDATION")['value']
            return context
        else:
            return s

    def download( self, url, save_file, thread_cnt=5, buffer_size=10240):

        thread_cnt = thread_cnt if thread_cnt <= self.thread_max else self.thread_max

        # Open File Stream
        fout = open(save_file, 'wb')#The extra 'b' indicates to write in binary mode. Currently you are writing a binary file in ASCII/text mode.

        # Get the Length of the file
        req = self.open(url, False)
        file_size = int(req.info().getheaders('Content-Length')[0])

        # Compute the average length for the threads
        avg_size, pad_size = divmod(file_size, thread_cnt)

        threads = []
        for i in xrange(thread_cnt):
            start_size = i*avg_size
            end_size = start_size + avg_size - 1
            # Add the pad_size to the last thread
            if i == thread_cnt - 1:
                end_size = end_size + pad_size + 1
            # Create a thread
            t = Downloader(url, start_size, end_size, buffer_size, self, fout )
            threads.append(t)
     
        #  Start all
        for t in threads:
            t.start()
     
        # Wait all threads to Close file stream
        for t in threads:
            t.join()
        fout.close()

        print os.path.basename(save_file) + ' is ready.'

    def download_( self, url, save_file):
        # print save_file
        fout = open(save_file, 'wb') #The extra 'b' indicates to write in binary mode. Currently you are writing a binary file in ASCII/text mode.

        stream = self.open(url, False)
        meta = stream.info()
        file_size = int(meta.getheaders("Content-Length")[0])

        file_size_dl = 0
        block_sz = 8192
        while True:
            buffer = stream.read(block_sz)
            if not buffer:
                break

            file_size_dl += len(buffer)
            fout.write(buffer)
            status = r"%s %10d  [%3.2f%%]" % ('',file_size_dl, file_size_dl * 100. / file_size)
            status = status + chr(8)*(len(status)+1)
        #     print status,
        # print


lock = threading.RLock()
class Downloader(threading.Thread):
    def __init__(self, url, start_size, end_size, buffer_size, httpsocketer, fstream):
        self.url = url
        self.start_size = start_size
        self.end_size = end_size
        self.buffer_size = buffer_size
        self.httpsocketer = httpsocketer
        self.fstream = fstream
        threading.Thread.__init__(self)
 
    def run(self):
        req = self.httpsocketer.request(self.url, None)

        # Set range to the HTTP Header
        req.headers['Range'] = 'bytes=%s-%s' % (self.start_size, self.end_size)
        fs = self.httpsocketer.get(req)

        # Set the offset
        offset = self.start_size
        while 1:
            block = fs.read(self.buffer_size)
            # break when the stream is empty
            if not block:
                # with lock:
                #     print '%s is ready.' % self.getName()
                break
            # 'with lock' instead of 'lock.acquire().....lock.release()'
            # at least python 2.5
            with lock:
                self.fstream.seek(offset)
                self.fstream.write(block)
                offset = offset + len(block)
                # sys.stdout.write('done.\n')

if __name__ == '__main__':
    print "Downloading.........."
    http = HttpSocket()
    #http.download('http://www.eclipse.org/downloads/download.php?file=/technology/epp/downloads/release/luna/R/eclipse-standard-luna-R-win32.zip', 'eclipse-standard-luna-R-win32.zip', 10, 10240000)
    img_url = 'https://supplierweb.carrefour.com.cn/includes/image.jsp'
    img_store_path = 'C:/Users/temp/Desktop/carrefour/certificationcode.jpg'
    http.download( img_url, img_store_path, 1 )    
