# coding=utf-8
#############################################################################
#                                                                           #
#   File: threadedprocess.py                                                #
#   GNU v3                                                                  #
#############################################################################

import threading
import queue
import proxy
import urllib.request, urllib.parse, urllib.error
import common
import time
import re

FETCH_FAIL_LIMIT = 5
globalVars = common.getGlobalVars()

class ThreadedLargeResponseDaemonProcess(threading.Thread):
    #this is used as daemon thread as to have the large file process
    #faster
    
    def __init__(self, queue_get):
        threading.Thread.__init__(self)
        self.queue_get = queue_get
        self.fetch_core = globalVars.FETCH_CORE
        
    def fetch(self, params):

        try:
            resp = self.fetch_core.fetch(params)
        except Exception as e:
            common.logger(1,"threadedprocess.fetch",'tlrdp.f:%s' %e)
            return None
        return resp
    

    def run(self):
        try:#ensure that this thread will not die out
            while(True):
                (queue_give, task, range_range) = self.queue_get.get()
                common.logger(3,"threadedprocess.run","run %d, %d." % range_range)
                resp = self.doFetch(task,range_range)
                common.logger(3,"threadedprocess.run","finshed %d, %d." % range_range)
                queue_give.put((resp, range_range))
            
                self.queue_get.task_done()
        except Exception as e:
            common.logger(1,"threadedprocess.run.exception",'tlrdp: %s' %e)
            raise

    def getRespCode(self,resp):
        try:
            if resp == None:
                return 999 #just a error code
            status = resp.status
            common.logger(3,"threadedprocess.getRespCode",'status %d' %status) 
            return status
        except Exception as e:
            common.logger(3,"threadedprocess.getRespCode",'line %s' %(line,))
            common.logger(3,"threadedprocess.getRespCode",'tlrdp.gRC: %s' %e) 
            raise

    def redirect(self,resp,params_dict,range_range):
        #change address and range things
        url = None
        while True:
            line = resp.readline().strip()
                # end header?
            if line == "":
                break
                # header
            (name, _, value) = line.partition(":")
            name = name.strip()
            value = value.strip()
                # get total length from Content-Range
            nl = name.lower()
            if nl == "location":
                url = value
                break #we need url only
        common.logger(3,"threadedprocess.redirect","302redirect to" + url) 
        params_dict['path']=url
        params = urllib.parse.urlencode(params_dict)
        pos = params.find('&range')
        range_str =urllib.parse.quote("Range:bytes=%d-%d\n" % range_range)
        params = params[0:pos]+ range_str + params[pos:]
        return params
        #param['headers']['range'] = "bytes=0-%d" % t-s
        

        
    def doFetch(self, params_dict,range_range):
        allowed_failed = FETCH_FAIL_LIMIT
        resp = None
        params = urllib.parse.urlencode(params_dict)
        pos = params.find('&range')
        range_str =urllib.parse.quote("Range:bytes=%d-%d\n" % range_range)
        params = params[0:pos]+ range_str + params[pos:]
        while allowed_failed > 0:
            
            resp = self.fetch(params)
            status = self.getRespCode(resp)
            if status == 206:
                return resp
            elif status == 200:
                return resp #fine we just got all 
            elif status == 302:
                #params = self.redirect(resp,params_dict,range_range)
                allowed_failed -= 1
                continue
            else:
                allowed_failed -= 1
                continue
        return None
###for testing
            
        
if __name__ == "__main__":
    params = urllib.parse.urlencode({"method": "GET",
                                       "path": "http:\\www.google.com",
                                       "headers": "",
                                       "postdata": "",
                                       "version": '2',
                                       "range": "on"})
    print('start fetch')




#------------------------- range change with in thread
## 
##
##def doFetch(self, task):
##        allowed_failed = FETCH_FAIL_LIMIT
##        while allowed_failed > 0:
##            
##            resp = self.fetch(param)
##            
##            if not resp:
##                m = re.match(r"bytes[ \t]+([0-9]+)-([0-9]+)/([0-9]+)", param['Range'])
##                part_length = int(m.group(2)) - int(m.group(1))
##                if part_length > 65536:
##                    part_length /= 2
##                allowed_failed -= 1
##                continue
##            while True:
##                line = resp.readline().strip()
##                # end header?
##                if line == "":
##                    break
##                # header
##                (name, _, value) = line.partition(":")
##                name = name.strip()
##                value = value.strip()
##                # get total length from Content-Range
##                if name.lower() == "content-range":
##                    m = re.match(r"bytes[ \t]+([0-9]+)-([0-9]+)/([0-9]+)", value)
##                    if not m or int(m.group(1)) != cur_pos:
##                         # Content-Range error, fatal error
##                        return
##                    next_pos = int(m.group(2)) + 1
##                    continue
##                
##            if next_pos == content_length:
##                return
##            cur_pos = next_pos
