# -*- coding:utf-8 -*-
'''
Created on 2012/4/11

@author: austin_kau
######################## Submission Tool V1.0 ###########################
1.[4/19] v0.5    modify DTAS test program, supporting sample uploaded and report retrieved
2.[4/20] v0.5.1  bug fixed
3.[4/23] v0.5.2  create log file for PI integration, support DTAS 1021
4.[4/25] v0.5.3  unicode availability, 60MB limitation, port configurable, extend condition
5.[4/25] v0.5.4  remove file type detecting      
6.[5/1]  v1.0    Bug: 1.fix unicode problem      2.
              update: 1.un-worked case handler   2. sourceid:106    
7.[5/13] v1.0.2 update: 1.handle update and retrieve exception, 2.reposrt timeout
8.[5/29] v1.0.3 update: 1.support single file submission 2. Affected Entity Field       
9.[5/31] v1.0.4 Bug: 1. subject less than 512 2.affectentity support '='
10.[6/15] v1.0.5 Bug: 1. add the http message
11.[8/8] v1.0.6 Request: return code 102 keepalive 15 seconds
######################## Submission Tool V1.1 ###########################
1.[11/7] v1.1.0 for DDA WebProtocol 1.1
     (1) debug mode: 
            i. test DDA web service, 
     (2) support new Web protocol

######################## Submission Tool V1.2 ###########################
1.[02/06] v1.2.0 for DDA WebProtocaol 1.2
######################## Submission Tool V1.2.1 ###########################
1.[04/26] add parameter -a for object archive
2.[04/26] implement windows event logs  
######################## Submission Tool V1.2.2 ###########################
1.[08/02] add risklevel for sample retrieving
######################## Submission Tool V1.2.3 ###########################
1.[10/14] enforce to use SSL with default port 443
######################## Submission Tool V1.2.4 ###########################
1.[11/12] fixed -a with interval arguments issue
2.[11/18] fixed recovery failed submission sample
######################## Submission Tool V1.2.5 ###########################
1. support customized Extra_Info  
######################## Submission Tool V1.2.6 ###########################
1. Get result only
2. Add flow control
######################## Submission Tool V1.2.7 ###########################
1. Solve clientuuid conflict issue
2. Protocol version to 1.3
######################## Submission Tool V1.2.8 ###########################
1,Add URL support
2,Add "force" and "Prioritize" option
'''
try:
    import rc
except:
    print 'Fail to load resource file'
    CURRENT_VERSION = "1.2.0000"
else:
    CURRENT_VERSION = rc.version

import os, sys, getopt, uuid, time, logging, hashlib, StringIO, shutil, urllib
import ConfigParser
import myLib, response
import traceback 
from calendar import timegm
import  base64
import httplib
import ssl


'''URL'''
WEB_HOME = r'/web_service/sample_upload'
WebService = {
            'Register':r'{0}/register'.format(WEB_HOME),
            'Unregister':r'{0}/unregister'.format(WEB_HOME),
            'Upload':r'{0}/upload_sample'.format(WEB_HOME),
            'Retrieve':r'{0}/get_report'.format(WEB_HOME),
            'PCAP':r'{0}/get_pcap'.format(WEB_HOME),
            'ScreenShot':r'{0}/get_sandbox_screenshot'.format(WEB_HOME),
            'EventLog':r'{0}/get_event_log'.format(WEB_HOME),
            'Sample':r'{0}/get_sample'.format(WEB_HOME),
            
            'BlackList':r'{0}/get_black_lists'.format(WEB_HOME),
            'SampleList':r'{0}/get_sample_list'.format(WEB_HOME),
            'ProductLog':r'{0}/get_product_log'.format(WEB_HOME),
            
            'check_duplicate_sample':r'{0}/check_duplicate_sample'.format(WEB_HOME),
            'get_brief_report':r'{0}/get_brief_report'.format(WEB_HOME),
            'test_connection':r'{0}/test_connection'.format(WEB_HOME),
            }
'''support extract info'''
D_ExtraInfo = {}
L_EXTRA_INFO = [
                "ProtocolGroup" 
                , "Protocol" 
                , "Direction"
                , "DstIP"
                , "DstPort"
                , "SrcIP"
                , "SrcPort"
                , "HostName"
                , "DetectionName"
                , "RiskTypeGroup"
                , "RiskType"
                , "FileName"
                , "TrueFileType"
                , "Recipient"
                , "Sender"
                , "UserName"
                , "DstHost"
                , "SrcHost"
]
''' Internal vars'''
_BL = "["
_BR = "]"

'''Parameters'''
P_INDIR = 'indir'
P_URL_PATH_FILENAME = 'url.txt'
P_OUTDIR = 'outdir'
P_VERBOSE = 'verbose';
P_SLEEPTIME = 'sleep'
P_DEBUG = 'debug'
P_TIMER = 'timer'
P_TIMEOUT = 'timeout'
P_AFFECT = 'affectentity'
P_SAMPLE = 'sample'
P_SHA1 = 'sha1'
P_BLACKLIST = 'blacklist'
P_END = 'end'
P_START = 'start'
P_INTERVALTYPE = 'interval-type'
P_PROTOCOL = 'protocol'
P_METHOD = 'method'
P_BODY = 'body'
P_RISK = 'risklevel'
P_ROCESSINGLIMIT = 'processing-limit'
P_IMMEDIATE = 'immediate'
P_SKIPPREFILTER = 'skipprefilter'

B_QUIET = False;
B_VERBOSE = False;
B_IsMutilOS = False;
B_IsFull = False;
B_IsIncludeSample = False;
B_Get_BL = False;
B_Get_SampleList = False;
B_Archive = False;
B_IsURL = False;
B_IMMEDIATE = True;
B_SKIPPREFILTER = False;
'''value'''
V_AFFECT = None;
V_INTERVALEND = None
V_INTERVALSTART = None
V_INTERVALTYPE = '1'
V_SHA1 = None
V_PROTOCOL = None
V_METHOD = None
V_BODY = None
V_RISK = None
V_SAMPLE = None
V_ROCESSINGLIMIT = 100

'''Default dir'''
D_WORKDIR = os.path.join(os.getcwd(), 'work');
D_INDIR = os.path.join(D_WORKDIR, 'indir');
D_INDIR_CACHE = os.path.join(D_INDIR, '.working');  # compress file and folder then upload
D_OUTDIR = os.path.join(D_WORKDIR, 'outdir');
D_OUTDIR_CACHE = os.path.join(D_OUTDIR, '.working');  # retrieve report waiting for xml parsing
D_BLDIR = os.path.join(D_WORKDIR, 'blacklist');
D_LOG = os.path.join(os.getcwd(), 'log');
'''Config File'''
F_CONFIG = os.path.join(os.getcwd(), 'config.ini');
F_LASTQUERYTIME = os.path.join(os.getcwd(), 'lastquerytime.txt');
'''Flow control file @2.21.2014'''
F_PROCESSING = os.path.join(os.getcwd(), '.processing');

'''URL TEMP file PREFIX '''
URL_FILENAME_PREFIX='____DTAS_SUBMISSION_TOOL_URL_TEMP_'
###########################################################
'''Log Handler'''
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
# console
ch = logging.StreamHandler()
ch.setFormatter(logging.Formatter('%(asctime)s\t%(levelname)s\t%(message)s'))
ch.setLevel(logging.INFO)
logger.addHandler(ch)
#file
logname = os.path.join(D_LOG, 'dtascli_{0}.log'.format(time.strftime("%Y-%m-%d", time.localtime())))
logHandler = logging.FileHandler(logname)
logHandler.setFormatter(logging.Formatter('%(asctime)s\t%(levelname)s\t%(message)s [%(filename)s(%(lineno)d)]'))
logHandler.setLevel(logging.INFO)
logger.addHandler(logHandler)

class dtasException(Exception):
    '''dtas Exception'''
###########################################################
class Header:
    Out_ProtocolVersion = 'X-DTAS-ProtocolVersion';
    Out_Time = 'X-DTAS-Time';
    Out_Challenge = 'X-DTAS-Challenge';
    Out_Checksum = 'X-DTAS-Checksum';
    Out_ProductName = 'X-DTAS-ProductName';
    Out_ClientHostname = 'X-DTAS-ClientHostname';
    Out_ClientUUID = 'X-DTAS-ClientUUID';
    Out_SourceID = 'X-DTAS-SourceID';
    Out_SourceName = 'X-DTAS-SourceName';
    Out_ArchiveSHA1 = 'X-DTAS-Archive-SHA1';
    Out_ArchiveFilename = 'X-DTAS-Archive-Filename';
    Out_SHA1 = 'X-DTAS-SHA1';
    Out_LastQueryID = 'X-DTAS-LastQueryID';
    Out_MaxGetNumber = 'X-DTAS-MaxGetNumber';
    Out_Duration = 'X-DTAS-Duration';
    Out_ArchiveType = 'X-DTAS-ArchiveType';
    Out_ArchiveEncrypted = 'X-DTAS-ArchiveEncrypted';
    Out_ReportType = 'X-DTAS-ReportType';
    Out_ImageTypeID = 'X-DTAS-ImageTypeID';
    Out_AfterTime = 'X-DTAS-IntervalStartingPoint';
    Out_BeforeTime = 'X-DTAS-IntervalEndPoint';
    Out_IntervalType = 'X-DTAS-IntervalType';
    Out_Time = 'X-DTAS-Time';
    Out_ContentType = 'Content-Type';
    Out_ContentLength = 'Content-Length';
    def __init__(self):
        self.ContentType = None;
        self.ContentLength = None;
        self.ProductName = None;
        self.ClientHostname = None;
        self.ArchiveSHA1 = None;
        self.ArchiveFilename = None;
        self.SHA1 = None;
        self.ReportType = None;
        self.OrigFileName = None;
        self.ArchiveType = None;
        self.ArchiveEncrypted = None;
        self.AfterTime = None;
        self.BeforeTime = None;
        self.IntervalType = None;
        self.Time = None;
        self.Body = None;
               
class LastQuery:
    def __init__(self):
        self._Time = V_INTERVALSTART
        self.DecidedByFile = False
        if os.path.exists(F_LASTQUERYTIME):
            ofile = open(F_LASTQUERYTIME, "r");
            _time = ofile.readline()
            ofile.close()
            if _time: 
                self._Time = str(_time).rstrip()    
                self.DecidedByFile = True 
    @property
    def Time(self):
        return self._Time
    def showTime(self):
        logger.info('[LastQueryTime][Found] {0} (UTC)'.format(self._Time))
    @staticmethod
    def setTime(value):
        try:
            ofile = open(F_LASTQUERYTIME, "w")
            ofile.write(value)
            ofile.close()
            
            logger.info("[LastQueryTime][Update] %s (UTC)" % value)
        except Exception:
            logger.error(traceback.format_exc());
            raise dtasException('setTime faile')

class FlowControl:
    def __init__(self):
        if not _isfile(F_PROCESSING):
            _create_file(F_PROCESSING, "")
            
        __processing = open(F_PROCESSING, 'r+b')
        __samplelist = self.__removeFailedSAH1(str(__processing.read()).strip('\n'))
        __processing.close()
        res= _retrieve_sampleList_by_status(__samplelist, "2,3")  # status is Arrived, Processing
        self.ProcessingSHA1 = res if res else []
        
        self.__checkAvaibleNumber()
        self.__saveProcessingSample()
        
    def addSubmisstion(self, sha1):
        if self.AvailableNumber > 0:
            self.ProcessingSHA1.append(sha1)
            self.__saveProcessingSample()
            self.__checkAvaibleNumber()
        return self.AvailableNumber
    def __checkAvaibleNumber(self):
        self.AvailableNumber = V_ROCESSINGLIMIT - len(self.ProcessingSHA1) if self.ProcessingSHA1 else V_ROCESSINGLIMIT 
        # print "available number is %s " % self.AvailableNumber
    def __saveProcessingSample(self):
        __processing = open(F_PROCESSING, 'wb')
        __processing.write(";".join(self.ProcessingSHA1))
        __processing.close()
    def __removeFailedSAH1(self, sha1):
        sha1list = []
        for x in str(sha1).split(';'):
            if len(x) == 40: sha1list.append(x)
        return ";".join(sha1list)

""" its purpose is for temp solution"""
class NTevent:
    def __init__(self, appName="dtascli", eventCategory=0, strings=None, data=None, sid=None):
        import win32evtlog, win32evtlogutil as ntEvent
        eventType = ntEvent.win32evtlog.EVENTLOG_ERROR_TYPE,
        eventID = ntEvent.win32evtlog.RegisterEventSource(None, appName)
        print eventID
        sys.exit(-1)
        ntEvent.ReportEvent(appName=appName, eventID=eventID, eventCategory=eventCategory, eventType=eventType, strings=strings, data=data, sid=sid)

'''Fixed the EventLogHandler without win32 extension in py2exe'''
class AustinHandler(logging.Handler):  # Inherit from logging.Handler
        def __init__(self):
                # run the regular Handler __init__
                logging.Handler.__init__(self)
        def emit(self, record):
                # record.message is the log message
                print "Report an event log: %s " % record.message
                NTevent(strings=[record.message])
  
###########################################################


'''
Config Loading
'''
def _get_config(section, option):
    parser = ConfigParser.SafeConfigParser()
    parser.read(F_CONFIG)
    return parser.get(section, option)
    
def _parse_config(oHeader):
    try:
        get_config_value = {}
        header_list = []
        option_dict = {}
        parser = ConfigParser.SafeConfigParser()
        config = F_CONFIG;
        logger.debug(config)
        parser.read(config)
        NEW_RULE_SHA1 = 0
    
        global Host, ApiKey, Port;
        Host = parser.get('DTAS', 'Host')
        ApiKey = parser.get('DTAS', 'ApiKey')
        Port = parser.get('DTAS', 'Port') if parser.has_option('DTAS', 'Port') else "443"
        
        
        for section in parser.sections():
            if section == "Header":
                for option in parser.options(section):
                    value = parser.get(section, option)
                    # print option, value
                    option_dict.update({option : value})
                header_list = _real_header_maker(option_dict, oHeader)
                get_config_value.update({section : header_list})
            elif section == "Body":
                for option in parser.options(section):
                    value = parser.get(section, option)
                    get_config_value.update({section : value})
            elif section == "File":
                for option in parser.options(section):
                    value = parser.get(section, option)
                    get_config_value.update({option : value})
        
        return (True, get_config_value)
    except:
        raise dtasException('parsconfig fail')

def _isfile(file_path):
    return os.path.isfile(file_path)
def _isdir(dir_path):
    return os.path.isdir(dir_path)


'''
Send package            
'''
def _real_header_maker(header_dict, oHeader):
    challenge = uuid.uuid4()
    timestamp = int(time.time())
    version = "1.0"
    checksum = []
    return_value = []
    
    last_query_id = None
    client_uuid = None
    source_id = None
    source_name = None
    
    product_name = oHeader.ProductName
    product_host_name = oHeader.ClientHostname
    file_sha1 = oHeader.SHA1;
    reporttype = oHeader.ReportType;
    archive_sha1 = oHeader.ArchiveSHA1;
    archive_name = oHeader.ArchiveFilename;
    
        
    for header, value in header_dict.items():
        if header == Header.Out_ProtocolVersion.lower():
            version = value;
            protocol_version = header + ": " + version
        elif header == Header.Out_ProductName.lower():
            header_product_name = header + ": " + str(product_name)
        elif header == Header.Out_ClientHostname.lower():
            header_product_host_name = header + ": " + str(product_host_name)
        elif header == Header.Out_ClientUUID.lower():
            client_uuid = value
            header_client_uuid = header + ": " + str(client_uuid)
        elif header == Header.Out_LastQueryID.lower():
            last_query_id = value
            header_last_query_id = header + ": " + str(last_query_id)
        elif header == Header.Out_SHA1.lower():
            file_sha1 = file_sha1 if file_sha1 else value
            header_file_sha1 = header + ": " + str(file_sha1)
        elif header == Header.Out_ReportType.lower():
            reporttype = reporttype if reporttype else value
            header_reporttype = header + ": " + str(reporttype)
        elif header == Header.Out_SourceID.lower():
            source_id = value;
            header_source_id = header + ": " + str(source_id)
        elif header == Header.Out_SourceName.lower():
            source_name = value
            header_source_name = header + ": " + str(source_name)
        elif header == Header.Out_ArchiveSHA1.lower():
            archive_sha1 = _file_sha1(os.path.join(D_INDIR_CACHE, archive_name)) if archive_name else value
            header_archive_sha1 = header + ": " + str(archive_sha1)
        elif header == Header.Out_ArchiveFilename.lower():
            archive_name = archive_name if archive_name else value
            header_archive_name = header + ": " + str(archive_name)
    
    # APIKey must be the first value
    checksum.append(ApiKey)           
        
    # Header not in config.ini    
    if oHeader.ContentType:
        return_value.append(Header.Out_ContentType.lower() + ":" + oHeader.ContentType)
    if oHeader.ArchiveType:
        return_value.append(Header.Out_ArchiveType.lower() + ":" + oHeader.ArchiveType)
        checksum.append(oHeader.ArchiveType)
    if oHeader.ArchiveEncrypted:
        return_value.append(Header.Out_ArchiveEncrypted.lower() + ":" + oHeader.ArchiveEncrypted)
        checksum.append(oHeader.ArchiveEncrypted)
    

    return_value.append(protocol_version)
    checksum.append(version)
    
    if product_name:
        checksum.append(str(product_name))
        return_value.append(header_product_name)
    
    if product_host_name:
        checksum.append(str(product_host_name))
        return_value.append(header_product_host_name)
    
    if client_uuid:
        checksum.append(str(client_uuid))
        return_value.append(header_client_uuid)

    if last_query_id:
        checksum.append(str(last_query_id))
        return_value.append(header_last_query_id)
    
    if file_sha1:
        checksum.append(str(file_sha1))
        return_value.append(header_file_sha1)
        
    if reporttype:
        checksum.append(str(reporttype))
        return_value.append(header_reporttype)
    
    if source_id:
        checksum.append(str(source_id))
        return_value.append(header_source_id)
    
    if source_name:
        checksum.append(str(source_name))
        return_value.append(header_source_name)
    
    if archive_sha1:
        checksum.append(str(archive_sha1))
        return_value.append(header_archive_sha1)
    
    if archive_name:
        checksum.append(str(archive_name))
        return_value.append(header_archive_name)
    
    
    # Finish the checksum process
    checksum.append(str(timestamp))
    return_value.append(Header.Out_Time.lower() + ": " + str(timestamp))
    
    if oHeader.BeforeTime:
        return_value.append(Header.Out_BeforeTime.lower() + ": " + oHeader.BeforeTime)
        checksum.append(oHeader.BeforeTime)
    if oHeader.AfterTime:
        return_value.append(Header.Out_AfterTime.lower() + ": " + oHeader.AfterTime)
        checksum.append(oHeader.AfterTime)
    if oHeader.IntervalType:
        return_value.append(Header.Out_IntervalType.lower() + ": " + oHeader.IntervalType)
        checksum.append(oHeader.IntervalType)
    
    checksum.append(str(challenge))
    return_value.append(Header.Out_Challenge.lower() + ": " + str(challenge))
    
    # print 'checksum:', checksum 
    logger.debug("checksum = " + str(checksum))
    checksum_sha1 = _checksum_sha1(checksum, archive_name, oHeader.Body)
    return_value.append(Header.Out_Checksum + ": " + checksum_sha1)
    
    # avoid keep alive too long
    return_value.append("Connection: close")
    
    logger.debug("return = " + str(return_value))
    # print 'return_value:', return_value 
    return return_value   

def _checksum_sha1(checksum, ArchiveFilename, Body=None):
    string = ""
    sha1 = ""
    for value in checksum:
        string = string + value
    if ArchiveFilename:
        file_path = os.path.join(D_INDIR_CACHE, ArchiveFilename);
        if os.path.exists(file_path):
            f = open(file_path, 'rb')
            string = string + f.read()  # Add file
    if Body:
        string = string + Body;
    sha1 = hashlib.sha1(string).hexdigest()
    return sha1

def _file_sha1(filename):
    sha1 = hashlib.sha1(open(filename, "rb").read()).hexdigest()
    return str(sha1).upper()

def _dir_sha1(directory):
    SHAhash = hashlib.sha1()
    try:
        for root, dirs, files in os.walk(directory):
            for names in files:
                filename = os.path.join(root, names)
                SHAhash.update(hashlib.sha1(open(filename, "rb").read()).hexdigest())
    except Exception, e:
        logger.error(e);
    return SHAhash.hexdigest()

def _send_package_to_server(url, package_info, oHeader, Specific):
    header = package_info['Header']
    logger.debug("connect %s with header %s" % (''.join(url), header))
    
    exp_return_code = 200
    file_path = os.path.join(D_INDIR_CACHE, oHeader.ArchiveFilename) if oHeader.ArchiveFilename else '';  

    method = 'GET'
    body = None
    try:
        conn = httplib.HTTPSConnection(url[1], timeout=600, context=ssl._create_unverified_context())
    except:
        conn = httplib.HTTPSConnection(url[1], timeout=600)
    if B_VERBOSE:
        conn.set_debuglevel(4)
    
    if Specific:
        if str(V_METHOD).upper() == 'PUT':
            method = 'PUT'
        if V_BODY:
            body = V_BODY
    
    '''get_brief_report'''
    if oHeader.Body:
        method = 'PUT'
        body = oHeader.Body
            
    if _isfile(file_path) != False:
        method = 'PUT'
        body = open(file_path, 'rb').read()

    conn.putrequest(method, url[4])
    for i in header:
        k,v = i.split(':')
        conn.putheader(k,v)
    if body is not None:
        conn.putheader('Content-Length',len(body))
    conn.endheaders()

    if body is not None:
        conn.send(body)

    try:
        logger.debug("=====>Start performing request")
        result = conn.getresponse()
        logger.debug("=====>Finish performing request")
    except Exception, e:
        logger.error(e)
        print traceback.format_exc()
        sys.exit(3)

    return_code = result.status
    return_message = result.msg
    body_out = result.read()
    conn.close()

    if str(exp_return_code) == str(return_code):
        logger.debug("Return body = " + str(body_out))
        logger.debug(str(exp_return_code) + "=" + str(return_code))
        return True,StringIO.StringIO(body_out), '', return_message
    else:
        logger.debug(str(exp_return_code) + "!=" + str(return_code))
        return False, StringIO.StringIO(body_out), return_code, return_message

def send_package(service, oHeader, Specific=False):
    try:
        return_code, package_info = _parse_config(oHeader)
        if not return_code:
            logger.error("Some wrongs happened while parsing config file")
            return False, None;
        elif package_info != {}:
            logger.debug("Config Parser Successful")
            url = ["https://",Host,":",Port,WebService[service]]
            result, info, info_code, info_message = _send_package_to_server(url, package_info, oHeader, Specific)
            
            if result:
                logger.debug("%s is success" % service)
            elif str(info_code) == '201':
                result = True
                logger.info("Already registered")
            elif str(info_code) == '102':
                logger.info("%s(Sample:%s) is processing" % (oHeader.SHA1, oHeader.OrigFileName))
            elif str(info_code) == '421':
                if service == 'Retrieve':
                    logger.info("%s(Sample:%s) is in line" % (oHeader.SHA1, oHeader.OrigFileName))
                else:
                    logger.info("%s %s not found " % (oHeader.OrigFileName if oHeader.OrigFileName else 'The', service))
            else:
                logger.error("%s is failed [%s]: %s" % (service, info_message, info.getvalue()))
                
            if service == 'Retrieve':
                return result, str(info.getvalue());
            else:    
                return result, info;
        else:
            logger.debug("Configuration file parsing failed, please check config.ini")
            return False, None;
    except Exception:
        logger.error(traceback.format_exc());
        return False, None;

'''
Control API
'''
def runDTAS(opts):
    try:

        _B_Upload = False;
        _B_Retrive = False;
        _B_Test = False;
        _B_IsFreeArchieve = False;
        
        """parse the opts for upload or retrieve"""
        for o, a in opts:
            if o in ['-b', '-u']:
                _B_Upload = True;
                if o in [ "-u"]:
                    # delete work\outdir\.working temp file
                    _B_IsFreeArchieve = True
            if o in ['-b', '-r']:
                _B_Retrive = True;
            if o in ['-t']:
                _B_Test = True;
            if o in ['--group']:
                global B_IsMutilOS;
                B_IsMutilOS = True;
            if o in ['--full']:
                global B_IsFull;
                B_IsFull = True;
            if o in ['--includesample']:
                global B_IsIncludeSample;
                B_IsIncludeSample = True;
            if o in ['--url']:
                global  B_IsURL;
                B_IsURL = True;
        if _B_Upload:
            upload(_B_IsFreeArchieve);
        if _B_Retrive:
            retrieve();
        if _B_Test:
            testProtocol(V_PROTOCOL);
            
        if _B_Retrive and B_Get_BL:
            retrieveBL();
        if _B_Retrive and B_Get_SampleList:
            retrive_list_report()
        if B_Archive and B_Get_SampleList:
            retrive_archive()    
    except Exception, e:
        logger.error(e)
        logger.error(traceback.format_exc());
        
##### private function ######
def _createResultFolder(OrigFileName):
        # create report folder
        reportDir = os.path.join(D_OUTDIR, "Report_" + time.strftime("%Y%m%d-%H%M%S", time.localtime()) + "_" + OrigFileName)
        if not os.path.exists(reportDir):
            os.mkdir(reportDir);    
        logger.debug("create report folder %s" % reportDir)
        return reportDir;
    
def _createArchiveFolder(OrigFileName):
        # create report folder
        todayDir = os.path.join(D_OUTDIR, time.strftime("%Y%m%d", time.localtime()))
        
        if not os.path.exists(todayDir):
            os.mkdir(todayDir);    
        
        resultDir = os.path.join(todayDir, OrigFileName);
        
        if not os.path.exists(resultDir):
            os.mkdir(resultDir);  
    
        sampleDir = os.path.join(resultDir, "sample");
        reportDir = os.path.join(resultDir, "report");
        integrityDir = os.path.join(resultDir, "integrity");
            
        if not os.path.exists(sampleDir):
            os.mkdir(sampleDir);    
        if not os.path.exists(reportDir):
            os.mkdir(reportDir);    
        if not os.path.exists(integrityDir):
            os.mkdir(integrityDir);         
                   
        return sampleDir, reportDir, integrityDir;
            
def _retrieve_report_by_sha1(sha1):
    try:            
        retrieveHeader = Header();
        retrieveHeader.SHA1 = sha1;
        
        if B_IsMutilOS:
            retrieveHeader.ReportType = "1";
        
        result, XML_Content = send_package("Retrieve", retrieveHeader);
        
        if not result:
            return;
        elif not XML_Content:
            return;
        
        retrieveHeader.OrigFileName = retrieveHeader.SHA1;
        reportDir = _createResultFolder(retrieveHeader.OrigFileName);
        xmlFinename = os.path.join(reportDir, retrieveHeader.SHA1 + ".xml")
        ofile = open(xmlFinename, "w")
        ofile.write(XML_Content)
        ofile.close()
        
        # transform xml repot to HTML
        htmlFileName = os.path.join(reportDir, retrieveHeader.OrigFileName + ".html")
        installdir = sys.path[0] if _isdir(sys.path[0]) else os.path.dirname(sys.path[0])
        styleFileName = os.path.join(installdir, 'xslt_style.xml')
        
        #oXMLtoReport = XMLtoReport(xmlFinename, styleFileName, htmlFileName)
        #oXMLtoReport.createHTML();
        
        logger.info("[Found] Retrieve report: {0}".format(sha1))
        
        if B_IsFull:
            _retrive_object("ScreenShot", os.path.join(reportDir, "ScreenShot_{0}.zip".format(sha1)), sha1)
            _retrive_object("PCAP", os.path.join(reportDir, "Pcap_{0}.zip".format(sha1)), sha1) 
            _retrive_object("EventLog", os.path.join(reportDir, "event.xml".format(sha1)), sha1) 
        
        if B_IsIncludeSample:
            _retrive_object("Sample", os.path.join(reportDir, "Sample_{0}.zip".format(sha1)), sha1) 
    except Exception:
        raise dtasException('get report fail')

def _retrieve_sampleList_by_risk(_SampleList, _RiskList):
    try:            
        # return sample list without risk level
        if(_RiskList == None):
            return str(_SampleList).split(';');

        retrieveHeader = Header();
        retrieveHeader.Body = _SampleList
        retrieveHeader.ContentType = "text/plain"
        
        result, XML_Content = send_package("get_brief_report", retrieveHeader);
        
        if not result or not XML_Content:
            logger.error("[Cannot get brief report]")
            return None;
        
        oXMLer = myLib.XMLer(XML_Content.getvalue(), "BRIEF_REPORT")
        return oXMLer.getDataList("SHA1", "RiskLevel", str(_RiskList).split(','))
        
    except Exception:
        raise dtasException('_retrieve_sampleList_by_risk faile')

def _retrieve_sampleList_by_status(_SampleList, _StatusList):
    try:
        # return sample list without risk level
        if not _StatusList  :
            return str(_SampleList).split(';');
        elif not _SampleList:
            return []

        retrieveHeader = Header();
        retrieveHeader.Body = _SampleList
        retrieveHeader.ContentType = "text/plain"
        
        result, XML_Content = send_package("get_brief_report", retrieveHeader);
        
        if not result or not XML_Content:
            logger.error("[Cannot get brief report]")
            return None;
        
        oXMLer = myLib.XMLer(XML_Content.getvalue(), "BRIEF_REPORT")
        return oXMLer.getDataList("SHA1", "STATUS", str(_StatusList).split(','))
        
    except Exception:
        print traceback.format_exc()
        raise dtasException('_retrieve_sampleList_by_status fail')

def _retrieve_briefreport_by_risk(sha1):
    try:            
        for s in sha1.split(';'):
            if len(s) != 40:
                logger.error('"%s" is not correct sha1 format' % s)
                return 
        
        retrieveHeader = Header();
        retrieveHeader.Body = sha1
        retrieveHeader.ContentType = "text/plain"
        
        result, XML_Content = send_package("get_brief_report", retrieveHeader);
        
        if not result or not XML_Content:
            logger.error("[Cannot get brief report]")
            return None;
        
        oXMLer = myLib.XMLer(XML_Content.getvalue(), "BRIEF_REPORT")
        
        for breifReport in oXMLer.Elements:
            _sha1 = oXMLer.getDatabyTag(breifReport, 'SHA1')
            _status = response.RES_BREIFREPORT_STATUS[oXMLer.getDatabyTag(breifReport, 'STATUS')] if response.RES_BREIFREPORT_STATUS.has_key(oXMLer.getDatabyTag(breifReport, 'STATUS')) else 'None'
            _risklevel = response.RES_RISKLEVEL_MAP[oXMLer.getDatabyTag(breifReport, 'RiskLevel')] if response.RES_RISKLEVEL_MAP.has_key(oXMLer.getDatabyTag(breifReport, 'RiskLevel')) else oXMLer.getDatabyTag(breifReport, 'RiskLevel')
            print "SHA1:[{0}], Stauts:[{1}], RiskLevel:[{2}]".format(_sha1, _status, _risklevel)

    except Exception:
        raise dtasException('_retrieve_briefreport_by_risk fail')

def _retrieve_archive_by_sha1(sha1):
    try:            
        retrieveHeader = Header();
        retrieveHeader.SHA1 = sha1;
        
        if B_IsMutilOS:
            retrieveHeader.ReportType = "1";
        
        result, XML_Content = send_package("Retrieve", retrieveHeader);
        
        if not result or not XML_Content:
            return;
        
        retrieveHeader.OrigFileName = retrieveHeader.SHA1;
        
        sampleDir, reportDir, integrityDir = _createArchiveFolder(retrieveHeader.OrigFileName);
        
        # get the report
        xmlFinename = os.path.join(reportDir, retrieveHeader.SHA1 + ".xml")
        if not os.path.exists(xmlFinename):
            ofile = open(xmlFinename, "w")
            ofile.write(XML_Content)
            ofile.close()
            logger.info("[Found] Retrieve a report: {0}".format(sha1))
        
        # get the sample
        sampleFilename = os.path.join(sampleDir, "Sample_{0}.zip".format(sha1))
        if not os.path.exists(sampleFilename):
            _retrive_object("Sample", sampleFilename, sha1)
            logger.info("[Found] Retrieve a sample: {0}".format(sha1)) 
        
        # get the product log
        productlogFilename = os.path.join(integrityDir, "{0}.txt".format(sha1))
        if not os.path.exists(productlogFilename):
            JSON_Content = _retrieve_ProductLog_by_SHA1(INTERVALSTART=LastQuery().Time, SHA1=sha1, INTERVALEND=V_INTERVALEND)
            
            ofile = open(productlogFilename, "w")
            if JSON_Content:
                ofile.write(JSON_Content)
            else:
                ofile.write("Didn't find product log information")
            ofile.close()
        
    except Exception:
        raise dtasException('_retrieve_archive_by_sha1 fail')
        
def _retrieve_report_by_submission(item):
    try:
        retrieveHeader = Header();
        
        _Out_Sample_Folder = os.path.join(D_OUTDIR_CACHE, item)
        _OrigNameFile = os.path.join(_Out_Sample_Folder, 'OrigFileName.txt')
        _OrigSampleFile = os.path.join(_Out_Sample_Folder, item + '.tgz')
        
        if B_IsMutilOS:
            retrieveHeader.ReportType = "1";
        if _isfile(_OrigNameFile):
            retrieveHeader.OrigFileName = open(_OrigNameFile, 'r').read()
        
        retrieveHeader.SHA1 = item;
        
        # Retrieve report
        result, XML_Content = send_package("Retrieve", retrieveHeader);
        
        # get report successful
        if not result:
            return None;
        else:
            logger.info("{0}[Found] report: %s %s".format('[MutilOS] ' if B_IsMutilOS else '') % (retrieveHeader.OrigFileName, retrieveHeader.SHA1))
        
        logger.debug(XML_Content)

        if not retrieveHeader.OrigFileName:  # can't find OrigFileName
            retrieveHeader.OrigFileName = myLib.XMLer(XML_Content, 'OrigFileName').getData() if myLib.XMLer(XML_Content, 'OrigFileName').getData() else retrieveHeader.SHA1;

        TrueFileType = myLib.XMLer(XML_Content, 'TrueFileType').getData()

        reportDirName= retrieveHeader.OrigFileName
        if TrueFileType == 'URL':
            reportDirName='URL_'+retrieveHeader.SHA1
        risklevel = response.RES_RISKLEVEL_MAP[myLib.XMLer(XML_Content, 'OverallROZRating').getData()] if response.RES_RISKLEVEL_MAP.has_key(myLib.XMLer(XML_Content, 'OverallROZRating').getData()) else 'ERROR';
        
        # Quiet mode will just create brief report in specified folder 
        if B_QUIET: 
            # create risklevel file
            briefreportdir = os.path.join(D_OUTDIR, "BriefReport")
            if not _isdir(briefreportdir):
                os.mkdir(briefreportdir)
            riskFileName = os.path.join(briefreportdir, retrieveHeader.SHA1 + ".rating")
            ofile = open(riskFileName, "w")
            ofile.write(risklevel)
            ofile.close()
            print "Get a result: SHA1:[{0}], RiskLevel:[{1}]".format(retrieveHeader.SHA1, risklevel)
            logger.debug("generate risk status %s" % riskFileName)
            
            shutil.rmtree(_Out_Sample_Folder);
            logger.debug("Remove temp folder %s" % _Out_Sample_Folder)
        
        else:
            # create report folder
            reportDir = _createResultFolder(reportDirName);
            
            # move sampler to report folder
            shutil.move(_OrigSampleFile, os.path.join(reportDir, os.path.basename(_OrigSampleFile)))
            shutil.rmtree(_Out_Sample_Folder);
            logger.debug("Remove temp folder %s" % _Out_Sample_Folder)
            
            # create the xml report
            xmlFinename = os.path.join(reportDir, retrieveHeader.SHA1 + ".xml")
            ofile = open(xmlFinename, "w")
            ofile.write(XML_Content)
            ofile.close()
            logger.debug("generate XML report %s" % xmlFinename)
            
            # transform xml repot to HTML
            #htmlFileName = os.path.join(reportDir, retrieveHeader.OrigFileName + ".html")
            #installdir = sys.path[0] if _isdir(sys.path[0]) else os.path.dirname(sys.path[0])
            #styleFileName = os.path.join(installdir, 'xslt_style.xml')
            
            
            #logger.debug("Using the style file:%s" % styleFileName)
            
            #oXMLtoReport = XMLtoReport(xmlFinename, styleFileName, htmlFileName)
            #oXMLtoReport.createHTML();
            #logger.debug("generate HTML report %s" % htmlFileName)
        
            return reportDir;
        
    except Exception:
        raise dtasException('_retrieve_report_by_submission fail')

def _retrieve_by_submission():
    _remove_notfound_sample();
    
    for item in os.listdir(D_OUTDIR_CACHE):
        try:
            if not _isdir(os.path.join(D_OUTDIR_CACHE, item)):
                continue;
            
            reportDir = _retrieve_report_by_submission(item);
            
            if not reportDir:
                continue;
            
            '''Get all objects that can download'''
            _B_Get_ScreenShot = B_IsFull;
            _B_Get_PCAP = B_IsFull;
            _B_Get_EventLog = B_IsFull;
            
            
            if _B_Get_ScreenShot:
                _retrive_object("ScreenShot", os.path.join(reportDir, "ScreenShot_{0}.zip".format(item)), item)
            if _B_Get_PCAP:
                _retrive_object("PCAP", os.path.join(reportDir, "Pcap_{0}.zip".format(item)), item) 
            if _B_Get_EventLog:
                _retrive_object("EventLog", os.path.join(reportDir, "event.xml".format(item)), item) 
            if B_IsIncludeSample:
                _retrive_object("Sample", os.path.join(reportDir, "Sample_{0}.zip".format(item)), item) 
            
            
        except Exception, e:
            logger.error(traceback.format_exc());

def _retrive_object(Service, filename, SHA1):
    try:
        retrieveHeader = Header();
        retrieveHeader.SHA1 = SHA1;
        retrieveHeader.ArchiveType = 'zip'
        
        if Service == 'Sample':
            retrieveHeader.ArchiveEncrypted = '1'
        
        result, objectfile = send_package(Service, retrieveHeader);
        
        if not result or not object:
            return;
        
        objectfile.seek(0)
        output = open(filename, 'wb')
        output.write(objectfile.read())
        output.close()
        # objectfile.write(open(filename, 'rb').read())
        
        logger.info("[Found] {0}".format(os.path.basename(filename)))
        
    except Exception:
        raise dtasException('_retrive_object fail')

def _retrieveSL(INTERVALEND, INTERVALSTART, RISKLEVEL):
    try:
        retrieveSLHeader = Header();

        if INTERVALEND:
            retrieveSLHeader.BeforeTime = str(int(timegm(time.strptime(INTERVALEND, "%Y-%m-%dT%H:%M:%S"))))
            # retrieveSLHeader.BeforeTime = str(int(time.mktime(time.strptime(V_INTERVALEND, "%Y-%m-%dT%H:%M:%S")))) #local to UTC
        if INTERVALSTART:
            retrieveSLHeader.AfterTime = str(int(timegm(time.strptime(INTERVALSTART, "%Y-%m-%dT%H:%M:%S"))))
            # retrieveSLHeader.AfterTime = str(int(time.mktime(time.strptime(V_INTERVALSTART, "%Y-%m-%dT%H:%M:%S")))) #local to UTC
    
        retrieveSLHeader.IntervalType = V_INTERVALTYPE
        
        result, info = send_package("SampleList", retrieveSLHeader);
        
        if not result or not info:
            return "";
        
        logger.debug("[Sample List] {0}".format(info.getvalue()))
        
        _RiskList = str(RISKLEVEL).replace("high", "3").replace("medium", "2").replace("low", "1").replace("norisk", "0").replace("error", "-1") if RISKLEVEL != None else None
        
        return _retrieve_sampleList_by_risk(str(info.getvalue()), _RiskList)
        
    except Exception:
        logger.error(traceback.format_exc())

def _retrieve_ProductLog_by_SHA1 (INTERVALSTART, SHA1, INTERVALEND=None):
    try:
        retrievePLHeader = Header();

        if INTERVALEND:
            retrievePLHeader.BeforeTime = str(int(timegm(time.strptime(INTERVALEND, "%Y-%m-%dT%H:%M:%S"))))
            # retrieveSLHeader.BeforeTime = str(int(time.mktime(time.strptime(V_INTERVALEND, "%Y-%m-%dT%H:%M:%S")))) #local to UTC
        if INTERVALSTART:
            retrievePLHeader.AfterTime = str(int(timegm(time.strptime(INTERVALSTART, "%Y-%m-%dT%H:%M:%S"))))
            # retrieveSLHeader.AfterTime = str(int(time.mktime(time.strptime(V_INTERVALSTART, "%Y-%m-%dT%H:%M:%S")))) #local to UTC
        if SHA1:
            retrievePLHeader.SHA1 = SHA1
        
        result, info = send_package("ProductLog", retrievePLHeader);
        
        if not result or not info or info.getvalue() == "false":
            logger.warn("[Product Log] {0} log not found".format(SHA1))
            return;
        logger.info("[Product Log] {0}".format(info.getvalue()))
        return info.getvalue()
    except Exception:
        logger.error(traceback.format_exc())
 
def _remove_notfound_sample():
    # Remove the waiting file if cached a wrong sha1
    for sha1 in os.listdir(D_OUTDIR_CACHE):
        if len(sha1) != 40: shutil.rmtree(os.path.join(D_OUTDIR_CACHE, sha1));
   
    # Remove the waiting file if briefreport is not found
    __NotFoundSampleList = _retrieve_sampleList_by_status(';'.join(os.listdir(D_OUTDIR_CACHE)), '1')
    if __NotFoundSampleList is None:
        return
    for sha1 in __NotFoundSampleList:
        if _isdir(os.path.join(D_OUTDIR_CACHE, sha1)): shutil.rmtree(os.path.join(D_OUTDIR_CACHE, sha1));
           
   
##### public function #####
def testProtocol(Service):
    if not (V_METHOD and V_PROTOCOL):
        exit("Method and Protocol are both required!", False)
    testHeader = Header();
    testHeader.ContentType = r'text/plain'
    testHeader.Body = V_BODY;
    # dtascli.exe -t method=get protocol=test_connection
    result, info = send_package(Service, testHeader, True);
    if result:
        logger.info("[Test Result] {0}".format(info.getvalue()))
    else:
        logger.error("[Test Result] {0}".format(info.getvalue())) 
        
def register():
    registerHeader = Header();
    registerHeader.ProductName = _get_config('Header', Header.Out_ProductName);
    registerHeader.ClientHostname = _get_config('Header', Header.Out_ClientHostname);
    result, fileobject = send_package("Register", registerHeader);
    if result:
        logger.info("Register is success")
    return result

def unregister():
    unregisterHeader = Header();
    result, fileobject = send_package("Unregister", unregisterHeader);
    if result:
        logger.info("Unregister is success")

def retrieveSL():    
    if V_INTERVALEND or V_INTERVALSTART:
        try:
            logger.info('[{2} Time(UTC)] {0} ~ {1} '.format(V_INTERVALSTART, V_INTERVALEND, 'Submission' if V_INTERVALTYPE == '0' else 'Completion'))
            global B_Get_SampleList
            B_Get_SampleList = True
            global SampleList
            if B_Archive:
                LastQuery().showTime()
                SampleList = _retrieveSL(INTERVALEND=None, INTERVALSTART=LastQuery().Time, RISKLEVEL=V_RISK)
            else:
                SampleList = _retrieveSL(INTERVALEND=V_INTERVALEND, INTERVALSTART=V_INTERVALSTART, RISKLEVEL=V_RISK)
        except Exception:
            raise dtasException('retrieveSL fail')
        
def retrive_list_report():
    _FinishSHA1 = []
    for sha1 in SampleList:
        try:
            if not sha1:
                continue
            _retrieve_report_by_sha1(sha1)
            _FinishSHA1.append(sha1)       
        except Exception, e:
            logger.error("[Failed] can't get report {0} : {1}".format(sha1, e))
            logger.error(traceback.format_exc())
    for sha1 in _FinishSHA1:
        SampleList.remove(sha1)
 
def retrive_archive():
    _FinishSHA1 = []
    for sha1 in SampleList:
        try:
            if not sha1:
                continue
            _retrieve_archive_by_sha1(sha1)
            _FinishSHA1.append(sha1)  
        except Exception, e:
            logger.error("[Failed] can't get report {0} : {1}".format(sha1, e))
            logger.error(traceback.format_exc())
            raise dtasException('retrive_archive fail')     # stop the backup servie
    for sha1 in _FinishSHA1:
        SampleList.remove(sha1)
    LastQuery.setTime(time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()))

def retrieveBL():
    try:
        filename = os.path.join(D_BLDIR, "blacklist.xml")
        if _isfile(filename):
            os.remove(filename)
        _retrive_object("BlackList", filename, None)
               
    except Exception, e:
        logger.error(e)
        logger.error(traceback.format_exc())

def retrieve():
    try:
        if V_SHA1:
            if B_QUIET:
                _retrieve_briefreport_by_risk(V_SHA1)
            else:
                _retrieve_report_by_sha1(V_SHA1)
        else:
            _retrieve_by_submission()
    except Exception, e:
        logger.error(e.message)
        logger.error(traceback.format_exc())

def preProcessURL():
    list = []
    if not B_IsURL:
        return list
    urlFile=os.path.join(os.path.join(D_INDIR),P_URL_PATH_FILENAME)
    
    if os.path.exists(urlFile):
        with open(urlFile,'rb') as f:
            for line in f:
                url=line.strip()
                if len(url)<3:
                    logging.error('wrong format url[%s]'%url)
                    continue
                list.append(url)
        # Delete the file which include url
        os.remove(urlFile)
    return list

def restore_url(item):
    with open(os.path.join(os.path.join(D_INDIR),P_URL_PATH_FILENAME),'a') as f:
        f.write(item+'\n')

def is_url(item,totalURL):
    return B_IsURL and item in totalURL and not os.path.isfile(item)

def upload(IsFreeArchieve):
    # Flow control: initial flow control
    oFlowControl = FlowControl();
    totalURL = preProcessURL()
    for item in os.listdir(D_INDIR)+totalURL:
        isUrl = False
        try:
            isUrl = is_url(item,totalURL)
            # Flow Control: check available number first
            if V_ROCESSINGLIMIT > 0 and oFlowControl.AvailableNumber <= 0:
                if isUrl:
                    restore_url(item)
                continue
            elif item == os.path.basename(D_INDIR_CACHE):  # don't submit remain folder
                continue;
            elif V_SAMPLE and item != V_SAMPLE:
                continue;  # just submit the specific

            sample = None
            addUrl = None
            if isUrl:
                #write url to tmp file
                sample = os.path.join(D_INDIR_CACHE, URL_FILENAME_PREFIX)
                _create_file(sample, item)
                addUrl = base64.b64encode(item)
                logger.info("Find URL sample: %s" % item)
            else:
                sample = os.path.join(D_INDIR, item)
                if _isdir(sample):
                    _handle_err_file(sample, "The sample %s should be a file not a folder" % sample)
                    continue;
                elif os.path.getsize(sample) > 60 * 1024 * 1024 :
                    _handle_err_file(sample, "The sample size %s bytes are over than 60MB" % os.path.getsize(sample))
                    continue;
                else:
                    logger.info("Find FILE sample: %s" % item)
                
            # create tarname: timestamp_sha1.tar.gz
            timestamp = time.strftime("%Y%m%d-%H%M%S", time.localtime())
            samplesha1 = ''

            samplesha1 = _file_sha1(sample)
            samlepfilename_working = os.path.join(D_INDIR_CACHE, samplesha1 + ".dat")
            metafinename_working = os.path.join(D_INDIR_CACHE, samplesha1 + ".meta")
            extrafilename_working = os.path.join(D_INDIR_CACHE, samplesha1 + ".log")
            
            tarname = "{0}_{1}.tgz".format(timestamp, samplesha1)
            
            # put value into package_info
            uploadHeader = Header();
            uploadHeader.ArchiveFilename = tarname
            uploadHeader.ArchiveSHA1 = samplesha1;
            uploadHeader.ContentType = r'application/x-compressed'

            if not isUrl:
                uploadHeader.OrigFileName = item;
            
            # move sample, meta file to working folder

            shutil.move(sample, samlepfilename_working)
            _create_meta(metafinename_working, uploadHeader,isUrl);
            _create_extra_info(extrafilename_working,addUrl);
            
            # compress sample,meta data
            oTar = myLib.Tarfiler(samlepfilename_working, tarname)
            oTar.gz([metafinename_working, extrafilename_working]);
            logger.debug("create sample file %s" % oTar.absfile)
            
            result, info = send_package("Upload", uploadHeader);
            
            if result:
                # Add sample to flow contorl
                oFlowControl.addSubmisstion(samplesha1)
                # GetResultOnly just display the submitted sample's sha1
                if B_QUIET:
                    print "Submit a sample: SHA1:[{0}]".format(samplesha1)
                
                # move the sample to outdir working space
                _Out_Sample_Temp_Folder = os.path.join(D_OUTDIR_CACHE, samplesha1)
                _Out_Sample_Temp_File = os.path.join(_Out_Sample_Temp_Folder, samplesha1 + ".tgz")
                _OrigFileName = os.path.join(_Out_Sample_Temp_Folder, "OrigFileName.txt")
                
                if not _isdir(_Out_Sample_Temp_Folder) and not IsFreeArchieve:
                    os.mkdir(_Out_Sample_Temp_Folder)
                    logger.debug("Create Temp Sample Working Folder %s" % _Out_Sample_Temp_Folder)
                
                # Free Archive without storing the submitted sample
                if(IsFreeArchieve):
                    os.remove(oTar.absfile)
                    logger.debug("remove %s" % (oTar.absfile))
                else:
                    shutil.move(oTar.absfile, _Out_Sample_Temp_File);
                    if isUrl:
                        _create_file(_OrigFileName, item)
                    logger.debug("move %s to %s" % (oTar.absfile, _Out_Sample_Temp_File))
                 
            else:
                logger.error("can't handler file %s trying to recover the sample... " % item)

                if isUrl:
                    restore_url(item)
                else:
                    shutil.copy(samlepfilename_working, sample)
                os.remove(oTar.absfile)
                if _isfile(sample):
                    logger.info("Recovered successfully")
                else:
                    logger.error("Failed to recover")
                
            # remove temp file
            os.remove(metafinename_working)
            os.remove(extrafilename_working)
            os.remove(samlepfilename_working)
            
        except Exception, e:
            logger.error(e)
            logger.error(traceback.format_exc())
    if V_ROCESSINGLIMIT > 0 and oFlowControl.AvailableNumber <= 0:
        logger.error("Too many samples are in processing, the upper limit is {0}".format(V_ROCESSINGLIMIT))
        print "Too many samples are in processing, the upper limit is {0}".format(V_ROCESSINGLIMIT)

def _handle_err_file(OrigFilePath, errormsg, OrigFileName=None, isTar=True):
    try:
        
        logger.error(errormsg);
        
        FileName = os.path.basename(OrigFilePath)
        if OrigFileName == None:
            OrigFileName = FileName;
            
        reportDir = _createResultFolder(OrigFileName);
        errLog = os.path.join(reportDir, 'error.log')
        
        targetName = os.path.join(reportDir, FileName)
        try:
            shutil.move(OrigFilePath, targetName)
        except:
            raise dtasException('Fail to move file');
            logger.error('Fail to move file');
        
        if isTar:
            oTar = myLib.Tarfiler(targetName)
            oTar.gz();
            if _isdir(targetName):
                shutil.rmtree(targetName)
            elif _isfile(targetName):
                os.remove(targetName)
        
        ofile = open(errLog, "ab")
        ofile.write("%s\t%s" % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), errormsg))
        ofile.close()    
    except:
        logger.debug('ERROR',traceback.format_exc());
        raise dtasException('_handle_err_file fail');

def _create_meta(metafile, uploadHeader,isUrl):
    try:
      
        _section = 'Header'
        ClientUUID = _get_config(_section, Header.Out_ClientUUID);
        SourceID = _get_config(_section, Header.Out_SourceID);
        SampleFileSHA1 = uploadHeader.ArchiveSHA1;
        # Specified file name
        OrigFileName = D_ExtraInfo["FileName"] if D_ExtraInfo.has_key("FileName") else uploadHeader.OrigFileName
        content = "ClientUUID={0}&SourceID={1}&SampleFileSHA1={2}&SampleFileExist={3}".format(ClientUUID, SourceID, SampleFileSHA1, 1)
        if isUrl:
            content+="&SampleType=1"
        else:
            content+="&OrigFileName=%s"%urllib.quote(myLib.toUTF8(OrigFileName))
        if B_SKIPPREFILTER:
            content+="&SkipPrefilter=1"
        else:
            content+="&SkipPrefilter=0"
        if B_IMMEDIATE:
            content+="&immediate=1"
        else:
            content+="&immediate=0"
        logger.debug("[meta info] %s %s " % (metafile, content))
        
        _create_file(metafile, content)   
    except:
        raise dtasException('_create_meta fail');

def _create_extra_info(extrafile,addUrl):
    try:
        # _Date = time.strftime(r"%m/%d/%Y %H:%M:%S", time.localtime())
        _Date = time.strftime(r"%m/%d/%Y %H:%M:%S", time.gmtime(time.mktime(time.localtime())))  # utc format
        _Detectedby = _get_config('Header', Header.Out_SourceName);
        _Subject = r"&Subject={0}".format(V_AFFECT) if V_AFFECT else '';
        
        content = u'Date={0}&Description={1}{2}'.format(_Date, _Detectedby, _Subject)
        #
        for key in D_ExtraInfo.keys():
            content = content + u"&{0}={1}".format(key, urllib.quote(myLib.toUTF8(D_ExtraInfo[key])))        
        if addUrl:
            content = content + u"&{0}={1}".format('URL', urllib.quote(myLib.toUTF8(addUrl)))
        #
        logger.debug("[extra info] %s %s " % (extrafile, content))
        
        _create_file(extrafile, content)        
    except:
        raise dtasException('_create_extra_info fail');
    
def _create_file(filename, info):
    try:        
        ofile = open(filename, "w")
        ofile.write(info)
        ofile.close()        
    except:
        raise dtasException('_create_file fail');

'''
Prepare Environment          
'''
def setEnv():
    try:
        if not _isdir(D_OUTDIR):
            logger.error("No Output Folder:%s" % D_OUTDIR)
            raise dtasException("No Output Folder:%s" % D_OUTDIR)
        elif not _isdir(D_OUTDIR_CACHE):
            logger.debug("Create Output Cache Folder:%s" % D_OUTDIR_CACHE)
            os.mkdir(D_OUTDIR_CACHE)
             
        if not _isdir(D_INDIR):
            logger.error("No Input Folder:%s" % D_INDIR)
            raise dtasException("No Input Folder:%s" % D_INDIR)
        elif not _isdir(D_INDIR_CACHE):
            logger.debug("Create Input Cache Folder:%s" % D_INDIR_CACHE)
            os.mkdir(D_INDIR_CACHE)
        
        if not _isdir(D_BLDIR):
            os.mkdir(D_BLDIR)    

        indir = D_INDIR.strip(os.sep).split(os.sep)
        outdir =D_OUTDIR.strip(os.sep).split(os.sep)

        cnt = 0
        for i,_ in enumerate(indir):
            if indir[i].upper() == outdir[i].upper() :
                cnt +=1
        logger.debug('%d == %d'%(cnt,len(indir)))
        if cnt == len(indir):
            logger.error('The [D_OUTDIR] directory is located in the [D_INDIR] directory')
            raise dtasException('Same [D_OUTDIR] and [D_INDIR]')
    except:
        logger.error('Fail to initialize the environment')
        raise dtasException('setEnv fail');

def _global_setting():
    pass;

def _check_test_case_env():
    pass;


'''
Main Function           
'''
def exit(err, withUsage=False):
    if (not None == err) and len(str(err)) > 0:
        #print "ERROR: " + str(err)
        logger.error(str(err));
    if withUsage: 
        print _BL + "DESCRIPTION:" + _BR
        print "\t" + _BL + "dtascli" + _BR + " is a utility for sample submission and report retrieval."
        print " "
        print _BL + "SYNOPSIS:" + _BR
        print "\t" + _BL + "dtascli" + _BR + " [-b|-u|-r][--full][--includesample][--url][--quiet]"
        print "                     [indir=<path>|outdir=<path>][blacklist=true|false]"
        print "                     [immediate=true|false][skipprefilter=true|false]"
        print "                     [verbose=true|false][debug=true|false][sleep=<minutes>]"
        print "                     [affectentity=<affectentity>][sample=<path>][sha1=<sha1>]"
        print "                     [processing-limit=<int>]"
        print " "
        
        print _BL + "INTERVAL:" + _BR
        print "\t" + _BL + "dtascli" + _BR + " [-b|-r] [end=<YYYY-MM-DDThh:mm:ss>]"
        print "                          [start=<YYYY-MM-DDThh:mm:ss>][interval-type=[0|1]]"
        print "                          [riskleve=[high[,medium[,low[,norisk[,error]]]]]]"
        print "\t" + _BL + "end" + _BR + " the end of interval(UTC)"
        print "\t" + _BL + "start" + _BR + " the start of interval(UTC)"
        print "\t" + _BL + "interval-type" + _BR + " interval of: 0 (submission time)"
        print "\t                             1 (completion time)<default>"
        print "\t" + _BL + "risklevel" + _BR + " indicate rozrating of samples"
        print " "
        
        print _BL + "COMMANDS:" + _BR
        print "\tdtascli [-b] indir=<path> outdir=<path>"
        print "\tdtascli [-b] (indir and outdir are default folders)"
        print " "
        print _BL + "PARAMETER:" + _BR
        print "\t" + _BL + "-b" + _BR + " submit sample and retrieve report"
        print "\t" + _BL + "-u" + _BR + " submit sample"
        print "\t" + _BL + "-r" + _BR + " retrieve report"
        print "\t" + _BL + "--full" + _BR + " retrieve screenshot,pcap and eventlog"
        print "\t" + _BL + "--includesample" + _BR + " retrieve the archived sample with password 'Virus'"
        print "\t" + _BL + "--url" + _BR + "Process url samples in indir's %s(format:one line, one url)"%P_URL_PATH_FILENAME
        print "\t" + _BL + "indir" + _BR + " full path of submission sample folder"
        print "\t" + _BL + "outdir" + _BR + " full path of retrieve report folder"
        print "\t" + _BL + "verbose" + _BR + " http verbose output"
        print "\t" + _BL + "debug" + _BR + "  debug output"
        print "\t" + _BL + "sleep" + _BR + " loop's sleeping time "
        print "\t" + _BL + "timer" + _BR + " sleep seconds, the default is 60s "     
        print "                 (real sleep time = sleep * timer)"
        print "\t" + _BL + "affectentity" + _BR + " the affect entity "
        print "\t" + _BL + "sha1" + _BR + " retrieve a report by sha1 "
        print "\t" + _BL + "sample" + _BR + " specific sample "
        print "\t" + _BL + "skipprefilter" + _BR + "url sample do or not do pre-filter in Usandbox. True, not do URL pre-filter. False, will do URL pre-filter."
        print "\t" + _BL + "immediate" + _BR + "analyze the samples immediately or based on weight. True, analyze samples immediately; False, analyze samples based on weight."
        print "\t" + _BL + "processing-limit" + _BR + " Limit the submission sample count in a batch, the default is 100"
        print " "
        print _BL + "ATTENTION:" + _BR
        print "\t All samples in [indir] folder will be removed after submission"
    sys.exit(-1)

def main():
    # xml.MyXML();
    opts, args = getopt.getopt(sys.argv[1:], "burta", ["group", "full", "includesample", "quiet","source", "submittername", "filename","url","build"])
    # print  opts, args
    
    if len(opts) not in (1, 2, 3, 4) :
        exit(None, True)
    
    _sleeptime = 0;
    _timer = 60;
    
    # parse the opts
    for o, a in opts:
        if o in ['-a']:
            global B_Archive
            B_Archive = True;
        # if o in ['--ntevent']:
        #     ntHandler = logging.handlers.NTEventLogHandler('dtascli')
        #     # ntHandler = AustinHandler();
        #     ntHandler.setFormatter(logging.Formatter(formatter))
        #     ntHandler.setLevel(logging.ERROR)
        #     logger.addHandler(ntHandler)
        if o in ['--quiet']:
            global B_QUIET;
            B_QUIET = True;
            logger.setLevel(logging.ERROR)  # just show the error message to avoid noisy
    
    # parse the args
    for arg in args:
        if len(str(arg).split('=')) > 0 and str(arg).split('=')[0] == P_AFFECT:
            n = P_AFFECT;
            m = '='.join(str(arg).split('=')[1:len(str(arg).split('='))])
            if len(m) > 510:
                logger.error('{0} length must be less than 512: the current length is '.format(P_AFFECT, len(m)))
                return;
        elif len(str(arg).split('=')) == 2:
            (n, m) = str(arg).split('=');    
        else:
            exit(None, True)
        
        # paremeters
        if n == P_INDIR:  # assign indir if user inupt
            global D_INDIR, D_INDIR_CACHE
            D_INDIR = m;
            D_INDIR_CACHE = os.path.join(D_INDIR, '.working');
        elif n == P_OUTDIR:  # assign outdir if user input
            global D_OUTDIR, D_OUTDIR_CACHE
            D_OUTDIR = m;
            D_OUTDIR_CACHE = os.path.join(D_OUTDIR, '.working');
        elif n == P_AFFECT:  # assign affectentity if user input
            global V_AFFECT
            V_AFFECT = urllib.quote(myLib.toUTF8(m));     
        elif n == P_SLEEPTIME:  # assign sleep time for loop if user input
            if not str(m).isdigit(): 
                exit("sleep should be a number", False)
            _sleeptime = int(m);
        elif n == P_TIMER:  # assign sleep timer(seconds) for loop if user input
            if not str(m).isdigit(): 
                exit("timer should be a number", False)
            _timer = int(m);           
        elif n == P_BLACKLIST:  # retrieve blacklist if user ask
            global B_Get_BL;
            if str(m).lower() == 'true':
                B_Get_BL = True;
            elif str(m).lower() == 'false':
                B_Get_BL = False;
            else:
                exit("blacklist should be 'true' or 'false'", False)   
        elif n == P_INTERVALTYPE:  # retrieve by interval if user ask
            if m not in ('0', '1'):
                exit("interval-type should be 0 or 1", False)
            global V_INTERVALTYPE
            V_INTERVALTYPE = m    
        elif n == P_END:  # retrieve by interval if user ask
            if time.mktime(time.strptime(m, "%Y-%m-%dT%H:%M:%S")):
                global V_INTERVALEND
                V_INTERVALEND = m      
        elif n == P_START:  # retrieve by interval if user ask
            if time.mktime(time.strptime(m, "%Y-%m-%dT%H:%M:%S")):
                global V_INTERVALSTART
                V_INTERVALSTART = m      
        elif n == P_VERBOSE:  # display version for http request and response if user enable it
            global B_VERBOSE;
            if str(m).lower() == 'true':
                B_VERBOSE = True;
            elif str(m).lower() == 'false':
                B_VERBOSE = False;
            else:
                exit("verbose should be 'true' or 'false'", False)
        elif n == P_DEBUG:  # show debug message if user enable it
            global B_DEBUG;
            if str(m).lower() == 'true':
                logHandler.setLevel(logging.DEBUG)
            elif str(m).lower() != 'false':
                exit("debug should be 'true' or 'false'", False)
        elif n == P_SHA1:  # retrieve the specified SHA1 information
            global V_SHA1;
            V_SHA1 = m
        elif n == P_SAMPLE:  # copy the specified sample to indir and submit it, it can prevent delete the sample
            global V_SAMPLE
            V_SAMPLE = os.path.basename(m)
            logger.info('The specified sample is {0}'.format(m))
            for o, a in opts:
                if o in ['-b', '-u']:
                    if(_isfile(m)):                        
                        inFile = os.path.join(D_INDIR, V_SAMPLE)
                        logger.info('copy {0} to {1}'.format(m, inFile))
                        if not (_isfile(inFile)):
                            shutil.copy(m, inFile)                     
                    else:
                        logger.error('can not find {0}'.format(m))
        elif n == P_PROTOCOL:  # use the specified web protocol
            global V_PROTOCOL;
            V_PROTOCOL = m 
        elif n == P_METHOD:  # use the specified http method
            global V_METHOD;
            V_METHOD = m  
        elif n == P_BODY:  # provide the specified body
            global V_BODY;
            V_BODY = m 
        elif n == P_RISK:  # retrieve by risk
            global V_RISK;
            V_RISK = m 
        elif n in L_EXTRA_INFO:  # submit with the sepeicfied extra info
            global D_ExtraInfo;
            D_ExtraInfo[n] = m
        elif n == P_ROCESSINGLIMIT:  # assign concurrent processing limit, 0 is unlimit
            if not str(m).isdigit(): 
                exit("{0} should be a number".format(P_ROCESSINGLIMIT), False)
            global V_ROCESSINGLIMIT
            V_ROCESSINGLIMIT = int(m)
        elif n == P_IMMEDIATE:
            global B_IMMEDIATE;
            if str(m).lower() == 'true':
                B_IMMEDIATE = True;
            elif str(m).lower() == 'false':
                B_IMMEDIATE = False;
            else:
                exit("immediate should be 'true' or 'false'", False)
        elif n == P_SKIPPREFILTER:
            global B_SKIPPREFILTER;
            if str(m).lower() == 'true':
                B_SKIPPREFILTER = True;
            elif str(m).lower() == 'false':
                B_SKIPPREFILTER = False;
            else:
                exit("skipprefilter should be 'true' or 'false'", False)
        else:
            logger.error("unreconized parameter: %s" % n)
            exit(None, True)
    
    '''Print the configuration'''
    logger.info('**** welcome to use submission tool v%s ****' % CURRENT_VERSION)
    logger.info('indir: {0}'.format(D_INDIR))
    logger.info('outdir: {0}'.format(D_OUTDIR))
    logger.info('Server: {0}'.format(_get_config('DTAS', 'Host')));
    logger.info('API Key: {0}'.format(_get_config('DTAS', 'ApiKey')));
    
    """register your tool"""
    if not register():
        sys.exit(1)
    """get smaple list if required"""
    retrieveSL();
    
        
    try:
        """set evironment"""
        setEnv();
        while 1:
            runDTAS(opts)
            if not _sleeptime:
                break;   
            print '==========='
            time.sleep(_sleeptime * _timer)
                     
    except:
        logger.error('Error when do main loop, and Exit')
        raise dtasException('main fail') ;
    finally:
        """unregister your tool"""
        unregister();

if __name__ == '__main__':
    try:
        sys.exit(main())
    except SyntaxError, err:
        exit(err, True)
    except getopt.GetoptError, err:
        exit(err, True)
    except Exception, err:
        exit(err)

