# Copyright (C) 2013 Robby Zeitfuchs
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

import logging
import re

#Automater-Imports
import httplib2
import urllib
import urllib2

from lib.cuckoo.common.config import Config

log = logging.getLogger(__name__)

class Automator:
    """Beschreibung"""

    def __init__(self, input):
        """Creates a new instance.
        @param input: input from PCAP file
        """
        self.input = input
        #IP-Adressen fuer weitere Analyse
        self.ip = []
        self.fortiGuard = []
        self.urlvoid = []
        self.ipvoid = []
        self.alienvault = []
        self.robtex = []
        #Dictionary containing all the results of this processing.
        self.results = {}        
        
        
    def get_proxy(self):
        pserver = Config().community.automator_proxy_server
        pport = str(Config().community.automator_proxy_port)
        puser = Config().community.automator_proxy_user
        ppass = Config().community.automator_proxy_pass
        
        proxy = {'http': 'http://' + puser + ':' + ppass + '@' + pserver + ':' + pport}
        return proxy
    
    def _fortiURL(self, urlInput):   
        httplib2.debuglevel=4          
        
        if Config().community.automator_proxy:
            proxy = urllib2.ProxyHandler(self.get_proxy())
            opener = urllib2.build_opener(proxy, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler)
            urllib2.install_opener(opener) 
        
        conn = urllib2.urlopen("http://www.fortiguard.com/ip_rep.php?data=" + urlInput + "&lookup=Lookup")
        content2String = conn.read()        
        
        rpd = re.compile('h3\sstyle\=\"float:\sleft\"\>Category:\s(.+)\<\/h3', re.IGNORECASE)
        rpdFind = re.findall(rpd,content2String)
        rpdSorted=sorted(rpdFind)

        m=''
        for m in rpdSorted:
            self.fortiGuard.append(urlInput + " Categorization: " + m)  
        if m=='':
            self.fortiGuard.append(urlInput + " Categorization: Uncategorized")   

    def _urlvoid(self, urlInput):     
        httplib2.debuglevel=4 
                   
        if Config().community.automator_proxy:
            proxy = urllib2.ProxyHandler(self.get_proxy())
            opener = urllib2.build_opener(proxy, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler)
            urllib2.install_opener(opener)      
        
        conn = urllib2.urlopen("http://urlvoid.com/scan/" + urlInput)
        content2String = conn.read()            
        
        rpderr = re.compile('An\sError\soccurred', re.IGNORECASE)
        rpdFinderr = re.findall(rpderr,content2String)
        
        if "ERROR" in str(rpdFinderr):
            urlvoid = ('http://www.urlvoid.com/')
            raw_params = {'url':urlInput,'Check':'Submit'}
            params = urllib.urlencode(raw_params)
            request = urllib2.Request(urlvoid,params,headers={'Content-type':'application/x-www-form-urlencoded'})
            page = urllib2.urlopen(request)
            page = page.read()
            content2String = str(page)
   
        rpd = re.compile('title=\"Find\swebsites\shosted\shere\"\><strong\>(\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}).+', re.IGNORECASE)
        rpdFind = re.findall(rpd,content2String)
        rpdSorted1=sorted(rpdFind) 
        
        rpd = re.compile('color..red..DETECTED..font...td..td..a.rel..nofollow..href.\"(.{6,120})\"\stitle', re.IGNORECASE)
        rpdFind = re.findall(rpd,content2String)
        rpdSortBlacklist=sorted(rpdFind)  
        
        rpd = re.compile('alt\=\"flag\".+\>(.+)\<\/td\>', re.IGNORECASE)
        rpdFind = re.findall(rpd,content2String)
        rpdSortCountry=sorted(rpdFind)
        
        rpd = re.compile('HTTP\sResponse\sCode\<\/td\>\<td\>\<img\ssrc=.{1,100}\salt=".{5}"\s\/\>(.+)\<\/td\>\<\/tr\>', re.IGNORECASE)
        rpdFind = re.findall(rpd,content2String)
        rpdSortHTTPResponseCode=sorted(rpdFind)
        
        rpd = re.compile('\<h3\sclass=\"detected_website\"\>(.+)\<\/h3\>', re.IGNORECASE)
        rpdFind = re.findall(rpd,content2String)
        rpdSortWebsiteStatus=sorted(rpdFind)             
        
        urlResult = []
        
        i=''
        for i in rpdSorted1:
            urlResult.append({"IP" : i})
            #IP fuer weitere Analyse Speichern
            self.ip.append(i)
        if i=='':
            urlResult.append({"IP" : "Host IP Address is not listed"})
        l=''
        for l in rpdSortCountry:
            urlResult.append({"CountryCode" : 'Country: '+ l})
        if l=='':
            urlResult.append({"CountryCode" : "No Country listed"})
        m=''
        for m in rpdSortHTTPResponseCode:
            urlResult.append({"HTTPResponseCode" : 'HTTP-Response-Code: '+ m})
        if m=='':
            urlResult.append({"HTTPResponseCode" : 'HTTP-Response-Code not listed.'})
        n=''
        for n in rpdSortWebsiteStatus:
            urlResult.append({"WebsiteStatus" : n})
        if n=='':
            urlResult.append({"WebsiteStatus" : 'The website is not blacklisted and looks safe to use.'})          
        j=''
        for j in rpdSortBlacklist:
            urlResult.append({"Blacklist" : 'Host is listed in blacklist at: '+ j})
        if j=='':
            urlResult.append({"Blacklist" : 'Host is not listed in a blacklist'})              
    
        self.urlvoid.append({"url" : urlInput, "urlResult" : urlResult})
        
    def _ipvoid(self, ipInput):
        httplib2.debuglevel=4 
                   
        if Config().community.automator_proxy:
            proxy = urllib2.ProxyHandler(self.get_proxy())
            opener = urllib2.build_opener(proxy, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler)
            urllib2.install_opener(opener)      
        
        conn = urllib2.urlopen("http://ipvoid.com/scan/" + ipInput)
        content2String = conn.read()            
        
        rpderr = re.compile('An\sError\soccurred', re.IGNORECASE)
        rpdFinderr = re.findall(rpderr,content2String)
        
        if "ERROR" in str(rpdFinderr):
            ipvoid = ('http://www.ipvoid.com/')
            raw_params = {'ip':ipInput,'go':'Scan Now'}
            params = urllib.urlencode(raw_params)
            request = urllib2.Request(ipvoid,params,headers={'Content-type':'application/x-www-form-urlencoded'})
            page = urllib2.urlopen(request)
            page = page.read()
            content2String = str(page)      
              
        rpd = re.compile('Detected\<\/font\>\<\/td..td..a.rel..nofollow..href.\"(.{6,70})\"\stitle\=\"View', re.IGNORECASE)
        rpdFind = re.findall(rpd,content2String)
        rpdSortBlacklist=sorted(rpdFind)
        
        rpd = re.compile('\<tr\>\<td\>Blacklist\sStatus\<\/td\>\<td\>\<span\sclass=\"blacklist_status_.{3,4}\">(.+)\<\/span\>\<\/td\>\<\/tr\>', re.IGNORECASE)
        rpdFind = re.findall(rpd,content2String)
        rpdSortBlacklistStatus=sorted(rpdFind)        
    
        rpd = re.compile('ISP\<\/td\>\<td\>(.+)\<\/td\>\<\/tr\>\<tr\>\<td\>Continent', re.IGNORECASE)
        rpdFind = re.findall(rpd,content2String)
        rpdSortISP=sorted(rpdFind)
    
        rpd = re.compile('Country\sCode.+flag\"\s\/\>\s(.+)\<\/td\>\<\/tr\>\<tr\>\<td\>Latitude', re.IGNORECASE)
        rpdFind = re.findall(rpd,content2String)
        rpdSortGeoLoc=sorted(rpdFind)

        ipResult = []
        
        i=''
        for i in rpdSortBlacklistStatus:
            ipResult.append({"BlacklistStatus" : i})
        if i=='':
            ipResult.append({"BlacklistStatus" : 'No Blacklist status'})         
        
        j=''
        for j in rpdSortBlacklist:
            ipResult.append({"Blacklist" : 'Host is listed in blacklist at: '+ j})
        if j=='':
            ipResult.append({"Blacklist" : 'Host is not listed in a blacklist'})   
       
        k=''
        for k in rpdSortISP:
            ipResult.append({"ISP" : 'The ISP for this IP is: '+ k})
        if k=='':
            ipResult.append({"ISP" : 'No ISP listed'})
        
        l=''
        for l in rpdSortGeoLoc:
            ipResult.append({"GEOLocation" : 'Geographic Location: '+ l})
        if l=='':
            ipResult.append({"GEOLocation" : 'No GEO location listed'})          
              
        self.ipvoid.append({"ip" : ipInput, "ipResult" : ipResult})
        
    def _alienvault(self, ipInput):
        httplib2.debuglevel=4 
                   
        if Config().community.automator_proxy:
            proxy = urllib2.ProxyHandler(self.get_proxy())
            opener = urllib2.build_opener(proxy, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler)
            urllib2.install_opener(opener)      
        
        url = "http://labs.alienvault.com/labs/index.php/projects/open-source-ip-reputation-portal/information-about-ip/?ip=" + ipInput
        conn = urllib2.urlopen(url)
        content2String = conn.read()  
        
        rpd = re.compile('.*IP not found.*')
        rpdFind = re.findall(rpd, content2String)
    
        if not rpdFind:
            self.alienvault.append(ipInput + ' is listed in AlienVault-Database: ' + url)
        else:           
            self.alienvault.append(ipInput + ' is not listed in AlienVault IP reputation database') 
    
    def _robtex(self, ipInput):
        httplib2.debuglevel=4 
                   
        if Config().community.automator_proxy:
            proxy = urllib2.ProxyHandler(self.get_proxy())
            opener = urllib2.build_opener(proxy, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler)
            urllib2.install_opener(opener)  
            
        conn = urllib2.urlopen("http://robtex.com/" + ipInput)
        content2String = conn.read()          
        
        rpd = re.compile('host\.robtex\.com.+\s\>(.+)\<\/a\>', re.IGNORECASE)
        rpdFind = re.findall(rpd, content2String)
        
        rpdSorted=sorted(rpdFind)
        
        ipResult = []
        
        i=''
        for i in rpdSorted:
            if len(i)>4:
                if not i == ipInput:
                    ipResult.append({"ARecord" : (i)})
        if i=='':
            ipResult.append({"ARecord" : "This IP does not resolve to a domain"})     
            
        self.robtex.append({"ip" : ipInput, "ipResult" : ipResult})               
            
    def _checkIP(self, ipAdress):
        #ToDo in config auslagern
        if len(ipAdress) >0 and ipAdress.find("192.168.") == -1 and ipAdress.find("8.8.8.8") == -1:
            return True
        
    def _isInURLVoid(self, ipAdress):
        for obj in self.ip:
            if ipAdress == obj:
                return True
        return False
                
    def _processIP(self):
        for obj in self.ip:
            self._ipvoid(obj)
            self._alienvault(obj)
            self._robtex(obj)
        if "domains" in self.input and isinstance(self.input["domains"], list):
            for obj in self.input["domains"]:
                targetInput = str(obj.get("ip"))
                if self._checkIP(targetInput) and self._isInURLVoid(targetInput) == False:  
                    self._ipvoid(targetInput)   
                    self._alienvault(targetInput) 
                    self._robtex(targetInput)        
        if "hosts" in self.input and isinstance(self.input["hosts"], list):
            for obj in self.input["hosts"]:
                targetInput = str(obj)
                if self._checkIP(targetInput) and self._isInURLVoid(targetInput) == False:
                    self._ipvoid(targetInput)   
                    self._alienvault(targetInput)      
                    self._robtex(targetInput)
        self.results["IPVoid"] = self.ipvoid   
        self.results["Alienvault"] = self.alienvault    
        self.results["Robtex"] = self.robtex      

    def _processDomain(self):
        if "domains" in self.input and isinstance(self.input["domains"], list):
            for obj in self.input["domains"]:
                targetInput = str(obj.get("domain"))
                self._fortiURL(targetInput)
                self._urlvoid(targetInput)
            
            self.results["FortiGuard"] = self.fortiGuard
            self.results["URLVoid"] = self.urlvoid
        
    def run(self):
        """Process Automator.
        @return: dict with Automator analysis data.
        """
        self._processDomain()
        self._processIP()
        
        return self.results
