# !/usr/bin/env python
# http://code.google.com/p/beaconbits/
#
# This is the analyzer, it can be used with to evaluate a REDIS database of flows to evaluate the timing
#
# Once you validate that the collector has collected and populated data you can call this script
# add the -t top after the script

# This code is released under license GPL2
# http://www.gnu.org/licenses/gpl-2.0.html


import sys
import redis
import formatter

options = None
worklist = []
tempset = []
magic_values = 15,29,30,31,59,60,61,89,90,91,119,120,121,239,240,241,299,300,301,400,514,600,720,900,1200,1600,1800,2400,3600
mvalue = False
toplist1 = ()
toplist = []   


#  minimal count to consider for beaconbits
# Do not set below 3 as it will cause a failure
set_minvalue = 12

# Maximum number for consideration
# large sets cause performance issues
# this threshold is set at 30000 for demo purposes but you might want to set really high for a first run
# set low, to 5000 if you have confidence in checking through netflow or other top analytical methods
set_maxvalue = 5000

# This is the minimal duration estimate from first to last packet within the dataset as calculated.
# 900 seconds in 15 minutes, aggressive would be 300
set_duration_estimate = 900

#  Allowance for highest visitor count
# This variable estimates how many host might be connecting to a given IP
# this is only an estimate by taking the total attempts by any given host against the total for all host
# this assumes that popular sites get more host visiting, say more then 5, 10, or 100
# This assumes also that attackers don't completely own your network and are below a threshold for beacons
# say only 5 host might be compromised in the last attack, thus set it to 6 or 7 to be safe
set_visitor = 3

# TOP value for highest compensated variance
# componsated variance is the maximum acceptable variance for consideration
# setting to 10 really gives the most idealistic beacons
# setting to 300 is fairly broad but useful while evaluating effectiveness
set_comp_var = 30 

# TOP values for compvar
# compansated variance divided by time gives a factor that allows for tolerance that is quite different then standard deviation
# set to 140 to get a large factor of beacons, and divide by half as needed, probably don't want to drop below 35
# if you find the top 
set_compvar_time_factor = 70

# An interesting factor is taking the componsated variance and dividing it by the number of seconds in the duration estimate



''' Useful algorithms'''
def compensated_variance(data):
    # sourced from HTTP://en.wikipedia.org/w/index.php?title=Algorithms_for_calculating_variance
    n = 0
    sum1 = 0
    for x in data:
        n = n + 1
        sum1 = sum1 + int(x)
    mean = sum1/n
 
    sum2 = 0
    sum3 = 0
    for x in data:
        sum2 = sum2 + (int(x) - mean)**2
        sum3 = sum3 + (int(x) - mean)
    variance = (sum2 - sum3**2/n)/(n - 1)
    return variance

def online_variance(data):
    # sourced from HTTP://en.wikipedia.org/w/index.php?title=Algorithms_for_online_variance    
    n = 0
    mean = 0
    M2 = 0
 
    for x in data:
        n = n + 1
        delta = int(x) - mean
        mean = mean + delta/n
        M2 = M2 + delta*(int(x) - mean)
 
    variance_n = M2/n
    variance = M2/(n - 1)
    return (variance, variance_n)

def population_fix(data):
        goods=[]
        diff = int(data[1]) - int(data[0])
        for each in data:
                workvalue = int(each) - int(diff)
                goods.append(workvalue)
                diff = each
        goods.pop(0)
        return(goods)


def main():
        global options, mvalue
        print 'executing with the following values'
        print 'Minimal number of sessions (must be three or more)',set_minvalue
        print 'Maximum number of sessions to consider',set_maxvalue
        print 'Minimal number of seconds between first and last for consideration',set_duration_estimate
        print 'Maximum number of host visiting a given external IP address (estmated)',set_visitor
        print 'Maximum compensated Variance for consideration',set_comp_var
        print 'Maximum time for consideration where Variance is a factor',set_compvar_time_factor
        print 'All M values',magic_values
        '''open a connection to the local redis database'''        
        r = redis.StrictRedis(host='localhost', port=6379, db=0)
        roundone = r.keys('SET:*')
        for each in roundone:
            newcount = r.scard(each)
            if newcount >= set_minvalue and newcount <= set_maxvalue:
                worklist.append(each)


        #print r.info()


        for each in worklist:
            tab_queue = 0
            cummulative_value = 0
            inner_count = 0
            visitors = 0
            pair_count = r.scard(each)
            tempset = r.sort(each, alpha=True)

            ''' HIGH MATH, statistical functions here'''
            if int(tempset[1]) - int(tempset[0]) >0:
                compvar = compensated_variance(population_fix(tempset))
                onlinevar = online_variance(population_fix(tempset))
            else:
                print 'broken set', each

            sets_sub = each.split(':')
            set_src_ip = sets_sub[1]
            set_dst_ip = sets_sub[2]
            set_dst_port = sets_sub[3]
            set_date = sets_sub[4]
            src_count = r.get('ip_src:'+set_src_ip)
            dst_count = r.get('ip_dst:'+set_dst_ip)

            first_seen = tempset[0]
            next_value = tempset[1]
            work_value = int(next_value) - int(first_seen)

            for tempset_sub in tempset:
                if tempset_sub == first_seen:
                    tab_queue = int(tempset_sub)
                    cummulative_value = work_value
                    inner_count = 1

                else:
                    key_value = int(tempset_sub) - tab_queue                                                                
                    cummulative_value = cummulative_value + key_value
                    tab_queue = int(tempset_sub)
                    work_mean = cummulative_value
                    inner_count = inner_count + 1

            ''' LOW MATH and intial condition testing '''           
            mean = cummulative_value / pair_count
            duration_est = (mean * pair_count) / 60
            if duration_est == 0:
                    duration_est = 1
            if dst_count != None:
                visitors =   int(dst_count) / int(pair_count)                
            else:
                visitors = 1
            if duration_est >= set_duration_estimate and visitors <= set_visitor:
                ''' LOW MATH CONDITIONAL TESTING'''
                if int(mean) in magic_values:
                    mvalue = True
                    toplist1 = (set_src_ip,set_dst_ip,set_dst_port,set_date,pair_count,mean,duration_est,compvar,onlinevar[0],onlinevar[1],src_count,dst_count,visitors,mvalue)
                    toplist.append(toplist1)
                else:
                        mvalue = False

                if compvar <= set_comp_var:
                    toplist2 = (set_src_ip,set_dst_ip,set_dst_port,set_date,pair_count,mean,duration_est,compvar,onlinevar[0],onlinevar[1],src_count,dst_count,visitors,mvalue)
                    if toplist2 not in toplist:
                        toplist.append(toplist2)
                else:
                    # This factor was experimental and remains useful
                    if int(compvar) / duration_est  <= set_compvar_time_factor:
                        toplist2 = (set_src_ip,set_dst_ip,set_dst_port,set_date,pair_count,mean,duration_est,compvar,onlinevar[0],onlinevar[1],src_count,dst_count,visitors,mvalue)
                        if toplist2 not in toplist:
                            toplist.append(toplist2)
                # flush key_value
                key_value = []

        ''' printing top values'''
        toplist.sort()
        for each in toplist:
            print "{0:15} {1:15} {2:7} {3:8} {4:6} {5:6} {6:4} {7:8} {8:8} {9:8} {10:6} {11:6} {12:6} {13:6}".format(each[0],each[1],each[2],each[3],each[4],each[5],each[6],each[7],each[8],each[9],each[10],each[11],each[12],each[13])

        print "Finished"
if __name__ == '__main__':
        main()
