#!/usr/local/bin/python

# do some processing of input graphs

import igraph as ig
import numpy
#from numpy import *
#from pylab import *
import threading
import sys,getopt
import db,os, glob
import math
import time
from optparse import OptionParser
from multiprocessing import Pool
from multiprocessing import Lock

options = None
graphs = {}
graphs_lock = Lock()

skipped_files = []

methods = ["degree","betweenness","closeness","shell_index","evcent","eccentricity","constraint","pagerank","transitivity_local_undirected"]
titles = ["degree","betweenness","closeness","shell_index","evcent","eccentricity","constrain","pagerank","cc"]
post_processor = { "betweenness": "vcount_normalizer(g,'betweenness')"}
#methods = ["transitivity_local_undirected"]
#titles = ["cc"]


    
def timeit(method):

    def timed(*args, **kw):
        ts = time.time()
        result = method(*args, **kw)
        te = time.time()

        print '%r (%r, %r) %2.2f sec' % \
              (method.__name__, args, kw, te-ts)
        return result

    return timed

def assortativity(graph, degrees=None):
    if degrees is None: degrees = graph.degree()
    degrees_sq = [deg**2 for deg in degrees]
        
    m = float(graph.ecount())
    num1, num2, den1 = 0, 0, 0
    for source, target in graph.get_edgelist():
        num1 += degrees[source] * degrees[target]
        num2 += degrees[source] + degrees[target]
        den1 += degrees_sq[source] + degrees_sq[target]

    num1 /= m
    den1 /= 2*m
    num2 = (num2 / (2*m)) ** 2

    return (num1 - num2) / (den1 - num2)

def run_graph_method( g, i ):
    ts = time.time()
    gr = graphs[g]
    m=getattr(gr,methods[i])
    print '%r...' % (m.__name__)
    v=m()
    te = time.time()
    print '...%r %2.2f sec' % \
              (m.__name__, te-ts)
    return v
        
    
# this method normalizes the results by (n - 1)(n - 2) / 2
def vcount_normalizer( g, method ):
    vcount = g.vcount()
    val = float((vcount-1)*(vcount-2))/2
    print "normalizing %s by %.3f..."%(method,val)
    l = g.vs[method]
    for i in xrange(len(l)):
        l[i] = float(l[i])/val
    g.vs[method] = l
    print "normalizing %s...completed"%method
    

def create_table( tblName, methods ):
    #print "create table %s in datasource %s"%(tblName,datasource)
    dbdata=options.db.split(":")
    conn,cursor = db.create_cursor(host=dbdata[0],port=dbdata[1],user=dbdata[2],passwd=dbdata[3])
    #tblName=dbdata[4]
    
    createQuery = "CREATE TABLE IF NOT EXISTS %s (name VARCHAR(30) NOT NULL, year INT NOT NULL, month INT NOT NULL, asn INT NOT NULL, "%tblName
    for method in methods:
        createQuery += "%s FLOAT, "%method
    
    createQuery += "PRIMARY KEY (name,year,month,asn) ) ENGINE=MyISAM;"

    print "creating table: %s"%createQuery	
    cursor.execute( createQuery )
    db.close_cursor(conn,cursor)

def create_graph_table( tblName, methods ):
    dbdata=options.db.split(":")
    conn,cursor = db.create_cursor(host=dbdata[0],port=dbdata[1],user=dbdata[2],passwd=dbdata[3])
    #tblName=dbdata[4]
    
    createQuery = "CREATE TABLE IF NOT EXISTS %s (name VARCHAR(40) NOT NULL, year INT NOT NULL, month INT NOT NULL, "%tblName
    for method in methods:
        createQuery += "%s FLOAT, "%method
    
    createQuery += "PRIMARY KEY (name,year,month) ) ENGINE=MyISAM;"

    print "creating table: %s"%createQuery	
    cursor.execute( createQuery )
    db.close_cursor(conn,cursor)


def store_results_in_db( g, voutput, filename, methods, titles ):
    dbdata=options.db.split(":")
    conn,cursor = db.create_cursor(host=dbdata[0],port=dbdata[1],user=dbdata[2],passwd=dbdata[3])
    tblName=voutput
    
    # filename: bla/bla/dimes_ases_2007_1.ncol
    (head,nopath)=os.path.split(filename)
    (name,ext)=os.path.splitext(nopath)
    desc = name.split("_")
    print "breaking path: ",nopath
    print "desc: ",desc
    print "tblName: ",tblName
    if len(desc)>=4:
        name = desc[0]
        year = desc[2]
        month=desc[3]
    else:
        name = filename
        year = "0"
        month = "0"
    
    vs=g.vs
    
    query = "REPLACE INTO %s (name, year, month, asn, "%tblName
    for title in titles:
        query += "%s,"%title
    query= query[:-1]
    query += ") VALUES "
            
    batch = 10
    for i in xrange(0,len(vs),batch):
        insertQuery = query
        for j in xrange(i,min(len(vs),i+batch)):
            values = "( '%s',%s,%s,"%(name,year,month)
            values += "%s,"%vs[j]["name"]
            #print "asn=%s"%(vs[j]["name"])
            for method in methods:
                try:
                    val = vs[j][method]
                    if val!=None:  
			# we mark nan as -1 to differentiate from 0
			if math.isnan(val):
			    values += "-1,"
			else:
                            values += "%s,"%val
                    else:
                        values += "0,"
			print "error?!"
                except:
		    print "wtf?! error!!"
                    values += "0,"
            values = values[:-1]
            insertQuery +=values
            insertQuery += "),"
        
        insertQuery = insertQuery[:-1]

        if options.verbose:
            print "insert query: %s"%insertQuery
        cursor.execute(insertQuery)
    
    db.close_cursor(conn,cursor)

def store_results_in_file( g, voutput, filename, methods, titles ):
    
    if options.verbose:
        print "storing to file %s"%voutput
    f = open( voutput,"w" )
    (head,nopath)=os.path.split(filename)
    (name,ext)=os.path.splitext(nopath)
    desc = name.split("_")
    print "breaking path: ",nopath
    print "desc: ",desc

    if len(desc)>=4:
        name = desc[0]
        year = desc[2]
        month=desc[3]
    else:
        name = filename
        year = "0"
        month = "0"
    """ 
    # filename: bla/bla/dimes_ases_2007_1.ncol
    nopath = filename.split("/")[-1]
    desc = nopath.split("_");
    if len(desc)==4:
        name = desc[0]
        year = desc[2]
        month=desc[3].split(".")[0]
    else:
        name = filename
        year = "0"
        month = "0"
    """    
    vs=g.vs
    
    query = "name, year, month, asn, "
    for title in titles:
        query += "%s,"%title
    query= query[:-1]
    f.write( query+"\n" )
            
    batch = 10
    for i in xrange(0,len(vs),batch):
        insertQuery = ""
        for j in xrange(i,min(len(vs),i+batch)):
            values = "'%s',%s,%s,"%(name,year,month)
            values += "%s,"%vs[j]["name"]
            #print "asn=%s"%(vs[j]["name"])
            for method in methods:
                try:
                    val = vs[j][method]
                    if val!=None and not math.isnan(val):
                        values += "%s,"%val
                    else:
                        values += "0,"
                except:
                    values += "0,"
            values = values[:-1]
            insertQuery +=values
            insertQuery += "\n"
        
        insertQuery = insertQuery[:-1]

        if options.verbose:
            print insertQuery
        f.write( insertQuery )
    
    f.close()


def store_results_in_graph_db( g, goutput, filename, values ):
    dbdata=options.db.split(":")
    conn,cursor = db.create_cursor(host=dbdata[0],port=dbdata[1],user=dbdata[2],passwd=dbdata[3])
    tblName=goutput
    (head,nopath)=os.path.split(filename)
    (name,ext)=os.path.splitext(nopath)
    desc = name.split("_")
    print "breaking path: ",nopath
    print "desc: ",desc

    if len(desc)>=4:
        name = desc[0]
        year = desc[2]
        month=desc[3]
    else:
        name = filename
        year = "0"
        month = "0"
 
    vs=g.vs
    
    query = "REPLACE INTO %s (name, year, month, "%tblName
    for key in values:
        query += "%s,"%key
    query= query[:-1]
    query += ") VALUES ('%s',%s,%s,"%(name,year,month)
            
    for key in values:
        query += "%s,"%(values[key])
        

   
    query = query[:-1]
    query += ")"
    if True:#options.verbose:
        print "insert query: %s"%query
    cursor.execute(query)
    
    db.close_cursor(conn,cursor)


def store_results_in_graph_file( g, goutput, filename, values ):
    f = open( goutput,"w" )
    (head,nopath)=os.path.split(filename)
    (name,ext)=os.path.splitext(nopath)
    desc = name.split("_")
    print "breaking path: ",nopath
    print "desc: ",desc

    if len(desc)>=4:
        name = desc[0]
        year = desc[2]
        month=desc[3]
    else:
        name = filename
        year = "0"
        month = "0"
        
    vs=g.vs
    
    query = "name, year, month, "%tblName
    for key in values:
        query += "%s,"%key
    query= query[:-1]
    f.write( query+"\n" )
    
    query = "'%s',%s,%s,"%(name,year,month)
            
    for key in values:
        query += "%s,"%(values[key])
        
    query = query[:-1]

    f.write( query+"\n" )
    
    f.close()

def analysis_exists( filename, tblName ):
    if options.db is not None:
    	dbdata=options.db.split(":")
	conn,cursor = db.create_cursor(host=dbdata[0],port=dbdata[1],user=dbdata[2],passwd=dbdata[3])
    	(head,nopath)=os.path.split(filename)
    	(name,ext)=os.path.splitext(nopath)
    	desc = name.split("_")

    	if len(desc)>=4:
        	name = desc[0]
        	year = desc[2]
        	month=desc[3]
    	else:
        	name = filename
        	year = "0"
        	month = "0"
 
    	query = "SELECT COUNT(*) FROM %s WHERE name='%s' AND year=%s AND month=%s"%(tblName,name,year,month)
    	num = db.get_value( cursor, query )
	#print num
    	if num is not None and num>0:
		print "skipping %s cause %s/%s is already stored"%(filename,month,year)
		result = True
    	else:
		result = False
    	db.close_cursor(conn,cursor)
    return result
    

def process_graph( filename, vertices_output, graph_output,type="ncol" ):
    # skipping is a bit risky, cause threads might collide, or more common, the vertices tables will be a mess
    if not options.skip or (options.skip and not analysis_exists( filename, graph_output )):
    	process_graph_vertices( filename,vertices_output,type )
    if not options.skip or (options.skip and not analysis_exists( filename, vertices_output )):
        process_complete_graph( filename, graph_output)

def vertex_exists( g, v_name ):
    try:
        v = g.vs[v_name]
        return True
    except:
        return False
        

@timeit
def read_graph( filename, dg = None ):
    print "...reading graph %s"%filename
    g=None
    got_names=False
    
    # barabsi-n-m
    if filename.startswith("barabasi"):
        v = filename.split("-")
        g = ig.Graph.Barabasi(int(v[1]),int(v[2]))
        
    # erdos-n-p-m
    elif filename.startswith("erdos"):
        v = filename.split("-")
        g = ig.Graph.Erdos_Renyi(int(v[1]),float(v[2]),int(v[3]))
    
    # random-n-m
    elif filename.startswith("random"):
        v = filename.split("-")
        g = ig.Graph.GRG(int(v[1]),int(v[2]))
    
    else:
        try:
            g = ig.read( filename )
            got_names=True
        except:
            g=None
        
    # just give simple names
    if g is not None and not got_names:
        i=1
        print "...generating vertices names"
        for v in g.vs:
            v["name"] = str(i)
            i+=1
       
    # making the graph undirected
    num_h = 0
    try:
        g.to_undirected() 
        if dg:
            # so actually, we already have a directed graph, let's go over the complete edge list, and verify each!
            undirected_edges = g.get_edgelist()

            for (as1,as2) in undirected_edges:
                need_to_add = False

                if not vertex_exists( dg, as1 ):
                    dg.add_vertex( as1 )
                    need_to_add = True
                if not vertex_exists( dg, as2 ):
                    dg.add_vertex( as2 )
                    need_to_add = True
                
                # if a vertex is missing (need_to_add), or the edge is completely missing from the directed graph
                if need_to_add or ( not dg.are_connected(as1,as2) and not dg.are_connected(as2,as1)):
                    # we need to add - use degree heuristics
                    d1 = g.degree( as1 )
                    d2 = g.degree( as2 )
                    ratio = float(d1)/float(d2)
                    dist = d1-d2
                    
                    # there was a heuristic inference
                    num_h += 1
                    
                    if ratio>1.5 and dist>100:
                        print "Heuristic edge: %d (%d) --> %d (%d)"%(as1,d1,as2,d2)
                        dg.add_edge( (as1,as2) )
                    if ratio<=1.5 and dist<=100:
                        dg.add_edge( (as2,as1) )
                        print "Heuristic edge: %d (%d) <-- %d (%d)"%(as1,d1,as2,d2)
                    else:
                        # p2p
                        dg.add_edge( (as1,as2) )
                        dg.add_edge( (as2,as1) )
                        print "Heuristic edge: %d (%d) <--> %d (%d)"%(as1,d1,as2,d2)
                    
            # overwrite g with dg
            g=dg
            
            print "directed graph completed, heuristically inferred %d links"%num_h
            
            
    except:
        print "reading of graph %s failed"%filename
        return None
    
    print "graph %s ready, holding %d vertices and %d edges"%(filename, g.vcount(),g.ecount())
    return g

@timeit
def read_relationships( filename ):
    print "reading relationships from %s"%filename
    f=open( filename, "r" )
    v = []
    e = []
    
    dg=ig.Graph(directed=True)
    
    for line in f:
        if not line.startswith("#"):
            link = line.strip().split("|")
            as1 = int(link[0])
            as2 = int(link[1])
            link_type = int(link[2])
            
            if as1 not in v:
                v.append( as1 )
            if as2 not in v:
                v.append( as2 )
                
            if link_type==-1:
                e.append( (as2,as1) )
            elif link_type==1:
                e.append( (as1,as2) )
            elif link_type==0 or link_type==2:
                e.append( (as1,as2) )
                e.append( (as2,as1) )
            
    f.close()
    print "...completed reading file."
        
    try:
        print "vertices to add: %d"%len(v)
        for vv in v:
            dg.add_vertices( vv )
        print "...vertices ok"
        print "edges to add: %d"%len(e)
        dg.add_edges( e[:30000] )
        dg.add_edges( e[30000:60000] )
        dg.add_edges( e[60000:] )
        print "...edges ok"
    except Exception as inst:
        print type(inst)
        print inst.args
        print inst
        
    
    print "...graph built."

    print "relationships graph %s ready, holding %d vertices and %d edges"%(filename, dg.vcount(),dg.ecount())
    return dg

def process_graph_vertices( filename, output, type="ncol" ):
    
    g=graphs[filename]
    pool = Pool(processes=2)
    
    results = {}
    for i in xrange(len(methods)):
        results[ methods[i] ]=pool.apply_async( run_graph_method, (filename,i) )
            
            
    if options.verbose:
        print "waiting for methods to complete..."
        
    for method in results:
        v = results[method].get()
        g.vs[method]=v
        if method in post_processor:
                # nasty but working!
                exec post_processor[method]

    if options.verbose:
        print "all methods are done."
        
    if options.db:
        try:	
            create_table( output, titles )
        except:
            print "failed creating table!"

	print "calling: store_results_in_db, with output ",output
        store_results_in_db( g,output,filename,methods,titles  )
    else:
        store_results_in_file( g,output,filename,methods,titles  )
        
    return g


def process_complete_graph( filename, output ):
    
    g=graphs[filename]
    
    vs = g.vs
    
    values={}
    """    
    methods = ["vertices","edges","density","diameter","max_degree","num_clusters","cc","avg_cc","assortativity",#"omega","alpha",
"avg_bc","max_bc","max_bc_vertex","max_kcore","kcore_size","avg_path_len"]
    """
    if options.verbose:
        print "...processing graph methods"
    
    # calc stuff graph
    values["vertices"]=g.vcount()
    if options.verbose:
        print "\t...edges"
    values["edges"]=g.ecount()
    if options.verbose:
        print "\t...density"
    values["density"]=g.density()
    
    if options.verbose:
        print "\t...max-degree"
    values["max_degree"]=g.maxdegree()
    if options.verbose:
        print "\t...num clusters"
    c = g.clusters()
    values["num_clusters"]=len(c)
    if options.verbose:
        print "\t...modularity"
    values["modularity"]=c.q
    if options.verbose:
        print "\t...cc"
    values["cc"]=g.transitivity_undirected()
    if options.verbose:
        print "\t...avg cc"
    values["avg_cc"]=g.transitivity_avglocal_undirected()
    if options.verbose:
        print "\t...assortativity"
    values["assortativity"]=assortativity(g)
    """
    if options.verbose:
        print "\t...omega"
    values["omega"]=g.omega()
    """
    """
    if options.verbose:
        print "\t...alpha"
    values["alpha"]=g.alpha()
    """
    if options.verbose:
        print "\t...diameter"
    values["diameter"]=g.diameter()
    
    if options.verbose:
        print "\t...betweenness"
    bc = g.vs["betweenness"]
    max_bc = max(bc)
    max_bc_vertices = [g.vs[idx] for idx, vb in enumerate(bc) if vb == max_bc]
    
    values["avg_bc"]=numpy.mean(bc)
    values["max_bc"]=max_bc
    values["max_bc_vertex"]=max_bc_vertices[0]["name"]
    
    if options.verbose:
        print "\t...pagerank"
    pr = g.vs["pagerank"]
    max_pr = max(pr)
    max_pr_vertices = [g.vs[idx] for idx, vb in enumerate(pr) if vb == max_pr]
    
    values["avg_pr"]=numpy.mean(pr)
    values["max_pr"]=max_pr
    values["max_pagerank_vertex"]=max_pr_vertices[0]["name"]
    

    if options.verbose:
        print "\t...cores"
    kc = g.vs["shell_index"]
    max_k = max(kc)
    max_k_vertices = [g.vs[idx] for idx, ks in enumerate(kc) if ks == max_k]
    
    values["max_kcore"]=max_k
    values["kcore_size"]=len(max_k_vertices)
    values["avg_path_len"]=g.average_path_length()

    if options.db:
        try:	
            create_graph_table( output, values.keys() )
        except:
            print "failed creating table!"

        store_results_in_graph_db( g,output,filename,values  )
    else:
        store_results_in_graph_file( g, output, filename, values  )

 

def file_processor(filename, voutput, goutput):
    global graphs
    global graphs_lock
    global skipped_files

    print "processing file %s..."%filename
    try:
    	if filename not in graphs:
            dg = None
            if options.directed:
                dg = read_relationships( options.directed )
       		g = read_graph(filename, dg)
		if g is not None:
			graphs_lock.acquire()
        		graphs[filename] = g
 			graphs_lock.release()
			print "graphs dict now has %d graphs in it"%(len(graphs))
    	print "processing graph..." 
    	process_graph( filename, voutput, goutput )
    	print "done graph %s."%filename
	return True
    except:
	print "skipping file %s"%filename
	skipped_files.append( filename )
	return False

def path_processor( folder, voutput_file, goutput_file ):
    global graphs
    global graphs_lock
    files = os.listdir(folder)    
    files = sorted([os.path.join(folder,file) for file in files])
        
    #pool = Pool(processes=10)

    for filename in files:
        #pool.apply_async( file_processor, ( filename, voutput_file,goutput_file ) )
        file_processor( filename, voutput_file,goutput_file ) 
    #pool.close()
    #pool.join()
    print "that's it."
    if len(skipped_files)>0:
	print "the following files were not processed correctly:"
        for filename in skipped_files:
    		print filename

def process( options, args ):
        if options.output is None:
            vout = options.voutput
            gout = options.goutput
        else:
            vout = options.output +"_vertices"
            gout = options.output +"_graph"
    
        if options.verbose:
            print vout,gout
            
        if vout is None or gout is None:
            print "must specify output names, either using --output, or by specifying both --voutput and --goutput"
            return 0
        else:
            if os.path.isdir(args[0]):
                path_processor( args[0], vout, gout )
            elif os.path.isfile(args[0]):
                file_processor( args[0], vout, gout )
            else:
                file_processor( args[0], vout, gout )
            
        return 1

def main(argv=None):
        global options
        if argv is None:
                argv = sys.argv[1:]
        
        parser = OptionParser(usage="""usage: %prog [options] graphFile""")
        parser.add_option("--voutput", default=None, type="string", help="Output file for vertex analysis")
        parser.add_option("--goutput", default=None, type="string", help="Output file for graph analysis")
        parser.add_option("--output", default=None, type="string", help="Name of output file")
        parser.add_option("--skip", default=False, action="store_true", help="Whether to skip an analysis if already present in output")
        parser.add_option("--verbose", default=False, action="store_true", help="Verbosity")
        parser.add_option("--db", default=None, type="string", help="Parameters for storing in a db: [server:port:user:passwd]")
        parser.add_option("--directed", default=None, type="string", help="Use directed graph, values is edge direction")

        (options, args) = parser.parse_args(argv)
        
        if len(args) < 1:
                parser.print_help()
                return 1

        if not process(options, args):
                parser.print_help()
                return 1
    
if __name__ == "__main__":
    sys.exit(main())




# ./graph_analysis.py  --db=b4:3306:codeUnlimited:superCode --output=DIMES_PLAYGROUND.dimes_ases data/topology/dimes/
# ./graph_analysis.py  --db=b4:3306:codeUnlimited:superCode --output=DIMES_PLAYGROUND.iplane_ases data/topology/iplane/
# ./graph_analysis.py  --db=b7:3306:codeUnlimited:superCode --output=ANALYSIS.iplane_ases data/topology/iplane_ases_2006_6_fast.ncol
