'''
Created on Feb 14, 2010

@author: Roni
'''
import graph
import logging
import sys
import cliques.randomCliqueSearch
import orchid
import re
from cliques.randomCliqueSearch import randomCliqueSearch
from cliques.knownDegreeSearch import knownDegreeSearch
from cliques.cliqueStarSearch import cliqueStarSearch
from cliques.cliqueSearchLowerbound import CliqueSearchLowerbound
from cliques.mdp.mdpBasedSearch import mdpBasedSearch
from cliques.mdp.limitedSampling import LimitedSampling
from cliques.closest.closestCliqueSearch import ClosestCliqueSearch
from cliques.closest.breadthFirstSearch import BreadthFirstSearch
import timerManager
import os
import urllib2
import time
import random
#import urllib2.HTTPError
import onlineCliqueSearch
from cliques.closest.ongoingCliqueSearch import OngoingCliqueSearch


RESOURCE_RESULTS_DIR = "../resources/results"
RESOURCE_NANOTOXIC_DIR = "../resources/nanotoxic"
RESOURCE_ROOT_PAPERS_FILE = "%s/papersOver3.txt" % RESOURCE_NANOTOXIC_DIR
RESOURCE_ROOT_NODES_FILE = "%s/SourceNodes40.txt" % RESOURCE_NANOTOXIC_DIR
RESOURCE_ROOT_CKG_FILE = "%s/SourceNodes40.dot" % RESOURCE_NANOTOXIC_DIR
RESOURCE_ROOT_EXPLORED_FILE = "%s/ExploredNodes40.txt" % RESOURCE_NANOTOXIC_DIR

class OnlineClosestCliqueSearch(onlineCliqueSearch.OnlineCliqueSearch):
    ''' A class that implements an online search using the unknown graph clique algorithms '''
    def __init__(self, clique_algorithm):
        onlineCliqueSearch.OnlineCliqueSearch.__init__(self,clique_algorithm)     
   
   
def main(): 
    logging.basicConfig(level=logging.DEBUG)
    logging.root.addHandler(logging.StreamHandler(sys.stdout))
    continue_search(RESOURCE_ROOT_NODES_FILE, RESOURCE_ROOT_CKG_FILE,RESOURCE_ROOT_EXPLORED_FILE)

def load_old_results(old_configurations, result_file_name):
    in_file = open(result_file_name, 'r')
    header_line = True
    for line in in_file:
        if header_line:
            header_line = False
        else:
            line = line.strip()
            line_parts = line.split("\t")
            root = line_parts[0].strip()
            k = int(line_parts[1].strip())
            iteration = int(line_parts[2].strip())
            algorithm = line_parts[3].strip()
            configuration = (root, k, iteration, algorithm)
            old_configurations.append(configuration)
    in_file.close()

def continue_search(root_nodes_filename,root_ckg_filename,root_explored_filename):
    ''' Search for a k-clique starting from a partial exploration of the unknown graph '''
    
    old_configurations = []
    result_file_name = '%s/closestTo40.txt' % RESOURCE_RESULTS_DIR
    if os.path.exists(result_file_name) == False:
        out_file = open(result_file_name, 'w')
        out_file.write('root\t k\t iteration\t alg\t runtime\t iterations\t explore.Runtime\t ch.Runtime\t expand.Runtime\t clique\t Success\n')
        out_file.close()
    else:
        logging.info("Loading old results file...")
        load_old_results(old_configurations, result_file_name)        
    
    out_file = open(result_file_name,'a')

    # Open cliques output file
    cliques_file_name = '%s/cliquesFoundUsing40.txt' % RESOURCE_RESULTS_DIR
    cliques_out_file = open(cliques_file_name,'a')

    # Load root pages and root CKG
    logging.info("Loading root pages from file...")
    root_nodes_file = open(root_nodes_filename,'r')
    index = 0
    root_nodes = []
    for line in root_nodes_file:
        line = line.strip()
        root_nodes.append(line.split('\t')[1])
        if index!=int(line.split('\t')[0]): # FOR DEBUG
            raise ValueError("Root nodes are not ordered - indexing problem")
        index = index+1
    
    logging.info("Loading root ckg from file...")        
    root_ckg = cliques.utils.import_graph(root_ckg_filename, 'dot')
    
    # Load the list of nodes that have already been explored
    logging.info("Loading root explored from file...")
    root_explored_file = open(root_explored_filename,'r')
    root_explored = []
    for line in root_explored_file:
        root_explored.append(int(line.split('\t')[0]))

    
    #algorithm_tuple = [BreadthFirstSearch(), ClosestCliqueSearch()]
    algorithm_tuple = [OngoingCliqueSearch()]
    
    graph_counter=0
    exploration_limit = 500
    iteration = 0
    for k in xrange(3,8):
        for clique_algorithm in algorithm_tuple:    
            graph_counter=graph_counter+1
            configuration = ('Configuration',k,iteration, clique_algorithm.__str__())
            if (configuration in old_configurations) == False: 
                #searcher = OnlineClosestCliqueSearch(clique_algorithm)
                searcher = OnlineOngoingCliqueSearch(clique_algorithm)
                
                            
                try:                            
                    searcher.timer_manager.create_timer('total_runtime')
                    logging.info("Starting %d-clique search with %s !!" % (k, clique_algorithm))
                    searcher.continue_run(root_nodes, root_ckg,root_explored, k, exploration_limit)                                       
                    first_configuration=False
                except urllib2.HTTPError, error:
                    logging.exception(error)
                    logging.error(error.read())
                    logging.info("Blocked after %d nodes" % len(searcher.ckg.nodes()))
                    out_file.close()
                    sys.exit()
                
                # Gather results
                record = dict()
                if searcher.algorithm.done==True:                            
                    record['clique'] = searcher.output_clique_members_titles()
                else:
                    record['clique'] = []
                record['iterations'] = searcher.algorithm.iteration
                record['total_runtime']=searcher.timer_manager.get_total_time('total_runtime')
                
                
                out_file.write("%s\t " % 'Configuration')            
                out_file.write("%d\t " % k)
                out_file.write("%d\t " % iteration)
                out_file.write("%s\t " % clique_algorithm)                
                out_file.write("%d\t " % record['total_runtime'])
                out_file.write("%d\t " % record['iterations'])
                out_file.write("%d\t " % record['exploration_runtime'])
                out_file.write("%d\t " % record['choose_node_runtime'])
                out_file.write("%d\t " % record['expand_runtime'])
                out_file.write("%s\t" % record['clique'])
                out_file.write("%s\n" % searcher.algorithm.done)
                out_file.flush()
                
                # Output the clique if found
                cliques_out_file.write("%d,%d,%s,%s\n" % (k,iteration,clique_algorithm,searcher.algorithm.done))
                cliques_out_file.write("-----------\n")
                if searcher.algorithm.done==False:
                    cliques_out_file.write("%d-clique not found\n" % k)
                else:
                    for member in record['clique']:
                        cliques_out_file.write("%s\n" % member)    
                cliques_out_file.write("-----------\n")
                cliques_out_file.flush();
                
                
                # Output the graph
                searcher.explort_ckg("%s/Graph%d.dot" % (RESOURCE_RESULTS_DIR,graph_counter), \
                                            host_file_name="%s/Graph%d.nodes.txt" % (RESOURCE_RESULTS_DIR,graph_counter))
               
                logging.info("Sleeping...")
                sleep_time = (600 + random.Random().random()*600)
                time.sleep(sleep_time)
                logging.info("Awake!")
    out_file.close()
    cliques_out_file.close()
    
        
def run_search():       
    ''' Search for a k-clique starting from a given list of node '''
    old_configurations = []
    result_file_name = '%s/onlineClosestCliqueSearch.txt' % RESOURCE_RESULTS_DIR
    if os.path.exists(result_file_name)==False:
        out_file = open(result_file_name,'w')
        out_file.write('root\t k\t iteration\t alg\t runtime\t iterations\t explore.Runtime\t ch.Runtime\t expand.Runtime\t clique\t Success\n')
        out_file.close()
    else: 
        in_file = open(result_file_name,'r')
        header_line = True
        for line in in_file:
            if header_line:
                header_line=False
            else:
                line = line.strip()
                line_parts = line.split("\t")
                root = line_parts[0].strip()
                k = int(line_parts[1].strip())
                iteration = int(line_parts[2].strip())
                algorithm = line_parts[3].strip()
                configuration = (root,k,iteration,algorithm)
                old_configurations.append(configuration)            
        in_file.close()
    out_file = open(result_file_name,'a')

    # Open cliques output file
    cliques_file_name = '%s/cliquesFound.txt' % RESOURCE_RESULTS_DIR
    cliques_out_file = open(cliques_file_name,'a')

    #algorithm_tuple = [ClosestCliqueSearch()]#,randomCliqueSearch(),knownDegreeSearch(),cliqueStarSearch()]
    #algorithm_tuple = [randomCliqueSearch(),knownDegreeSearch(),cliqueStarSearch()]

    
    # Load root pages
    source_pages_file = open(RESOURCE_ROOT_PAPERS_FILE,'r')
    start_nodes = []
    for line in source_pages_file:
        line = line.strip()
        if line.endswith(".pdf"):
            line = line[:-4]
        start_nodes.append("http://scholar.google.com/scholar?q=%s&lr=lang_en&num=100"%line);

    algorithm_tuple = [BreadthFirstSearch(len(start_nodes)+1)]
    
    graph_counter=0
    exploration_limit = 500
    for k in xrange(3,8):  
        for iteration in xrange(10): # 2 iterations to ignore caching 
            start_nodes=start_nodes[1:]+start_nodes[0:1] # Shuffle the start nodes by shifting all items one place      
            for clique_algorithm in algorithm_tuple:    
                graph_counter=graph_counter+1
                configuration = ('Configuration',k,iteration, clique_algorithm.__str__())
                if (configuration in old_configurations) == False: 
                    searcher = OnlineClosestCliqueSearch(clique_algorithm) 
                    root_page = start_nodes[0]            
                    try:                            
                        searcher.timer_manager.create_timer('total_runtime')
                        searcher.timer_manager.create_timer('exploration_runtime')
                        searcher.timer_manager.create_timer('choose_node_runtime')
                        searcher.timer_manager.create_timer('expand_runtime')
                        logging.info("Starting %d-clique search with %s !!" % (k, clique_algorithm))                        
                        searcher.run(start_nodes[0], k,exploration_limit,start_nodes[1:])                
                        first_configuration=False
                    except urllib2.HTTPError, error:
                        logging.exception(error)
                        logging.error(error.read())
                        logging.info("Blocked after %d nodes" % len(searcher.ckg.nodes()))
                        out_file.close()
                        sys.exit()
                    
                    # Gather results
                    record = dict()
                    if searcher.algorithm.done==True:                            
                        record['clique'] = searcher.output_clique_members_titles()
                    else:
                        record['clique'] = []
                    record['iterations'] = searcher.algorithm.iteration
                    record['total_runtime']=searcher.timer_manager.get_total_time('total_runtime')
                    record['exploration_runtime']=searcher.timer_manager.get_total_time('exploration_runtime')
                    record['choose_node_runtime']=searcher.timer_manager.get_total_time('choose_node_runtime')
                    record['expand_runtime']=searcher.timer_manager.get_total_time('expand_runtime')
                    
                    
                    out_file.write("%s\t " % 'Configuration')            
                    out_file.write("%d\t " % k)
                    out_file.write("%d\t " % iteration)
                    out_file.write("%s\t " % clique_algorithm)                
                    out_file.write("%d\t " % record['total_runtime'])
                    out_file.write("%d\t " % record['iterations'])
                    out_file.write("%d\t " % record['exploration_runtime'])
                    out_file.write("%d\t " % record['choose_node_runtime'])
                    out_file.write("%d\t " % record['expand_runtime'])
                    out_file.write("%s\t" % record['clique'])
                    out_file.write("%s\n" % searcher.algorithm.done)
                    out_file.flush()
                    
                    # Output the clique if found
                    cliques_out_file.write("%d,%d,%s,%s\n" % (k,iteration,clique_algorithm,searcher.algorithm.done))
                    cliques_out_file.write("-----------\n")
                    if searcher.algorithm.done==False:
                        cliques_out_file.write("%d-clique not found\n" % k)
                    else:
                        for member in record['clique']:
                            cliques_out_file.write("%s\n" % member)    
                    cliques_out_file.write("-----------\n")
                    cliques_out_file.flush();
                    
                    
                    # Output the graph
                    searcher.explort_ckg("%s/Graph%d.dot" % (RESOURCE_RESULTS_DIR,graph_counter), \
                                                host_file_name="%s/Graph%d.nodes.txt" % (RESOURCE_RESULTS_DIR,graph_counter))
                   
                    logging.info("Sleeping...")
                    sleep_time = (600 + random.Random().random()*600)
                    time.sleep(sleep_time)
                    logging.info("Awake!")
    out_file.close()
    cliques_out_file.close()
if __name__ == '__main__':
    main()