#!/usr/bin/python

'''
Description: This script is a wrapper to the ldappm.cpp file 

Created on: Jun 27, 2011

@author: Clint P. George 
'''

import os
import glob 
import subprocess

from collections import defaultdict


def build_all(source_dir, recompile=False):
    
    exit_status = 0 
    
    if recompile: 
        cmd = ['make', 'clean']
        exit_status = subprocess.Popen(cmd, cwd=source_dir).wait()

    cmd = ['make']
    exit_status += subprocess.Popen(cmd, cwd=source_dir).wait()        

    return exit_status


def run_Metropolis_search(source_dir, data_dir, data_file, vocab_file, saved_beta_file, max_iter, burn_in_period, output_prefix):
    
    algorithm = 'ts_hrw'

    lda_cmd = ['./ldappm']
    
    lda_cmd.append('--algorithm')
    lda_cmd.append(algorithm)
    
    lda_cmd.append('--data')
    lda_cmd.append(data_file) # full path 
    
    lda_cmd.append('--vocab')
    lda_cmd.append(vocab_file) # full path 
    
    lda_cmd.append('--data_format')
    lda_cmd.append('ldac')
    
    lda_cmd.append('--saved_beta')
    lda_cmd.append(saved_beta_file)
    
    lda_cmd.append('--max_iter')
    lda_cmd.append(str(max_iter))
    
    lda_cmd.append('--burn_in')
    lda_cmd.append(str(burn_in_period))
    
    lda_cmd.append('--output_prefix')
    lda_cmd.append(output_prefix)
    
    lda_cmd.append('--output_dir')
    lda_cmd.append(data_dir)
    
    print lda_cmd
  
    exit_status = subprocess.Popen(lda_cmd, cwd=source_dir).wait()
    
    return exit_status

def fetch(root_dir, batch_dir, query_dir, saved_beta):
    
    ts_inputs = list() 
    
    for infile in os.listdir(query_dir):
        
        basename, extension = os.path.splitext(infile)
        
        if extension == '.supertweets' or extension == '.query': 
            
            is_valid_batch = True
            ts_input = dict() 
            
            ts_input['query_dir'] = query_dir            
            ts_input['document_file'] =  os.path.join(query_dir, infile)
            ts_input['base_name'] = basename
            ts_input['extension'] = extension
            
            if not os.path.exists(os.path.join(root_dir, batch_dir, basename, basename + '.vocabulary')):
                print 'vocabulary file not exists in ', os.path.join(root_dir, batch_dir, basename)
                is_valid_batch = False
            else:
                ts_input['vocab_file'] = os.path.join(root_dir, batch_dir, basename, basename + '.vocabulary')
            
            print os.path.join(root_dir, batch_dir, basename, basename + '.' +  saved_beta)
            
            if not os.path.exists(os.path.join(root_dir, batch_dir, basename, basename + '.' +  saved_beta)):
                print 'beta file not exists in ', os.path.join(root_dir, batch_dir, basename)
                is_valid_batch = False
            else:
                ts_input['saved_beta_file'] = os.path.join(root_dir, batch_dir, basename, basename + '.' +  saved_beta)
            
            # Adds to the queue if it is valid 
            if is_valid_batch:
                ts_inputs.append(ts_input)
    
        
    return ts_inputs

if __name__ == '__main__':
    
    work_dir = os.getcwd()
    root_dir = '/home/clint/Dropbox/TREC/'
    batch_dir = 'batch'
    query_dir = '/home/clint/Dropbox/TREC/query/1'
    output_prefix = 'ts'
    build_files = True
    max_iter = 50
    burn_in_period = 40
    saved_beta = 'supertweets_beta'
    
    
    if build_files: 
        build_all(work_dir, False)
    

    ts_inputs = fetch(root_dir, batch_dir, query_dir, saved_beta)           

    for ts in ts_inputs:
        print 'Process is added to the job queue:', ts['document_file'], ts['vocab_file'], ts['saved_beta_file'], '...'
        output_prefix = ts['base_name'] + ts['extension']
        run_Metropolis_search(work_dir, ts['query_dir'], ts['document_file'], 
                              ts['vocab_file'], ts['saved_beta_file'], 
                              max_iter, burn_in_period, output_prefix) 

            
            
        

    
    
    
    
    