#!/usr/bin/python

import sys
import psycopg2
import os
import subprocess
import datetime
import time
import argparse

parser = argparse.ArgumentParser(description= """

DESCRIPTION
    Performs the following steps:
    - Read the fastqc table in postgres db to get a list of the files with
      quality metrics produced by FastQC.
    - Read dirs in READ_DIRS to get a list of all the current fastq and bam
      files.
    - For files in READ_DIRS not found in fastqc table, execute FastQC
    - Copy the newly created fastqc files to /tmp/fastqc/ and import them to
      postgres using fastqc_to_pgtable.py 

ARGUMENTS:

USAGE:
    ## Execute on the machine hosting postgres!!
    update_fastqc.py > /Users/berald01/fastqc/update_fastqc.log
    
    ## Only process given fastq files:
    update_fastqc.py -q /path/to/file/fastq.fq.gz /path/to/file/fastq2.fq.gz

DEPENDS-ON:
    - fastqc_to_pgtable.py
    - File ~/.psycopgapss for connection to sblab.
    - get_file_stats2.py
DEPENDS-ON-ME:
    - update_fastqc.sh (a cron job)

TODO:
    - Simpler handling of remote hosts. Just add on option --hostname and
      --repository which will be used to scan for files.
    - Make sure that input files differing only in the extension don't have
      the outputs overwritten! E.g. if
      myfile.fq.gz
      myfile.fq.bam
      The fastqc output will be myfile.fq_fastqc.zip for both!
      See bamqc_batch_lustre.py to handle this.
   
""", formatter_class= argparse.RawTextHelpFormatter)

parser.add_argument('--fastq', '-q',
                   required= False,
                   nargs= '+',
                   default= None,
                   help='''One or more fastq file(s) (including path) for which
to execute fastq and uplaod to postgres. If not present, all the missing files will be processed.
Currently these file *must* be on the 'remote' host.
                   ''')

parser.add_argument('--hostname', '-H',
                   required= False,
                   default= '143.65.169.175',
                   help='''Hostname where to connect to for files in --repository or
--fastq.
                   ''')

parser.add_argument('--repository', '-r',
                   required= False,
                   default= '/data01/sblab/users/berald01/repository',
                   help='''Directory to scan recursivley for files not already
in postgres. Ignored if --fastq is not None.
                   ''')

args = parser.parse_args()

# --------------------------[ Constant settings ]-------------------------------

## Where to look for fastq and bamfiles. Complete of hostname.
## NB: This dirs will be searched recursivley.
READ_DIRS= {'local': [],  ## Local dirs, just put absolute path. E.g. ['/Users/berald01/']
            'remote': ['berald01@10.20.13.11:/data01/sblab/users/berald01/repository/original/fastq'], ## Remote dirs. Use format ['username@hostname:/path/to/dir'] 
            'cluster': [] ## Files in these dirs will be processed using bsub. Format the same as remote.
            }
EXT= ['.fq.gz', '.fastq.gz', '.bam'] ## Extensions to recognize fastq and bam files

LOCALHOST_IP= '10.20.13.11'
LOCAL_FASTQC_DIR= '/Users/berald01/django_code/sblabsite/uploads/fastqc/' ## fastqc files will be scp'd to this local dir

NCORES= 4 ## Max number of cores to use

# -----------------------------[ Functions ]------------------------------------

def get_psycopgpass():
    """
    Read file ~/.psycopgpass to get the connection string to pass to
    psycopg2.connect()
    """
    conn= open(os.path.join(os.getenv("HOME"), '.psycopgpass'))
    conn= conn.readlines()
    conn_args= [x.strip() for x in conn if x.strip() != '' and x.strip().startswith('#') is False][0]
    return(conn_args)

def splitlist(L,count):
    """ Split list L in sublists of length count, last sublist of remaining length
    See http://www.velocityreviews.com/forums/t670143-breaking-python-list-into-set-length-list-of-lists.html
    """
    M=[]
    for i in xrange(0, len(L), count):
        M.append(L[i:i+count])
    return(M)

# -----------------------[ Get list of archived files ]------------------------ 
conn= psycopg2.connect(get_psycopgpass())
cur= conn.cursor()
cur.execute("SELECT filename, fsize, mtime FROM fastqc") ## Note we are using name, size and mtime to identify a file
archived_fastqc= cur.fetchall()


# -----------------------------------------------------------------------------
# Get list of remote files
# -----------------------------------------------------------------------------

readfiles= {} ## Will be {'file.fq': ('local', '/path/to/dir'), 'file2.fq': ('remote', 'user@host:/path/to/dir'), 'file3.fq': ('cluster', '/path/to/dir')}
"""
readfiles dict will have a key for each tuple (filename, md5sum) and the value associated is:
readfiles= {
    ('file1.fq', 'md5sum'):
        {'path': '/abs/path/to/dir', 'hostname': 'uk-cri-lcst01', 'ctime': os.path.getctime('file1.fq'), 'mtime': os.path.getmtime('file1.fq')}
}
"""
#read_dirs= READ_DIRS['remote']
#for dir in read_dirs:
# -------------------------------------------------------------------------
# Get all the files in READ_DIRS
# -------------------------------------------------------------------------
host= args.hostname
path= args.repository
cmd= """ssh %s "find %s" """ %(host, path)
p= subprocess.Popen(cmd, shell= True, stdout=subprocess.PIPE)
files= p.stdout.read().strip().split('\n')
files_all= [x.strip() for x in files]

# --------------------------------------------------------------------------
# Get only the files with extension in EXT
# --------------------------------------------------------------------------

if args.fastq is not None:
    files= args.fastq
else:
    files= []
    for f in files_all:
        for ext in EXT:
            if f.endswith(ext):
                files.append(f)

# ------------------------------------------------------------------------------
# Get file stats for the selected files.
# ------------------------------------------------------------------------------

files= splitlist(files, 100) ## Split in chunks otherwise the bash shell will probably get clogged!
for fl in files:
    file_str= ' '.join(fl)
    cmd= """ssh %s 'source ~/.bash_profile;
            for f in %s
            do
            get_file_stats2.py --datetime --infile $f
            done'
         """ %(host, file_str)
    p= subprocess.Popen(cmd, shell= True, stdout=subprocess.PIPE)
    file_dict= p.stdout.read().strip()
    file_dict= file_dict.split('\n')
    file_dict= [eval(x) for x in file_dict]
    for fstats in file_dict:
        if not (fstats['filename'], fstats['fsize'], fstats['mtime']) in readfiles:
            readfiles[(fstats['filename'], fstats['fsize'], fstats['mtime'])]= fstats

# ------------------------------------------------------------------------------
# See which fastqfiles are not already in fastqc
# Use only filename, fsize and mtime.
# ------------------------------------------------------------------------------
newreadfiles= {}
for fq in readfiles:
    if fq not in archived_fastqc:
        newreadfiles[fq]= readfiles[fq]
print('Files found: %s' %(len(readfiles),) )
print('Files to execute fastqc: %s' %(len(newreadfiles),) )
## Convert uk-cri-lsrv10 to ip address:
for fq in newreadfiles:
    if newreadfiles[fq]['hostname'] == 'uk-cri-lsrv10.crnet.org':
        (newreadfiles[fq]['hostname'])= '143.65.169.175'

# -----------------------------------------------------------------------------
# Execute fastqc for the missing files. Execute also get_file_stats2.py with
# md5sum option
# Look at https://code.google.com/p/bioinformatics-misc/source/browse/trunk/subprocess.snippet.py for how multiple processese are run in parallel
# -----------------------------------------------------------------------------

procs= []
running_procs= 0
n= 0
for fq in newreadfiles:
    """ Execute fastqc remotely """
    n += 1
    fastqc_out= os.path.splitext(os.path.join(newreadfiles[fq]['path'], newreadfiles[fq]['filename']))[0] + '_fastqc.zip'
    cmd= """ssh %(hostname)s 'set -e; source ~/.bash_profile
fastqc -q --noextract %(filepath)s
scp %(fastqc_out)s %(local_dir)s
rm %(fastqc_out)s'
""" %{'hostname': newreadfiles[fq]['hostname'],
      'filepath': os.path.join(newreadfiles[fq]['path'], newreadfiles[fq]['filename']),
      'fastqc_out':fastqc_out,
      'local_dir':LOCALHOST_IP + ':' + LOCAL_FASTQC_DIR}
    print('\n' + cmd.strip())
    p= subprocess.Popen(cmd, shell= True)
#    p.wait()
    procs.append(p)
    running_procs += 1
    while running_procs >= NCORES:
        time.sleep(5)
        running_procs= 0
        for x in procs:
            xv= x.poll()
            if x.returncode is None:
                running_procs += 1

## Do not procede until *all* procs have completed
for x in procs:
   x.wait()

for fq in newreadfiles:
    fastqc_out= os.path.splitext(os.path.join(newreadfiles[fq]['path'], newreadfiles[fq]['filename']))[0] + '_fastqc.zip'
    ## Get md5sum
    cmd= "ssh %s 'source ~/.bash_profile; get_file_stats2.py --md5sum --infile %s'" %(newreadfiles[fq]['hostname'], os.path.join(newreadfiles[fq]['path'], newreadfiles[fq]['filename']))
    print(cmd)
    p= subprocess.Popen(cmd, shell= True, stdout= subprocess.PIPE)
    fstat= eval(p.stdout.read().strip())
    # -------------------------------------------------------------------------
    # Now upload the output from fastqc to postgres.
    # Refer to fastqc_to_pgtable.py for this
    # -------------------------------------------------------------------------
    localfastqc_zip= os.path.join(LOCAL_FASTQC_DIR, os.path.split(fastqc_out)[1])
    localfastqc_unzip= os.path.splitext(os.path.join(LOCAL_FASTQC_DIR, os.path.split(fastqc_out)[1]))[0]
    md5sum= newreadfiles[fq]['md5sum']
    cmd= 'set -e; unzip -q -o -d %(local_fastqc_dir)s %(localfastqc_zip)s; /usr/local/bin/fastqc_to_pgtable.py --infile %(localfastqc_unzip)s --md5sum %(md5sum)s --fsize %(fsize)s --mtime %(mtime)s; rm %(localfastqc_zip)s' %{'local_fastqc_dir': LOCAL_FASTQC_DIR, 'localfastqc_zip':localfastqc_zip, 'localfastqc_unzip': localfastqc_unzip, 'md5sum': fstat['md5sum'], 'fsize': fstat['fsize'], 'mtime': fstat['mtime']}
    print(cmd)
    p= subprocess.Popen(cmd, shell= True)
    p.wait()
    
