#!/usr/bin/python

import psycopg2
import subprocess
import sys
import os
import re
import time
import argparse

parser = argparse.ArgumentParser(description= """
DESCRIPTION

    This program queries the view view_fastq_for_library from sblab to get the
    service_id's which don't have an associated fastq file.
    
    This program searches for services in the cue that have completed but the fastq files have not
    been downloaded yet. If a download path for a fastq file can be found, download it,
    convert to sanger encoding and demultiplex as appropriate.

    Output fastqfiles will have the format:
        <library_id>.<slx-id>.<run id>.<s_n_n_sequence.sanger.fq.gz>
    Library id omitted for multiplexed files.

       
EXAMPLE

DEPENDS-ON:
    - ~/psycopgpass & psycopg2
    - getFilesFromLibrary.py (see google code)
    - illumina2sanger.py
    - demultiplex_service.py
    
DEPENDS-ON-ME:

TODO:
    - ssh to remote server
    - error checking is minimal
    - Uses bash md5sum 

""", formatter_class= argparse.RawTextHelpFormatter)

parser.add_argument('--service', '-s',
                   required= True,
                   nargs= '+',
                   default= 'all',
                   help='''Only process these services. Use keyword 'all'
to process all the slx ids in the database that don't have an associated fastqfile.
                   ''')

parser.add_argument('--viewfiles',
                   action= 'store_true',
                   help='''Only show which files could be downloaded. Don't do
anything else. With this option only getFilesForLibrary.py is executed.
                   ''')

parser.add_argument('--norecode',
                   action= 'store_true',
                   help='''Skip the downloading and the conversion Illumina to Sanger.
In other words, the files correpsonding the service id(s) have been already downloaded
and converted, just procede to archiving them to database and demultiplex as appropriate.
                   ''')

parser.add_argument('--ncores',
                   type= int,
                   default= 2,
                   help='''Number of cores to use, defualt 2.
                   ''')

parser.add_argument('--quiet',
                   action= 'store_true',
                   help='''Suppress (some) messages. Enforced if --viewfiles is True
                   ''')

args = parser.parse_args()
# -----------------------------------------------------------------------------

if args.viewfiles:
    args.quiet= True

REMOTE_SERVER= '143.65.169.175' ## Server where fastq files will be downloaded
REMOTE_DIR= '/data01/sblab/users/berald01/repository/original/fastq' ## Dir on the remote server where fastq files will go

def get_psycopgpass():
    """
    Read file ~/.psycopgpass to get the connection string to pass to
    psycopg2.connect()
    """
    import os
    conn= open(os.path.join(os.getenv("HOME"), '.psycopgpass'))
    conn= conn.readlines()
    conn_args= [x.strip() for x in conn if x.strip() != '' and x.strip().startswith('#') is False][0]
    return(conn_args)

conn= psycopg2.connect(get_psycopgpass())
cur= conn.cursor()

def get_runid(solexapath):
    """ Extract the run id from the Solexa path
    solexapath looks like:
    solexapath= 'uk-cri-lsol01.crnet.org:/solexa02/data/Runs/120419_HWI-ST230_822_D0U6DACXX/full_Data/Intensities/Bustard1.9.0_02-05-2012_solexa/GERALD_02-05-2012_solexa/s_1_1_sequence.txt.gz'    
    return: '120419_HWI-ST230_822_D0U6DACXX'
    """
    ori_path= solexapath
    folders=[]
    while 1:
        solexapath,folder=os.path.split(solexapath)
        if folder!="":
            folders.append(folder)
        else:
            if solexapath!="":
                folders.append(solexapath)
            break
    folders.reverse()    
    runre= re.compile('^\d{6}_[A-Za-z0-9\-]+_\d{1,}_[\w\-]+$') ## Regex to match runID. Composed by: <date>_<instrument id>_<lane id>_<flowcell id>
    for d in folders:
        if runre.match(d) is not None:
            return(d)
    sys.exit('Cannot find a run identifier from the path:\n%s' %(ori_path))
    
# -----------------------------------------------------------------------------

if args.service == ['all']:
    sql= "select distinct service_id from view_fastq_for_library where fastqfile = 'No fastq file found' and upper(service_id) like 'SLX%';"
    print(cur.mogrify(sql) + '\n')
    cur.execute(sql)
else:
    s_string= '%s, ' * len(args.service)
    sql= "select distinct service_id from view_fastq_for_library where fastqfile = 'No fastq file found' and upper(service_id) in (%s);" %(s_string[:-2])
    print(cur.mogrify(sql, args.service) + '\n')
    cur.execute(sql, args.service)

waiting_services= cur.fetchall()
cur.close()

for slx_id in waiting_services:
    slx_id= slx_id[0]
    ## Get path to fastq
    cmd= """ssh %s 'source ~/.bash_profile; getFilesForLibrary.py %s' """ %(REMOTE_SERVER, slx_id) ## Note: Two rows are returned for paired-end
    if not args.quiet:
        print(cmd + '\n')
    p= subprocess.Popen(cmd, shell= True, stdout=subprocess.PIPE)
    fastq_path= p.stdout.read().strip()
    if args.viewfiles:
        print(slx_id + ':\n' + fastq_path)
        continue
    if fastq_path == '':
        continue
    fastq_path= fastq_path.split('\n')
    sanger_names= []
    procs= []
    running_procs= 0
    for fq in fastq_path:
        # ---------------------------------------------------------------------
        # Download file(s), rename, convert to sanger, tidy up 
        # ---------------------------------------------------------------------
        oriname= os.path.split(fq)[1]
        runid= get_runid(fq)
        newname= slx_id.lower() + '.' + runid + '.' + oriname
        sanger_name= re.sub('\.txt\.gz$', '.sanger.fq.gz', newname)
        sanger_names.append(sanger_name)
        cmd= """ssh %(remote_server)s 'set -e; source ~/.bash_profile; cd %(remote_dir)s;
                        scp %(fastqfile)s %(newname)s;
                        illumina2sanger.py %(newname)s | gzip > %(sanger_name)s;
                        rm %(newname)s' """ %{'remote_server': REMOTE_SERVER, 'remote_dir': REMOTE_DIR, 'fastqfile': fq, 'oriname': oriname, 'newname': newname, 'sanger_name': sanger_name}
        if args.norecode is False:
            print(cmd)
            p= subprocess.Popen(cmd, shell= True)
            procs.append(p)
            running_procs += 1
            while running_procs >= args.ncores:
                ## Do not proceede to the next loop if the number of ruuning procs exeeds the max
                time.sleep(10)
                running_procs= 0
                for x in procs:
                    xv= x.poll()
                    if x.returncode is None:
                        running_procs += 1
    ## Do not procede until *all* procs have completed
    for x in procs:
        x.wait()

    proc= []
    running_procs= 0
    for sanger_name in sanger_names:
        # ---------------------------------------------------------------------
        # Get md5 of converted file and upload to database 
        # ---------------------------------------------------------------------
        cmd= """ssh %s 'md5sum %s' """ %(REMOTE_SERVER, os.path.join(REMOTE_DIR, sanger_name)) ## NB: This is Linux specific as md5sum is not on Mac
        print(cmd)
        p= subprocess.Popen(cmd, shell= True, stdout=subprocess.PIPE)
        p.wait()
        md5= p.stdout.read().strip()
        md5= md5.split(' ')[0]
        fastq_line= (sanger_name, 'Sanger', md5, 'File produced by get_fastq_from_service.py', slx_id, None)
        cur= conn.cursor()
        sql= "INSERT INTO fastqfiles VALUES (%s, %s, %s, %s, %s, %s)"
        print(cur.mogrify(sql, fastq_line) + '\n')
        cur.execute(sql, fastq_line)
        conn.commit()
        cur.close()
        # -------------------------------------------------------------------------
        # Demultiplexing
        # -------------------------------------------------------------------------
        "Check the file is multiplex"
        cur= conn.cursor()
        sql= 'select barcode_id, demultiplexed_file from view_fastq_for_library where fastqfile = %s'
        cur.execute(sql, (sanger_name,))
        demfiles= cur.fetchall()
        cur.close()
        if demfiles == []:
            print(cur.mogrify(sql, (sanger_name,)))
            sys.exit('No file found in view_fastq_for_library for fastqfile %s' %(sanger_name))
        elif demfiles[0][0] == 'not_multiplexed':
            fastqname= demfiles[0][1]
            cmd= """ssh %s 'set -e; cd %s; mv %s %s' """ %(REMOTE_SERVER, REMOTE_DIR, sanger_name, fastqname)
            p= subprocess.Popen(cmd, shell= True)
            p.wait()
        else:
            cmd= """demultiplex_service_fastq.py --fastq %s""" %(sanger_name,)
            print(cmd)
            p= subprocess.Popen(cmd, shell= True)
            procs.append(p)
            running_procs += 1
            while running_procs >= args.ncores:
                ## Do not proceede to the next loop if the number of ruuning procs exeeds the max
                time.sleep(10)
                running_procs= 0
                for x in procs:
                    xv= x.poll()
                    if x.returncode is None:
                        running_procs += 1
    ## Do not procede until *all* procs have completed
    for x in procs:
        x.wait()
conn.close()
sys.exit()


    
