#!/bin/bash
#SBATCH --job-name=m4
#SBATCH --partition=cpu
#SBATCH -N 1
#SBATCH --ntasks-per-node=8
#SBATCH --mail-type=all
#SBATCH --mail-user=None
#SBATCH --output=/lustre/home/acct-medfzx/medfzx-lkw/project/w5/log/m4_%j.stdout.log
#SBATCH --error=/lustre/home/acct-medfzx/medfzx-lkw/project/w5/log/m4_%j.stderr.log

cd /lustre/home/acct-medfzx/medfzx-lkw/project/w5
mkdir -p /lustre/home/acct-medfzx/medfzx-lkw/project/w5/log/
ref=/lustre/home/acct-medfzx/medfzx-lkw/project/CAH/data/refseqMy/hg19/hg19.fa
Time=TRF-20241210-L-01-2024-12-141040
outputPath=result/${Time}
BED=bed/w5_21W.bed
# 测序深度统计  上把率  比对到区域reads占比(中靶率) 平均深度  0.2x以上的覆盖度
# A1 1.09G
# A2 2.22G
# A3 1.3G
# totalBase=3.79*10^9
# panalX_A1=$(echo "scale=2; ${totalBase} / 120000" | bc)
panalX_A1=-
for tag in 4F1R1 5F2R2 9F3R3 10F1R1 11R3R3;do
    sample=Sample_JZ24236534-20241210M4-1-${tag}
    name=$(echo "${sample}"|cut -d "_" -f2)
    samplePath=data/fastq/${Time}/${sample}
    # fastp
    mkdir -p ${outputPath}/fastp/${Name}
    fastp -i ${samplePath}/${name}_combined_R1.fastq.gz -o ${outputPath}/fastp/${name}_fastp_combined_R1.fastq.gz \
        -I ${samplePath}/${name}_combined_R2.fastq.gz -O ${outputPath}/fastp/${name}_fastp_combined_R2.fastq.gz  \
        -q 15 \
        -u 40 \
        -e 0 \
        -n 5 \
        -l 50 \
        -p \
        -P 20 \
        -w $(nproc) \
        -g \
        -x \
        -h ${outputPath}/fastp/${Name}/${name}.html -j ${outputPath}/fastp/${Name}/${name}.json

    module load fastqc/0.12.1
    mkdir -p ${outputPath}/FASTQC/${name}
    fastqc -q -t $(nproc) ${outputPath}/fastp/${name}*.fastq.gz -o ${outputPath}/FASTQC/${name}
    module purge 

    # 2 alignment
    source activate biotools 
    mkdir -p ${outputPath}/bam/${name}
    bamSavePath=${outputPath}/bam/${name}
    bwa mem -t $(nproc) $ref -R "@RG\tID:${name}\tLB:hg19\tPL:Illumina\tPU:${name}\tSM:${name}" ${outputPath}/fastp/${name}_fastp_combined_R1.fastq.gz ${outputPath}/fastp/${name}_fastp_combined_R2.fastq.gz > ${bamSavePath}/${name}.sam 
    sambamba view -t $(nproc) -S -f bam -o ${bamSavePath}/${name}.bam ${bamSavePath}/${name}.sam  
    sambamba sort -t $(nproc) -o ${bamSavePath}/${name}.sorted.bam ${bamSavePath}/${name}.bam
    sambamba index -t $(nproc) ${bamSavePath}/${name}.sorted.bam 
    rm ${bamSavePath}/${name}.sam

    ##  remove M length less than 65
    samtools view -@ $(nproc) -F 4 ${bamSavePath}/${name}.sorted.bam | awk '{sum=0; if($6 ~ /[0-9]+M/) {match($6, /([0-9]+)M/, a); sum += a[1];} if(sum > 65) print $0;}' > ${bamSavePath}/${name}.lengthMT65.sam
    samtools view -H ${bamSavePath}/${name}.sorted.bam > ${bamSavePath}/${name}.header.txt
    cat ${bamSavePath}/${name}.header.txt  ${bamSavePath}/${name}.lengthMT65.sam > ${bamSavePath}/${name}.lengthMT65.cat.sam
    samtools view -@ $(nproc) -bS ${bamSavePath}/${name}.lengthMT65.cat.sam | samtools sort -@ $(nproc) -o ${bamSavePath}/${name}.lengthMT65.cat.bam -
    samtools index ${bamSavePath}/${name}.lengthMT65.cat.bam
    rm ${bamSavePath}/*.sam

    ## unmapping reads
    unmapped_reads=${outputPath}/bam/unmapping/${name}
    mkdir -p $unmapped_reads
    samtools view -h -f 4 -b ${bamSavePath}/${name}.sorted.bam  > ${unmapped_reads}/${name}.unmapped_reads.bam

    # 2.1 get depth from bam file.
    sambamba depth region -F "mapping_quality > 0 and not duplicate and not failed_quality_control" -t $(nproc) -L $BED ${bamSavePath}/${name}.sorted.bam -o ${bamSavePath}/${name}.c0.depth.txt 
    # 2.2 base statistics
    
    sambamba flagstat -t $(nproc) ${bamSavePath}/${name}.sorted.bam > ${bamSavePath}/base_flagstat.txt
    mapping_ratio=$(cat ${bamSavePath}/base_flagstat.txt | grep "mapped (" | awk -F'[()]' '{print $2}'|cut -d '%' -f1)

    total_reads=$(samtools view -c ${bamSavePath}/${name}.sorted.bam)
    region_reads=$(samtools view -c ${bamSavePath}/${name}.sorted.bam -L $BED)
    region_ratio=$(echo "scale=2;($region_reads / $total_reads) * 100" | bc -l)

    averageDepth=$(awk '{sum+=$5} END {print sum/NR}' ${bamSavePath}/${name}.c0.depth.txt )
    threshold=0.2
    averageDepth_threshold=$(echo "scale=2;${averageDepth}*${threshold}"|bc -l)

    percentage2Coverage=$(awk -v threshold=${averageDepth_threshold} '{if($5 >= threshold) covered++} {total++} END {print covered/total}' ${bamSavePath}/${name}.c0.depth.txt )

    echo -e "样本名\t测序深度\t上靶率\t中靶率\t平均深度\t大于0.2深度的覆盖度" >  ${bamSavePath}/output.txt
    echo -e "${name}\t${panalX_A1}\t${mapping_ratio}\t${region_ratio}%\t${averageDepth}\t${percentage2Coverage}" >>  ${bamSavePath}/output.txt

    mkdir -p report/${Time}/${name}
    cp ${bamSavePath}/${name}.c0.depth.txt report/${Time}/${name}/${name}.c0.depth.txt
    cp ${bamSavePath}/base_flagstat.txt report/${Time}/${name}/base_flagstat.txt
    cp ${bamSavePath}/output.txt report/${Time}/${name}/output.txt
    
    # 3 variant calling
    # mkdir -p ${outputPath}/vcf/${name}
    # vcf=${outputPath}/vcf/${name}/${name}.vcf.gz
    # time singularity run ~/singularity/deepvariant.simg \
    # /opt/deepvariant/bin/run_deepvariant \
    # --model_type WES \
    # --ref ${ref} \
    # --reads ${bamSavePath}/${name}.sorted.bam \
    # --output_vcf ${vcf}  \
    # --output_gvcf ${outputPath}/vcf/${name}/${name}.g.vcf.gz \
    # --num_shards $(nproc) \
    # --regions ${BED} \
    # --sample_name ${name} \
    # --make_examples_extra_args="min_mapping_quality=1,keep_legacy_allele_counter_behavior=true,normalize_reads=true" \
    # ######## --sample_name

    # # # 4 annotation ################ refGene,clinvar_20240611,exac03 #################
    # mkdir -p ${outputPath}/vcf/${name}
    # annovarOutPut=${outputPath}/vcf/${name}
    # annovar=/lustre/home/acct-medfzx/medfzx-lkw/software/annovar
    # ${annovar}/table_annovar.pl ${vcf} ${annovar}/humandb -buildver hg19 -out ${annovarOutPut}/${name}_3 -remove -protocol refGene,clinvar_20240611,exac03 -operation g,f,f -nastring . -vcfinput -polish
    
    conda deactivate 
done
