#!/bin/bash

stage=1
stop_stage=100

config_path=conf/default.yaml
DUMP_DIR=/home/processed_data/pwgan_thorsten

DATA_ROOT=/home/processed_data/thorsten
DATASET_ROOT=/home/datasets
if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then
    # get durations from MFA's result
    echo "Generate durations.txt from MFA results ..."
    python3 ${MAIN_ROOT}/utils/gen_duration_from_textgrid.py \
        --inputdir=${DATA_ROOT}/corpus_aligned \
        --output=${DUMP_DIR}/durations.txt \
        --config=${config_path}
fi

if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then
    # extract features
    echo "Extract features ..."
    python3 ${BIN_DIR}/../preprocess.py \
        --rootdir=${DATASET_ROOT}/thorsten-de_v03 \
        --dataset=thorsten \
        --dumpdir=${DUMP_DIR} \
        --dur-file=${DUMP_DIR}/durations.txt \
        --config=${config_path} \
        --cut-sil=True \
        --num-cpu=8
fi

if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then
    # get features' stats(mean and std)
    echo "Get features' stats ..."
    python3 ${MAIN_ROOT}/utils/compute_statistics.py \
        --metadata=${DUMP_DIR}/train/raw/metadata.jsonl \
        --field-name="feats"
fi

if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then
    # normalize, dev and test should use train's stats
    echo "Normalize ..."
   
    python3 ${BIN_DIR}/../normalize.py \
        --metadata=${DUMP_DIR}/train/raw/metadata.jsonl \
        --dumpdir=${DUMP_DIR}/train/norm \
        --stats=${DUMP_DIR}/train/feats_stats.npy
    python3 ${BIN_DIR}/../normalize.py \
        --metadata=${DUMP_DIR}/dev/raw/metadata.jsonl \
        --dumpdir=${DUMP_DIR}/dev/norm \
        --stats=${DUMP_DIR}/train/feats_stats.npy
    
    python3 ${BIN_DIR}/../normalize.py \
        --metadata=${DUMP_DIR}/test/raw/metadata.jsonl \
        --dumpdir=${DUMP_DIR}/test/norm \
        --stats=${DUMP_DIR}/train/feats_stats.npy
fi
