#!/bin/sh
#use -D to select dmenu
selector='fzf' 
viewer='nsxiv'
cachedir="$HOME/.cache/mkklt"
imgdir="${cachedir}/img/$$" && [ ! -d "$imgdir" ] && mkdir -p "$imgdir" 
downdir="$HOME/mkklt"
mode='read'
chapter=1

clean_up(){
    [ -d "$imgdir" ] && rm -rf "$imgdir"
}

trap 'clean_up' EXIT

while :; do
    case $1 in
        -D) selector='dmenu' ;;
        -f) viewer="$viewer -f" ;;
        -d) mode='download' ;;
        --) shift break ;;
        *) break ;;
    esac
    shift
done

menu(){
    [ "$selector" = "fzf" ] && $selector --cycle --prompt="$1" || $selector -p "$1"
}

exit_menu(){
    printf 'exit' | menu "$1" > /dev/null
    exit
}
get_query(){
    #some magic leads to fzf being false
    [ "$selector" = "fzf" ] && ! :|$selector --prompt="$1" --print-query || :|$selector -p "$1"
}

get_image_link_list(){
    curl -s "$url${1:-$chapter}" | sed 's/jpg"/jpg\n/g;s/<img src="/\n/g;/<meta/d' | grep 'jpg'
}

download(){ 
    #this construction minimizes the time files are overwritten and image opening errors
    printf "$@\n" | while read -r cmd; do
        bn=$(basename $cmd)
        curl -s -H 'Referer: https://mangakakalot.com/' "$cmd" -o "u${bn}" 
        mv -f "u${bn}" "$bn"
    done 2> /dev/null
}

start_viewer(){
    $viewer $(ls | grep 'jpg' | grep -v '^u.*' | sort -n)
}
view_chapter(){
    if [ ! -d "${imgdir}/${chapter}" ]; then
        mkdir "${imgdir}/${chapter}"
        cd "${imgdir}/${chapter}" 
        linklist=$(get_image_link_list)
        download 'https://mangakakalot.com/favicon.ico' 
        printf "$linklist" | xargs -n 1 basename | while read line; do cp 'favicon.ico' "$line"; done 
        download "$linklist" &
        start_viewer
    else 
        view_again
    fi
}

view_again(){
    cd "${imgdir}/${chapter}"
    start_viewer
}

get_chapter_list(){
    printf "${chapters}" | grep -v '^ *$' | tr -d '" ' | sort -n
}

get_chapter(){
    chapter=$(printf "exit\n$(get_chapter_list)" | menu "${1:-Select chapter }")
    [ "$chapter" = "exit" ] && exit
}

get_view_chapter(){
    get_chapter
    view_chapter
}

relative_chapter(){
    #dollar signs are needed for chapters like 17 and 17.1 
    next=$(get_chapter_list | sed -n "/^$chapter$/{n;p;}")
    previous=$(get_chapter_list | sed -n "/^$chapter$/{g;1!p;};h")
    action=$(printf "next chapter ($next)\nprevious chapter ($previous)\ncurrent chapter ($chapter)\nexit\nchapter list" | menu 'What to view ') 
    case "$action" in
        "current chapter ($chapter)") view_again ;; 
        'exit') exit ;;
        "next chapter ($next)") chapter="$next" && view_chapter;;
        "previous chapter ($previous)") chapter="$previous" && view_chapter;;
        'chapter list') get_view_chapter ;;
    esac
}

read_chapters(){
    get_view_chapter
    while :; do
        relative_chapter
    done
}

download_chapters(){
    get_chapter 'Select first chapter '
    first="$chapter"
    get_chapter 'Select last chapter '
    last="$chapter"
    chapterdownlist=$(get_chapter_list | sed "/^${last}$/q" | sed -n "/^${first}$/,\$p")
    [ "$chapterdownlist" = '' ] && exit_menu "Download list was empty, try changing selection order for first and last chapters" 
    mkdir -p "$downdir" 
    cd "$downdir" 
    printf "$chapterdownlist\n" | while read -r dchapter; do
        mkdir -p "${downdir}/${titlename}/${dchapter}"
        cd "${downdir}/${titlename}/${dchapter}"
        printf "Downloading ${dchapter}\n"
        download "$(get_image_link_list $dchapter)"
    done
}


searchword=$(get_query 'Enter search query ' | tr ' ' '_')

data=$(curl -s 'https://mangakakalot.com/home_json_search' --data-raw "searchword=$searchword" | sed -e 's/<span class=\\"search_result_title_red\\">//g;s/<\\\/span>//g')

titlenames=$(printf "$data" | jq '.[].name')
[ "$titlenames" = '' ] && exit_menu 'Results not found '
titlename=$(printf "exit\n$titlenames" | menu 'Select name ')
[ "$titlename" = 'exit' ] && exit
selection=$(printf "$data" | jq ".[] | select(.name==$titlename)")
titlename=$(printf "$titlename" | tr -d '"' | tr -c 'A-Za-z0-9' '_' )

[ -n "$(printf "$selection" | jq '.story_link' | grep 'readmanganato')" ] && url="$(printf "$selection" | jq '.story_link' | tr -d '"')/chapter-" || url="https://mangakakalot.com/chapter/$(printf "$selection" | jq '.nameunsigned' | tr -d '"')/chapter_" 

chapters=$(curl -s "{$url}1" | sed -e '/navi-change-chapter/!d;s/<\/option>/\n/g;s/>/>\n/g;s/ </</g' | grep data-c | sort -u | sed -e "s/<option data-c=//g;s/>//g;s/selected//g;s/[']//g" | sort -n)

[ "$mode" = 'read' ] && read_chapters || download_chapters
