rm(list = ls())
library(tidyverse)
library(rvest)
library(progress)
od_home <- "/Pub/Users/wangyk/Project_wangyk/Codelib_YK/tmp/hy2/"
always_url <-"http://www.fossilworks.org/cgi-bin/bridge.pl"

# url <- "http://www.fossilworks.org/cgi-bin/bridge.pl?a=taxonInfo&taxon_no=142047"

urls <- read_lines("/Pub/Users/wangyk/Project_wangyk/Codelib_YK/tmp/buchong")

find_subtaxa_html <- function(url) {
    main <- rvest::read_html(url)
    find_subtaxa <- main %>% html_nodes("p span")
    taxa_index <- str_detect(html_text2(find_subtaxa), "Subtaxa")
    subtaxa_nodes <- find_subtaxa[taxa_index] %>% html_nodes("a")
    subtaxa_inner_html <- subtaxa_nodes %>% html_attrs() %>% map(~as.character(.x))
    names(subtaxa_inner_html) <- html_text2(subtaxa_nodes)

    return(subtaxa_inner_html)
}

foo <- function(x) {
    # message(str_glue("\n\n"))
    # message(str_glue("\n{x}\n"))
    # message(str_glue("\n"))
    pd$tick()
    b <<- find_subtaxa_html(paste0(always_url, a[[x]]))
}

safe_foo <- possibly(.f = foo, otherwise = "Error")

# 循环爬取数据
for (url in urls) {
    loc <- str_locate(url, "no=") %>% as.vector()
    od <- str_glue("{od_home}{str_sub(url,loc[2]+1,str_count(url))}")
    dir.create(od)

    a <- find_subtaxa_html(url)

    repeat{
        pd <- progress_bar$new(total = length(a), width = 44, format = "[:bar] :percent :eta", clear = F)
        aa <- map(names(a),safe_foo)

        Judgment_condition <- map_int(aa, ~ length(.x))

        if (any(which(Judgment_condition == 0))) {
            no_end_index <- which(Judgment_condition != 0)
            aa <- aa[no_end_index]

            filter_cod <- any(map_lgl(aa, ~ .x[[1]] == "Error"))

            if (filter_cod) {
                error_index <- which(map_lgl(aa, ~ .x[[1]] == "Error"))
                aa <- aa[-error_index]
            }

            end_index <- which(Judgment_condition == 0)
            end_links <- a[end_index]
            tibble(spc = names(end_links), link = as.character(unlist(end_links))) %>%
                write_tsv(file = str_glue("{od}/end_link.tsv"), append = T)

            a <- a[no_end_index]
            if (filter_cod) {
                a <- a[-error_index]
            }
        }

        if (sum(Judgment_condition) == 0) {
            break
        } else {
            names(aa) <- names(a)
            a <- unlist(aa) %>% as.list()
            names(a) <- names(a) %>% str_replace_all("\\.", "__")
        }
    }
}

# 整合文件
root_dirs <- list.files("/Pub/Users/wangyk/Project_wangyk/Codelib_YK/tmp",full.names = T)

file_dirs_hy2 <- list.files(root_dirs[4],full.names = T,recursive = T)
file_dirs_hy1 <- str_replace(file_dirs_hy2,'hy2','hy')
file_dirs_hy3 <- str_replace(file_dirs_hy2,'hy2','hy3')

walk(dirname(file_dirs_hy3),~ dir.create(.x,recursive = T))
pwalk(list(file_dirs_hy1,file_dirs_hy2,file_dirs_hy3),function(x,y,z){
    df1 <- read.delim(x,header = F)
    df2 <- read.delim(y,header = F)
    df <- bind_rows(df1,df2) %>% rename(spc = 1,link =2) %>% distinct()
    write_tsv(df,file = z)

})


# 爬取信息
make_df_finial <- function(x) {
    pd$tick()
    # x = '?a=taxonInfo&taxon_no=262924'
    url <- paste0(always_url, x)
    main <- rvest::read_html(url)
    display_name_node <- main %>% html_nodes("span")

    display_name <- html_text2(display_name_node)[2]

    infor_final <- main %>% html_nodes("p")
    main_chr <- c("Full reference", "Age range", "Distribution")
    infors <- map(main_chr, function(chr) {
        index <- str_detect(html_text2(infor_final), chr)
        html_text2(infor_final)[index]
    })
    names(infors) <- main_chr

    df_final <- tibble(
        link = x,
        url = url,
        display_name = display_name,
        `Full reference` = infors[["Full reference"]],
        `Age range` = infors[["Age range"]],
        `Distribution` = infors[["Distribution"]],
    )

    return(df_final)
}

safe_make_df_finial <- possibly(.f = make_df_finial, otherwise = "Error")


for (i in file_dirs_hy3) {
    df <- read_tsv(i)

    pd <- progress_bar$new(total = length(df[["link"]]), width = 40, format = "[:bar] :percent :eta")
    # infor_df <- map_df(df[["link"]], safe_make_df_finial)

    infor_df <- map_df(df[["link"]], function(j) {
        y <- safe_make_df_finial(j)
        if (class(y) == "character") {
            return(tibble())
        } else {
            return(y)
        }
    })
    inner_join(df, infor_df) %>% write_tsv(file = str_replace(i, "end_link.tsv", "Full_information2.xls"))
}




files2 <- list.files("/Pub/Users/wangyk/Project_wangyk/Codelib_YK/tmp/hy3",full.names = T,recursive = T, pattern = 'Full_information2')
files1 <- str_replace_all(files2,"Full_information2","Full_information")

files3 <- str_replace_all(files2,"Full_information2","Full_information_final")

pwalk(list(files1,files2,files3),function(x,y,z){
    # x = files1[1]
    # y = files2[1]
    # z = files3[1]
    df1 <- read.delim(x)
    df2 <- read.delim(y)
    df <- bind_rows(df1,df2) %>% distinct()
    write_tsv(df,file = z)
})

