package com.qen.yanshen

import cn.hutool.core.date.DateUtil
import cn.hutool.core.io.file.FileWriter
import cn.hutool.core.lang.Console
import cn.hutool.core.util.StrUtil
import cn.hutool.poi.excel.ExcelUtil
import it.skrape.core.htmlDocument
import it.skrape.fetcher.HttpFetcher
import it.skrape.fetcher.response
import it.skrape.fetcher.skrape
import kotlinx.coroutines.runBlocking
import kotlinx.coroutines.sync.Semaphore
import java.util.Date
import java.util.concurrent.atomic.AtomicInteger

/**抓的文章数量----22篇就结束*/
const val MaxNum = 22

/**先爬取文章链接，然后再根据链接按多线程爬文章*/
fun main() {
    var page = 1
    var num = 0
    var urlPre = YanShenUrl
    val list: MutableList<Article> = ArrayList()

    while (true) returnWhile@ {
        var flag = false
        val urlNew = if (page == 1) YanShenUrl else YanShenUrl + "/page/${page}/#board"
        val referer = if (page == 1) YanShenUrl else YanShenUrl + "/page/${page}/"
        skrape(HttpFetcher) {
            request {
                url = urlNew
                headers = initHeader(urlPre)
                cookies = initCookie()
                userAgent = UserAgent
            }
            response {
                htmlDocument {
                    // 类选择器，选择class="quote"
                    ".list-group-item-action" {
                        findAll {
                            forEach forEash@{
                                val url = it.attribute("href")
                                val item = it.children
                                val time = item.get(0).text
                                val title = item.get(1).text
//                                val res = detail(goUrl = url, refererUrl = urlNew, title = title)
                                num++
                                if (num >= MaxNum) {
                                    flag = true
                                    return@forEash
                                } else {
                                    list.add(
                                        Article(
                                            url = BaseYanShenUrl + url,
                                            date = time,
                                            title = title,
                                            content = "./$title${DateUtil.format(Date(), "MMdd")}.txt",
                                            html = ""
                                        )
                                    )
                                }
                            }
                        }
                    }
                }
            }
        }
        page++
        if (flag) {
            break
        }
        urlPre = referer
    }
    val writer = ExcelUtil.getWriter(
        ZhihuOutFile + "/${DateUtil.format(Date(), "YYYYMMdd")}/Zhihu-${
            DateUtil.format(
                Date(),
                "YYYYMMdd"
            )
        }.xlsx"
    )
    writer.passCurrentRow()
//    writer.merge(quoteList.size - 1, "标题")
    writer.write(list, true)
    writer.close()

    /**爬虫线程队列*/
    val threads = ArrayList<Thread>()

    /**已爬取的新闻数（所谓“已爬取”，实际是从拿到链接就算）*/
    val newsFetchedCounter = AtomicInteger()

    /**爬取结束信号*/
    val newsTargetReachedSemaphore = Semaphore(THREAD_COUNT, THREAD_COUNT)

    /**待爬取队列非空信号*/
    val linkQueueAvailableSemaphore = Semaphore(Int.MAX_VALUE, Int.MAX_VALUE)

    /**放出一个信号*/
    linkQueueAvailableSemaphore.release()


    var pageNum = AtomicInteger()

    var preUrlDetail = YanShenUrl
    for (i in 0..THREAD_COUNT) {
        val thread = Thread {
            while (true) {
                runBlocking {
                    linkQueueAvailableSemaphore.acquire()
                }
                if (pageNum.get() >= MaxNum) { //如果拿到目标数就结束
                    break
                }
                var obj = list.get(pageNum.get())
                Console.log("${Thread.currentThread().id}：title:${obj.title}:fetching ${obj.url}")
                try {
                    detailArticle(goUrl = obj.url, refererUrl = preUrlDetail, title = obj.title)
                } catch (e: Exception) {
                    e.printStackTrace()
                }
                preUrlDetail = obj.url
                val newsCount = pageNum.getAndIncrement()
                if (newsCount >= MaxNum) {
                    newsTargetReachedSemaphore.release()
                }
                linkQueueAvailableSemaphore.release()
            }
        }
        thread.start()
        threads.add(thread)
    }
    runBlocking {
        newsTargetReachedSemaphore.acquire()
    }
    threads.forEach { t -> t.join() }
}

fun detailArticle(goUrl: String, refererUrl: String, title: String): String {

    var html = StringBuilder()
    var content = StringBuilder()
    var contentList = mutableListOf<String>()
    skrape(HttpFetcher) {
        request {
            url = goUrl
            headers = initHeader(refererUrl)
            cookies = initCookie()
            userAgent = UserAgent
        }
        response {
            htmlDocument {
                ".markdown-body" {
                    findAll {
                        forEach { t ->
                            html.append(t.html)
                            t.children.forEach { ii ->
                                content.append("${ii.text}\n")
                                contentList.add(ii.text)
                            }
                        }
                    }
                }
            }
        }
    }
//    Console.log("content:$content")
//    Console.log("Html:$html")
//    Console.log(contentList.joinToString("\n"))
    val writer = FileWriter(ZhihuOutFile + title + "${DateUtil.format(Date(), "MMdd")}.txt")
    writer.write(StrUtil.join("\n", contentList))
//    return mutableMapOf("content" to "./$title.txt", "html" to html.toString())
    return "./$title.txt"
}
