package indexer

import com.mongodb.MongoClient
import com.mongodb.ServerAddress
import com.mongodb.client.model.*
import com.mongodb.client.model.Updates.combine
import com.mongodb.client.model.Updates.set
import indexer.processer.DingdianPageProcessor
import indexer.processer.QuanshuwuPageProcessor
import indexer.processer.SilukePageProcessor
import indexer.processer.UrlPageProcessor
import indexer.processer.biquge.BiqugePageProcessor
import indexer.processer.bxwx.BxwxPageProcessor
import indexer.processer.ybdu.YbduPageProcessor
import org.slf4j.LoggerFactory
import us.codecraft.webmagic.Spider
import us.codecraft.webmagic.scheduler.BloomFilterDuplicateRemover
import us.codecraft.webmagic.scheduler.QueueScheduler
import java.time.LocalDateTime
import java.time.ZoneId
import java.time.format.DateTimeFormatter
import java.util.concurrent.TimeUnit

class NovelCrawer(val mongoClient: MongoClient,
                  val threads:Int){
    val mongoPipeline = MongoPipeline(mongoClient)
    val log = LoggerFactory.getLogger("ss")
    val pps = listOf(SilukePageProcessor(),
                        DingdianPageProcessor(),
                    BiqugePageProcessor(),
                    BxwxPageProcessor(),
                    YbduPageProcessor(),
                    QuanshuwuPageProcessor())
    var database = mongoClient.getDatabase("book")
    var spiders = database.getCollection("spiders")

    fun start() {

        var c = database.getCollection("sites")

        while(true){
            val opt = FindOneAndUpdateOptions().sort(Sorts.descending("created"))
                    .returnDocument(ReturnDocument.AFTER)
            val doc = c.findOneAndUpdate(Filters.eq("status","unstarted"),
                    Updates.set("status","started"),
                    opt)
            if(doc != null){
                val url = doc.getString("url")
                val pp: UrlPageProcessor? = pps.find { it.getUrl() == url }
                if (pp != null){
                    val spider = newSpider(pp)
                    try {
                        spiderStatus(spider)
                        spider.run()
                    }catch (e:Exception){
                        log.error("",e)
                    }finally {
                        spiderStatus(spider)
                        c.updateOne(Filters.eq("url",url),Updates.set("status","unstarted"))
                    }
                }else{
                    log.error("no url to crawler!")
                }
            }else{
                log.error("no url to crawler!")
            }


            TimeUnit.SECONDS.sleep(20)

        }
    }

    private fun spiderStatus(spider: Spider) {
        spiders.updateOne(Filters.eq("_id", spider.uuid),
                combine(set("status", spider.status.name),
                        set("time", LocalDateTime.now(ZoneId.of("Asia/Shanghai")).format(DateTimeFormatter.ISO_DATE_TIME))),
                UpdateOptions().upsert(true))
    }

    fun newSpider(pp: UrlPageProcessor) : Spider {
        val queueScheduler = QueueScheduler()
                .setDuplicateRemover(BloomFilterDuplicateRemover(100_000))
        val uuid = pp.getUrl()

        return Spider.create(pp)
                .addUrl(pp.getUrl())
                .setUUID(uuid)
                .setScheduler(queueScheduler)
                .setSpiderListeners(listOf(LoggingListener,StatusListener(mongoClient,uuid)))
                .addPipeline(mongoPipeline)
                .thread(threads)
                .setExitWhenComplete(true)


    }

}

fun main(args: Array<String>) {

    val mongoUrl = System.getProperty("dbUrl","localhost:7017").split(":")
    val threads = System.getProperty("c","8").toInt()
    val debug = System.getProperty("debug","false").toBoolean()
    val mongoHost = mongoUrl[0]
    val mongoPort = mongoUrl[1].toInt()



    var mongoClient = MongoClient(ServerAddress(mongoHost, mongoPort))
    val novelCrawler = NovelCrawer(mongoClient,threads)

    novelCrawler.start()
}