package test

import cn.edu.hfut.dmic.webcollector.model.{CrawlDatums, Page}
import cn.edu.hfut.dmic.webcollector.plugin.berkeley.BreadthCrawler
import implicits.Implicits._

import java.io.File

/**
 * Created by yz on 13/12/2021
 */
object KoCrawler {

  val outDir = new File("E:\\snp_database\\test")
  val annoOutDir = new File(outDir, "anno")
  val koDir = new File(outDir, "ko")

  class Crawler(crawlPath: String, autoParse: Boolean) extends BreadthCrawler(crawlPath, autoParse) {
    override def visit(page: Page, next: CrawlDatums): Unit = {
      val url = page.url().split('/').last.mySplit(":").last
      try {
        val pre = page.html()
        pre.toFile(new File(koDir, s"${url}.txt"))
      } catch {
        case e: Exception =>
          println(e)
          println(url)
      } finally {
      }

    }
  }

  def main(args: Array[String]): Unit = {

    val crawler = new Crawler("crawler", false)
    crawler.setThreads(10)
    val needGetIds = annoOutDir.myListFiles.filter { dir =>
      val destFile = new File(dir, s"or.txt")
      destFile.exists()
    }.filterNot { dir =>
      dir.myListFiles.exists(_.getName.endsWith(".ko"))
    }.flatMap { dir =>
      val destFile = new File(dir, s"or.txt")
      destFile.txtLines.map(_ (0))
    }

    println(needGetIds.size)
    println(needGetIds)
    needGetIds.foreach { x =>
      crawler.addSeed(s"http://rest.kegg.jp/get/${x}")
    }
    crawler.start(1)

  }


}
