package test

import cn.edu.hfut.dmic.webcollector.model._
import cn.edu.hfut.dmic.webcollector.plugin.berkeley.BreadthCrawler
import command.CommandExec
import command.CommandPojo.CommandData

import java.io.{BufferedOutputStream, File, FileOutputStream}
import implicits.Implicits._
import org.apache.commons.net.ftp._
import utils.Utils

import java.net.URL
import scala.collection.JavaConverters._
import sys.process._

/** Created by yz on 19/7/2021
  */
object GbffCrawler {

  val parentDir = new File("E:\\mtd_database\\test")
  val outDir = new File(parentDir, "my_out")

  class Crawler(crawlPath: String, autoParse: Boolean) extends BreadthCrawler(crawlPath, autoParse) {
    val startTime = System.currentTimeMillis()
    val maps = new File(parentDir, "prokaryotes.csv").csvLines.lineSeqMap

    val linkIdMap = maps
      .filter { map =>
        val refLink = map("RefSeq FTP")
        val genLink = map("GenBank FTP")
        refLink.nonBlank || genLink.nonBlank
      }
      .filter { map =>
        val id = map("BioSample")
        val dir = new File(outDir, id)
        val gbffFileExist = dir.myListFiles.exists(_.getName.endsWith(".gbff.gz"))
          (!gbffFileExist) || {
          (!dir.myListFiles.exists(_.getName.endsWith(".gbff"))) && {
            val gzFile = dir.myListFiles.find(_.getName.endsWith(".gbff.gz")).get
            val command =
              s"""
               |gunzip -k ${gzFile.unixPath}
               |""".stripMargin
            val commandExec = CommandExec().exec { b =>
              CommandData(dir, command)
            }
            if (!commandExec.isSuccess) {
              println(id, commandExec.errorInfo)
            }
            (!commandExec.isSuccess)
          }
        }
      }
      .map { map =>
        val refLink = map("RefSeq FTP")
        val genLink = map("GenBank FTP")
        val ftpLink = if (refLink.nonBlank) {
          refLink
        } else genLink
        val link = s"${ftpLink.replaceAll("^ftp", "https")}/"
        (link, map("BioSample"))
      }
      .toSeqMap

    val links = linkIdMap.keyList

    links.foreach { x =>
      this.addSeed(s"${x}")
    }
    var i = 0

    override def visit(page: Page, next: CrawlDatums): Unit = {
      val url = page.url()
      try {
        val links = page.links().asScala.toList.filter { x =>
          x.endsWith("genomic.gbff.gz")
        }
        if (links.size > 1) println(url)
        val id = linkIdMap(url)
        val eachOutDir = new File(outDir, id).createDirectoryWhenNoExist
        (new URL(links.head).#>(new File(eachOutDir, s"${id}.gbff.gz"))).!!
        i += 1
        println(i, linkIdMap.size, Utils.getTime(startTime))
      } catch {
        case e: Exception =>
          e.printStackTrace()
          println(url)
      }

    }
  }

  def main(args: Array[String]): Unit = {

    val crawler = new Crawler("crawl", false)
    crawler.setThreads(10)
    crawler.start(1)
  }

}
