package org.gensokyo.crawler.proxy

import com.typesafe.scalalogging.slf4j.StrictLogging
import org.gensokyo.crawler.util.SingalPageJsonFilePipeline
import us.codecraft.webmagic.processor.PageProcessor
import us.codecraft.webmagic.{Page, Site, Spider}

case class ProxyInfo(ip: String = ""
                     , port: String = ""
                     , secureFlag: String = ""
                     , httpType: String = ""
                     , area: String = ""
                     , speed: String = ""
                     , lastUpdateTime: String = ""
                    )

class KuaiProxyProcessor extends PageProcessor with StrictLogging {

  import scalaj.collection.Imports._

  private val site: Site = Site.me
    .setCycleRetryTimes(5)
    .setRetryTimes(5)
    .setSleepTime(500)
    .setTimeOut(3 * 60 * 1000)
    .setUserAgent("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36 OPR/42.0.2393.517")
    .addHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
    .addHeader("Accept-Language", "zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3")
  //    .setCharset("GBK")

  def getSite: Site = site

  val r1 = """[\p{Print}]+ww4\.sinaimg\.cn[\p{Print}]+""".r

  def process(page: Page) {
    logger.debug(s"""url=[${page.getUrl.get()}]""")
    if (page.getUrl.get().contains("""http://www.kuaidaili.com/proxylist""")) {
      val ll: List[ProxyInfo] = page.getHtml.css("""table.table-bordered.table-striped.table-index tbody tr""").nodes().asScalaMutable.toList.map(x => {
        val keys = x.css("td", "data-title").all().asScalaMutable.toList
        val values = x.css("td", "text").all().asScalaMutable.toList
        val m = keys.zip(values).toMap
        ProxyInfo(m.getOrElse("IP", "")
          , m.getOrElse("PORT", "")
          , m.getOrElse("类型", "")
          , m.getOrElse("get/post支持", "")
          , m.getOrElse("位置", "")
          , m.getOrElse("响应速度", "")
          , m.getOrElse("最后验证时间", "")
        )
      })
      page.putField("ProxyInfos", ll)
    } else if (page.getUrl.get().contains("""http://www.kuaidaili.com/free""")) {
      val ll: List[ProxyInfo] = page.getHtml.css("""table tbody tr""").nodes().asScalaMutable.toList.map(x => {
        val keys = x.css("td", "data-title").all().asScalaMutable.toList
        val values = x.css("td", "text").all().asScalaMutable.toList
        val m = keys.zip(values).toMap
        ProxyInfo(m.getOrElse("IP", "")
          , m.getOrElse("PORT", "")
          , m.getOrElse("类型", "")
          , m.getOrElse("get/post支持", "")
          , m.getOrElse("位置", "")
          , m.getOrElse("响应速度", "")
          , m.getOrElse("最后验证时间", "")
        )
      })
      page.putField("ProxyInfos", ll)
    } else {
      logger.error("no handler route to")
    }
  }
}


object KuaiProxyProcessor extends StrictLogging {

  val urls: Array[String] = (1 to 10).toList.slice(0, 5).map(x => s"""http://www.kuaidaili.com/proxylist/${x}/""").toArray
  val freeIntrUrls: Array[String] = (1 to 1000).toList.slice(0, 50).map(x => s"""http://www.kuaidaili.com/free/intr/${x}/""").toArray
  val freeInhaUrls: Array[String] = (1 to 1000).toList.slice(0, 50).map(x => s"""http://www.kuaidaili.com/free/inha/${x}/""").toArray
  val freeOuthaUrls: Array[String] = (1 to 1000).toList.slice(0, 50).map(x => s"""http://www.kuaidaili.com/free/outha/${x}/""").toArray
  val freeOuttrUrls: Array[String] = (1 to 1000).toList.slice(0, 50).map(x => s"""http://www.kuaidaili.com/free/outtr/${x}/""").toArray

  def deal = {
    //    logger.debug(s"urls:${freeOuthaUrls.toList}")
    Spider.create(new KuaiProxyProcessor)
      .addUrl(freeOuttrUrls: _*)
      //      .setScheduler(new FileCacheQueueScheduler("z_cache"))
      .addPipeline(new SingalPageJsonFilePipeline("proxy.json"))
      //      .setDownloader(new CustomHttpClientDownloader)
      .run()
  }

  def main(args: Array[String]) {
    deal
  }

}
