package org.gensokyo.crawler.luluba

import com.typesafe.scalalogging.slf4j.StrictLogging
import org.gensokyo.crawler.util.SingalPageJsonFilePipeline
import us.codecraft.webmagic.processor.PageProcessor
import us.codecraft.webmagic.{Page, Site, Spider}

class LulubaProcessor extends PageProcessor with StrictLogging {

  import scalaj.collection.Imports._

  private val site: Site = Site.me
    .setCycleRetryTimes(5)
    .setRetryTimes(5)
    .setSleepTime(500)
    .setTimeOut(3 * 60 * 1000)
    .setUserAgent("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36 OPR/42.0.2393.517")
    .addHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
    .addHeader("Accept-Language", "zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3")
  //    .setCharset("GBK")

  def getSite: Site = site

  val r1 = """[\p{Print}]+ww4\.sinaimg\.cn[\p{Print}]+""".r

  def process(page: Page) {
    logger.debug(s"""url=[${page.getUrl.get()}]""")
    //    page.putField("ProxyInfos", ll)
  }
}


object LulubaProcessor extends StrictLogging {

  def deal = {
    Spider.create(new LulubaProcessor)
      .addUrl("http://www.avshe8.com/?x=347989")
      //      .setScheduler(new FileCacheQueueScheduler("z_cache"))
      //      .addPipeline(new SingalPageJsonFilePipeline("proxy.json"))
      //      .setDownloader(new CustomHttpClientDownloader)
      .run()
  }

  def main(args: Array[String]) {
    deal
  }

}
