package org.gensokyo.crawler

import com.typesafe.scalalogging.slf4j.StrictLogging
import us.codecraft.webmagic.processor.PageProcessor
import us.codecraft.webmagic.{Page, Site, Spider}

import scalaj.collection.Imports._

class T66yImageProcessor extends PageProcessor with StrictLogging {

  private val site: Site = Site.me
    .setCycleRetryTimes(5)
    .setRetryTimes(5)
    .setSleepTime(500)
    .setTimeOut(3 * 60 * 1000)
    .setUserAgent("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0")
    .addHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
    .addHeader("Accept-Language", "zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3")
  //    .setCharset("GBK")

  def getSite: Site = site

  val r1 = """[\p{Print}]+ww4\.sinaimg\.cn[\p{Print}]+""".r

  def process(page: Page) {
    logger.debug(s"""url=[${page.getUrl.get()}]""")
    page.getHtml.css(""".tpc_content.do_not_catch input[src^=http]""").all().asScalaMutable.toList.flatMap {
      case r1() => None
      case v => Some(v)
    }
    //    page.putField("provinceInfo", provinceInfo)
  }
}


object T66yImageProcessor {

  def deal = {
    Spider.create(new T66yImageProcessor)
      .addUrl("http://localhost:8000/1.html")
      //      .setScheduler(new FileCacheQueueScheduler("z_cache"))
      //      .addPipeline(new ScalaJsonFilePipeline("ProvinceInfo0.json", true))
      //      .setDownloader(new CustomHttpClientDownloader)
      .run()
  }

  def main(args: Array[String]) {
    deal
  }

}
