package com.crawler.service

import java.io.File
import java.lang.reflect.Method
import java.net.{URL, URLClassLoader}
import java.util
import java.util.Date
import java.util.concurrent.TimeUnit

import akka.actor.{Props, Actor, ActorLogging}
import com.crawler.client.Param
import com.crawler.comm.CrawlerConfig
import com.crawler.message.TaskParamMessage
import com.crawler.proxy.{CrawlProxy, ProxyUtils}
import com.crawler.store.LocalStore
import com.crawler.utils.{CrawlerQueue, ZipUtils}
import com.typesafe.config.ConfigFactory
import org.openqa.selenium.WebDriver
import org.openqa.selenium.chrome.{ChromeDriver, ChromeOptions}
import org.openqa.selenium.phantomjs.{PhantomJSDriver, PhantomJSDriverService}
import org.openqa.selenium.remote.{CapabilityType, DesiredCapabilities}

import scala.util.Random

/**
  * Created by tongtao.zhu on 2016/11/28.
  */
class CrawlerService extends Actor with ActorLogging{
  override def receive: Receive = {
    case tasks: List[TaskParamMessage] => {
      val list = crawler(tasks)
      sender() ! list
    }
  }

  val config = ConfigFactory.load()
  val crawlerConfig = new CrawlerConfig(config)
  var proxyList:List[CrawlProxy] = null;
  val localStoreRef = context.actorSelection("/user/localStore")

  def getProxy(): CrawlProxy = {
    val useProxyList = proxyList.filter(proxy => proxy.errorCount<20)
    val random = Random.nextInt(useProxyList.size)
    useProxyList(random)
  }

  def crawler(tasks: List[TaskParamMessage]): List[TaskParamMessage] = {
    System.setProperty("phantomjs.binary.path", crawlerConfig.phantomjsPath)
    System.setProperty("webdriver.chrome.driver", crawlerConfig.chromeDriver)
    proxyList = ProxyUtils.proxyList
    var crawlProxy = getProxy()
    var driver = chromeDriver(crawlProxy)
    val taskList = new collection.mutable.ListBuffer[TaskParamMessage]()
    for(task <- tasks) {
      var url: String = null
      if (task.param == null || task.param.length == 0){
        url = task.host
        log.info(url)
      }else{
        log.info("%s?%s".format(task.host,task.param))
        url = "%s?%s".format(task.host,task.param)
      }
      var isTrue = true
      try {
        var flag = true
        var sum = 1
        var errorCount = 0
        while (flag){
          if (url.length<10){
            flag = false
          }else{
            flag = crawl(driver, url, task.source,task.`type`, task.data)
          }
          if (flag){
            if (sum%3 == 0){
              driver.quit()
              crawlProxy = getProxy()
              driver = chromeDriver(crawlProxy)
            }
            sum = sum + 1
            errorCount = errorCount + 1
            if (errorCount == crawlerConfig.retryCount){
              log.error(s"price = ${task.data},重试${crawlerConfig.retryCount}次失败，url=${url}")
              taskList.append(TaskParamMessage(task.method,task.data,task.host,task.param,task.header,task.source,task.`type`,3,task.prior))
              isTrue = false
            }
          }
        }
      } catch {
        case e: Exception => {
          log.info("crawler error:"+e)
        }
      }
      if (isTrue){
        //gen new task
        val taskGenList:Array[Param] = parse(driver.getPageSource, task.source, task.`type`, url)
        if (null != taskGenList && taskGenList.length > 0){
          taskGenList.map(ta => taskList.append(TaskParamMessage(ta.getMethod,ta.getData,ta.getHost,ta.getParam,ta.getHeader,ta.getSource,ta.getType,ta.getStatus,ta.getPrior)))
        }
        taskList.append(TaskParamMessage(task.method,task.data,task.host,task.param,task.header,task.source,task.`type`,1,task.prior))
        localStoreRef ! PersistMessage(driver.getPageSource,url, new Date().getTime.toString, task.source, task.`type`, task.header)
//        CrawlerQueue.link.push(PersistMessage(driver.getPageSource,url, new Date().getTime.toString, task.source, task.`type`, task.header))
      }
    }

    driver.close()
    driver.quit()
    taskList.toList
  }

  def parse(page: String, source: String, ty: String, url:String): Array[Param] ={
    // Getting the jar URL which contains target class
    val classLoaderUrls: Array[URL] = Array[URL](new URL(s"file:///${crawlerConfig.fileJar}"))

    // Create a new URLClassLoader
    val urlClassLoader: URLClassLoader = new URLClassLoader(classLoaderUrls)

    // Load the target class
    val beanClass: Class[_] = urlClassLoader.loadClass(s"com.crawler.client.${source}Parser")

    // Create a new instance from the loaded class
    val constructor = beanClass.getConstructor()
    val beanObj = constructor.newInstance()

    // Getting a method from the loaded class and invoke it
    val method: Method = beanClass.getMethod("parser", classOf[String], classOf[String], classOf[String])
    val param = method.invoke(beanObj, page, ty, url)

    if (param == null) return null

    val list = param.asInstanceOf[util.ArrayList[Param]]
    val arr = new Array[Param](list.size())
    list.toArray(arr)
  }

  def validate(page: String, source: String, pageType:String, data: String): Boolean ={
    // Getting the jar URL which contains target class
    val classLoaderUrls: Array[URL] = Array[URL](new URL(s"file:///${crawlerConfig.fileJar}"))

    // Create a new URLClassLoader
    val urlClassLoader: URLClassLoader = new URLClassLoader(classLoaderUrls)

    // Load the target class
    val beanClass: Class[_] = urlClassLoader.loadClass(s"com.crawler.client.${source}Parser")

    // Create a new instance from the loaded class
    val constructor = beanClass.getConstructor()
    val beanObj = constructor.newInstance()

    // Getting a method from the loaded class and invoke it
    val method: Method = beanClass.getMethod("validate", classOf[String], classOf[String], classOf[String])
    val param = method.invoke(beanObj, page, pageType, data)
    param.asInstanceOf[Boolean]
  }

  def chromeDriver(crawlProxy: CrawlProxy): WebDriver = {
    val proxyPath: String = ZipUtils.createZipFile(crawlerConfig.proxyHelperDir,
      crawlProxy.password,  crawlProxy.user,  crawlProxy.ip,  crawlProxy.port.toString)

    val chromeOptions: ChromeOptions = new ChromeOptions
    chromeOptions.addExtensions(new File(proxyPath))
    val preferences: util.Map[String, Int] = new util.HashMap[String, Int]
    preferences.put("profile.managed_default_content_settings.images", 2)
    chromeOptions.setExperimentalOption("prefs", preferences)
    val capabilities: DesiredCapabilities = DesiredCapabilities.chrome
    //        capabilities.setCapability("prefs", preferences);
    val cliArgsCap: util.List[String] = new util.ArrayList[String]
    cliArgsCap.add("--web-security=false")
    cliArgsCap.add("--ssl-protocol=any")
    cliArgsCap.add("--ignore-ssl-errors=true")
    cliArgsCap.add("--webdriver-loglevel=INFO")
    cliArgsCap.add("--load-images=false")
    capabilities.setCapability(CapabilityType.SUPPORTS_FINDING_BY_CSS, true)
    capabilities.setCapability(CapabilityType.TAKES_SCREENSHOT, true)
    capabilities.setCapability(PhantomJSDriverService.PHANTOMJS_CLI_ARGS, cliArgsCap)
    capabilities.setCapability(ChromeOptions.CAPABILITY, chromeOptions)
    val webDriver: WebDriver = new ChromeDriver(capabilities)
    webDriver
  }

  def createDriver(crawlProxy: CrawlProxy): WebDriver ={
    val capabilities = DesiredCapabilities.phantomjs()

    val cliArgsCap = new util.ArrayList[String]()
    cliArgsCap.add("--web-security=false")
    cliArgsCap.add("--ssl-protocol=any")
    cliArgsCap.add("--webdriver-loglevel=ERROR")
    cliArgsCap.add("--load-images=false")
    cliArgsCap.add("--proxy=%s:%d".format(crawlProxy.ip,crawlProxy.port))
    cliArgsCap.add("--proxy-auth=%s:%s".format(crawlProxy.user,crawlProxy.password))
    cliArgsCap.add("--proxy-type=http")
    capabilities.setCapability(PhantomJSDriverService.PHANTOMJS_CLI_ARGS, cliArgsCap)
    capabilities.setCapability(CapabilityType.SUPPORTS_FINDING_BY_CSS, true)
    capabilities.setCapability(CapabilityType.TAKES_SCREENSHOT, true)
    val driver = new PhantomJSDriver(capabilities)
    driver.manage.timeouts.implicitlyWait(30, TimeUnit.SECONDS)
    driver.manage.timeouts.pageLoadTimeout(200, TimeUnit.SECONDS)
    driver.manage.timeouts.setScriptTimeout(30, TimeUnit.SECONDS)
    driver
  }

  def crawl(driver: WebDriver, url: String, source: String,pageType:String, data: String): Boolean ={
    try {
      driver.get(url)
      val flag = validate(driver.getPageSource,source,pageType, data)
      if (!flag)
        return true
      if (driver.getPageSource.length < 1024) {
        System.out.println(driver.getPageSource)
        log.info("error: pagesource is too short!")
        return true
      }
    }
    catch {
      case e: Exception => {
        log.info("exception:"+e.printStackTrace())
        return true
      }
    }
    return false
  }
}

object CrawlerService {
  val crawlerService = new CrawlerService()

  def main(args: Array[String]) {
    val tasks = crawlerService.crawler(List(TaskParamMessage("get","","https://list.tmall.com/search_product.htm?q=%CF%B4%B7%A2%CB%AE&prop=20019:3259918&sort=s&style=g&from=sn_1_prop-qp&active=1&spm=a221u.7741822.4522092394.256.ALa8Ki&s=3240","","","TaoBao","list",0,10)))
    for (url <- tasks)
      println(url)
  }
}

case class PersistMessage(content: String, url: String, crawlerData: String, source: String, typ: String, header: String)