package com.lheia.jcb

import com.lheia.downloader.MiddlewareDownloader
import com.lheia.process.BaseProcess
import com.lheia.process.SpierGZHPProcess
import com.lheia.proxy.FileProxyProvider
import com.lheia.tool.DownLoadManager
import com.lheia.tool.ToolDateTime
import com.lheia.util.RequestExtraConstants
import grails.gorm.transactions.Transactional
import us.codecraft.webmagic.Request

/***
 * 爬虫管理表service
 *
 */
@Transactional
class SpiderService {
    DownLoadManager downLoadManager
    def resdisScheduler
    def gormPipeline
    /**
     * 爬虫管理表列表显示
     */
    def spiderQuery(params) {
        int page
        int limit
        if (params.page && params.limit) {
            page = params.int('page') - 1
            limit = params.int('limit')
        }
        def spiderList = Spider.createCriteria().list(max: limit, offset: page * limit) {
            def queryName = params.queryName
            if (queryName) {
                or {
                    like("spiderProcess", "%" + queryName + "%")
                    like("beginUrl", "%" + queryName + "%")

                }
            }
            eq("ifDel", false)
            order("id", "desc")
        }
        def spiderData = []
        def resMap = [:]
        spiderList.each {
            def dataMap = [:]
            dataMap.id = it.id
            dataMap.spiderProcess = it.spiderProcess
            dataMap.beginUrl = it.beginUrl
            dataMap.spiderArea = it.spiderArea
            dataMap.lastRunTime = it.lastRunTime
            dataMap.pageCount = it.pageCount

            spiderData << dataMap
        }
        resMap.data = spiderData
        resMap.total = spiderList.totalCount
        return resMap
    }
    /**
     * 爬虫管理表保存
     */
    def spiderSave(params) {
        def spider
        if (params.spiderId) {
            spider = Spider.findById(Long.valueOf(params.spiderId))
        } else {
            spider = new Spider()
        }
        spider.properties = params

        spider.save(flush: true, failOnError: true)
    }

    /**
     * 删除爬虫管理表
     * @param spiderId
     * @return
     */
    def getSpiderDataMap(Long spiderId) {
        def dataMap = [:]
        if (spiderId) {
            def spider = Spider.findById(spiderId)
            dataMap.putAll(spider.properties)
            dataMap.spiderId = spider.id
            dataMap.id = spider.id
        }
        return dataMap
    }

    /**
     * 删除爬虫管理表
     * @param spiderId
     * @return
     */
    def spiderDel(Long spiderId) {
        def spider = Spider.findById(spiderId)
        spider.ifDel = true
        spider.save(flush: true, failOnError: true)
    }

    /**
     * 删除爬虫管理表
     * @param spiderId
     * @return
     */
    def spiderRun(Long spiderId) {
        def spiderData = Spider.findById(spiderId)
        long time1 = System.currentTimeMillis()
        def downloader = new MiddlewareDownloader()
        downloader.setProxyProvider(FileProxyProvider.from("./proxyes.dat",true,3))
        def process = (BaseProcess)Class.forName(spiderData.spiderProcess).newInstance()
        us.codecraft.webmagic.Spider spider =  us.codecraft.webmagic.Spider.create(process)
        def requestList = process.getBeginRequest(spiderData.beginUrl)
        spider.addRequest( (Request[]) requestList.toArray())
        spider.setUUID(spiderData.spiderProcess)
        spider.setScheduler(resdisScheduler).addPipeline(gormPipeline).setDownloader(downloader).thread(5).run()
        long time2 = System.currentTimeMillis()
        long interval = time2 - time1
        if(spiderData.pageCount){
            spiderData.pageCount = spiderData.pageCount+spider.getPageCount()
        }else{
            spiderData.pageCount = spider.getPageCount()
        }
        spiderData.lastRunTime = new java.util.Date()
        spiderData.save(flush: true, failOnError: true)
        return "抓取用时" + interval + "ms"
    }
}
