package com.xxl.job.executor.crawler.job;

import com.xxl.job.core.log.XxlJobLogger;
import com.xxl.job.executor.XxlJobExecutorApplication;
import com.xxl.job.executor.crawler.XxlCrawler;
import com.xxl.job.executor.crawler.annotation.PageFieldSelect;
import com.xxl.job.executor.crawler.annotation.PageSelect;
import com.xxl.job.executor.crawler.conf.XxlCrawlerConf;

import com.xxl.job.executor.crawler.entity.model.BdasCrawlerProduct;
import com.xxl.job.executor.crawler.mapper.BdasCrawlerProductMapper;
import com.xxl.job.executor.crawler.parser.PageParser;
import com.xxl.job.executor.crawler.proxy.ProxyMaker;
import com.xxl.job.executor.crawler.proxy.strategy.RoundProxyMaker;
import com.xxl.job.executor.crawler.util.HibernateUtil;
import com.xxl.job.executor.crawler.util.JedisUtil;
import com.xxl.job.executor.crawler.util.UUIDUtil;

import lombok.Data;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.hibernate.cfg.Configuration;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.stereotype.Service;
import org.springframework.test.context.junit4.SpringRunner;
import redis.clients.jedis.Jedis;

import javax.swing.*;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/*
 *  lcx 爬取所有商品的url
 * */
@Service
@RunWith(SpringRunner.class)
@SpringBootTest(classes = XxlJobExecutorApplication.class,webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
public class AllProduct {
    @Autowired
    private BdasCrawlerProductMapper bdasCrawlerProductMapper;

    @PageSelect(cssQuery = "body")
    @Data
    public static class PageVo {
        //商品名称
        @PageFieldSelect(cssQuery = "h6.itemTitle", selectType = XxlCrawlerConf.SelectType.TEXT)
        private List<String> product_name;
        //商品的超链接

        @PageFieldSelect(cssQuery = "a.itemLink.block", selectType = XxlCrawlerConf.SelectType.ATTR, selectVal = "href")
        private List<String> product_href;

        //结束标志
        @PageFieldSelect(cssQuery = ".warning > .clearMargin-bottom > li", selectType = XxlCrawlerConf.SelectType.TEXT)
        private String endState;

        @PageFieldSelect(cssQuery = "a.img-link", selectType = XxlCrawlerConf.SelectType.ATTR, selectVal = "data-ean")
        private List<String> eanList1;

        @PageFieldSelect(cssQuery = "div.column-block", selectType = XxlCrawlerConf.SelectType.ATTR, selectVal = "data-ean")
        private List<String> eanList2;

        @PageFieldSelect(cssQuery = "li > h1",selectType = XxlCrawlerConf.SelectType.TEXT)
        private String type;


    }

    @Test
    public void insert(String param) {

        Map headerMap = new HashMap();
        Map cookieMap = new HashMap();
        Map paramMap = new HashMap();

        paramMap.put("type","1");

        cookieMap.put("BUYER_CITY_SELECTED", "80");

        headerMap.put("Cache-Control", "max-age=0");

        final List<BdasCrawlerProduct> listAll = new ArrayList<>();

        if(JedisUtil.selectproductUrlByRedis().size() != 0){

            XxlJobLogger.log("爬取失败队列");
            System.out.println("爬取失败队列");

            List listJedis = JedisUtil.selectproductUrlByRedis();

            // 给url数组赋长度：集合长度
            final String[] link = new String[listJedis.size()];
            Jedis jedis = JedisUtil.getJedis();

            // 循环遍历集合中的url添加到数组中
            for (int i = 0; i < listJedis.size(); i++) {
                link[i] = jedis.lpop("productUrl");
            }

            XxlJobLogger.log("初始化任务数量:" + listJedis.size());

            crawlerUrl(link,cookieMap,headerMap,paramMap,bdasCrawlerProductMapper,listAll);
        }else {
            System.out.println("爬取数据库");
            XxlJobLogger.log("爬取数据库");

            List<String> linkUrl = new ArrayList<>();

            for (int j = 1; j <= 7; j++) {
                linkUrl.add("https://saudi.souq.com/sa-en/beats/s/?as=1&section=2&page=" + j);
            }

            for (int j = 1; j <= 5; j++) {
                linkUrl.add("https://saudi.souq.com/sa-en/bose/s/?as=1&section=2&page=" + j);
            }

            for (int j = 1; j <= 26; j++) {
                linkUrl.add("https://saudi.souq.com/sa-en/tissot/s/?as=1&section=2&page=" + j);
            }

            for (int j = 1; j <= 33; j++) {
                linkUrl.add("https://saudi.souq.com/sa-en/diesel/s/?as=1&section=2&page=" + j);
            }

            for (int j = 1; j <= 51; j++) {
                linkUrl.add("https://saudi.souq.com/sa-en/casio/s/?as=1&section=2&page=" + j);
            }

            XxlJobLogger.log("初始化任务数量:" + linkUrl.size());

            final String[] link= linkUrl.toArray(new String[linkUrl.size()]); //将url集合转成数组

            crawlerUrl(link,cookieMap,headerMap,paramMap,bdasCrawlerProductMapper,listAll);
        }
    }

    public static void crawlerUrl(String[] link, Map cookieMap, Map headerMap,Map paramMap,
                                  BdasCrawlerProductMapper bdasCrawlerProductMapper,
                                  final List<BdasCrawlerProduct> listAll) {

        XxlCrawler crawler = new XxlCrawler.Builder()
                .setUrls(link)
                .setThreadCount(4).setPauseMillis(1000)
                .setAllowSpread(false)
                //.setProxyMaker(proxyMaker)
                .setCookieMap(cookieMap)
                .setHeaderMap(headerMap)
                .setParamMap(paramMap)
                .setPageParser(
                        new PageParser<PageVo>() {
                            //HibernateUtil hibernateUtil = new HibernateUtil();

                            @Override
                            public boolean parse(Document html, Element pageVoElement, PageVo pageVo) {

                                List<BdasCrawlerProduct> list = new ArrayList<>();
                                pageVoToListSave(list, pageVo.getProduct_name(), pageVo.getProduct_href());

                                for (BdasCrawlerProduct tem : list) {
                                    tem.setProductId(UUIDUtil.getId());
                                    tem.setType(pageVo.getType());
                                    listAll.add(tem);
                                }
                                return true;
                            }
                        }
                )
                .build();
        XxlJobLogger.log("start");
        System.out.println("start");
        crawler.start(true);
        XxlJobLogger.log("开启持久化");
        System.out.println("开启持久化");
        //int i  = 1;
        /*for (BdasCrawlerProduct tem : listAll) {
            bdasCrawlerProductMapper.insertSelective(tem);
            // XxlJobLogger.log("正在持久化"+ i++ +"条");
            i++;
        }*/
        int insert = bdasCrawlerProductMapper.insert(listAll);
        XxlJobLogger.log("持久化成功"+ insert +"条");

        XxlJobLogger.log("end-pa");
        XxlJobLogger.log("end");
        if (!crawler.isRunState()) {
            XxlJobLogger.log("-----------------这个类别已结束-------------------------");
            System.out.println("-----------------这个类别已结束-------------------------");
        }
    }





    /*打包集合*/
    private static List pageVoToListSave(List<BdasCrawlerProduct> re_list, List... list_tem) {

        int j = 0;
        //setProduct_name
        for (Object tem_list : list_tem[0]) {
            BdasCrawlerProduct BdasCrawlerProduct = new BdasCrawlerProduct();
            BdasCrawlerProduct.setProductName(String.valueOf(tem_list));
            re_list.add(BdasCrawlerProduct);
        }


        for (Object tem_list : list_tem[1]) {
            BdasCrawlerProduct BdasCrawlerProduct = re_list.get(j++);
            BdasCrawlerProduct.setProductHref(String.valueOf(tem_list));
        }


        return re_list;
    }

}
