package com.cw.spider.parse.jxtv;

import cn.wanghaomiao.seimi.annotation.Crawler;
import cn.wanghaomiao.seimi.http.SeimiHttpType;
import cn.wanghaomiao.seimi.spring.common.CrawlerCache;
import cn.wanghaomiao.seimi.struct.CrawlerModel;
import cn.wanghaomiao.seimi.struct.Request;
import cn.wanghaomiao.seimi.struct.Response;
import com.cw.spider.parse.AbstractCrawler;
import com.cw.spider.queue.Myqueue;
import com.cw.spider.service.NewService;
import org.seimicrawler.xpath.JXDocument;
import org.springframework.beans.factory.annotation.Autowired;

import java.util.List;
import java.util.stream.Collectors;

/**
 * 描述:
 *
 * @author withqianqian@163.com
 * @create 2020-08-31 15:03
 */
@Crawler(name = "jx_region_basic", httpType = SeimiHttpType.OK_HTTP3, delay = 1,
        httpTimeOut = 3000, queue = Myqueue.class)
public class JxRegionCrawler extends AbstractCrawler {

    @Autowired
    JxContentCrawler jxContentCrawler;

    @Override
    public void start(Response response) {
        if (response != null) {
            JXDocument doc = response.document();
            try {
                if (doc != null) {
                    List<Object> navs = doc.sel("//ul[@class='nav']/li/a/@href");
                    List<String> strModalBdUrls = navs.stream().map(u -> u.toString()).collect(Collectors.toList());
                    String[] navsArr = strModalBdUrls.toArray(new String[navs.size()]);
                    CrawlerModel model = CrawlerCache
                            .getCrawlerModel("jx_content_basic");
                    for(String s :navsArr){
                        Request request  = Request.build(s,JxContentCrawler::start);
                        request.setCrawlerName("jx_content_basic");
                        model.getQueueInstance().push(request);
                    }

                }

            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }




}
