package org.jeecg.modules.crawler.controller;

import cn.hutool.core.codec.Base64Encoder;
import cn.hutool.http.HttpRequest;
import cn.hutool.http.HttpResponse;
import cn.hutool.http.HttpUtil;
import cn.wanghaomiao.xpath.model.JXDocument;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.NameValuePair;
import org.apache.http.message.BasicNameValuePair;
import org.jeecg.common.api.vo.Result;
import org.jeecg.common.aspect.annotation.AutoLog;
import org.jeecg.common.system.base.controller.JeecgController;
import org.jeecg.common.system.query.QueryGenerator;
import org.jeecg.modules.crawler.entity.CueCrawlerInfo;
import org.jeecg.modules.crawler.entity.NewCrawlerInfo;
import org.jeecg.modules.crawler.entity.NewCrawlerKeyword;
import org.jeecg.modules.crawler.service.ICueCrawlerInfoService;
import org.jeecg.modules.crawler.service.INewCrawlerInfoService;
import org.jeecg.modules.crawler.service.INewCrawlerKeywordService;
import org.jeecgframework.p3.core.util.HttpUtils;
import org.jsoup.nodes.Element;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.servlet.ModelAndView;
import sun.misc.BASE64Decoder;

import javax.net.ssl.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @Description: 爬虫信息
 * @Author: jeecg-boot
 * @Date: 2020-05-02
 * @Version: V1.0
 */
@Slf4j
@Api(tags = "爬虫信息")
@RestController
@RequestMapping("/crawler/newCrawlerInfo")
public class NewCrawlerController extends JeecgController<CueCrawlerInfo, ICueCrawlerInfoService> {


    @Autowired
    private INewCrawlerInfoService newCrawlerInfoService;

    @Autowired
    private INewCrawlerKeywordService newCrawlerKeywordService;

    /**
     * 分页列表查询
     *
     * @param newCrawlerInfo
     * @param pageNo
     * @param pageSize
     * @param req
     * @return
     */
    @AutoLog(value = "爬虫信息-分页列表查询")
    @ApiOperation(value = "爬虫信息-分页列表查询", notes = "爬虫信息-分页列表查询")
    @GetMapping(value = "/list")
    public Result<?> queryPageList(NewCrawlerInfo newCrawlerInfo,
                                   @RequestParam(name = "pageNo", defaultValue = "1") Integer pageNo,
                                   @RequestParam(name = "pageSize", defaultValue = "10") Integer pageSize,
                                   HttpServletRequest req) {
        //获取关键词
        QueryWrapper<NewCrawlerInfo> queryWrapper = new QueryWrapper<>();

//        List<NewCrawlerKeyword> keywordList = newCrawlerKeywordService.list();
        Page<NewCrawlerInfo> page = new Page<NewCrawlerInfo>(pageNo, pageSize);
//        for (NewCrawlerKeyword keyword : keywordList) {
//            queryWrapper.or().like("title", keyword.getKeyword());
//        }
        queryWrapper.orderByDesc("time");
        IPage<NewCrawlerInfo> pageList = newCrawlerInfoService.page(page, queryWrapper);
        return Result.ok(pageList);
    }


    @AutoLog(value = "爬虫信息-分页列表查询")
    @ApiOperation(value = "爬虫信息-分页列表查询", notes = "爬虫信息-分页列表查询")
    @GetMapping(value = "/listNoPage")
    public Result<?> queryPageList() {
        //获取关键词
        QueryWrapper<NewCrawlerInfo> queryWrapper = new QueryWrapper<>();
        queryWrapper.orderByDesc("time");
        queryWrapper.last(" limit 10");
        List<NewCrawlerInfo> pageList = newCrawlerInfoService.list( queryWrapper);
        return Result.ok(pageList);
    }


    @GetMapping(value = "/addKeyword")
    public Result<?> addKeyword(@RequestParam(name = "keyword") String keyword) {
        String[] keywords = keyword.split(",");
        QueryWrapper<NewCrawlerKeyword> queryWrapper = new QueryWrapper<>();
        newCrawlerKeywordService.remove(queryWrapper);
        List<NewCrawlerKeyword> newCrawlerKeywords = new ArrayList<>();
        for (String keywordStr : keywords) {
            NewCrawlerKeyword newCrawlerKeyword = new NewCrawlerKeyword();
            newCrawlerKeyword.setKeyword(keywordStr);
            newCrawlerKeywords.add(newCrawlerKeyword);
        }
        newCrawlerKeywordService.saveBatch(newCrawlerKeywords);
        return Result.ok("成功");
    }


    @GetMapping(value = "/initCrawlerData")
    public Result<?> initCrawlerData() {

        List<NewCrawlerKeyword> keywordList = newCrawlerKeywordService.list();
        String keywordStr = "";
        for (NewCrawlerKeyword keyword : keywordList) {
            keywordStr += (keyword.getKeyword() + ",");
        }
        Map<String, Object> retMap = new HashMap<>();
        retMap.put("keyword", keywordStr);
        return Result.ok(retMap);
    }


    @GetMapping(value = "/goSearch")
    public Result<?> goSearch() {
        List<NewCrawlerKeyword> keywordList = newCrawlerKeywordService.list();

        try {
            for (NewCrawlerKeyword newCrawlerKeyword : keywordList) {
                for (int i = 1; i < 6; i++){
                    String queryStr = "[{\"field\":\"pageIndex\",\"value\":"+i+"},{\"field\":\"group\",\"value\":0},{\"field\":\"searchType\",\"value\":\"\"},{\"field\":\"keyword\",\"value\":\"" + newCrawlerKeyword.getKeyword() + "\"},{\"field\":\"recommend\",\"value\":\"1\"},{\"field\":4,\"value\":\"\"},{\"field\":5,\"value\":\"\"},{\"field\":6,\"value\":\"\"},{\"field\":7,\"value\":\"\"}]";
                    String query = Base64Encoder.encode(queryStr.getBytes(StandardCharsets.UTF_8));
                    List<NewCrawlerInfo> crawlerInfos = new ArrayList<>();
                    Map<String, Object> fromMap = new HashMap<>();
                    fromMap.put("searchInfo", query);
                    HttpRequest post = HttpUtil.createPost("https://www.seu.edu.cn/_web/_search/api/searchCon/create.rst?_p=YXM9MiZ0PTI0OTImZD05NTk1JnA9MSZtPVNOJg__&tt=" + Math.random());
                    post.header("Referer", "https://www.seu.edu.cn/_web/_search/api/search/new.rst?locale=zh_CN&request_locale=zh_CN&_p=YXM9MiZ0PTI0OTImZD05NTk1JnA9MSZtPVNOJg__");
                    post.form(fromMap);
                    HttpResponse httpResponse = post.execute();
                    JSONObject respObj = (JSONObject) JSONObject.parse(httpResponse.body());
                    String respStr = respObj.getString("data");
                    String xpath = "//html/body/div";
                    JXDocument jxDocument = new JXDocument(respStr);
                    List<Object> rs = jxDocument.sel(xpath);
                    for (Object o : rs) {
                        if (o instanceof Element) {
                            NewCrawlerInfo tempInfo = new NewCrawlerInfo();
                            //链接
                            String aStr = ((Element) o).outerHtml();
                            String apath = "//html/body/div/h3/a/@href";
                            JXDocument aJxDocument = new JXDocument(aStr);
                            List<Object> ars = aJxDocument.sel(apath);
//                        System.out.println(ars.get(0).toString());
                            tempInfo.setLink(ars.get(0).toString());
                            //标题
                            String titleStr = ((Element) o).outerHtml();
                            String titlepath = "//html/body/div/h3/a";
                            JXDocument titleJxDocument = new JXDocument(titleStr);
                            List<Object> titlers = titleJxDocument.sel(titlepath);
                            Object titleO = titlers.get(0);
                            String title = ((Element) titleO).html();
//                        System.out.println(title);
                            tempInfo.setTitle(title);
                            tempInfo.setId(title);
                            //其他信息 创建者，发布时间，目录 ，出处
                            String otherStr = ((Element) o).outerHtml();
                            String otherpath = "//html/body/div/span";
                            JXDocument otherJxDocument = new JXDocument(otherStr);
                            List<Object> otherrs = otherJxDocument.sel(otherpath);
                            if(otherrs.size()>=1){
                                Object otherO1 = otherrs.get(0);
                                tempInfo.setAuthor(((Element) otherO1).html());
                            }
                            if(otherrs.size()>=2) {
                                Object otherO2 = otherrs.get(1);
                                tempInfo.setTime(((Element) otherO2).html().replaceAll("发布时间:", ""));
                            }
                            if(otherrs.size()>=3) {
                                Object otherO3 = otherrs.get(2);
                                tempInfo.setMl(((Element) otherO3).html());
                            }
                            if(otherrs.size()>=4) {
                                Object otherO4 = otherrs.get(3);
                                tempInfo.setSource(((Element) otherO4).html());
                            }
                            crawlerInfos.add(tempInfo);
                        }
                    }
                    newCrawlerInfoService.saveOrUpdateBatch(crawlerInfos);
                }
            }
        } catch (Exception var7) {
            var7.printStackTrace();
        }


        return Result.ok(null);
    }


}
