package cn.le.service;

import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DateUtil;
import cn.le.beans.dto.CrawlerDto;
import cn.le.beans.vo.CrawlerHomeVo;
import cn.le.crawler.*;
import cn.le.beans.dto.CrawlerHomeSearchDto;
import cn.le.crawler.ipproxy.IpKuaidailiCrawler;
import cn.le.crawler.ipproxy.IpProxyData;
import cn.le.crawler.ipproxy.IpXiCiDailiCrawler;
import cn.le.crawler.ipproxy.IpXiCiDailiData;
import cn.le.dao.CrawlerHomeMapper;
import cn.le.entity.CrawlerHome;
import cn.le.utils.IpProxyUtil;
import cn.le.utils.IpProxyPool;
import cn.le.utils.SqlUtils;
import com.alibaba.fastjson.JSON;
import com.github.pagehelper.Page;
import com.github.pagehelper.PageHelper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.math.BigDecimal;
import java.net.Proxy;
import java.net.URL;
import java.net.URLEncoder;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;

/**
 * @Description
 * @Author le
 * @Date 2020/2/22 21:03
 * @Version 0.1
 **/
@Service
public class CrawlerHomeService {

    @Autowired
    //private CrawlerHomeRepository crawlerHomeMapper;
    private CrawlerHomeMapper crawlerHomeMapper;

    /**
     * 爬取房源
     * @param req
     * @return
     */
    public String crawler(CrawlerDto req) throws Exception {
        //CrawlerLock.lock(req.getCrawler());
        long start = System.currentTimeMillis();
        Crawler crawler = CrawlerFactory.getCrawler(req.getCrawler());
        List<CrawlerHome> crawlerHomes = crawler.crawlerHome();
        Crawler crawler = CrawlerFactory.getCrawler(req.getCrawler());
        List<CrawlerHome> crawlerHomes = crawler.crawlerHome();
        System.out.println("耗时 -- " + (System.currentTimeMillis() - start));
        String msg = "";//saveOfUpdate(crawlerHomes, req.getCrawler().getText());
        //CrawlerLock.unlock(req.getCrawler());
        return msg;
    }

    /**
     * 爬取代理
     * @param
     * @return
     */
    public String proxy() throws Exception {
        BufferedReader br = new BufferedReader(new FileReader("D:/JavaCode/ldd-tool/src/main/resources/Ip.txt"));
        String s = null;
        List<IpProxyData> list = new LinkedList<>();
        while((s = br.readLine())!=null){//使用readLine方法，一次读一行
           //35.221.163.22:80@HTTP#[高匿名]台湾彰化县 未知
            String address = s.substring(0,s.indexOf("@"));
            String[] ipPort = address.split(":");
            IpProxyData data = new IpProxyData();
            data.setIp(ipPort[0]);
            data.setPort(Integer.valueOf(ipPort[1]));
            list.add(data);
        }
        br.close();

        //CrawlerLock.lock(req.getCrawler());
        long start = System.currentTimeMillis();
        //List<IpProxyData> list = IpKuaidailiCrawler.crawler();
        IpProxyPool.replacePool(list);
       /* Crawler crawler = CrawlerFactory.getCrawler(req.getCrawler());
        List<CrawlerHome> crawlerHomes = crawler.crawlerHome();*/
        System.out.println("耗时 -- " + (System.currentTimeMillis() - start));
        String msg = "";//saveOfUpdate(crawlerHomes, req.getCrawler().getText());
        String msg = String.valueOf(list.size());//saveOfUpdate(crawlerHomes, req.getCrawler().getText());
        //CrawlerLock.unlock(req.getCrawler());
        return msg;
    }




    /**
     * 今日新增房源
     * @return
     */
    public Integer todayNewHome(){
        String todayStr = DateUtil.today();
        Date tomorrow = DateUtil.tomorrow();
        String tomorrowStr = DateUtil.format(tomorrow,"yyyy-MM-dd");
        int count = crawlerHomeMapper.todayNewHome(todayStr,tomorrowStr);
        return count;
    }

    /**
     * 下载房源图片
     */
    public void  downloadHomeImg(HttpServletResponse response, Long id){
        CrawlerHome crawlerHome = crawlerHomeMapper.getById(id);
        String homeImgs = crawlerHome.getHomeImgs();
        List<String> homeImgUrls = JSON.parseArray(homeImgs,String.class);
        try {
            String downloadFilename = crawlerHome.getTitle()+".zip";//文件的名称
            downloadFilename = URLEncoder.encode(downloadFilename, "UTF-8");//转换中文否则可能会产生乱码
            response.setContentType("application/octet-stream");// 指明response的返回对象是文件流
            response.setHeader("Content-Disposition", "attachment;filename=" + downloadFilename);// 设置在下载框默认显示的文件名
            ZipOutputStream zos = new ZipOutputStream(response.getOutputStream());
            for (int i = 0,size = homeImgUrls.size(); i < size; i++) {
                String imgUrl = homeImgUrls.get(i);
                URL url = new URL(imgUrl);
                String format = imgUrl.substring(imgUrl.lastIndexOf(".") + 1,imgUrl.length());
                StringBuilder sb = new StringBuilder(crawlerHome.getTitle()).append("/").append(i).append(".").append(format);
                zos.putNextEntry(new ZipEntry(sb.toString()));
                InputStream fis = url.openConnection().getInputStream();
                byte[] buffer = new byte[1024];
                int r = 0;
                while ((r = fis.read(buffer)) != -1) {
                    zos.write(buffer, 0, r);
                }
                fis.close();
            }
            zos.flush();
            zos.close();
        } catch (UnsupportedEncodingException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    /**
     * 爬虫房源分页
     * @return
     */
    public Page page(CrawlerHomeSearchDto req){
        String param = SqlUtils.getSql(req);
        PageHelper.startPage(req.getPageNum(),req.getPageSize());
        Page<CrawlerHomeVo> page = (Page<CrawlerHomeVo>) crawlerHomeMapper.find(param);
        if (CollectionUtil.isEmpty(page)){
            return page;
        }
        BigDecimal b2 = new BigDecimal(Double.toString(10000));
        for (CrawlerHomeVo vo : page){
            BigDecimal initialPrice = new BigDecimal(Double.toString(vo.getInitialPrice()));
            double initialPriceValue = initialPrice.divide(b2, BigDecimal.ROUND_HALF_UP).doubleValue();
            vo.setInitialPrice(initialPriceValue);
            BigDecimal consultPrice = new BigDecimal(Double.toString(vo.getConsultPrice()));
            double consultPriceValue = consultPrice.divide(b2,BigDecimal.ROUND_HALF_UP).doubleValue();
            vo.setConsultPrice(consultPriceValue);
        }
        return page;
    }


    /**
     * 新增或修改
     * @param list
     * @param source
     * @return
     */
    public String saveOfUpdate(List<CrawlerHome> list,String source){
        Set<Long> homeIds = crawlerHomeMapper.findHomeIds(source);
        List<CrawlerHome> insert = new ArrayList<>();
        List<CrawlerHome> update = new ArrayList<>();
        list.forEach(data->{
            if (homeIds.contains(data.getHomeId())){
                update.add(data);
            }else {
                insert.add(data);
            }
        });
        if (CollectionUtil.isNotEmpty(insert)){
            crawlerHomeMapper.batchInsert(insert);
        }
        update.forEach(data->{
            crawlerHomeMapper.update(data);
        });
        return "新增房源"+insert.size();
    }

}
