package com.qigao.mall.web.craw;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.qigao.mall.commons.utils.FileUtils;
import com.xuxueli.crawler.XxlCrawler;
import com.xuxueli.crawler.conf.XxlCrawlerConf;
import com.xuxueli.crawler.parser.strategy.NonPageParser;
import com.xuxueli.crawler.util.FileUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;

/**
 * 爬虫示例09：采集非Web页面，如JSON接口等，直接输出响应数据
 *
 * @author xuxueli 2018-10-17
 */
public class XxlCrawlerTest10JSON {
    private static Logger logger = LoggerFactory.getLogger(XxlCrawlerTest10JSON.class);

    public static class ContentData{
        private String productId;
        private String imgUrl;
        private String productName;
        private String discountPrice;
        private String marketPrice;

        public String getProductId() {
            return productId;
        }

        public void setProductId(String productId) {
            this.productId = productId;
        }

        public String getImgUrl() {
            return imgUrl;
        }

        public void setImgUrl(String imgUrl) {
            this.imgUrl = imgUrl;
        }

        public String getProductName() {
            return productName;
        }

        public void setProductName(String productName) {
            this.productName = productName;
        }

        public String getDiscountPrice() {
            return discountPrice==null?"":new BigDecimal(discountPrice).divide(new BigDecimal(100)).toString();
        }

        public void setDiscountPrice(String discountPrice) {
            this.discountPrice = discountPrice;
        }

        public String getMarketPrice() {
            return marketPrice==null?"":new BigDecimal(marketPrice).divide(new BigDecimal(100)).toString();
        }

        public void setMarketPrice(String marketPrice) {
            this.marketPrice = marketPrice;
        }
    }


    public static void main(String[] args) {
        int i=1;
        while(i<=7){
            // 构造爬虫
            XxlCrawler crawler = new XxlCrawler.Builder()
                    .setUrls("https://haohuo.snssdk.com/shop/goodsList?shop_id=dtxSkDB&page="+i+"&pageSize=10&b_type_new=0")
                    .setPageParser(new NonPageParser() {
                        @Override
                        public void parse(String url, String pageSource) {
                          //  System.out.println(url + ": " + pageSource);
                            List<ContentData> contentDataArrayList =processHtml(pageSource);
                        }
                    })
                    .build();

            // 启动
            crawler.start(true);
            i++;
        }

    }

    private static List<ContentData> processHtml( String retBody){
        JSONObject jsonObject=JSONObject.parseObject(retBody);
        JSONArray jsonArray=jsonObject.getJSONObject("data").getJSONArray("list");
        List<ContentData> contentDatas=new ArrayList<>();
        String filePath = "E:\\logs\\image\\";
        for(int i=0;i<=jsonArray.size()-1;i++){
            ContentData contentData=new ContentData();
            JSONObject jsonObjectT=jsonArray.getJSONObject(i);
            contentData.setProductId(jsonObjectT.getString("product_id"));
            contentData.setImgUrl(jsonObjectT.getString("img"));
            contentData.setProductName(jsonObjectT.getString("name"));
            contentData.setDiscountPrice(jsonObjectT.getString("discount_price"));
            contentData.setMarketPrice(jsonObjectT.getString("market_price"));
            contentDatas.add(contentData);

            // 下载图片文件
            String imgUrl=contentData.getImgUrl();
          //  String fileName = FileUtil.getFileNameByUrl(imgUrl, null);
            String fileName=contentData.getProductId()+".jpg";
            boolean ret = FileUtil.downFile(imgUrl, XxlCrawlerConf.TIMEOUT_MILLIS_DEFAULT, filePath, fileName);
           // System.out.println("down images " + (ret?"success":"fail") + "：" + imgUrl+" "+fileName);

            processSingle(contentData);

        }
        return contentDatas;
    }


    private static void processSingle(ContentData contentData){
        String productId=contentData.getProductId();
        String productName=contentData.productName;
        String itemSql="INSERT INTO `db_mall`.`tb_item` (\n" +
                "  `item_no`,\n" +
                "  `item_title`,\n" +
                "  `share_content`,\n" +
                "  `sell_point`,\n" +
                "  `price`,\n" +
                "  `pic_url`,\n" +
                "  `narrow_pic_url`)\n" +
                "VALUES('%s','%s','%s','%s',%s,'%s','%s');\r\n";
        FileUtils.appendFile("E:\\logs\\image\\insertSql.sql",String.format(itemSql,productId,productName,productName,productName,contentData.getDiscountPrice(),productId,productId));
            XxlCrawler crawler = new XxlCrawler.Builder()
                    .setUrls("https://ec.snssdk.com/product/fxgajaxstaticitem?b_type_new=0&id="+contentData.getProductId()+"&token=17620701851cef4506d7d793562b9ab6")
                    .setPageParser(new NonPageParser() {
                        @Override
                        public void parse(String url, String pageSource) {
                            System.out.println(url + ": " + pageSource);
                            JSONObject jsonObject=JSONObject.parseObject(pageSource);
                            String productId=jsonObject.getJSONObject("data").getString("product_id");
                            JSONArray subImageArray=jsonObject.getJSONObject("data").getJSONArray("img_list");
                            String filePath = "E:\\logs\\image\\";
                            String itemPicSql="INSERT INTO `db_mall`.`tb_item_pic` (\n" +
                                    "  `item_no`,\n" +
                                    "  `pic_no`,\n" +
                                    "  `narrow_pic_no`  \n" +
                                    ") VALUES('%s','%s','%s');\r\n";
                            String fileSql="INSERT INTO `db_mall`.`tb_file_info` (\n" +
                                    "  `file_no`,\n" +
                                    "  `narrow_file_no`,\n" +
                                    "  `zip_file_no`,\n" +
                                    "  `file_type`,\n" +
                                    "  `file_name`,\n" +
                                    "  `file_url`\n" +
                                    ") \n" +
                                    "VALUES('%s','%s','%s','%s','%s','%s');\r\n";
                            for(int i=0;i<=subImageArray.size()-1;i++){
                                String filePrx=productId+"-"+i;
                                String fileName=filePrx+".jpg";
                                String imgUrl=subImageArray.getString(i);
                                boolean ret = FileUtil.downFile(imgUrl, XxlCrawlerConf.TIMEOUT_MILLIS_DEFAULT, filePath, fileName);
                                //System.out.println("down images " + (ret?"success":"fail") + "：" + imgUrl+" "+fileName);
                                FileUtils.appendFile("E:\\logs\\image\\insertSql.sql",String.format(itemPicSql,productId,filePrx,filePrx));
                                FileUtils.appendFile("E:\\logs\\image\\insertSql.sql",String.format(fileSql,filePrx,filePrx,filePrx,"jpg",fileName,filePath));
                            }
                        }
                    })
                    .build();

            // 启动
            crawler.start(true);
    }

}
