package cn.newshasha.hlx.service.impl;

import cn.newshasha.hlx.model.Comments;
import cn.newshasha.hlx.model.Post;
import cn.newshasha.hlx.model.Posts;
import cn.newshasha.hlx.model.PostsJsonRootBean;
import cn.newshasha.hlx.service.ToolsService;
import cn.newshasha.hlx.utils.HlxUtils;
import cn.newshasha.hlx.utils.HttpUtils;
import cn.newshasha.hlx.utils.RespBean;
import com.alibaba.fastjson.JSONObject;
import org.apache.log4j.Logger;
import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.*;

/**
 * @author bSu
 * @date 2020/11/28 - 18:42
 */
@Service
public class ToolsServiceImpl implements ToolsService {


    private static Logger logger = Logger.getLogger(ToolsServiceImpl.class);
    private static String getName(){
        return ToolsServiceImpl.class.getName();
    }

    @Override
    public Map<String,Object> getAllImages(String postId,String postAdds,int floor) {
        try {
            Map<String,String> map=new HashMap<>();
            map.put("Content-Type","application/x-www-form-urlencoded");
            map.put("Connection","close");
            map.put("Host","floor.huluxia.com");
            map.put("Accept-Encoding","gzip");
            map.put("User-Agent","okhttp/3.8.1");
            map.put("Content-Length","zh-CN,zh;q=0.9");

            //页码
            int pageNum=1;
            //目标
            boolean flag=true;
            //图片集合
            Map<String,Object> images =new LinkedHashMap<>();
            //未定位到指定楼层的图片集合
            Map<String,Object> failImages =new LinkedHashMap<>();
            //定位到指定楼层的图片集合
            Map<String,Object> successImages =new LinkedHashMap<>();
            //评论总数
            int commentCount=0;
            //楼层总数
            int i=0;
            do {
                String postJsonUrl="http://floor.huluxia.com/post/detail/ANDROID/2.3?post_id=" + postId + "&page_no="+pageNum+"&page_size=20&doc=1";
                Connection.Response response = HttpUtils.get(postJsonUrl,map);
                String body = response.body();
                JSONObject json=new JSONObject();
                PostsJsonRootBean jsonRootBean = json.parseObject(response.body(), PostsJsonRootBean.class);
                //先获取帖子主题里的图片
                if(pageNum==1){
                    //主题翻页不变，只获取一次就可以了
                    List<String> mainAmg = jsonRootBean.getPost().getImages();
                    images.put("main",mainAmg);
                    if(mainAmg.size()==0){
                        //为0则有可能图文，需要再次获取一下；草率了，这里用接口更节省资源。
                        Document doc = Jsoup.connect(postAdds)
                                .timeout(10000)
                                .post();
                        Elements share_content_wrapper = doc.getElementsByClass("share_content_wrapper");
                        Elements img = share_content_wrapper.get(0).getElementsByTag("img");
                        List<String> list=new ArrayList<>();
                        for(Element element : img){
                            String src = element.attr("src");
                            list.add(src);
                        }
                        //帖子内容里图文和普通图只能同时存在一个，所以可以直接覆盖
                        images.put("main",list);
                    }
                    //获取评论总数,只有第一页有post对象，所有要过滤一下
                    commentCount = jsonRootBean.getPost().getCommentCount();
                    if (commentCount<floor || floor<0 || commentCount==0){
                        //如果楼层数小于0或者没有这个楼层再或者总楼层为0，直接return
                        return images;
                    }
                }


                List<Comments> comments = jsonRootBean.getComments();
                //遍历回复里的图片

                for(Comments com : comments){
                    //images.put((i+1)+"楼",com.getImages());
                    failImages.put((i+1)+"楼",com.getImages());
                    if(floor==(i+1)){
                        //匹配到相应楼层，return
                        successImages.put((i+1)+"楼",com.getImages());
                        images.putAll(successImages);
                        return images;
                    }
                    i++;
                }
                Comments lastComments = comments.get(comments.size() - 1);
                if(lastComments.getSeq()<commentCount){
                    //当前页面最后一个楼层小于最高楼层，本页未到达最后一个评论，继续翻页获取
                    pageNum++;
                }else {
                    //否则，为最后一页，结束循环
                    flag=false;
                }

            }while (flag);
            images.putAll(failImages);
            return images;
        }catch (Exception e){
            logger.error(getName()+".getAllImages()异常",e);
            return null;
        }
    }


    /**
     * "/" 这个符号是取整
     * @param postid
     * @param floor
     * @return
     * @throws IOException
     */
    @Override
    public Object getPostByAdds(long postid, int floor) throws IOException {
        Map<String,Object> resultMap=new LinkedHashMap<>();
        PostsJsonRootBean postDetails = HlxUtils.getPostDetails(String.valueOf(postid));
        //楼主id
        long userID = postDetails.getPost().getUser().getUserID();
        //楼主昵称
        String nick = postDetails.getPost().getUser().getNick();
        resultMap.put("postId",postid);
        resultMap.put("authorUserId",userID);
        resultMap.put("authorUserNick",nick);

        if(!(floor>=1)){//楼层只有大于等于1才参与计算
            logger.info("查询结束："+resultMap.toString());
            return RespBean.ok("查询成功",resultMap);
        }
        if(postDetails.getPost().getCommentCount()<floor){//如果输入楼层大于总楼层return
            logger.info("查询结束，输入楼层大于总楼层。");
            return RespBean.error("查询失败，输入楼层大于总楼层。");
        }
        //计算输入楼层为第几页码
        int pageNo=0;
        if(floor<=20){
            pageNo=1;
        }else {
            pageNo=(floor%20)>0?(int) Math.ceil((float)floor/(float)20):floor/20;
        }
        logger.info("输入的楼层所在页码为："+pageNo);
        PostsJsonRootBean postDetailsByPage = HlxUtils.getPostDetailsByPage(String.valueOf(postid), String.valueOf(pageNo));
        List<Comments> comments = postDetailsByPage.getComments();
        for(Comments comments1 : comments){
            if(comments1.getSeq()==floor){
                resultMap.put("userId",comments1.getUser().getUserID());
                resultMap.put("userNick",comments1.getUser().getNick());
                break;
            }
        }
        logger.info("查询结束："+resultMap.toString());
        return RespBean.ok("查询成功",resultMap);
    }

    @Override
    public Object getRanking(String catId) throws IOException {
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
        Map<String,String> map=new HashMap<>();
        map.put("Content-Type","application/x-www-form-urlencoded");
        map.put("Connection","close");
        map.put("Host","floor.huluxia.com");
        map.put("Accept-Encoding","gzip");
        map.put("User-Agent","okhttp/3.8.1");
        map.put("Content-Length","zh-CN,zh;q=0.9");
        //通过开始时间和帖子数获取详情；用的不多，懒得拆出来了
        long start = System.currentTimeMillis();
        boolean flag =true;//标识
        Map<Integer,Object> resultMap = new HashMap<>();
        do {
            String url="http://floor.huluxia.com/post/list/ANDROID/2.1?platform=2&gkey=410000&app_version=3.5.1.92.1&versioncode=261&market_id=tool_web&_key=&device_code=&start="+start+"&count=20&cat_id="+catId+"&tag_id=0&sort_by=1";
            Connection.Response response = HttpUtils.get(url,map);
            String body = response.body();
            JSONObject json=new JSONObject();
            PostsJsonRootBean jsonRootBean = json.parseObject(body, PostsJsonRootBean.class);
            List<Posts> posts = jsonRootBean.getPosts();
            for (int i=0;i<posts.size();i++){
                Posts post = posts.get(i);
                logger.info("标题："+post.getTitle());
                //循环判断是否为当日帖子，是就继续循环，不是就跳出两层循环
                long createTime = post.getCreateTime();
                if(sdf.format(createTime).equals(sdf.format(new Date()))){
                    //当天帖子就记录下来，等着排序
                    //热度计算方式，浏览量+回复量*10
                    resultMap.put(post.getHit() + post.getCommentCount() * 10,post);
                }else{
                    //非当天帖子，不入集合，结束循环
                    flag=false;
                    break;
                }
                //如果循环到最后一个元素还是当天的帖子，就将start赋值为最后一个帖子的日期，继续do while循环
                if(i==posts.size()-1){
                    start=post.getCreateTime();
                }
            }
        }while (flag);
        //logger.info("截至请求时间，当日帖子："+resultMap.toString());
        //System.out.println(mapStringToMap(resultMap.toString()));
        //卧槽，改了这么久一直想不出来为什么输出是按照降序，一放到浏览器就是降序，幸亏专门换了下浏览器，要不然就死了
        //tm谁知道edge上装的浏览器格式话json插件有排序功能，卧槽了。
        Map<Integer, Object> tMapRever = new TreeMap<>(Collections.reverseOrder());
        tMapRever.putAll(resultMap);
        return tMapRever;
    }



}
