package com.spider.moudle.worker;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.spider.moudle.api.Crawler;
import com.spider.moudle.api.Writer;
import com.spider.moudle.api.realize.AbstractWorker;
import com.spider.moudle.entity.*;
import com.spider.moudle.task.TaskManager;
import com.spider.moudle.util.CommonUtil;
import com.spider.moudle.util.DateUtil;
import com.spider.moudle.util.ErrorLogUtil;
import com.spider.moudle.util.MD5Util;

/**
 * Created by Linbo Hu on 2017/12/18.
 * 一个标准的工作者
 */
public class StandardWorker extends AbstractWorker {

    /**
     * 个人网站的配置
     */
    private Setting setting;

    /**
     * 任务类型
     */
    private TaskType taskType;

    public StandardWorker(Setting setting, TaskType taskType) {
        this.setting = setting;
        this.taskType = taskType;
    }

    @Override
    public void run() {
        while (!Thread.interrupted()) {
            Crawler crawler = null;
            Task task = null;
            try {
                //从kafka获取一条任务
                if(TaskType.SPEECH == taskType) {
                    //如果是言论
                    task = TaskManager.takeTask(setting.getSpeech_task(), 1000);
                } else if(TaskType.USER == taskType) {
                    //如果是用户
                    task = TaskManager.takeTask(setting.getUser_task(), 1000);
                } else {
                    System.err.println("the value of TaskType is unavailable");
                }
                if(task != null) {
                    if(TaskType.SPEECH == taskType) {
                        //如果是言论
                        standardLogRecorder.takeTaskCount(setting.getSpeech_task());
                    } else if(TaskType.USER == taskType) {
                        //如果是用户
                        standardLogRecorder.takeTaskCount(setting.getUser_task());
                    } else {
                        System.err.println("the value of TaskType is unavailable");
                    }
                    //获取一个crawler(非阻塞方法)
                    crawler = crawlerPool.fetchCrawler(setting.getSite(), 1000);
                    while (crawler == null) {
                        crawler = crawlerPool.fetchCrawler(setting.getSite(), 1000);
                    }
                    //拿到task,抓取数据
                    JSONObject content = crawler.crawlItem(task);
                    if (CommonUtil.isNull(content)) {
                        continue;
                    }
                    String path = null;
                    String fileName = null;
                    if(TaskType.SPEECH == taskType) {
                        //文件前缀
                        String filePrefix = setting.getSite();
                        if (!content.containsKey("data")) {
                            continue;
                        }
                        JSONObject data = content.getJSONObject("data");
                        String time = data.getString("publish_time");
                        if (CommonUtil.isNull(time)) {
                            System.out.println(setting.getName() + " publish_time 不存在");
                            continue;
                        }
                        //这段代码是根据不同的网址类型才用了不同的命名方式
                        if(WebType.FORLUM == setting.getWeb_type()) {
                            //这是论坛网站的命名规范
                            long tsp = DateUtil.date_to_timestamp(DateUtil.str_to_date(DateUtil.yyyy_MM_dd_HH_mm_ss, time)).getTime();
                            String user_id = data.getString("publish_user_id");
                            String tid = data.getString("id");
                            //平台名称（或者英文）_ 言论发布的时间戳 _ 发布用户的ID _ 言论的tid
                            fileName = filePrefix + "_" + tsp + "_" + user_id + "_" + tid;
                            String publishDate = DateUtil.formatDate(DateUtil.parseDate(data.getString("publish_time"))).replace("-", "-");
                            String name = setting.getName();
                            path = tsp + "_" + name + "_speeches_" + publishDate;
                        } else if(WebType.NEWS == setting.getWeb_type()) {
                            //这是新闻的命名规范
                            long tsp = DateUtil.date_to_timestamp(DateUtil.str_to_date(DateUtil.yyyy_MM_dd_HH_mm_ss, time)).getTime();
                            //平台名称（或者英文）_ 新闻发布的时间戳 _ urlMD5码
                            fileName = filePrefix + "_" + tsp + "_" + MD5Util.getHash3(data.getString("url").replaceAll("#.*", ""), "MD5");
                            String publishDate = DateUtil.formatDate(DateUtil.parseDate(time)).replace("-", "-");
                            String name = setting.getName();
                            path = tsp + "_" + name + "_speeches_" + publishDate;
                        } else if(WebType.WEIBO == setting.getWeb_type()) {
                            System.out.println("这里是微博的命名方式");
                        } else if(WebType.WEIXIN == setting.getWeb_type()) {
                            System.out.println("这里是微信的命名方式");
                        } else {
                            System.out.println("错误");
                        }
                    } else if(TaskType.USER == taskType) {
                        //userKey是以平台名称（中文）_用户ID组成
                        String userKey = task.getTaskKey();
                        //path为平台名称
                        path = userKey.substring(0, userKey.indexOf("_"));
                        //fileName为用户id
                        fileName = userKey.substring(userKey.indexOf("_") + 1);
                    } else {
                        System.err.println(taskType + "的任务类型还未编写输出代码");
                    }
                    if(CommonUtil.isNotNull(path) && CommonUtil.isNotNull(fileName)) {
                        WriterType writerType = null;
                        if("KAFKA".equals(dataWriteWay)){
                            writerType = WriterType.KAFKA;
                        } else if("FILE".equals(dataWriteWay)){
                            writerType = WriterType.FILE;
                        }


                        //因为writer会引起阻塞,所以我们在需要使用的时候才拿writer用完立马还回去
                        Writer writer = null;
                        try {
                            writer = writerPool.fetch(writerType);
                            writer.write(path, fileName, JSON.toJSONString(content,
                                    //将中文都会序列化为unicode
                                    SerializerFeature.BrowserCompatible,
                                    //允许null值
                                    SerializerFeature.WriteMapNullValue,
                                    //关闭循环引用 形如："$ref":"$.children.0"这些东西
                                    SerializerFeature.DisableCircularReferenceDetect), taskType);
                            if(TaskType.SPEECH == taskType) {
                                //如果是言论
                                standardLogRecorder.dataCount(setting.getSpeech_task());
                            } else if(TaskType.USER == taskType) {
                                //如果是用户
                                standardLogRecorder.dataCount(setting.getUser_task());
                            } else {
                                System.err.println("the value of TaskType is unavailable");
                            }
                        } catch (Exception e) {
                            e.printStackTrace();
                        } finally {
                            if (writer != null) {
                                writerPool.release(writerType, writer);
                            }
                        }
                    } else {
                        System.err.println(setting.getName() + "的" + taskType + "发生异常" );
                    }
                }
            } catch (Exception e) {
                //如果有异常就记录日志
                if (null != task) {
                    ErrorLogUtil.error(task.toString(), e);
                }
            } finally {
                //将crawler实例还回pool以便于下次使用
                if (crawler != null) {
                    crawlerPool.releaseCrawler(setting.getSite(), crawler);
                }
            }
        }
    }
}
