package com.spider.silence.worker;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.spider.silence.downloader.SinaNewsDownloader;
import com.spider.silence.output.FileWriter;
import com.spider.silence.output.FileWriterPool;
import com.spider.silence.output.MongoWriter;
import com.spider.silence.output.MongoWriterPool;
import com.spider.silence.pool.SinaNewsDownloaderPool;
import com.spider.silence.task.SinaNewsTask;
import com.spider.silence.task.manager.CommonTaskManager;
import com.spider.silence.utils.DateUtils;
import com.spider.silence.utils.MD5Utils;
import org.apache.log4j.Logger;

import java.io.File;
import java.util.Map;

/**
 * @author Administrator
 * @description 描述
 * @time 2017/4/12 0012
 */
public class SinaNewsWorker implements  Runnable{
    private Logger logger = Logger.getLogger(SinaNewsWorker.class);

    @Override
    public void run() {
        while (true&&!Thread.interrupted()){
            SinaNewsDownloaderPool pool= SinaNewsDownloaderPool.getInstance();
            SinaNewsDownloader downLoader =null;
            FileWriter writer = null;
            FileWriterPool writerPool =FileWriterPool.getInstance();
            try {
                downLoader = pool.fetchDownLoader(1000);
                SinaNewsTask task = (SinaNewsTask) CommonTaskManager.takeTask("SinaNews",1000);
                if(downLoader!=null&&task!=null) {
                    JSONObject content = downLoader.crawlItem(task.getForumUrl(),task.getChannel());
                    JSONObject data=content.getJSONObject("data");
                    if(data != null && !"".equals(data)&&!data.isEmpty()){
                        if(!"".equals(data.get("publish_time"))&&!"".equals(data.get("title"))&&!"".equals(data.get("content"))&&!"".equals(data.get("id"))) {
                            String filePrefix = "SinaNews";//文件前缀
                            long tsp = 0;
                            try {
                                tsp = DateUtils.dateToTimestamp(DateUtils.parseDateTime(data.getString("publish_time"))).getTime();
                            } catch (Exception e1) {
                                e1.printStackTrace();
                            }
                            String url = data.get("url").toString();
                            url = url.replaceAll("#.*", "");
                            String md5_url = MD5Utils.getHash3(url, "MD5");
                            //Writer writer =new FTPWriter();
                            String fileName = filePrefix + "_" + tsp + "_" + md5_url;//平台名称（或者英文）_ 新闻发布的时间戳 _ urlMD5码
                            String publishDate = DateUtils.formatDate(DateUtils.parseDate(data.getString("publish_time"))).replace("-", "-");
                            writer=writerPool.fetchWriter();
                            Map<String,File> map=downLoader.getFiles();
                            writer.write("新浪新闻_speeches_"+publishDate,fileName, JSON.toJSONString(content, SerializerFeature.BrowserCompatible),map);
                        }else{
                            System.out.println("解析内容失败:"+data.get("url"));
                        }
                    }
                }
            }catch (Exception e){
                e.printStackTrace();
            }finally {
                if (downLoader != null) {
                    pool.releaseDownLoader(downLoader);
                }
                if (writer != null) {
                    writerPool.releaseWriter(writer);
                }
            }

        }
    }
}
