package net.sppan.blog.job;

import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.jfinal.log.Log;
import com.jfinal.plugin.activerecord.ActiveRecordPlugin;
import com.jfinal.plugin.activerecord.Db;
import com.jfinal.plugin.druid.DruidPlugin;
import net.sppan.blog.model.ClickStats;
import net.sppan.blog.model.StatsLog;
import net.sppan.blog.model._MappingKit;
import net.sppan.blog.utils.ShellExec;
import net.sppan.blog.utils.StringTools;
import org.apache.commons.lang.StringUtils;

import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;

public class ClickStatsJob {
    private static final Log log = Log.getLog(ClickStatsJob.class);
    private SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");

    public static void monitor() {
        Timer timer = new Timer();
        timer.schedule(new ClickStatsJob().new StatsTask(), 60000, 60000 * 5);
    }

    public static void fixData(String data_day) {
        new ClickStatsJob().new StatsTask().doStats(data_day);
    }

    class StatsTask extends TimerTask {
        String logPath;

        StatsTask() {
            loadConfig();
        }

        @Override
        public void run() {
            String data_day = sdf.format(new Date(System.currentTimeMillis() - 1000 * 3600 * 24));
            doStats(data_day);
        }

        public void loadConfig() {
            Properties log4j = new Properties();
            try {
                log4j.load(StatsTask.class.getResourceAsStream("/log4j.properties"));
            } catch (IOException e) {
                e.printStackTrace();
            }
            logPath = log4j.getProperty("log4j.appender.click_log.File");
        }

        public void doStats(String data_day) {
            try {
                if (StringUtils.isBlank(logPath)) {
                    log.error("点击日志路径获取失败");
                    loadConfig();
                } else {

                    String fileRegex = "click." + data_day + "[0-9]{2}";
                    File dir = new File(logPath).getParentFile();
                    File[] files = dir.listFiles((thisDir, fileName) -> fileName.matches(fileRegex));
                    if (files == null || files.length == 0) {
                        log.error("统计文件未生成");
                    } else {
                        //判断任务是否已经执行过
                        StatsLog statsLog = StatsLog.dao.findFirst("select * from sys_stats_log where data_day=? and name='ClickStats' order by id desc", data_day);
                        if (statsLog == null || statsLog.getInt("status") == 0) {//任务不存在或者执行失败，重试执行
                            Db.update("delete from sys_click_stats where statsDay=?", data_day);
                            log.error("开始统计====|     " + data_day + "        |===");
                            if (statsLog == null) {
                                statsLog = new StatsLog();
                            }
                            statsLog.set("name", "ClickStats");
                            statsLog.set("data_day", data_day);
                            statsLog.set("start_time", new Date());
                            statsLog.set("end_time", null);
                            statsLog.set("status", 2);
                            if (statsLog.getId() != null) {
                                statsLog.update();
                            } else {
                                statsLog.save();
                            }
                            AtomicInteger flag = new AtomicInteger(0);
                            //统计缓存
                            Cache<String, Map<String, AtomicLong>> statsCache = Caffeine.newBuilder()
                                    .maximumSize(10_000_000).expireAfterAccess(2, TimeUnit.MINUTES).build();
                            Cache<String, Byte> tidCache = Caffeine.newBuilder()
                                    .maximumSize(10_000_000).expireAfterAccess(2, TimeUnit.MINUTES).build();
                            try {
                                //遍历文件（1小时1个文件）
                                for (File file : files) {
                                    try {
                                        //1524375691662	status=1	-Qk90iU1QheULYmTsM03Ug1524373840668	p1=-Qk90iU1QheULYmTsM03Ug1524373840668&p2=37&p3=customer&p4=34&p5=Agent&p6=123&p7=44&p8=2&p100=0	http://www.baidu.com
                                        //第二列状态：1正常，0异常
                                        //第三列tid
                                        //第四列p参数
                                        // p1:请求唯一标识
                                        // p2:请求广告的用户id
                                        // p3:请求广告的用户名
                                        // p4:上级代理的id
                                        // p5:上级代理的用户名
                                        // p6:设备硬件编号
                                        // p7:offerid
                                        // p8:价格
                                        // p100:扣量标识，1：扣量，0：不扣量
                                        Files.newBufferedReader(file.toPath()).lines().parallel().map(line -> line.split("\t")).filter(col -> col.length >= 4).forEach(col -> {
                                            Byte tid = tidCache.getIfPresent(col[2]);//tid去重
                                            if (tid == null) {
                                                tidCache.put(col[2], (byte) 0x00);
                                                String[] params = col[3].split("&");
                                                Map<String, String> paramMap = new HashMap<>();
                                                for (String param : params) {
                                                    String[] pa = param.split("=");
                                                    if (pa.length == 2) {
                                                        paramMap.put(pa[0], pa[1]);
                                                    }
                                                }
                                                //statsDay,status,userId,userName,agentid,agentName,deviceId,offerid,price,buckle,totalClick,successClick
                                                if (!(StringTools.isBlank(paramMap.get("p4")) || paramMap.get("p4").equalsIgnoreCase("null") || StringTools.isBlank(paramMap.get("p2")) || paramMap.get("p2").equalsIgnoreCase("null"))) {
                                                    String cacheKey = String.format("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s", paramMap.get("p2"), paramMap.get("p3"), paramMap.get("p4"), paramMap.get("p5"), paramMap.get("p6"), paramMap.get("p7"), paramMap.get("p8"), paramMap.get("p100"));
                                                    Map<String, AtomicLong> statsMap = statsCache.get(cacheKey, s -> {
                                                        Map<String, AtomicLong> map = new ConcurrentHashMap<>();
                                                        map.put("total", new AtomicLong(0));
                                                        map.put("success", new AtomicLong(0));
                                                        return map;
                                                    });
                                                    statsMap.get("total").addAndGet(1);
                                                    if (col[1].split("=")[1].equals("1")) {
                                                        statsMap.get("success").addAndGet(1);
                                                    }
                                                }
                                            }
                                        });
                                    } catch (IOException e) {
                                        e.printStackTrace();
                                        flag.decrementAndGet();//存在异常时-1
                                    }
                                }
                                Path loaddataFile = File.createTempFile("ClickSatus", "loaddata.txt", new File("/tmp")).toPath();
                                BufferedWriter writer = Files.newBufferedWriter(loaddataFile, StandardOpenOption.APPEND);
                                ConcurrentMap<String, Map<String, AtomicLong>> dataMap = statsCache.asMap();
                                for (Map.Entry<String, Map<String, AtomicLong>> entry : dataMap.entrySet()) {
                                    String dataLine = entry.getKey();
                                    Map<String, AtomicLong> statsValue = entry.getValue();
                                    String log = new StringBuilder(data_day).append("\t").append(dataLine).append("\t").append(statsValue.get("total").get()).append("\t").append(statsValue.get("success").get()).toString();
                                    writer.write(log);
                                    writer.newLine();
                                }
                                writer.flush();
                                writer.close();
                                //将文件loaddata到数据库中
//                                String fileName = loaddataFile.toFile().getAbsolutePath().replaceAll("\\\\", "/");
                                String insertSql = "insert into sys_click_stats (statsDay,userId,userName,agentId,agentName,deviceId,offerId,price,buckle,totalClick,successClick) values (''{0}'',{1},''{2}'',{3},''{4}'',''{5}'',''{6}'',''{7}'',''{8}'',''{9}'',''{10}'')";
                                final AtomicInteger succ = new AtomicInteger(0);
                                Files.readAllLines(loaddataFile).forEach(line -> {
                                    String[] arr = line.split("\t");
                                    String sql = StringTools.format(insertSql, arr);
                                    int update = Db.update(sql);
                                    succ.addAndGet(update);
                                });
//
//
//                                String loaddataSql = "LOAD DATA INFILE '" + fileName + "' INTO TABLE sys_click_stats FIELDS TERMINATED BY '\\t'  LINES TERMINATED BY '\\n' (statsDay,userId,userName,agentId,agentName,deviceId,offerId,price,buckle,totalClick,successClick);";
//                                //赋权
//                                String[] chown = new String[]{"/bin/sh", "-c", "chown mysql:mysql " + fileName};
//                                ShellExec.runShell(chown);
//                                //入库
//                                String[] cmd = new String[]{"/bin/sh", "-c", "mysql -uroot -hlocalhost -pg@qd1u.uOAdS -Dad -e \"" + loaddataSql + "\""};
//                                ShellExec.runShell(cmd);
//                                List<ClickStats> list = ClickStats.dao.find("select * from sys_click_stats where statsDay=" + data_day);
//                                int update = 0;
//                                if (list != null && !list.isEmpty()) {
//                                    update = list.size();
//                                }
                                log.error("统计完成，导入统计数据：" + succ.get() + "条");
                                //将临时文件删除
                                //loaddataFile.toFile().delete();
                            } catch (Exception e) {
                                flag.decrementAndGet();
                                log.error("统计异常:" + e.getMessage(), e);
                            } finally {
                                statsCache.cleanUp();
                                tidCache.cleanUp();
                            }
                            if (flag.get() < 0) {
                                statsLog.set("status", 0);
                                log.error("统计完成====|     " + data_day + "/" + 0 + "      |===");
                            } else {
                                statsLog.set("status", 1);
                                log.error("统计完成====|     " + data_day + "/" + 1 + "      |===");
                                //生成结算记录
                                Db.update("delete from sys_settlement where cleanDay=?",data_day);
                                Db.update("insert into sys_settlement SELECT 0 id,statsDay,agentId,agentName,sum(price*successClick) amount,0 status,now(),null cleanTime,null reason FROM sys_click_stats where buckle=0 and statsDay=? group by statsDay,agentId,agentName", data_day);
                            }
                            statsLog.set("end_time", new Date());
                            statsLog.update();
                        }
                    }
                }
            } catch (Exception e) {
                log.error("统计任务失败" + e.getMessage(), e);
            }

        }

    }

    public static void main(String[] args) {
        DruidPlugin statsDruidPlugin = new DruidPlugin("jdbc:mysql://101.201.239.101:3306/blog?characterEncoding=UTF-8&zeroDateTimeBehavior=convertToNull",
                "workload", "3570043");
        statsDruidPlugin.start();
        ActiveRecordPlugin arp = new ActiveRecordPlugin(statsDruidPlugin);
        _MappingKit.mapping(arp);
        arp.start();
        StatsTask statsTask = new ClickStatsJob().new StatsTask();
        statsTask.run();
    }

}
