package com.lanou.quanttradedata.stock.service;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.lanou.quanttradedata.stock.javabean.StockBasicInfo;
import com.lanou.quanttradedata.stock.javabean.StockDailyInfo;
import com.lanou.quanttradedata.stock.mapper.BiasInfoMapper;
import com.lanou.quanttradedata.stock.mapper.MapReduceDailyResultMapper;
import com.lanou.quanttradedata.stock.mapper.StockBasicInfoMapper;
import com.lanou.quanttradedata.stock.mapper.StockDailyInfoMapper;
import com.lanou.quanttradedata.stock.mapreduce.map.BiasCalculateMap;
import com.lanou.quanttradedata.stock.mapreduce.map.StockDailyInfoAvgMap;
import com.lanou.quanttradedata.stock.mapreduce.reducer.BiasCalculateReducer;
import com.lanou.quanttradedata.stock.mapreduce.reducer.StockDailyInfoAvgReducer;
import com.lanou.quanttradedata.utils.javabean.Const;
import com.lanou.quanttradedata.utils.javabean.RestObject;
import com.lanou.quanttradedata.utils.javabean.TuShareAPI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.sql.SQLIntegrityConstraintViolationException;
import java.text.SimpleDateFormat;
import java.util.*;

@Service
public class StockService {
    @Autowired
    private StockBasicInfoMapper stockBasicInfoMapper;
    @Autowired
    private TuShareAPI tuShareAPI;
    private SimpleDateFormat sdf1 = new SimpleDateFormat("yyyy-MM-dd");
    private SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd");
    @Autowired
    private StockDailyInfoMapper stockDailyInfoMapper;
    @Autowired
    private MapReduceDailyResultMapper mapReduceDailyResultMapper;
    @Autowired
    private BiasInfoMapper biasInfoMapper;
    /**
     * 根据参数，请求并更新股票列表信息
     * @param param
     */
    public void saveStockBasicInfoFromNet(HashMap<String, String> param) {
        List<String> fields = new ArrayList<>();
        Collections.addAll(fields,
                "ts_code","symbol","name","area","industry","cnspell","market","list_date","is_hs");
        JSONObject jsonObject = tuShareAPI.get(Const.STOCK_BASIC, param, fields);
        System.out.println(jsonObject.toJSONString()+"=========================");
        List<StockBasicInfo> stockBasicInfos = new ArrayList<>();
        //3.1、获取所有的行数据
        JSONArray jsonArray = jsonObject.getJSONObject("data").getJSONArray("items");
        //3.2、遍历每行数据，每行转为一个StockBasicInfo对象
        Date today = new Date();
        String todayStr = sdf1.format(today);
        for (int i = 0; i < jsonArray.size(); i++) {
            JSONArray array = jsonArray.getJSONArray(i);
            //3.3、将StockBasicInfo对象保存到List中
            stockBasicInfos.add(
                    new StockBasicInfo(
                            array.getString(0),
                            array.getString(1),
                            array.getString(2),
                            array.getString(3),
                            array.getString(4),
                            array.getString(5),
                            array.getString(6),
                            array.getString(7),
                            array.getString(8),
                            todayStr
                    )
            );
        }
        //4、进行数据插入或更新操作
        int insertCount=0,updateCount=0;
        //插入或更新
        if(stockBasicInfos.size()>0){
            //有需要插入或更新的数据，开始更新
            for (StockBasicInfo sbi : stockBasicInfos) {
                System.out.println(sbi.toString());
                try {
                    int rows = stockBasicInfoMapper.insertStockBasicInfo(sbi);
                    insertCount+=rows;
                } catch (Exception e) {
                    //1、判断是否为SQLIntegrityConstraintViolationException类型的异常
                    if(e.getCause() instanceof SQLIntegrityConstraintViolationException){
                        //2、如果是该类型，判断是否为重复主键错误
                        String errorMsg = e.getMessage();
                        //System.out.println(errorMsg);
                        if(errorMsg.contains("Duplicate entry") && errorMsg.contains("PRIMARY")){
                            //3、如果是重复主键错误，说明该数据，数据库已经存在了，没必要在Insert，进行该数据的update 即可
                            int rows = stockBasicInfoMapper.updateStockBasicInfo(sbi);
                            updateCount+=rows;
                        }
                    }
                }
            }
        }
        //5、展示结果
        System.out.println("插入成功了"+insertCount+"条数据");
        System.out.println("更新成功了"+updateCount+"条数据");
    }

    /**
     * 根据今天日期，删除不是今天数据的退市数据
     * @param todayStr
     */
    public void delStockBasicInfosByOpDate(String todayStr){
        //更新完毕后，已经退市的数据要从数据库删除掉
        int delCount = stockBasicInfoMapper.delStockBasicInfosByOpDate(todayStr);
        System.out.println("删除成功了"+delCount+"条失效数据");
    }

    /**
     * 更新当日的A股日线行情数据
     */
    public void saveStockDailyInfoFromNet() {
        //1、编辑传递给TuShare平台的参数
        Map<String, String> param = new HashMap<>();
        param.put("trade_date",sdf2.format(new Date()));
//        param.put("trade_date","20230926"); //当天日期
        //因为老师在写代码时，时间没有超过工作日的16:00，当天日线数据还不存在，为了测试的方便，取前面某天的数据来测试
//        param.put("trade_date","20230920");
        //2、向TuShare平台发出请求，获取json数据
        JSONObject jsonObject = tuShareAPI.get(Const.STOCK_DAILY, param, null);
        //3、解析json数据，解析为List集合
        //3.1、获取日线json数据
        JSONArray jsonArray = jsonObject.getJSONObject("data").getJSONArray("items");
        //3.2、创建List集合，遍历日线Json数据，每遍历一行数据，转为一个StockDailyInfo对象，存入List集合
        System.out.println(jsonArray.stream().toArray()+"=======================");
        List<StockDailyInfo> stockDailyInfos = new ArrayList<>();
        for (int i = 0; i < jsonArray.size(); i++) {
            JSONArray array = jsonArray.getJSONArray(i);
            stockDailyInfos.add(
                    new StockDailyInfo(
                            null,
                            array.getString(0),
                            array.getString(1),
                            array.getFloat(2),
                            array.getFloat(3),
                            array.getFloat(4),
                            array.getFloat(5),
                            array.getFloat(6),
                            array.getFloat(7),
                            array.getFloat(8),
                            array.getFloat(9),
                            array.getFloat(10)
                    )
            );
        }
        /*for (StockDailyInfo sdi : stockDailyInfos) {
            System.out.println(sdi);
        }*/
        //4、直接将List集合整体插入数据库
        if(stockDailyInfos.size()>0){
            int rows = 0;
            try {
                rows = stockDailyInfoMapper.insertStockDailyInfos(stockDailyInfos);
            } catch (Exception e) {
                //sql完整性约束异常
                if(e.getCause() instanceof SQLIntegrityConstraintViolationException && e.getMessage().contains("Duplicate entry")){
                    //说明当天数据已经更新过了
                    System.out.println("当天数据已更新，无需重复更新");
                }
            }
            //5、展示结果
            System.out.println("成功向数据库插入了"+rows+"条日线数据");
        }else{
            System.out.println("当天无日线数据 或 当前时间还未更新日线数据");
        }
    }

    /**
     * 分页查询股票列表的基本信息
     * @param pageNum   第几页
     * @param pageSize  一页展示的记录数
     * @return
     */
    public RestObject getStockBasicListByPage(Integer pageNum, Integer pageSize) {
        //1、设置分页查询
        PageHelper.startPage(pageNum,pageSize);
        //2、调用mapper层进行分页查询
        List<StockBasicInfo> stockBasicInfos = stockBasicInfoMapper.getStockBasicList();
        //3、封装分页数据
        PageInfo<StockBasicInfo> pageInfo = new PageInfo<>(stockBasicInfos);
        //4、封装到RestObject中，返回结果
        HashMap<String, Object> map = new HashMap<>();
        map.put("rows",pageInfo.getTotal());
        map.put("list",pageInfo.getList());
        return RestObject.OK(map);
    }

    /**
     * 将每只股票仅30天的日线信息，更新到HDFS的CSV文件上
     */
    public void storeStockDailyInfoToHDFS() {
        //1、查询当前所有的股票信息（获取所有股票的stockcode）
        List<StockBasicInfo> stockBasicList = stockBasicInfoMapper.getStockBasicList();
        //2、创建Hadoop配置信息封装对象
        Configuration configuration = new Configuration();
        FileSystem fs=null;
        try {
            fs = FileSystem.get(configuration);
            //3、删除原来的stock_bk目录，创建新的目录
            Path stock = new Path("/stock");
            fs.delete(stock,true);
            fs.mkdirs(stock);
            //4、遍历判断股票信息的CSV文件是否存在(学生测试，只要最近30条即可，要不需要耗时1小时)
            for (int j = 0; j < stockBasicList.size(); j++) {
                StockBasicInfo stockBasicInfo  = stockBasicList.get(j);
                String tscode = stockBasicInfo.getTscode();
                Path data = new Path("/stock/" + tscode + "/data.csv");
                //5、创建目录和文件，并请求该股票所有日线数据
                HashMap param = new HashMap();
                param.put("ts_code",tscode);
                //学生测试时，一定要加入start_date和end_date ，否则耗时1小时左右，获取近30天数据即可(因为有节假日，所以数据肯定低于30天)
                param.put("start_date",sdf2.format(new Date(System.currentTimeMillis()-1000L*60*60*24*30)));
                param.put("end_date",sdf2.format(new Date()));
                JSONObject jsonObject = tuShareAPI.get(Const.STOCK_DAILY, param, null);
                JSONArray items = null;
                try {
                    items = jsonObject.getJSONObject("data").getJSONArray("items");
                } catch (Exception e) {
                    //如果出现空指针异常，说明TuShare的令牌在该时间段没有权限了，让程序停5秒，再重试本次循环
                    System.out.println("令牌无权限，等5秒再试");
                    Thread.sleep(5000);
                    j--;
                    continue;
                }

                FSDataOutputStream fos = fs.create(data);
                fos.writeUTF("\"ts_code\",\"trade_date\",\"open\",\"high\",\"low\",\"close\",\"pre_close\",\"change\",\"pct_chg\",\"vol\",\"amount\"\n");
                for (int i = 0; i < items.size(); i++) {
                    String tempString = items.getJSONArray(i).toJSONString().replace("[", "").replace("]", "");
                    fos.writeUTF(tempString + "\n");
                }
                fos.flush();
                fos.close();
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (fs != null) {
                try {
                    fs.close();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }

    /**
     * 保证最多只有7天日线数据
     */
    public void maintainStockDailyInfo() {
        //1、查询数据库里有几天日线数据
        int count = stockDailyInfoMapper.countTradeDate();
        //2、如果日线数据到8，则多余一天数据
        if(count>=8){
            System.out.println("多余一天，删除最早的一天日线数据");
            //3、删除多出来的 最早的一天的所有日线数据
            stockDailyInfoMapper.delStockDailyInfosOfEarliestTradeDate();
        }

    }

    /**
     * 近7天日线数据清洗，计算平均值
     */
    public void calculateTheAvgThroughMapReduce() {
        System.out.println("数据库7天日线清洗结果初始化....");
        delAllMRDailyResults();
        //配置hadoop文件系统
        Configuration conf = new Configuration();
        try {
            FileSystem fs = FileSystem.get(conf);
            //必须删除HDFS上昨天计算结果
            fs.delete(new Path("/stockDailyInfoAvg"), true);
            Job job = Job.getInstance(conf);
            job.setJobName("stockDailyInfoAvg");

            job.setJarByClass(StockService.class);
            job.setMapperClass(StockDailyInfoAvgMap.class);
            job.setReducerClass(StockDailyInfoAvgReducer.class);

            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(Text.class);

            job.setInputFormatClass(TextInputFormat.class);
            job.setOutputFormatClass(TextOutputFormat.class);

            //获取所有的股票基本信息并整理成Hadoop路径
            List<StockBasicInfo> list = stockBasicInfoMapper.getStockBasicList();
            Path[] paths = new Path[list.size()];
            for (int i = 0; i < paths.length; i++) {
                paths[i] = new Path("/stock/" + list.get(i).getTscode() + "/data.csv");
            }
            TextInputFormat.setInputPaths(job, paths);
            //以下代码用于测试，仅仅使用了4只股票进行运算展示。因为如果使用5000多全部股票，一次执行要消耗15分钟
//            TextInputFormat.setInputPaths(job,new Path("/stock/000001.SZ/data.csv"),new Path("/stock/000002.SZ/data.csv"),new Path("/stock/000004.SZ/data.csv"),new Path("/stock/000005.SZ/data.csv"));
            TextOutputFormat.setOutputPath(job,new Path("/stockDailyInfoAvg"));
            System.out.println("7天日线数据清洗....");
            job.waitForCompletion(true);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 在保存日线清洗数据之前，先将数据库昨天的运算结果清空
     */
    public void delAllMRDailyResults() {
        mapReduceDailyResultMapper.delAll();
    }

    /**
     * 计算6日乖离率保存数据库并保存HDFS
     */
    public void biasCalculateToHDFS() {
        //1、清空数据库昨日计算的乖离率
        System.out.println("清空数据库昨日计算的乖离率...");
        biasInfoMapper.delAll();
        //2、HDFS运算
        //配置hadoop文件系统
        Configuration conf = new Configuration();
        try {
            FileSystem fs = FileSystem.get(conf);
            //必须删除HDFS上昨天计算结果
            fs.delete(new Path("/biasCalculate"), true);
            Job job = Job.getInstance(conf);
            job.setJobName("biasCalculate");

            job.setJarByClass(StockService.class);
            job.setMapperClass(BiasCalculateMap.class);
            job.setReducerClass(BiasCalculateReducer.class);

            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(Text.class);

            job.setInputFormatClass(TextInputFormat.class);
            job.setOutputFormatClass(TextOutputFormat.class);

            //获取所有的股票基本信息并整理成Hadoop路径
            List<StockBasicInfo> list = stockBasicInfoMapper.getStockBasicList();
            Path[] paths = new Path[list.size()];
            for (int i = 0; i < paths.length; i++) {
                paths[i] = new Path("hdfs://localhost:9000/stock/" + list.get(i).getTscode() + "/data.csv");
            }
            TextInputFormat.setInputPaths(job, paths);
            //以下代码用于测试，仅仅使用了4只股票进行运算展示。
//            TextInputFormat.setInputPaths(job,new Path("/stock/000001.SZ/data.csv"),new Path("/stock/000002.SZ/data.csv"),new Path("/stock/000004.SZ/data.csv"),new Path("/stock/000005.SZ/data.csv"));
//            TextInputFormat.setInputPaths(job,new Path("/stock/000001.SZ/data.csv"));
            TextOutputFormat.setOutputPath(job,new Path("hdfs://localhost:9000/biasCalculate"));
            System.out.println("6日乖离率清洗....");
            job.waitForCompletion(true);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
