package com.cloud2022.kafka.producer;
import java.util.Date;

import java.io.FileNotFoundException;
import java.text.SimpleDateFormat;
import java.util.*;

import com.cloud2022.kafka.Stock.FactoryIn;
import com.alibaba.fastjson.JSONObject;
import com.cloud2022.kafka.Stock;
import com.cloud2022.kafka.entity.JsonRootBean;
import com.cloud2022.kafka.entity.Record;
import com.cloud2022.kafka.mapper.RecordMapper;
import org.apache.ibatis.session.RowBounds;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.util.CollectionUtils;
import tk.mybatis.mapper.weekend.Weekend;

import javax.annotation.Resource;
import java.io.File;

/**
 * @author: create by God of SJF
 * @version: v1.0
 * @description: com.cloud2022.kafka.producer
 * @date:2021/12/3
 */
@Component
public class Producer {

    static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-ddHH:mm:ss");

    @Resource
    MongoTemplate mongoTemplate;

    @Resource
    KafkaTemplate<String, String> kafka;

    @Resource
    RecordMapper recordMapper;

    Integer flag = 0;


    @Scheduled(cron = "* * * * * ?")
    public void info() throws FileNotFoundException {
        Map<String, Stock> map = new HashMap<>();

        Weekend<Record> recordWeekend = Weekend.of(Record.class);
        recordWeekend.createCriteria().andEqualTo("status", 2);
        List<Record> records = recordMapper.selectByExampleAndRowBounds(recordWeekend, new RowBounds(0, 1));
        if (!CollectionUtils.isEmpty(records)) {
            return;
        }


        Weekend<Record> recordWeekend2 = Weekend.of(Record.class);
        recordWeekend2.createCriteria().andEqualTo("status", 0);
        List<Record> records2 = recordMapper.selectByExampleAndRowBounds(recordWeekend2, new RowBounds(0, 1));
        if (!CollectionUtils.isEmpty(records2)) {
            Record record = records2.get(0);
            record.setStatus(2);
            recordMapper.updateByPrimaryKeySelective(record);

            File file = new File(record.getPath());
            try {
                Scanner scanner = new Scanner(file);
                while (scanner.hasNext()) {
                    String line = scanner.next();
                    System.out.println(line);
                    JsonRootBean jsonRootBean = JSONObject.parseObject(line, JsonRootBean.class);
                    if (jsonRootBean.get_status().equals("12")) {
                        String barcode = jsonRootBean.get_barcode_content();
                        Date date = sdf.parse(jsonRootBean.get_operate_time());
                        Stock.TerminalIn in = new Stock.TerminalIn();
                        in.setTerminalInTime(date);
                        in.setTerminalInBillNo(jsonRootBean.get_workid());
                        in.setTerminalCode(jsonRootBean.get_organization_code());
                        in.setTerminalName(jsonRootBean.get_organization_name());
                        in.setOperateName(jsonRootBean.get_operate_name());


                        kafka.send("stock12",barcode,JSONObject.toJSONString(in)+"|"+file.getName());
                    }
                }
            } catch (Exception e) {
                e.printStackTrace();
                System.out.println("eeee");
            }

            record.setStatus(1);
            recordMapper.updateByPrimaryKeySelective(record);

        }
    }
}
