package cn.wolfcode.elasticjob.job;

import cn.wolfcode.elasticjob.domain.FileCustom;
import cn.wolfcode.mapper.FileCustomMapper;
import com.dangdang.ddframe.job.api.ShardingContext;
import com.dangdang.ddframe.job.api.dataflow.DataflowJob;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.util.Date;
import java.util.List;
import java.util.concurrent.TimeUnit;

/**
 * Dataflow类型用于处理数据流，可以分次数抓取，每次抓固定条数，到完为止
 */
@Component
public class FileDataflowJob implements DataflowJob<FileCustom> {
    @Autowired
    private FileCustomMapper fileCustomMapper;

    //抓取数据
    @Override
    public List<FileCustom> fetchData(ShardingContext shardingContext) {
        List<FileCustom> fileCustoms = fileCustomMapper.fetchData(2);  //一次取两个
        System.out.println("抓取时间:" + new Date() + ",个数" + fileCustoms.size());
        return fileCustoms;
    }

    //处理数据
    @Override
    public void processData(ShardingContext shardingContext, List<FileCustom> data) {
        for (FileCustom fileCustom : data) {
            backUpFile(fileCustom);
        }
    }

    private void backUpFile(FileCustom fileCustom) {
        try {
            //模拟备份动作,假设一堆操作花一秒钟
            TimeUnit.SECONDS.sleep(1);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        System.out.println("执行文件备份====> id:" + fileCustom.getId() + "   类型：" + fileCustom.getType());
        fileCustomMapper.changeState(fileCustom.getId(), 1);
    }
}