package com.elasticjob.job;

import com.dangdang.ddframe.job.api.ShardingContext;
import com.dangdang.ddframe.job.api.dataflow.DataflowJob;
import com.elasticjob.domian.FileCustom;
import com.elasticjob.mapper.FileCustomMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.util.Date;
import java.util.List;
import java.util.concurrent.TimeUnit;

/**
 * 搬家的时候一次搬点，之后再搬点(数据量太大，一次搬不过去，只能一点点搬)
 */
@Component
//<T>是操作数据库要返回的数据类型
public class FileDataflowJob implements DataflowJob<FileCustom> {
    @Autowired
    private FileCustomMapper fileCustomMapper;


    /**
     * 抓取数据方法
     * 一次性处理2个数据，其实就是从数据库中一次性查询2个数据 backedUp = 0 数据
     * @param shardingContext
     * @return
     */
    @Override
    public List<FileCustom> fetchData(ShardingContext shardingContext) {
        List<FileCustom> fileCustoms = fileCustomMapper.fetchData(2);
        System.out.println("抓取时间:" + new Date() + ",个数" + fileCustoms.size());
        return fileCustoms;
    }

    /**
     * 处理数据方法
     * @param shardingContext
     * @param fileCustoms
     */
    @Override
    public void processData(ShardingContext shardingContext, List<FileCustom> fileCustoms) {
        System.out.println("需要备份文件个数:" + fileCustoms.size());
        for (FileCustom fileCustom : fileCustoms) {
            backUpFile(fileCustom);
        }
    }

    private void backUpFile(FileCustom fileCustom) {
        try {
            //模拟备份动作
            TimeUnit.SECONDS.sleep(1);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        System.out.println("执行文件备份====>" + fileCustom);
        fileCustomMapper.changeState(fileCustom.getId(), 1);
    }
}