package com.esjob.task;

import com.dangdang.ddframe.job.api.ShardingContext;
import com.dangdang.ddframe.job.api.dataflow.DataflowJob;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @author: LCG
 * 作者: LCG
 * 日期: 2021/3/19  13:05
 * 描述:
 */
@Component
public class MyDataFlowTask2 implements DataflowJob<Map<String,Object>> {

    private static int count=1;

    /**
     * 作者: LCG
     * 日期: 2021/3/19  13:06
     * 描述: 批量的获取数据
     * @Param [shardingContext]
     * @Return java.util.List<java.util.Map<java.lang.String,java.lang.Object>>
     */
    @Override
    public List<Map<String, Object>> fetchData(ShardingContext shardingContext) {

        List<Map<String, Object>> list=new ArrayList<>();
        Map<String,Object> map=new HashMap<String,Object>();
        map.put("yyy","564645565");
        list.add(map);

        if (count%10==0){

            count++;

            return null;
        }

        return list;
    }

    
    /**
     * 作者: LCG
     * 日期: 2021/3/19  13:09
     * 描述: 处理 fetchData获取到的数据
     * @Param [shardingContext, list]
     * @Return void
     */
    @Override
    public void processData(ShardingContext shardingContext, List<Map<String, Object>> list) {

        int shardingItem = shardingContext.getShardingItem();

        if(shardingItem==0){
            for (Map<String, Object> map : list) {

                System.out.println("分片0=======处理数据=====》"+map);

            }

        }else{

            System.out.println("分片1======不=处理数据=====》");

        }

        try {
            Thread.sleep(1000);

            count++;

        } catch (InterruptedException e) {
            e.printStackTrace();
        }

    }
}
