package me.ywd.job;

import com.dangdang.ddframe.job.api.ShardingContext;
import com.dangdang.ddframe.job.api.dataflow.DataflowJob;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;

/**
 * @Author Ye_Wenda
 * @Date 6/29/2017
 */
public class MyDataflowJob implements DataflowJob<String> {

    private static final Logger log = LoggerFactory.getLogger(MyDataflowJob.class);

    @Override
    public List<String> fetchData(ShardingContext context) {
        switch (context.getShardingItem()) {
            case 0:
                List<String> data0 = new ArrayList<String>();// get data from database by sharding item 0
                data0.add("Hello World, Michael!");
                data0.add("Hello World, Neo!");
                return data0;
            case 1:
                List<String> data1 = new LinkedList<String>();// get data from database by sharding item 1
                data1.add("OK, Windy!");
                data1.add("OK, Myrcella!");
                return data1;
            case 2:
                List<String> data2 = new ArrayList<String>();// get data from database by sharding item 2
                data2.add("This shit!");
                data2.add("That shit!");
                return data2;
            default:
                return null;
        }
    }

    @Override
    public void processData(ShardingContext shardingContext, List<String> data) {
        // process data
        for (String str : data) {
            log.info(str);
        }
    }
}
