import com.aliyun.odps.data.Record;
import com.aliyun.odps.data.TableInfo;
import com.aliyun.odps.mapred.JobClient;
import com.aliyun.odps.mapred.MapperBase;
import com.aliyun.odps.mapred.ReducerBase;
import com.aliyun.odps.mapred.conf.JobConf;
import com.aliyun.odps.mapred.utils.InputUtils;
import com.aliyun.odps.mapred.utils.OutputUtils;
import com.aliyun.odps.mapred.utils.SchemaUtils;
import com.google.gson.Gson;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;

public class dataflow {
    public static class TokenizerMapper extends MapperBase {
        private Record key;
        private Record value;
        @Override
        public void setup(TaskContext context) throws IOException {
            key= context.createMapOutputKeyRecord();
            value = context.createMapOutputValueRecord();

        }
        @Override
        public void map(long recordNum, Record record, TaskContext context)
                throws IOException {
            key.set("diagnose_record_id",record.getBigint("diagnose_record_id"));
            value.set("item_id",record.getString("item_id"));
            value.set("item_name",record.getString("item_name"));
            value.set("item_value",record.getString("item_value"));
            value.set("item_unit",record.getString("item_unit"));
            context.write(key, value);

        }
    }

    public static class SumReducer extends ReducerBase {
        private Record result = null;
        @Override
        public void setup(TaskContext context) throws IOException {
            result = context.createOutputRecord();
        }
        @Override
        public void reduce(Record key, Iterator<Record> values, TaskContext context)
                throws IOException {


            ArrayList<data_flow> Dataflow = new ArrayList<>();
            while (values.hasNext()) {
                data_flow df=new data_flow();
                Record val = values.next();
                df.setId(val.get("item_id").toString());
                df.setName(val.get("item_name").toString());
                df.setUnit(val.get("item_unit").toString());
                df.setValue(val.get("item_value").toString());
                Dataflow.add(df);

            }
            sys1 Sys1 = new sys1();
            Sys1.setPath("");
            Sys1.setSys("");
            Sys1.setDataflows(Dataflow);
            syss1 Syss1 = new syss1();
            ArrayList<sys1> sys1arr= new ArrayList<>();
            sys1arr.add(Sys1);
            Syss1.setSyss(sys1arr);

            Gson gson = new Gson();
            String json = gson.toJson(Syss1);

            result.set(0, key.get(0));
            result.set(1, json);
            context.write(result);
        }
    }
    public static void main(String[] args) throws Exception {
        if (args.length != 2) {
            System.err.println("Usage: WordCount <in_table> <out_table>");
            System.exit(2);
        }
        JobConf job = new JobConf();
        job.setMapperClass(TokenizerMapper.class);
        job.setReducerClass(SumReducer.class);
        job.setMapOutputKeySchema(SchemaUtils.fromString("diagnose_record_id:bigint"));
        job.setMapOutputValueSchema(SchemaUtils.fromString("item_id:string,item_name:string,item_value:string,item_unit:string"));
        InputUtils.addTable(TableInfo.builder().tableName(args[0]).build(), job);
        OutputUtils.addTable(TableInfo.builder().tableName(args[1]).build(), job);
        JobClient.runJob(job);
    }
}
