package com.huangye88.etl.origin;

import java.sql.ResultSet;


import avro.shaded.com.google.common.collect.ImmutableMap;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.coders.SerializableCoder;
import org.apache.beam.sdk.io.TextIO;
import org.apache.beam.sdk.io.jdbc.JdbcIO;
import org.apache.beam.sdk.io.kafka.KafkaIO;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.KV;
import org.apache.kafka.common.serialization.StringDeserializer;


/*kafka to txt*/
public class KafkaToHbaseOrigin {
    public static void dealWithPipeline(Pipeline pipeline) {


        pipeline.apply(KafkaIO.<String, String>read()
        .withBootstrapServers("localhost:9092")//必需，设置kafka的服务器地址和端口
        .withTopic("huangye88")//必需，设置要读取的kafka的topic名称
        .withKeyDeserializer(StringDeserializer.class)//必需
        .withValueDeserializer(StringDeserializer.class)//必需
        .updateConsumerProperties(ImmutableMap.<String, Object>of("auto.offset.reset", "earliest"))
                .withoutMetadata())
        .apply("recevieData", ParDo.of(new DoFn<KV, String>() {
            private static final long serialVersionUID = 4502730154830948266L;

            @DoFn.ProcessElement
            public void processElement(ProcessContext c) {

                String v=(String)c.element().getValue();
                JSONObject o=JSON.parseObject(v);
                System.out.println(o.getString("ID"));

            }
        }));

    }
}