package com.huawei.bigdata.flink.examples;

import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer010;
import org.apache.flink.streaming.util.serialization.SimpleStringSchema;
import sun.misc.BASE64Decoder;
import sun.misc.BASE64Encoder;

import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.*;
import java.util.Properties;

public class WriteIntoKafka {
    private static Properties properties = new Properties();
    private static BASE64Encoder encode = new BASE64Encoder();
    //private static BASE64Decoder ecoder = new BASE64Decoder();
    private static BufferedReader bufferedReader = null;

    public static void main(String[] args) throws Exception {

        //加载ConfFile.properties配置文件中的内容
        ParameterTool paraTool = ParameterTool.fromArgs(args);
        String path=paraTool.get("app.conf");
        try {
            properties.load(new FileInputStream(new File(path)));
        } catch (IOException e) {
            e.printStackTrace();
        }

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DataStream<String> messageStream = env.addSource(new SimpleStringGenerator(properties));
        messageStream.addSink(new FlinkKafkaProducer010<>(properties.getProperty("Kafka.topic"), new SimpleStringSchema(), paraTool.getProperties()));
        env.execute();
    }

    public static class SimpleStringGenerator implements SourceFunction<String> {
        private Properties properties = new Properties();

        public SimpleStringGenerator(Properties properties) {
            this.properties = properties;
        }

        private static final long serialVersionUID = 2174904787118597072L;
        boolean running = true;
        int i = 0;

        //向kafka生产消息，每秒生产9000条消息
        @Override
        public void run(SourceContext<String> ctx) throws Exception {
            //BufferedReader reader = null;
            try {

                bufferedReader = new BufferedReader(new FileReader(new File(properties.getProperty("Kafka.sourceFile"))));
                String[] fieldsArr = null;
                String line = null;
                while ((line = bufferedReader.readLine()) != null) {
                    fieldsArr = line.split(",");
                    System.out.println(line+"picture length is "+getImageBinary(properties,fieldsArr[3]).length());
                    ctx.collect(line + "," + getImageBinary(properties,fieldsArr[3]));

//                    Thread.sleep(1000);
//                    running = false;
                }
            } catch (Exception e) {
                e.printStackTrace();
            } finally {
                //确保关闭已开启的数据流
                if (bufferedReader != null) {
                    try {
                        bufferedReader.close();
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            }
        }

        @Override
        public void cancel() {
            running = false;
        }
    }

    public static String getImageBinary(Properties properties,String str) {

        File f = new File(properties.getProperty("Kafka.pictureFilePath") + str);
        System.out.println("file is :"+properties.getProperty("Kafka.pictureFilePath") + str);
        BufferedImage bi;
        try {
            bi = ImageIO.read(f);
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            ImageIO.write(bi, "jpg", baos);  //经测试转换的图片是格式这里就什么格式，否则会失真
            byte[] bytes = baos.toByteArray();
            return encode.encodeBuffer(bytes).trim();
        } catch (IOException e) {
            e.printStackTrace();
        }
        return null;

    }
}
