package com.jxw.cloudpen.web;

import com.alibaba.fastjson.JSONObject;
import com.google.api.gax.rpc.BidiStream;
import com.google.api.gax.rpc.ClientStream;
import com.google.api.gax.rpc.ResponseObserver;
import com.google.api.gax.rpc.StreamController;
import com.google.cloud.speech.v1p1beta1.*;
import com.google.cloud.speech.v2.RecognizerName;
import com.google.cloud.speech.v2.SpeechClient;
import com.google.cloud.speech.v2.StreamingRecognizeRequest;
import com.google.cloud.speech.v2.StreamingRecognizeResponse;
import com.google.protobuf.ByteString;
import com.google.protobuf.Duration;
import com.jxw.cloudpen.web.netty.google.*;
import com.jxw.cloudpen.web.netty.server.NettyServerBootstrap;
import com.jxw.cloudpen.web.netty.websocketserver.TestSocketServerHandler;
import com.jxw.cloudpen.web.netty.websocketserver.TestSocketServerHandlerV2;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.*;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.codec.http.HttpObjectAggregator;
import io.netty.handler.codec.http.HttpServerCodec;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;

import javax.sound.sampled.*;
import java.io.IOException;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;

/**
 * @author liuhui
 * @email liuhui@jxwxxkj.com
 * @date 2022-11-30 20:15:43
 */
@Slf4j
@SpringBootApplication
//@EnableDiscoveryClient(autoRegister=false)
public class CloudPenApplication {
    static final int PORT = Integer.parseInt(System.getProperty("port", "8086"));

    private static TargetDataLine targetDataLine;
    private static volatile BlockingQueue<byte[]> sharedQueue = new LinkedBlockingQueue<byte[]>();
    static BidiStream<StreamingRecognizeRequest, StreamingRecognizeResponse> bidiStream;


    public static void main(String[] args) throws Exception {
        SpringApplication.run(CloudPenApplication.class, args);

//
//        try {
//            NettyServerBootstrap bootstrap=new NettyServerBootstrap(9999);
//        } catch (InterruptedException e) {
//            e.printStackTrace();
//        }


//        SpeechClient speechClient = null;
//        try {
//            speechClient = SpeechClient.create();
//        } catch (IOException e) {
//            e.printStackTrace();
//        }
//        bidiStream =
//                speechClient.streamingRecognizeCallable().call();
//        for (StreamingRecognizeResponse response : bidiStream) {
//            // Do something when a response is received.
//            log.info("response is {}", JSONObject.toJSONString(response));
//        }
        // Creating microphone input buffer thread


        // SampleRate:16000Hz, SampleSizeInBits: 16, Number of channels: 1, Signed: true,
        // bigEndian: false
//        AudioFormat audioFormat = new AudioFormat(16000, 16, 1, true, false);
//        DataLine.Info targetInfo =
//                new DataLine.Info(
//                        TargetDataLine.class,
//                        audioFormat); // Set the system information to read from the microphone audio
        // stream

        // Target data line captures the audio stream the microphone produces.
//        targetDataLine = (TargetDataLine) AudioSystem.getLine(targetInfo);
//        targetDataLine.open(audioFormat);


//        MicBuffer micrunnable = new MicBuffer();
//        Thread micThread = new Thread(micrunnable);
//        micThread.start();
//
//        speed speed = new speed();
//        new Thread(speed).start();
//
//        resule resule = new resule();
//        new Thread(resule).start();


//        try {
//            bbb();
//        } catch (InterruptedException e) {
//            e.printStackTrace();
//        }


        //初始化netty 服务端
//            InfiniteStreamRecognizeOptions options = InfiniteStreamRecognizeOptions.fromFlags(args);
//            if (options == null) {
//                // Could not parse.
//                System.out.println("Failed to parse options.");
//                System.exit(1);
//            }

//            try {
//                new InfiniteStreamRecognize6().infiniteStreamingRecognize( "en-US");
////                InfiniteStreamRecognize6.infiniteStreamingRecognize( "en-US");
//            } catch (Exception e) {
//                System.out.println("Exception caught: " + e);
//            }

    }




    public static void bbb() throws InterruptedException {
        EventLoopGroup bossGroup = new NioEventLoopGroup(1);
        EventLoopGroup workerGroup = new NioEventLoopGroup();
        try {
            ServerBootstrap b = new ServerBootstrap();
            b.group(bossGroup, workerGroup)
                    .channel(NioServerSocketChannel.class)
//             .childHandler(new TestSocketServerInitializer());



                    .childHandler(new ChannelInitializer<SocketChannel>() {
                        @Override
                        protected void initChannel(SocketChannel socketChannel) throws Exception {
                            ChannelPipeline p = socketChannel.pipeline();
                            p.addLast(new HttpServerCodec());
//                    p.addLast(new CommonRpcDecoderHandler());
                            p.addLast(new HttpObjectAggregator(65536));
                            p.addLast(new TestSocketServerHandlerV2());
//                    p.addLast(new BusinHandler());
//                p.addLast(new BusinHandler());
                        }
                    });



            Channel ch = b.bind(PORT).sync().channel();


            ch.closeFuture().sync();
        } finally {
            bossGroup.shutdownGracefully();
            workerGroup.shutdownGracefully();
        }
    }

    public static void aaa() throws InterruptedException {
        EventLoopGroup bossGroup = new NioEventLoopGroup(1);
        EventLoopGroup workerGroup = new NioEventLoopGroup();
            ServerBootstrap b = new ServerBootstrap();
            b.group(bossGroup, workerGroup)
                    .channel(NioServerSocketChannel.class)
//             .childHandler(new TestSocketServerInitializer());
                   .handler(new LoggingHandler(LogLevel.INFO))


                    .childHandler(new ChannelInitializer<SocketChannel>() {
                        @Override
                        protected void initChannel(SocketChannel socketChannel) throws Exception {
                            ChannelPipeline p = socketChannel.pipeline();
                            p.addLast(new HttpServerCodec());
//                    p.addLast(new CommonRpcDecoderHandler());
                            p.addLast(new HttpObjectAggregator(65536));
                            p.addLast(new TestSocketServerHandler());
//                    p.addLast(new BusinHandler());
//                p.addLast(new BusinHandler());
                        }
                    });





            Channel ch = b.bind(PORT).sync().channel();

            ch.closeFuture().sync();
    }

    static class MicBuffer implements Runnable {
        @Override
        public void run() {
            System.out.println("Start speaking...Press Ctrl-C to stop");
            targetDataLine.start();
            byte[] data = new byte[6400];
            while (targetDataLine.isOpen()) {
                try {
                    int numBytesRead = targetDataLine.read(data, 0, data.length);
                    if ((numBytesRead <= 0) && (targetDataLine.isOpen())) {
                        continue;
                    }
                    //组装数据
                    sharedQueue.put(data.clone());
                } catch (InterruptedException e) {
                    System.out.println("Microphone input buffering interrupted : " + e.getMessage());
                }
            }
        }
    }


    static class speed implements Runnable {
        @Override
        public void run() {
            byte[] bytes= new byte[0];
            try {
                bytes = sharedQueue.take();
                if(bytes.length>0){
                    com.google.cloud.speech.v2.StreamingRecognizeRequest request =
                            com.google.cloud.speech.v2.StreamingRecognizeRequest.newBuilder()
                            .setRecognizer(

//                                    RecognizerName.of("[PROJECT]", "[LOCATION]", "[RECOGNIZER]").toString())
//                                    RecognizerName.of("ai-lab-388402", "global", "streaming-recognizer").toString())

                    RecognizerName.of("ai-lab-388402", "global", "us-central1").toString())
                                    .setAudio( ByteString.copyFrom(bytes))
                                    .build();
                    log.info("audio is {}",request.getAudio());
                    bidiStream.send(request);
                    for (StreamingRecognizeResponse response : bidiStream) {
                        // Do something when a response is received.
                        log.info("response is {}", JSONObject.toJSONString(response));
                    }
                }
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }


    static class resule implements Runnable {
        @Override
        public void run() {
//            for (StreamingRecognizeResponse response : bidiStream) {
//                // Do something when a response is received.
//                log.info("response is {}", JSONObject.toJSONString(response));
//            }
        }
    }
}
