/*
 * To change this license header, choose License Headers in Project Properties.
 * To change this template file, choose Tools | Templates
 * and open the template in the editor.
 */
package cn.ac.iie.di.ban.ycl.data.exchange.runner.proxy;

import cn.ac.iie.di.ban.data.exchange.proxy.commons.protocol.DataExchangeRequest;
import cn.ac.iie.di.ban.data.exchange.proxy.commons.worker.ProxyWorker;
import cn.ac.iie.di.ban.data.exchange.runner.commons.protocol.ProtocolTools;
import cn.ac.iie.di.ban.data.exchange.runner.commons.protocol.ScopeEnum;
import cn.ac.iie.di.ban.data.exchange.runner.commons.protocol.StatusEnum;
import cn.ac.iie.di.ban.data.exchange.runner.commons.protocol.proxy.DataProxyRequest;
import cn.ac.iie.di.ban.data.exchange.runner.commons.protocol.proxy.DataProxyResponse;
import static cn.ac.iie.di.ban.ycl.data.exchange.runner.proxy.HDFSUtil.getHDFS;
import com.alibaba.fastjson.JSONObject;
import io.atomix.AtomixClient;
import io.atomix.catalyst.transport.Address;
import io.atomix.catalyst.transport.netty.NettyTransport;
import io.atomix.group.DistributedGroup;
import io.atomix.group.GroupMember;
import io.atomix.group.election.Election;
import io.atomix.group.election.Term;
import io.atomix.group.messaging.MessageClient;
import io.atomix.group.messaging.MessageProducer;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;

/**
 *
 * @author root
 */
public class MediaExecuteHandler implements ProxyWorker {

    static Logger logger = Logger.getLogger(MediaExecuteHandler.class);
    private final static String SCOPE = "4";
    private final static String SCOPETYPE = "request";
    private final static String OPRATIONTYPE = "search";
    private List<String> metadataUrls = null;
    private int sleepTime = -1;
    private String flowControlUrl = "";
    private int byteSize = -1;
    private Configuration hdfsConf = null;
    private static MessageProducer<DataProxyRequest> producer = null;

    public MediaExecuteHandler(List<String> metadataUrls, int sleepTime, String flowControlUrl, int byteSize, List<Address> clusterAddress, String groupName, String producerName, Configuration hdfsConf) {
        this.metadataUrls = metadataUrls;
        this.sleepTime = sleepTime;
        this.flowControlUrl = flowControlUrl;
        this.byteSize = byteSize;
        this.hdfsConf = hdfsConf;

        AtomixClient atclient = AtomixClient.builder().withTransport(new NettyTransport()).build();
        ProtocolTools.SerializeAll(atclient.serializer());
        atclient.connect(clusterAddress).join();
        DistributedGroup group = atclient.getGroup(groupName).join();
        Election election = group.election();
        Term term = election.term();
        GroupMember leader = term.leader();
        MessageClient messaging = leader.messaging();
        producer = messaging.producer(producerName);
    }

    @Override
    public void execute(DataExchangeRequest request, HttpServletResponse response) {
        //从API获得请求任务参数
        String uid = request.getUid();
        String sql = request.getContent();
        String uniq_key = request.getUniq_key();
        String fromui = request.getFrom_ui();
        ScopeEnum scopeEnum = ScopeEnum.MEDIA;
        logger.info(uniq_key + " NEW-request, the uid is " + uid + " ,sql is " + sql + " ,scope is " + scopeEnum.toString() + " ,uniq_key is " + uniq_key);

        HashMap<String, Object> masterMap = new HashMap<>();
        masterMap.put("uid", uid);
        masterMap.put("sql", sql);
        masterMap.put("scope", scopeEnum);
        masterMap.put("uniq_key", uniq_key);
        //send to master
        DataProxyResponse masterResponse = MasterTools.sendToSlave(producer, masterMap);
        if (masterResponse != null) {
            StatusEnum statusEnum = masterResponse.getStatus();
            uniq_key = masterResponse.getUniq_key();
            String req_id = masterResponse.getReq_id();
            String detail = masterResponse.getDetail();
            logger.info(uniq_key + " request Master finished, and the returned req_id is " + req_id + " ,the uniq_key is " + uniq_key);

            if (!statusEnum.toString().contains("ERROR")) {
                if (fromui.equals("0")) {
                    String taskResult = "";
                    HashMap<String, String> metadataMap = new HashMap<>();
                    metadataMap.put("scope", SCOPETYPE);
                    metadataMap.put("opration", OPRATIONTYPE);
                    Map<String, Object> conditionMap = new HashMap<>();
                    conditionMap.put("type", SCOPE);
                    conditionMap.put("req_id", req_id);
                    metadataMap.put("condition", new JSONObject(conditionMap).toString());
                    metadataMap.put("content", "{\"result\":\"\",\"status\":\"\"}");
                    while (true) {
                        try {
                            String postSendReturnContent = ParserFastJson.requestMetadata(metadataUrls, metadataMap);
                            boolean returnStatus = ParserFastJson.getReturnStatus(postSendReturnContent);
                            if (returnStatus) {
                                List<Map<String, String>> returnContentList = ParserFastJson.getContentByType(postSendReturnContent, "data");
                                if (!returnContentList.isEmpty()) {
                                    Map<String, String> taskMap = returnContentList.get(0);
                                    if ("4".equals(taskMap.get("status"))) {
                                        taskResult = taskMap.get("result");
                                        logger.info(uniq_key + " request Metadata, and the returned result is " + taskResult);
                                        break;
                                    }
                                }
                            }
                            Thread.sleep(sleepTime);
                        } catch (Exception e) {
                            logger.error(uniq_key + " post Metadta to get taskResult failed, cause " + e.getMessage(), e);
                        }
                    }

                    //流控参数
                    HashMap<String, String> flowMap = new HashMap<>();
                    flowMap.put("uid", uid);
                    flowMap.put("wait", "true");
                    try {
                        FileSystem fs = getHDFS(hdfsConf, taskResult);
                        Path[] hdfsFileList = null;
                        try {
                            if (fs.isFile(new Path(taskResult))) {
                            } else {
                                fs.mkdirs(new Path(taskResult));
                            }
                            FileStatus[] s = fs.listStatus(new Path(taskResult));
                            hdfsFileList = FileUtil.stat2Paths(s);
                        } catch (Exception e) {
                            logger.error("HDFS:getHDFSPathList " + taskResult + " failed ");
                        }
                        for (Path filePath : hdfsFileList) {
                            FileSystem filefs = FileSystem.get(new URI(filePath.toString()), hdfsConf);
                            Path srcPath = new Path(filePath.toString());
                            InputStream hdfsFileStream = null;
                            try {
                                if (filefs.isFile(srcPath)) {
                                    hdfsFileStream = filefs.open(srcPath);
                                }
                            } catch (Exception e) {
                                logger.error("HDFS:readFileAsStream " + filePath + " failed ");
                            }
                            if (hdfsFileStream != null) {
                                try {
                                    byte[] buffer = new byte[byteSize];
                                    int len = -1;
                                    while ((len = hdfsFileStream.read(buffer)) != -1) {
                                        String flowFlag = "false";
                                        while (!"true".equals(flowFlag)) {
                                            flowMap.put("size", len + "");
                                            flowFlag = new JettyClient().postSendReturnContent(flowControlUrl, flowMap);
                                            if ("true".equals(flowFlag)) {
                                                response.getOutputStream().write(buffer, 0, len);
                                                break;
                                            }
                                            Thread.sleep(sleepTime);
                                        }
                                    }
                                } catch (Exception ex) {
                                    logger.error(uniq_key + " write stream to response.getOutputStream failed, cause " + ex.getMessage(), ex);
                                } finally {
                                    hdfsFileStream.close();
                                }
                            }
                        }
                    } catch (Exception ex) {
                        detail = ex.getMessage();
                        logger.error(uniq_key + " read file as stream from hdfs failed, cause " + ex.getMessage(), ex);
                        try {
                            response.getWriter().write("{'status':'HDFS_ERROR','detail':'" + detail + "'}");
                            response.setStatus(HttpServletResponse.SC_OK);
                        } catch (IOException e) {
                        }
                    }
                } else {
                    try {
                        response.getWriter().write("{'req_id':'" + req_id + "'}");
                        response.setStatus(HttpServletResponse.SC_OK);
                    } catch (IOException ex) {
                    }
                }
            } else {
                logger.error(detail);
                try {
                    response.getWriter().write("{'status':'" + statusEnum.toString() + "','detail':'" + detail + "'}");
                    response.setStatus(HttpServletResponse.SC_OK);
                } catch (IOException ex) {
                }
            }
        } else {
            logger.error(uniq_key + " the masterResponse return null");
            try {
                response.getWriter().write("the masterResponse return null.");
                response.setStatus(HttpServletResponse.SC_OK);
            } catch (IOException ex) {
            }
        }
    }
}
