package com.starnet.yarnmonitor.yarn.service.impl;

import ch.ethz.ssh2.Connection;
import ch.ethz.ssh2.Session;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.starnet.yarnmonitor.security.entity.SysRole;
import com.starnet.yarnmonitor.security.entity.SysUserRoleRelation;
import com.starnet.yarnmonitor.security.mapper.SysRoleMapper;
import com.starnet.yarnmonitor.security.mapper.SysUserRoleRelationMapper;
import com.starnet.yarnmonitor.security.utils.UserUtil;
import com.starnet.yarnmonitor.yarn.conf.HDFSConf;
import com.starnet.yarnmonitor.yarn.handler.ApiException;
import com.starnet.yarnmonitor.yarn.service.HDFSService;
import com.starnet.yarnmonitor.yarn.service.SubmissionService;
import com.starnet.yarnmonitor.yarn.conf.LinuxConf;
import com.starnet.yarnmonitor.yarn.entity.AppTrackLog;
import com.starnet.yarnmonitor.yarn.mapper.AppTrackLogMapper;
import lombok.extern.log4j.Log4j;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import java.io.*;
import java.nio.charset.StandardCharsets;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import java.util.concurrent.CompletableFuture;
@Log4j
@Service
public class SubmissionServiceImpl implements SubmissionService {
    @Autowired
    HDFSService hdfsService;
    @Autowired
    HDFSConf hdfsConf;
    @Autowired
    FileSystem myHDFS;
    @Autowired
    LinuxConf linuxConf;
    @Autowired
    AppTrackLogMapper appTrackLogMapper;
    @Autowired
    SysUserRoleRelationMapper sysUserRoleRelationMapper;
    @Autowired
    SysRoleMapper sysRoleMapper;
    @Autowired
    Environment environment;


    @Override
    public AppTrackLog submitJar(MultipartFile multipartFile, String arguments) throws IOException {
        if (!multipartFile.getOriginalFilename().endsWith(".jar")) {
            throw new ApiException("请上传jar类型的文件");
        }
        //上传文件
        hdfsService.uploadFile(multipartFile, hdfsConf.getAutoJarPath());

        //在linux上创建track日志
        //创建日志
        //截取.jar以前的名字
        String originalFilename = multipartFile.getOriginalFilename();
        int i = originalFilename.indexOf(".");
        //日志文件名
        String logName = originalFilename.substring(0, i) + ".txt";
        boolean exists = myHDFS.exists(new Path("/app_track_log/" + logName));
        if (exists) {
            throw new ApiException("日志已存在");
        }
        FSDataOutputStream fsDataOutputStream = myHDFS.create(new Path("/app_track_log/" + logName));
        //从hdfs上下载到linux
        linuxGetJarFromHDFS(multipartFile.getOriginalFilename(), fsDataOutputStream);
        //执行hadoop jar
        //执行app并输出日志
        CompletableFuture.runAsync(() -> {
            try {
                executeApp(multipartFile.getOriginalFilename(), fsDataOutputStream, arguments);
                fsDataOutputStream.close();
            } catch (IOException e) {
                log.error(e.getStackTrace());
                e.printStackTrace();
            }
        });

        AppTrackLog appTrackLog = new AppTrackLog();
        appTrackLog.setCreatedTime(Timestamp.valueOf(LocalDateTime.now()));
        appTrackLog.setPath("/app_track_log/" + logName);
        appTrackLog.setSubmitterId(UserUtil.getCurrentUserId());
        appTrackLogMapper.insert(appTrackLog);
        return appTrackLog;
    }

    @Override
    public String trackById(Long trackId) throws IOException {
        AppTrackLog appTrackLog = appTrackLogMapper.selectById(trackId);
        if (appTrackLog==null){
            throw new ApiException("不存在此id的");
        }
        //判断是不是超管
        boolean isSuper = UserUtil.isSuper();
        //只有超管可以看别人提交的
        if (appTrackLog.getSubmitterId()!=UserUtil.getCurrentUserId()&&!isSuper){
            throw new ApiException("你不是超管,不能看别人提交的任务");
        }
        FSDataInputStream is = myHDFS.open(new Path(hdfsConf.getDefaultFS() + appTrackLog.getPath()));
        byte[] bytes = is.readAllBytes();
//        byte[] buffer = new byte[1024];
//        String result = "";
//        while ((is.read(buffer))!=-1){
//            String content = new String(buffer,"UTF-8");
//            result +=content;
//        }

        String content = new String(bytes,"UTF-8");
        return content;
    }

    public void executeApp(String filename, OutputStream os, String argument) throws IOException {
        if (argument != null) {
            argument = argument.trim();
        }
        String command = "hadoop jar ./auto_jar/" + filename + " " + argument;
        //创建连接
        Connection conn = new Connection(environment.getProperty("DB_HOST"));
        //连接服务器
        conn.connect();
        //用户登陆
        boolean isconn = conn.authenticateWithPassword(linuxConf.getUser(), linuxConf.getPassword());
        if (!isconn) {
            os.write("连接linux密码错误\r\n".getBytes());
            os.write("\r\n".getBytes());
            os.flush();
            throw new ApiException("连接linux密码错误");
        }
        //创建回话
        Session session = conn.openSession();
        //创建命令
        session.execCommand(command);
        //
        writeTrackLog(session.getStdout(), os);
        writeTrackLog(session.getStderr(), os);
        session.close();
        conn.close();
    }

    public static void writeTrackLog(InputStream is, OutputStream os) {
        try {
            BufferedReader br = new BufferedReader(new InputStreamReader(is));
            String s;
            while ((s = br.readLine()) != null) {
                os.write(s.getBytes(StandardCharsets.UTF_8));
                os.write("\r\n".getBytes());
                os.flush();
            }
            br.close();
            is.close();
        } catch (IOException e) {
            log.error(e.getStackTrace());
            e.printStackTrace();
        }
    }


    public void linuxGetJarFromHDFS(String filename, OutputStream os) throws IOException {
        String command = "hadoop fs -get /upload/auto_jar/" + filename + " ./auto_jar/" + filename;
        //创建连接
        Connection conn = new Connection(environment.getProperty("DB_HOST"));
        //连接服务器
        conn.connect();
        //用户登陆
        boolean isconn = conn.authenticateWithPassword(linuxConf.getUser(), linuxConf.getPassword());
        if (!isconn) {
            os.write(new String("__________________linux 连接异常______________\n").getBytes());
            os.write("\r\n".getBytes());
            os.flush();
            throw new ApiException("linux 连接异常");
        }
        //创建回话
        Session session = conn.openSession();
        //创建命令
        session.execCommand(command);

        writeTrackLog(session.getStdout(), os);
        writeTrackLog(session.getStderr(), os);
        conn.close();
        session.close();
    }
}
