package com.central.kerberosManage.service.impl;

import ch.ethz.ssh2.Connection;
import ch.ethz.ssh2.Session;
import ch.ethz.ssh2.StreamGobbler;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.central.common.constant.CommonConstant;
import com.central.common.feign.DataManageService;
import com.central.common.feign.FileService;
import com.central.common.model.DatasourceInfo;
import com.central.common.model.FileInfo;
import com.central.common.model.PageResult;
import com.central.common.model.Result;
import com.central.common.service.impl.SuperServiceImpl;
import com.central.common.utils.OSUtils;
import com.central.kerberosManage.common.CommonUtils;
import com.central.kerberosManage.mapper.HadoopClusterHostMapper;
import com.central.kerberosManage.mapper.KeytabFileMapper;
import com.central.kerberosManage.model.HadoopClusterHost;
import com.central.kerberosManage.model.KeytabFile;
import com.central.kerberosManage.service.IKAMService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.FileItemFactory;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.multipart.commons.CommonsMultipartFile;

import java.io.*;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;

/**
 * @author Tindy
 * @date 2021/7/16
 * @describe
 */
@Slf4j
@Service
public class KAMServiceImpl extends SuperServiceImpl<KeytabFileMapper, KeytabFile> implements IKAMService {

    @Autowired
    private FileService fileService;
    @Autowired
    private DataManageService dataManageService;
    @Autowired
    private HadoopClusterHostMapper hadoopClusterHostMapper;

    @Transactional
    @Override
    public void createOrUpdateKA(String proName, String username) throws IOException, SQLException {
        Long quoteId = null;
        try {
            String kerberosAccount = CommonConstant.KA_PREFIX + proName + "_" + username;
            String userGroup = CommonConstant.KA_PREFIX + proName;
            log.info("create kerberos account : {},usergroup:{}", kerberosAccount, userGroup);
            String r = OSUtils.exeCmd("sh ./kerberos-manage/conf/gen_credentials.sh ./kerberos-manage/keytabs/" + kerberosAccount + ".keytab " + kerberosAccount + "/" + userGroup);
//            String r = OSUtils.exeCmd("sh /root/gen_credentials.sh /root/keytabs/" + kerberosAccount + ".keytab " + kerberosAccount + "/" + userGroup);
            log.info("create kerberos account result : {}", r);
            MultipartFile multipartFile = fileToMultipartFile(kerberosAccount + ".keytab", "./kerberos-manage/keytabs/");
//            MultipartFile multipartFile = fileToMultipartFile(kerberosAccount + ".keytab", "/root/keytabs/");
            FileInfo fileInfo = fileService.upload(multipartFile);
            quoteId = fileService.quoteFile(fileInfo.getUrl(), "创建用户:" + kerberosAccount + " 的kerberos文件");
            KeytabFile keytabFile = baseMapper.selectOne(new QueryWrapper<KeytabFile>().eq("pro_name", proName).eq("username", username));
            KeytabFile newKeytabFile = new KeytabFile(proName, username, fileInfo.getUrl(), quoteId);
            Long oldQuoteId = 0l;
            if (keytabFile != null) {
                oldQuoteId = keytabFile.getQuoteId();
                newKeytabFile.setId(keytabFile.getId());
            } else {
                //第一次创建kerberos账户需同时在cdh集群服务器上创建同名账户，账户名与keytab同名
                log.info("create sys account : {}", kerberosAccount);
                createSysAccount(kerberosAccount,userGroup);
                //第一次创建kerberos账户需同时在系统hive库中创建用户角色，并将用户数据库授权给用户角色
                dataManageService.createUserRoleOfHive(proName, username);
                //第一次创建kerberos账户需同时在系统hdfs中创建用户目录
                dataManageService.createUserHdfsHomeDir(proName, username);
            }
            saveOrUpdate(newKeytabFile);
            fileService.delQuote(oldQuoteId);
        } catch (Exception e) {
            if (quoteId != null) {
                fileService.delQuote(quoteId);
            }
            throw e;
        }

    }

    private void createSysAccount(String kerberosAccount, String userGroup) throws IOException {
        List<HadoopClusterHost> hadoopClusterHosts = hadoopClusterHostMapper.selectAll();
        for (HadoopClusterHost hadoopClusterHost : hadoopClusterHosts) {
            Connection conn = new Connection(hadoopClusterHost.getHost(), hadoopClusterHost.getPort());
            conn.connect();
            //使用用户名密码验证身份
            String username=hadoopClusterHost.getUsername();
            String password=CommonUtils.decodePassword(hadoopClusterHost.getPassword());
            conn.authenticateWithPassword(username, password);
            Session session = conn.openSession();
            //创建用户组和用户
            session.execCommand("sudo groupadd "+userGroup+"; sudo useradd -M "+kerberosAccount+" -s /sbin/nologin -G "+userGroup);
//            InputStream in = session.getStderr();
//            BufferedReader r = new BufferedReader(new InputStreamReader(in));
//            String line=null;
//            while ((line=r.readLine()) != null){
//                log.info(line);
//            }
            conn.close();
        }
    }

    @Override
    @Transactional
    public Result addHadoopClusterHost(List<HadoopClusterHost> hostList) {
        for (HadoopClusterHost host : hostList) {
            host.setPassword(CommonUtils.encodePassword(host.getPassword()));
            hadoopClusterHostMapper.insert(host);
        }
        return Result.succeed("操作成功");
    }

    @Override
    @Transactional
    public Result delHadoopClusterHost(List<Long> hostIds) {
        hadoopClusterHostMapper.deleteBatchIds(hostIds);
        return Result.succeed("操作成功");
    }

    @Override
    public PageResult<HadoopClusterHost> findHadoopClusterHosts(Map<String, Object> params) {
        Page<HadoopClusterHost> page = new Page<>(MapUtils.getInteger(params, "page"), MapUtils.getInteger(params, "limit"));
        List<HadoopClusterHost> list = hadoopClusterHostMapper.findList(page, params);
        long total = page.getTotal();
        return PageResult.<HadoopClusterHost>builder().data(list).code(0).count(total).build();
    }

    @Override
    public Result flushClusterSysAccount() throws IOException {
        List<KeytabFile> users = baseMapper.findAll();
        for (KeytabFile user : users) {
            String kerberosAccount = CommonConstant.KA_PREFIX + user.getProName() + "_" + user.getUsername();
            String userGroup = CommonConstant.KA_PREFIX + user.getProName() ;
            log.info("flush cluster sys account : {}", kerberosAccount);
            createSysAccount(kerberosAccount,userGroup);
        }
        return Result.succeed("操作成功");
    }

    private MultipartFile fileToMultipartFile(String fileName, String savePath) {
        FileItemFactory factory = new DiskFileItemFactory(16, null);
        String textFieldName = "file";
        FileItem item = factory.createItem(textFieldName, "multipart/form-data; boundary=16f0f6d2c45", true, fileName);
        int bytesRead = 0;
        byte[] buffer = new byte[8192];
        try {
            FileInputStream fis = new FileInputStream(savePath + fileName);
            OutputStream os = item.getOutputStream();
            while ((bytesRead = fis.read(buffer, 0, 8192)) != -1) {
                os.write(buffer, 0, bytesRead);
            }
            os.close();
            fis.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
        MultipartFile multipartFile = new CommonsMultipartFile(item);
        return multipartFile;
    }
}
