package com.by.zhdoctor.web.rest;

import app.cloud.client.axis2.Client;
import app.cloud.client.utils.HdfsOperator;
import app.cloud.common.Target;
import com.by.zhdoctor.config.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Calendar;

/**
 * Created by Lawrence on 2017/6/1.
 * 每天执行一次,将日志文件上传到HDFS
 */
@Async
@Component
public class LogUploadTasks {
    private final Logger log = LoggerFactory.getLogger(LogUploadTasks.class);

    private static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");

    private static  String fileurl;

    public static String getLogfileUrl(){
        Calendar date = Calendar.getInstance ();
        date.add(Calendar.DAY_OF_MONTH,-1);
        fileurl=Constants.LOCALLOGSFILEURL+"/zhserver-"+dateFormat.format(date.getTime())+"-debug.log";
        System.out.println(fileurl);
        return fileurl;
    }

    @Scheduled(fixedRate = 1000 * 60 * 60 * 24)
    public void uploadLogfile() throws IOException {

        Client client=new Client();
        client.userLogin("admin",Constants.PW);
        Target target=client.getUploadLink(Constants.UPLOGSFILEURL);
        System.out.println(target.getMessage());
        String url=target.getUrl();
        String path=getLogfileUrl();
        File file = new File(path);
        if(file.exists()){
            HdfsOperator hdfsOperator=new HdfsOperator();
            hdfsOperator.init(url);
            System.out.println(hdfsOperator.isInited());

            boolean upflag=hdfsOperator.upload(path,url);
            if(upflag){
                log.info("上传日志文件成功");
            }else{
                log.info("上传日志文件失败");
            }
        }else {
            log.info("日志文件未生成,下一周期上传");
        }


    }

}
