package com.captjack.bigdata.hadoop.task;

import com.captjack.bigdata.hadoop.hbase.trace.HBaseTraceDao;
import com.captjack.bigdata.hadoop.hdfs.HadoopDistributedFileSystemDao;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.text.SimpleDateFormat;
import java.util.List;

/**
 * @author Capt Jack
 * @date 2018/1/12
 */
@Component
public class TestTask {

    /**
     *
     */
    private final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss:SSS");

    /**
     *
     */
    private static final Logger logger = LoggerFactory.getLogger(TestTask.class);

    /**
     * hdfs操作工具
     */
    private final HadoopDistributedFileSystemDao hadoopDistributedFileSystemDao;

    /**
     * hbase操作
     */
    private final HBaseTraceDao hBaseTraceDao;

    /**
     * 测试hadoop地址
     */
    private static final String HADOOP_TEST_URL = "hdfs://CDH63:8020/tmp/chengchao";

    /**
     * 测试
     */
    @Scheduled(fixedRate = 1_0_0_0_0L)
    public void test() {
        try {
            List<String> list = hadoopDistributedFileSystemDao.listAll(HADOOP_TEST_URL);
            list.forEach(System.out::println);
//            hBaseTraceDao.hello();
            logger.info("start task！time = " + simpleDateFormat.format(System.currentTimeMillis()));
        } catch (Exception e) {
            logger.error("", e);
        }
    }

    @Autowired
    public TestTask(HadoopDistributedFileSystemDao hadoopDistributedFileSystemDao, HBaseTraceDao hBaseTraceDao) {
        this.hadoopDistributedFileSystemDao = hadoopDistributedFileSystemDao;
        this.hBaseTraceDao = hBaseTraceDao;
    }

}
