package com.saic.data.main;

import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Random;

import org.apache.hadoop.hbase.client.Result;

import com.saic.data.dotask.AddDataTask;
import com.saic.data.dotask.UpdateCheckTime;
import com.saic.data.dotask.UploadRecord;
import com.saic.data.job.WriteJob;
import com.saic.data.table.TableAction;
import com.saic.data.table.TableConf;
import com.saic.data.table.TableFile;
import com.saic.data.table.TableUtil;
import com.saic.data.util.FTPUtil;
import com.saic.data.util.FileUtil;
import com.saic.data.util.HDFSUtil;
import com.saic.data.util.HbaseUtil;
import com.saic.data.util.ZookeeperUtil;

import it.sauronsoftware.ftp4j.FTPFile;

/**
 * @DESC:登陆FTP服务器/检查文件更新/下载文件到HDFS上/解析CSV文件
 * @author Xinshiyou
 */
public class FTPCheck {

	
	public static final String host = "10.129.11.131";
	public static final String user = "ftproot";
	public static final String pwd = "Saic123!";
	public static int checklimit=9990;
	public static boolean deleteflag = false;

	public static void main(String[] args) throws Exception {

		/** Random to delay */
		int delay = new Random().nextInt(30);
		Thread.sleep(delay);

		/** create entity */
		FTPUtil ftpUtil = new FTPUtil();
		HDFSUtil hdfsUtil = new HDFSUtil();
		

		/*** Check FTP and write to HBASE, then copy file to HDSF if possible ***/
		ftpUtil.login(host, user, pwd);

		ZookeeperUtil zooUtil = new ZookeeperUtil();
		Map<String, String> map = zooUtil.getProperties(zooUtil.rootpath);
		zooUtil.close();
		
		for (String key :map.keySet()){
			//if (key.compareTo("OAZJ") > 0) continue;
			TableConf tConf = new TableConf(key, map.get(key));
			try {
				doAction(ftpUtil, hdfsUtil, tConf);
				ftpUtil.reconnect();
			} catch (Exception e) {
				// TODO: handle exception
				e.printStackTrace();
				ftpUtil.reconnect();
				continue;
			}	
		}
		
		//run read job
		try {
			HbaseCheck.main(args);
			String shour =TableUtil.formatDate(new Date()).substring(8,10);
			if (shour.compareTo("06")>0 && shour.compareTo("09")<=0 ) {
				CheckSum.main(args);
				TableUtil.pm("main", " mail sent ! ");
			}
		} catch (Exception e) {
			// TODO: handle exception
			TableUtil.pm("main", " Error when update data ! "+e.getMessage());
		}finally{
			System.exit(0);
		}
		
	}

	/**
	 * @DESC:列出文件，检查类型，更新业务
	 * @param ftpUtil
	 * @param writeJob
	 * @param hdfsUtil
	 * @param dir
	 * @throws Exception
	 */
	public static void doAction(FTPUtil ftpUtil,HDFSUtil hdfsUtil, TableConf tConf) throws Exception {
		TableUtil.pm("FTPCheck-doAction", "get FTPlist--> tablefile : ");
		List<FTPFile> list = ftpUtil.getNames(true, tConf.getFtpDir());// OA
		int i=0;
		WriteJob writeJob = new WriteJob();
		for (FTPFile f : list) {
			TableUtil.pm("FTPCheck-doAction", "get FTPfile : " +  FileUtil.getName(f.getName())+" ModifiedDate:"+TableUtil.formatDate(f.getModifiedDate()) );
			String type = FileUtil.getType(f.getName());
			TableFile tablefile = new TableFile(f.getName(),tConf,TableUtil.formatDate(f.getModifiedDate()));
			if ("txt".equalsIgnoreCase(type) || "csv".equalsIgnoreCase(type))
				csvTask(writeJob,f, ftpUtil, hdfsUtil, tablefile);
			i++;
			if(i>=checklimit) return;
		}
		writeJob.close();
	}
	
	/**
	 * @DESC:核心业务逻辑/流程
	 * @param writeJob
	 * @param f
	 * @param ftpUtil
	 * @param hdfsUtil
	 * @param dir
	 * @throws Exception
	 */
	public static void csvTask(WriteJob writeJob,FTPFile f, FTPUtil ftpUtil,
			HDFSUtil hdfsUtil, TableFile table) throws Exception {
		
		
		// 查询是否已经存在的文件
		TableAction taAction = TableUtil.getAction(table, f);
		table.settAction(taAction);
		TableUtil.pm("FTPCheck-csvTask","file:"+f.getName()+" --> "+taAction.toString());
		
		if (taAction== TableAction.None) {
			TableUtil.pm("FTPCheck-csvTask","continue: file is updating , check next! ");
			return;
		}
		
		if (taAction == TableAction.UploadRecord){
			TableUtil.pm("FTPCheck-csvTask","Copy: New file --> copy and update");
			Result res = HbaseUtil.QueryByConditionKey(TableUtil.configtab,table.getRowkey());
			writeJob.doWriteJob(f.getName(), new UploadRecord(ftpUtil, hdfsUtil, f, res, table));
		}
		
		if (taAction == TableAction.Wait){
			TableUtil.pm("FTPCheck-csvTask","Update: currFS!=preFS-->Update check time");
			Result res = HbaseUtil.QueryByConditionKey(TableUtil.configtab,table.getRowkey());
			writeJob.doWriteJob(f.getName(), new UpdateCheckTime(TableUtil.configtab, res, f));
		}
		
		if (taAction == TableAction.New){
			TableUtil.pm("FTPCheck-csvTask", "Write: Hbase first: " + f.getName());
			writeJob.doWriteJob(f.getName(),new AddDataTask(TableUtil.configtab,f));
		}
		
		if (taAction == TableAction.DeleteFile){
			if (deleteflag){
			TableUtil.pm("FTPCheck-csvTask", "Delete file: " + f.getName());
			ftpUtil.delete(f);}
		}
		
		
	}
}
