package net.unix8.tool;

import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Date;
import java.util.Vector;

import org.apache.commons.net.ftp.FTPFile;

import com.demo.common.model.WaveData;
import com.demo.common.model.WaveStation;
import com.jfinal.kit.PathKit;

import net.unix8.bean.WaveDataIndex;
import net.unix8.bean.WaveDataNoSql;
import net.unix8.bean.WaveStationNoSql;
import net.unix8.constant.ConstantsValue;
import net.unix8.ftp.easyFTP;
import net.unix8.server.ServerHandler;
import net.unix8.share.NetworkShare;

public class FileProcess {
	public static void checkLeaveHostFile(){
		//SQLExec.clear();
		String pathString = PathKit.getWebRootPath() + "/sdata/";
		File fileSdata =new File(pathString);    
		File[] tempList = fileSdata.listFiles();
		if(tempList == null) return;
		for (int i = 0; i < tempList.length; i++) {				
			if  (tempList[i].isDirectory())      
				Tools.deleteDir(tempList[i]);
			else
				tempList[i].delete();	
		}
		/*
		pathString = PathKit.getWebRootPath() + "/mdata/";
		fileSdata =new File(pathString);    
		tempList = fileSdata.listFiles();
		if(tempList == null) return;
		for (int i = 0; i < tempList.length; i++) {
			if  (tempList[i].isDirectory())      
				Tools.deleteDir(tempList[i]);
			else
				tempList[i].delete();
		}*/
		pathString = PathKit.getWebRootPath() + "/tmp/";
		fileSdata =new File(pathString);    
		tempList = fileSdata.listFiles();
		if(tempList == null) return;
		for (int i = 0; i < tempList.length; i++) {
			if  (tempList[i].isDirectory())      
				Tools.deleteDir(tempList[i]);
			else
				tempList[i].delete();
		}
		pathString = PathKit.getWebRootPath() + "/pdata/";
		File file =new File(pathString);    
		if  (!file .exists()  && !file .isDirectory())      
		{       
			file .mkdir();    
		} 
	}
	
	public  static void sortFilesWtihDescending(File[] files) {
		  Arrays.sort(files, new Comparator<File>() {
		    public int compare(File sa, File sb) {		
		      File f1 = sa;
		      File f2 = sb;
		  
		      if (f1.getName().compareTo(f2.getName()) >0) {
		        return 1;
		      } else {
		        return -1;
		      }
		      }});
		}
	

	public static boolean timeModifyNoSQL(String hostString) throws IOException{
		//System.out.println("\n---------------------开始对时"+hostString+"--------------------------   ");	
		hostString = hostString.trim();
		String mPathString = PathKit.getWebRootPath() + "/mdata/" + hostString+"/";
		File fileDData =new File(mPathString);    
		if  (!fileDData .exists()){fileDData .mkdir();} 
		String pathString = PathKit.getWebRootPath() + "/sdata/" + hostString+"/";
		File fileSdata = new File(pathString);    
		if  (!fileSdata .exists()){fileSdata .mkdir(); return false;} 
		File[] tempList = fileSdata.listFiles();
		sortFilesWtihDescending(tempList);
		int len = tempList.length;
		if(len <= ConstantsValue.LOCALE_MIN_FILES)return false;

		String latestFileName = "";
		boolean is_have_station = true, is_have_fist_data = true;
		int s_ID = 0;
		long sStart = -1, sEnd = -1, usStart = -1, usEnd = -1;

		//根据Ip查询站是否缓存
		WaveStationNoSql waveStationNoSql = null;
		WaveDataNoSql waveDataNoSql  = null;
		WaveStation waveStation = null;
		Vector<WaveStationNoSql> stations = NetworkShare.stations;
		for (WaveStationNoSql x : stations){
			if( hostString.equals(x.ip)){
				waveStationNoSql = x;
				break;
			}	
		}
		
		//如果内存缓存该站的信息，就去数据库找
		if((waveStationNoSql == null)){
			waveStation = WaveStation.dao.findFirst("select * from wave_station where s_ip = ?", hostString);
			if(waveStation == null){
				is_have_station = false;	
				is_have_fist_data = false;
			}else{
				waveStationNoSql = new  WaveStationNoSql();
				waveStationNoSql.id = waveStation.getId();
				waveStationNoSql.ip = waveStation.getSIp();
				waveStationNoSql.file = waveStation.getSLatestFile();
			}
		}
		
		//如果有该站的信息
		if(is_have_station){
			latestFileName = waveStationNoSql.file; //当前的最新的对时后数据文件
			if((latestFileName == null) || (latestFileName.trim().length() == 0)){
				is_have_station = false;
			}else{
				s_ID = waveStationNoSql.id;
				
				for (WaveDataNoSql x : NetworkShare.datas){
					if(s_ID == x.sid && latestFileName.equals(x.file)){
						waveDataNoSql = x;
						break;
					}	
				}
				if(waveDataNoSql != null){
					sStart = waveDataNoSql.stimeS;
					sEnd = waveDataNoSql.etimeS;
					usStart = waveDataNoSql.stimeUS;
					usEnd = waveDataNoSql.etimeUS;
					//System.out.println("找到内存中的文件: " +waveDataNoSql);
				}else{
					is_have_station = false;
					is_have_fist_data = false;	
				}
			}
		}	
		boolean realWork = false;
		WaveDataIndex waveDataIndex = null, waveDataIndexExcep = null;;
		for (int i = 0; i < len - 1; i++) {
			//System.out.println("对时处理文件[+"+i+"]文     件："+tempList[i]);
			//排除异常文件
			File fileData = new File(tempList[i].getPath()); 
			if(fileData.length() < ConstantsValue.FILE_SIZE_ERROR_MIN){
				System.out.println("发现错误文件【1】"+tempList[i].getPath());	
				Tools.deleteFile(tempList[i].getPath());
				continue; 
			}	
			waveDataIndexExcep = readHead(tempList[i].getPath());		
			if(waveDataIndexExcep == null){
				System.out.println("发现错误文件【2】"+tempList[i].getPath());	
				Tools.deleteFile(tempList[i].getPath());	
				continue;
			}
			long diffTwoFile = waveDataIndexExcep.stimeS - sEnd; 
			if(diffTwoFile < 0){
				System.out.println("发现时间不符错误文件【3】"+tempList[i].getPath());	
				if(!Tools.deleteFile(tempList[i].getPath())){};	
				continue;
			}

			if(is_have_fist_data){
				fileData = new File(mPathString + latestFileName); 
				if(fileData.length() > ConstantsValue.FILE_SIZE_MAX ){//文件超容量则修改新文件的头部信息
					System.out.println("文件记录超限");	
					is_have_fist_data = false; 
				}
			}
						
			if((is_have_station == false) 
					|| (is_have_fist_data == false)
					||(diffTwoFile > ConstantsValue.INVALID_TIME_INTERVAL)
					){
				/*更新上一个对时文件的信息*/
				if((waveDataIndex != null) && realWork){
					WaveData waveData  = WaveData.dao.findFirst("select * from wave_data where s_ID = ? and d_data_url = ?",s_ID , latestFileName);			
					if(waveData != null){
						waveData.set("d_start_s", waveDataIndex.stimeS).set("d_start_us", waveDataIndex.stimeUS)
						.set("d_end_s", waveDataIndex.etimeS).set("d_end_us", waveDataIndex.etimeUS).set("d_data_url", latestFileName)
						.set("d_sample_rate", waveDataIndex.rate).set("d_data_nums", waveDataIndex.points).update();
					}else{
						new WaveData().set("d_start_s", waveDataIndex.stimeS).set("d_start_us", waveDataIndex.stimeUS)
						.set("d_end_s", waveDataIndex.etimeS).set("d_end_us", waveDataIndex.etimeUS).set("d_data_url", latestFileName)
						.set("d_sample_rate", waveDataIndex.rate).set("d_data_nums", waveDataIndex.points).save();
					}
				}
								
				latestFileName = tempList[i].getName();
				Tools.copyFile(tempList[i].getPath(), mPathString + latestFileName);
				System.out.println("新对时文件产生{noSQL}：["+diffTwoFile+"]   "+mPathString + latestFileName);	
				if(!Tools.deleteFile(tempList[i].getPath())){System.out.println("产生新的对时文件后旧文件删除失败"+tempList[i].getPath());}
				//更新数据库内容	
				WaveStation waveStationSql = WaveStation.dao.findFirst("select * from wave_station where s_ip = ?", hostString);
				if(waveStationSql == null){
					new WaveStation().set("s_name", "NNEW ADD1").set("s_ip", hostString).set("s_is_online", 1).set("s_latest_file", tempList[i].getName()).save();
					waveStationSql = WaveStation.dao.findFirst("select * from wave_station where s_ip = ?", hostString);
				}else{
					waveStationSql.set("s_latest_file", tempList[i].getName()).update();
				}
				s_ID = waveStationSql.getId();

				//内存有没有缓存
				waveStationNoSql = null;
				for (WaveStationNoSql x : stations){
					if( hostString.equals(x.ip)){
						waveStationNoSql = x;
						break;
					}	
				}	
				if(waveStationNoSql == null ){				
					waveStationNoSql = new WaveStationNoSql();
					waveStationNoSql.ip = hostString;
					waveStationNoSql.id = s_ID; 
					waveStationNoSql.file =  tempList[i].getName();
					stations.add(waveStationNoSql);
					//System.out.println("缓存基站信息:"+stations.size());
				}else{
					waveStationNoSql.file =  tempList[i].getName();
				}

				s_ID = waveStationNoSql.id;
								
				waveDataIndex = null;					
				waveDataIndex = readHead(mPathString + latestFileName);
				if(waveDataIndex == null) {
					System.out.println("发现错误文件【4】"+tempList[i].getPath());
					is_have_fist_data = false; 
					continue;
				}
				
				/*保存新对时文件信息*/
				WaveData waveData = WaveData.dao.findFirst("select * from wave_data where s_ID = ? and d_data_url = ?", s_ID,latestFileName);
				if(waveData == null){
					new WaveData().set("s_ID", s_ID).set("d_start_s", waveDataIndex.stimeS).set("d_start_us", waveDataIndex.stimeUS)
					.set("d_end_s", waveDataIndex.etimeS).set("d_end_us", waveDataIndex.etimeUS).set("d_data_url", latestFileName)
					.set("d_sample_rate", waveDataIndex.rate).set("d_data_nums", waveDataIndex.points).save();
				}else{
					waveData.set("s_ID", s_ID).set("d_start_s", waveDataIndex.stimeS).set("d_start_us", waveDataIndex.stimeUS)
					.set("d_end_s", waveDataIndex.etimeS).set("d_end_us", waveDataIndex.etimeUS).set("d_data_url", latestFileName)
					.set("d_sample_rate", waveDataIndex.rate).set("d_data_nums", waveDataIndex.points).update();
				}
				waveDataNoSql = null;
				for (WaveDataNoSql x : NetworkShare.datas){
					if((x.sid == s_ID) && (latestFileName.equals(x.file))){
						waveDataNoSql  = x;
						break;
					}
				}	
				if(waveDataNoSql == null){
					waveDataNoSql = new WaveDataNoSql();
					waveDataNoSql.sid = s_ID;waveDataNoSql.stimeS = waveDataIndex.stimeS;waveDataNoSql.stimeUS = waveDataIndex.stimeUS;
					waveDataNoSql.etimeS = waveDataIndex.etimeS; waveDataNoSql.etimeUS = waveDataIndex.etimeUS;waveDataNoSql.file = latestFileName;
					waveDataNoSql.rate = waveDataIndex.rate;waveDataNoSql.points = waveDataIndex.points;
					NetworkShare.datas.addElement(waveDataNoSql);
				}
				else{
					waveDataNoSql.sid = s_ID;waveDataNoSql.stimeS = waveDataIndex.stimeS;waveDataNoSql.stimeUS = waveDataIndex.stimeUS;
					waveDataNoSql.etimeS = waveDataIndex.etimeS; waveDataNoSql.etimeUS = waveDataIndex.etimeUS;waveDataNoSql.file = latestFileName;
					waveDataNoSql.rate = waveDataIndex.rate;waveDataNoSql.points = waveDataIndex.points;
				}

				sStart = waveDataIndex.stimeS;
				sEnd = waveDataIndex.etimeS;//对时文件最后截止时间	
				usStart = waveDataIndex.stimeUS;
				usEnd = waveDataIndex.etimeUS;
				is_have_station = true;
				is_have_fist_data = true;
				//System.out.println("FIRST2: " +tempList[i].getPath()+"  "+ sStart + "  " + usStart + "  " + sEnd + "  "+ usEnd);
				waveDataIndex = null;//置为null如果该else没有被执行，则waveDataIndex在循环结束后为null
			}else{
				waveDataIndex = mergeFile(mPathString + latestFileName, tempList[i].getPath());
				if(waveDataIndex==null) {
					System.out.println("发现错误文件【3】"+tempList[i].getPath());
					is_have_fist_data = false; continue;
				}
				sEnd = waveDataIndex.etimeS;//对时文件最后截止时间	
				usEnd = waveDataIndex.etimeUS;
				sStart = waveDataIndex.stimeS;
				usStart = waveDataIndex.stimeUS;
				if(!Tools.deleteFile(tempList[i].getPath())){}
				waveDataNoSql = null;
				for (WaveDataNoSql x : NetworkShare.datas){
					if((x.sid == s_ID) && (latestFileName.equals(x.file))){
						waveDataNoSql  = x;
						break;
					}
				}
				
				if(null == waveDataNoSql){
					waveDataNoSql = new WaveDataNoSql();
					waveDataNoSql.sid = s_ID;
					waveDataNoSql.stimeS = waveDataIndex.stimeS;
					waveDataNoSql.stimeUS = waveDataIndex.stimeUS;
					waveDataNoSql.etimeS = waveDataIndex.etimeS;
					waveDataNoSql.etimeUS = waveDataIndex.etimeUS;
					waveDataNoSql.file = latestFileName;
					waveDataNoSql.rate = waveDataIndex.rate;
					waveDataNoSql.points = waveDataIndex.points;
					NetworkShare.datas.addElement(waveDataNoSql);
					//System.out.println("合并到内存中的文件1: " +"  "+ sStart + "  " + usStart + "  " + sEnd + "  "+ usEnd);

				}else{
					waveDataNoSql.sid = s_ID;
					waveDataNoSql.stimeS = waveDataIndex.stimeS;
					waveDataNoSql.stimeUS = waveDataIndex.stimeUS;
					waveDataNoSql.etimeS = waveDataIndex.etimeS; 
					waveDataNoSql.etimeUS = waveDataIndex.etimeUS;
					waveDataNoSql.file = latestFileName;
					waveDataNoSql.rate = waveDataIndex.rate;
					waveDataNoSql.points = waveDataIndex.points;
				}
				//System.out.println("更新数据2:"+NetworkShare.datas.size());
				realWork = true;
			}
		}
		if((waveDataIndex != null) && realWork){
			WaveData waveData  = WaveData.dao.findFirst("select * from wave_data where s_ID = ? and d_data_url = ?",s_ID , latestFileName);			
			if(waveData != null){
				waveData.set("d_start_s", waveDataIndex.stimeS).set("d_start_us", waveDataIndex.stimeUS)
				.set("d_end_s", waveDataIndex.etimeS).set("d_end_us", waveDataIndex.etimeUS).set("d_data_url", latestFileName)
				.set("d_sample_rate", waveDataIndex.rate).set("d_data_nums", waveDataIndex.points).update();
			}else{
				new WaveData().set("d_start_s", waveDataIndex.stimeS).set("d_start_us", waveDataIndex.stimeUS)
				.set("d_end_s", waveDataIndex.etimeS).set("d_end_us", waveDataIndex.etimeUS).set("d_data_url", latestFileName)
				.set("d_sample_rate", waveDataIndex.rate).set("d_data_nums", waveDataIndex.points).save();
			}
		}
		return true;
	}

	public  static void sortFilesWtihDescending2(FTPFile[] files) {
		  Arrays.sort(files, new Comparator<FTPFile>() {
		    public int compare(FTPFile sa, FTPFile sb) {		
		    	FTPFile f1 = sa;
		    	FTPFile f2 = sb; 
		      if (f1.getName().compareTo(f2.getName()) >0) {
		        return 1;
		      } else {
		        return -1;
		      }
		      }});
		}
	
	public static boolean runPersonelLogic(String  hostString, int nums){
		easyFTP ftp = ServerHandler.getFtp(hostString);//获取ftp
		try {			
			if((ftp != null) && (ftp.getFtpClient().isAvailable() && ftp.getFtpClient().isConnected())){
				//System.out.println("该FTP链接可以使用："+hostString+"   "+nums);	
			}else{
				System.out.println("该FTP链接不可以使用,新创建ftp链接："+hostString);
				if(ftp != null) ftp.disconnect();
				ftp = null;
				ftp = new easyFTP();
				//System.out.println("创建新的FTP链接："+hostString+"   "+nums);
				ftp.getFtpClient().setRemoteVerificationEnabled(false);
				ftp.getFtpClient().enterLocalPassiveMode();
				ftp.getFtpClient().setControlEncoding("UTF-8");
				//ftp.getFtpClient().configure(new FTPClientConfig("net.unix8.ftp.UnixFTPEntryParser"));
				ftp.connect(hostString,"ftpuser","123456");
				ServerHandler.addFtp(hostString,ftp);
			}
			boolean status = false;
			status = ftp.setWorkingDirectory("/var/ftp/pub/");
			if(!status) {
				try {
					ftp.getFtpClient().disconnect();
					ftp.disconnect();
				} catch (IOException e1) {
					// TODO Auto-generated catch block
					e1.printStackTrace();
				}
				ServerHandler.removeFtp(hostString);		
				return false;
				}
			String pathString = PathKit.getWebRootPath() + "/data/" + hostString+"/";
			File file = new File(pathString);if(!file .exists()&& !file .isDirectory()) {file .mkdir();} 

			String sPathString = PathKit.getWebRootPath() + "/sdata/" + hostString+"/";
			File sFile =new File(sPathString);    
			if  (!sFile .exists()  && !sFile .isDirectory()){sFile .mkdir();} 
			
			String pPathString = PathKit.getWebRootPath() + "/pdata/" + hostString+"/";
			File pFile =new File(pPathString);    
			if  (!pFile .exists()  && !pFile .isDirectory()){pFile .mkdir();} 
			
			/*先将文件下载到data目录，再删除远程ftp文件，然后将data文件复制到sdata目录来处理*/
			//String []names = ftp.listName();
			FTPFile []names = ftp.listFiles();
			sortFilesWtihDescending2(names);
			
			String name;
			int len = names.length;
			int index = 0;
			for(index = 0;index < len; ++index){
					//System.out.println(names[i]);	
					name = names[index].getName();
					if(name.startsWith("gp"))continue;
					if(NetworkShare.startTime.compareTo(name) > 0) {
						ftp.deleteFile(name);
					}else
						break;
			}				
			
			
			len = (len - index > nums) ? nums:(len - index);//需要下载的文件个数
			if(len <= ConstantsValue.REMOTE_MIN_FILES){
				System.out.println("文件太少,跳过不处理："+len);
				return true;
			}
			len = len + index;
			len = len - 1;
			ArrayList<String> als = new ArrayList<String>();		
			for(; index < len; ++index){
				name = names[index].getName();
				if(name.startsWith("gp"))continue;
				//if(name.length() != 14)continue;
				ftp.downloadFile(name,pathString+name);
				als.add(name);		
			}	
			//System.out.println(hostString+"--下载完毕----------------->"+"文件总数共:"+(len - index));
			for(String x:als){
				if(!ftp.deleteFile(x)){
					System.out.println("删除失败ftp【2】："+x);	
				}
				Tools.copyFile(pathString+x, sPathString+x);
			}	
			
			status = ftp.setWorkingDirectory("/var/ftp/Source/");
			if(!status) {
			}else
				ftp.downloadFile("gps_location",pPathString+"gps_location");//下载gps文件
			Tools.sleep(1000);
			
			timeModifyNoSQL(hostString);//2017-03-06开始对时
		} catch (Exception e) {
			System.out.println("runPersonelLogic  ftp产生异常："+hostString);
			ServerHandler.hostWorkingMap.remove(hostString);		
			try {
				ftp.getFtpClient().disconnect();
				ftp.disconnect();
			} catch (IOException e1) {
				// TODO Auto-generated catch block
				e1.printStackTrace();
			}
			ServerHandler.removeFtp(hostString);		
			//ServerHandler.hostListALL.remove(hostString);//ftp链接失败，移除该站
		}
		return true;
	}

	public static boolean timeModify1(String hostString) throws IOException{
		hostString = hostString.trim();
		/*对时后数据目录维护*/
		String mPathString = PathKit.getWebRootPath() + "/mdata/" + hostString+"/";
		File fileDData =new File(mPathString);    
		if  (!fileDData .exists()/*  && !fileDData .isDirectory()*/){fileDData .mkdir();} 
		/*原始数据目录维护*/
		String pathString = PathKit.getWebRootPath() + "/sdata/" + hostString+"/";
		File fileSdata = new File(pathString);    
		if  (!fileSdata .exists()/*  && !fileSdata .isDirectory()*/){fileSdata .mkdir(); return false;} 
		//处理原始数据目录下的文件
		File[] tempList = fileSdata.listFiles();
		if(tempList.length <= ConstantsValue.LOCALE_MIN_FILES)return false;

		String latestFileName = "";
		boolean is_have_station = false, is_have_fist_data = false;
		int s_ID = 0;
		long sStart = -1, sEnd = -1, usStart = -1, usEnd = -1;
		//根据站IP查询该站的最新对时数据文件
		WaveStation waveStation = WaveStation.dao.findFirst("select * from wave_station where s_ip = ?", hostString);

		if((waveStation == null)){
			is_have_station = false;
			//如果数据库中没有该采集站的station数据，对时文件数据	
		}else{
			latestFileName = waveStation.getStr("s_latest_file"); //当前的最新的对时后数据文件
			if((latestFileName == null) || (latestFileName.trim().length() == 0)){
				is_have_station = false;
			}else{
				is_have_station = true;
				s_ID = waveStation.getId();
				WaveData waveData  = WaveData.dao.findFirst("select * from wave_data where s_ID = ? and d_data_url = ?",s_ID , latestFileName);			
				if(waveData == null){
					is_have_fist_data = false;
				}else{
					is_have_fist_data = true;
					sStart = waveData.getLong("d_start_s");
					sEnd = waveData.get("d_end_s", -1);//对时文件最后截止时间
					usStart = waveData.get("d_start_us", -1);
					usEnd = waveData.get("d_end_us", -1);
				} 
			}
		}

		for (int i = 0; i < tempList.length; i++) {
			//System.out.println("-----------------------------------------------\n[+"+i+"]文     件："+tempList[i]);	

			if(is_have_fist_data){
				File fileData = new File(mPathString + latestFileName); 
				if(fileData.length() > ConstantsValue.FILE_SIZE_MAX ){//文件超容量则修改新文件的头部信息
					System.out.println("文件记录超限");	
					is_have_fist_data = false; 
					//2017-03-20如果文件记录大小超限，则把当前文件作为新的对时文件
					//mFileHead(sEnd,usEnd,tempList[i].getPath());
				}
			}
			File fileData = new File(tempList[i].getPath()); 
			if(fileData.length() < ConstantsValue.FILE_SIZE_ERROR_MIN){
				System.out.println("发现错误文件"+tempList[i].getPath());	
				fileData.delete();
				continue; 
			}
			WaveDataIndex waveDataIndex = readHead(tempList[i].getPath());		
			if(waveDataIndex == null){
				Tools.deleteFile(tempList[i].getPath());
				continue;
			}
			long diffTwoFile = waveDataIndex.stimeS - sEnd; 
			//System.out.println("文件记录时间与差:"+waveDataIndex.stimeS + "  " +sEnd + "  " +diffTwoFile);
			if(diffTwoFile < 0){
				if(!Tools.deleteFile(tempList[i].getPath())){throw new IllegalArgumentException("文件删除失败12");}
				continue;
			}
			if((is_have_station == false) 
					|| (is_have_fist_data == false)
					|| (waveDataIndex.stimeS != sEnd)
					||(waveDataIndex.stimeS - sEnd > ConstantsValue.INVALID_TIME_INTERVAL)
					){
				latestFileName = tempList[i].getName();
				Tools.copyFile(tempList[i].getPath(), mPathString + latestFileName);
				//System.out.println("新对时文件产生："+mPathString + latestFileName);	
				if(!Tools.deleteFile(tempList[i].getPath())){throw new IllegalArgumentException("文件删除失败");}

				waveStation = WaveStation.dao.findFirst("select * from wave_station where s_ip = ?", hostString);
				if(waveStation == null ){ //000000 -> iiiiiiiii   2017-07-13
					new WaveStation().set("s_name", "NNEW ADD1").set("s_ip", hostString).set("s_is_online", 1).set("s_latest_file", tempList[i].getName()).save();
					waveStation = WaveStation.dao.findFirst("select * from wave_station where s_ip = ?", hostString);
				}
				s_ID = waveStation.getId();
				waveDataIndex = readHead(mPathString + latestFileName);
				waveStation.set("s_ID", s_ID).set("s_latest_file", latestFileName).update();
				if(new WaveData().set("s_ID", s_ID).set("d_start_s", waveDataIndex.stimeS).set("d_start_us", waveDataIndex.stimeUS)
						.set("d_end_s", waveDataIndex.etimeS).set("d_end_us", waveDataIndex.etimeUS).set("d_data_url", latestFileName)
						.set("d_sample_rate", waveDataIndex.rate).set("d_data_nums", waveDataIndex.points).save() == false){
					throw new IllegalArgumentException("写入数据库对时文件失败");
				}
				sStart = waveDataIndex.stimeS;
				sEnd = waveDataIndex.etimeS;//对时文件最后截止时间	
				usStart = waveDataIndex.stimeUS;
				usEnd = waveDataIndex.etimeUS;
				is_have_station = true;
				is_have_fist_data = true;

			}else{
				//System.out.println("---\n开始合并对时文件");	
				//System.out.println("正常记录1:"+mPathString + latestFileName);	
				//System.out.println("正常记录2:"+tempList[i].getPath());
				waveDataIndex = mergeFile(mPathString + latestFileName, tempList[i].getPath());
				if(!Tools.deleteFile(tempList[i].getPath())){throw new IllegalArgumentException("文件删除失败");}
				sEnd = waveDataIndex.etimeS;//对时文件最后截止时间	
				usEnd = waveDataIndex.etimeUS;
				WaveData waveData  = WaveData.dao.findFirst("select * from wave_data where s_ID = ? and d_data_url = ?",s_ID , latestFileName);			
				if(waveData != null){
					waveData.set("d_start_s", waveDataIndex.stimeS).set("d_start_us", waveDataIndex.stimeUS)
					.set("d_end_s", waveDataIndex.etimeS).set("d_end_us", waveDataIndex.etimeUS).set("d_data_url", latestFileName)
					.set("d_sample_rate", waveDataIndex.rate).set("d_data_nums", waveDataIndex.points).update();
				}
			}
		}
		return true;
	}
	public static WaveDataIndex readHead(String file){
		try{
			//解析第一个文件
			byte []headFirst =  Tools.readFiletoNByteArray(file, 0, ConstantsValue.HEAD_LENGTH);	
			String headFistStrinigF = new String(headFirst,0,40,"ISO-8859-1");
			String headFistStrinigB = new String(headFirst,80,48,"ISO-8859-1");
			String[] arrsFirst=null; 
			arrsFirst=headFistStrinigF.split(":");
			//System.out.println( "Len = "+arrsFirst.length);
			if(arrsFirst.length < 3) return null;
			//文件头部信息，t1First为开始秒，t11First为开始微妙,t11First为结束秒，t22为结束微妙，pointsFirst为点数，rateFirst为采样率
			long t1First, t2First, t11First,t22First,pointsFirst,rateFirst;
			pointsFirst = Long.parseLong(arrsFirst[1].trim());//点数
			rateFirst = Long.parseLong(arrsFirst[2].trim());//采样率
			
			arrsFirst=headFistStrinigB.split(":");
			String[] arrs1First=null;
			arrs1First = arrsFirst[0].trim().split("\\.");
			t1First = Long.parseLong(arrs1First[0].trim());t2First =  Long.parseLong(arrs1First[1].trim());
			arrs1First = arrsFirst[1].split("\\.");
			t11First = Long.parseLong(arrs1First[0].trim());t22First =  Long.parseLong(arrs1First[1].trim());
			
			//System.out.println("FIRST: " + t1First + "  " + t2First + "  " + t11First + "  "+ t22First+"  "+pointsFirst+" ");
			WaveDataIndex waveDataIndex = new WaveDataIndex(t1First,t11First,t2First,t22First,pointsFirst,rateFirst,file);
			return waveDataIndex;
		} catch (Exception e) {
			e.printStackTrace();
			return null;
		}
	}
	public static WaveDataIndex readHeadV1(String file){
		try{
			//System.out.println("\n-----------------------------------------------"+file);	

			//解析第一个文件
			byte []headFirst =  Tools.readFiletoNByteArray(file, 0, ConstantsValue.HEAD_LENGTH);	
			String headFistStrinig = new String(headFirst,"ISO-8859-1");
			//System.out.println( headFistStrinig.getBytes("ISO-8859-1").length);
			String[] arrsFirst=null; arrsFirst=headFistStrinig.split(":");
			//System.out.println( "Len = "+arrsFirst.length);

			if(arrsFirst.length < 5) return null;
			//文件头部信息，t1First为开始秒，t11First为开始微妙,t11First为结束秒，t22为结束微妙，pointsFirst为点数，rateFirst为采样率
			long t1First, t2First, t11First,t22First,pointsFirst,rateFirst;
			String[] arrs1First=null;
			arrs1First = arrsFirst[0].trim().split("\\.");
			t1First = Long.parseLong(arrs1First[0].trim());t2First =  Long.parseLong(arrs1First[1].trim());
			arrs1First = arrsFirst[1].split("\\.");
			t11First = Long.parseLong(arrs1First[0].trim());t22First =  Long.parseLong(arrs1First[1].trim());
			pointsFirst = Long.parseLong(arrsFirst[3].trim());
			//System.out.println("FIRST: " + t1First + "  " + t2First + "  " + t11First + "  "+ t22First+"  "+pointsFirst+" ");

			//rateFirst = Long.parseLong(arrsFirst[4].trim());
			//System.out.println("采样率"+arrsFirst[4].trim() +"/"+arrsFirst[4].trim().split("\n")[0]+"/");	
			rateFirst = Long.parseLong(arrsFirst[4].trim().split("\n")[0].trim());

			/*2017-03-07数据文件的尾巴需要通过开始时间+点数来计算，不能直接通过读取头部*/
			/*
		t1First = t1First * 1000000 + t2First;
		t11First = t1First + (pointsFirst * 1000000 / rateFirst);

		WaveDataIndex waveDataIndex = new WaveDataIndex((long)(t1First / 1000000),
				(long)(t11First / 1000000),(long)(t1First % 1000000),(long)(t11First % 1000000),pointsFirst,rateFirst,file);*/
			WaveDataIndex waveDataIndex = new WaveDataIndex(t1First,t11First,t2First,t22First,pointsFirst,rateFirst,file);
			/*2017-03-07数据文件的尾巴需要通过开始时间+点数来计算，不能直接通过读取头部*/	

			return waveDataIndex;
		} catch (Exception e) {
			e.printStackTrace();
			return null;
		}
	}
	
	/**
	 * 将从ftp获取的file2数据文件合并到file1对时文件中
	 * @param file1 对时文件
	 * @param file2待对时文件
	 * @return true为合并成功
	 * @throws IOException
	 */
	public static WaveDataIndex mergeFile(String file1, String file2){
		//解析第一个文件
		try {
			byte []headFirst =  Tools.readFiletoNByteArray(file1, 0, ConstantsValue.HEAD_LENGTH);	
			String headFistStrinigF = new String(headFirst,0,80,"ISO-8859-1");
			String headFistStrinigB = new String(headFirst,80,48,"ISO-8859-1");
			String[] arrsFront=null; 
			arrsFront=headFistStrinigF.split(":");
			if(arrsFront.length < 3) return null;
			//文件头部信息，t1First为开始秒，t11First为开始微妙,t11First为结束秒，t22为结束微妙，pointsFirst为点数，rateFirst为采样率
			long t1First, t2First, t11First,t22First,pointsFirst,rateFirst;
			pointsFirst = Long.parseLong(arrsFront[1].trim());//点数
			rateFirst = Long.parseLong(arrsFront[2].trim());//采样率
			
			String[] arrsBack=headFistStrinigB.split(":");
			String[] arrs1First=null;
			arrs1First = arrsBack[0].trim().split("\\.");
			t1First = Long.parseLong(arrs1First[0].trim());t2First =  Long.parseLong(arrs1First[1].trim());
			arrs1First = arrsBack[1].split("\\.");
			t11First = Long.parseLong(arrs1First[0].trim());t22First =  Long.parseLong(arrs1First[1].trim());
/*		WaveDataIndex head1 = readHead(file1);	
		t1First = head1.stimeS;
		t2First = head1.stimeUS;
		t11First = head1.etimeS;
		t22First = head1.etimeUS;
		pointsFirst = head1.points;
		rateFirst = head1.rate;
	*/
		//System.out.println("FIRST: " + t1First + "  " + t2First + "  " + t11First + "  "+ t22First+"  "+pointsFirst+" "+rateFirst);
		byte [][]dataFirst = new byte[3][];
		dataFirst[0] = Tools.readFiletoNByteArray(file1, ConstantsValue.HEAD_LENGTH,pointsFirst * 4);//从start开始读取数据x
		dataFirst[1] = Tools.readFiletoNByteArray(file1, ConstantsValue.HEAD_LENGTH + pointsFirst * 4,pointsFirst * 4);//从start开始读取数据y
		dataFirst[2] = Tools.readFiletoNByteArray(file1, ConstantsValue.HEAD_LENGTH + pointsFirst * 4 * 2 ,pointsFirst * 4);//从start开始读取数据z
		
		
		long t1Seconod, t2Seconod, t11Seconod,t22Seconod,pointsSeconod,rateSeconod;
		WaveDataIndex head2 = readHead(file2);
		t1Seconod = head2.stimeS;
		t2Seconod = head2.stimeUS;
		t11Seconod = head2.etimeS;
		t22Seconod = head2.etimeUS;
		pointsSeconod = head2.points;
		rateSeconod = head2.rate;

		
		//System.out.println("Seconod: " + t1Seconod + "  " + t2Seconod + "  " + t11Seconod + "  "+ t22Seconod+"  "+pointsSeconod+" "+rateSeconod);

		if((t1Seconod <= t1First)/*||(rateFirst != rateSeconod)*/) {
			System.out.println("错误1: "+file1+"   " + t1First + "  " + t2First + "  " + t11First + "  "+ t22First+"  "+pointsFirst+" "+rateFirst);
			System.out.println("错误2: "+file2+"   "  + t1Seconod + "  " + t2Seconod + "  " + t11Seconod + "  "+ t22Seconod+"  "+pointsSeconod+" "+rateSeconod);
			throw new IllegalArgumentException("对时时，两个文件采样率不一样或者新文件比对时文件还新");
		}

		byte [][]dataSeconod = new byte[3][];
		dataSeconod[0] = Tools.readFiletoNByteArray(file2, ConstantsValue.HEAD_LENGTH ,pointsSeconod * 4);//从start开始读取数据x
		dataSeconod[1] = Tools.readFiletoNByteArray(file2, ConstantsValue.HEAD_LENGTH + pointsSeconod * 4, pointsSeconod * 4);//从start开始读取数据y
		dataSeconod[2] = Tools.readFiletoNByteArray(file2, ConstantsValue.HEAD_LENGTH + pointsSeconod * 4 * 2,pointsSeconod * 4);//从start开始读取数据z	

		byte [][] mergeData = Tools.ByteArrayInsert(dataFirst, dataSeconod);//合并

		//开始时间不变	
		//计算最新文件的结束时间
		/*
		t11First = t11First * 1000000 + t22First;//结束时间先转换成毫秒
		t11First = t11First + pointsSeconod * 1000000 / rateFirst;
		long i = (long)(t11First / 1000000);
		long f = (long)(t11First % 1000000);
		t11First = i;
		t22First = f;		
		 */
		t11First = t11Seconod;
		t22First = t22Seconod;
		pointsFirst = pointsFirst + pointsSeconod ;
		
		
		byte []headFirstNew = new byte[128]; 
		//修改结束时间，重新生成头部
		arrsFront[1] = String.valueOf(pointsFirst);
		arrsBack[1] = String.valueOf(t11First) + "." + String.valueOf(t22First);
		
		StringBuffer sbFront = new StringBuffer();
		for(int ii = 0; ii < arrsFront.length-1; ii++){
			sbFront. append(arrsFront[ii]+":");
		}
		sbFront. append(arrsFront[arrsFront.length-1]);
		String sFront = sbFront.toString();
		headFirst = sFront.getBytes("ISO-8859-1");
		System.arraycopy(headFirst, 0, headFirstNew, 0, (headFirst.length > 80 ? 80:headFirst.length));
		
		StringBuffer sbBack = new StringBuffer();
		for(int ii = 0; ii < arrsBack.length-1; ii++){
			sbBack. append(arrsBack[ii]+":");
		}
		sbBack. append(arrsBack[arrsBack.length-1]);
		String sBack = sbBack.toString();
		headFirst = sBack.getBytes("ISO-8859-1");		
		System.arraycopy(headFirst, 0, headFirstNew, 80, (headFirst.length > 48 ? 48:headFirst.length));
		//System.out.println(s + "  " + headFirst.length);
		
		
		Tools.writeByteNToFile(file1, 0, ConstantsValue.HEAD_LENGTH, headFirstNew);
		Tools.writeByteNToFile(file1, ConstantsValue.HEAD_LENGTH, pointsFirst * 4, mergeData);
		WaveDataIndex waveDataIndex = new WaveDataIndex(t1First,t11First,t2First,t22First,pointsFirst,rateFirst,file1);
		//System.out.println("MERGE: " + t1First + "  " + t2First + "  " + t11First + "  "+ t22First+"  "+pointsFirst+" "+rateFirst);
		return waveDataIndex;
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return null;
	}
	
	public static void runPersonelLogic1(String  hostString){//下载编号为h的所有ftp数据
		if(true){//如果该站正在全局下载
			boolean status = false;
			easyFTP ftp = new easyFTP();

			//设置访问被动模式
			ftp.getFtpClient().setRemoteVerificationEnabled(false);
			ftp.getFtpClient().enterLocalPassiveMode();
			ftp.getFtpClient().setControlEncoding("UTF-8");

			try {

				ftp.connect(hostString,"ftpuser","123456");
				status = ftp.setWorkingDirectory("/var/ftp/pub/");
				if(!status)return;
				String pathString = PathKit.getWebRootPath() + "/data/" + hostString+"/";
				File file =new File(pathString);    
				if  (!file .exists()  && !file .isDirectory())      
				{       
					file .mkdir();    
				} 

				/*源文件*/
				String sPathString = PathKit.getWebRootPath() + "/sdata/" + hostString+"/";
				File sFile =new File(sPathString);    
				if  (!sFile .exists()  && !sFile .isDirectory())      
				{       
					sFile .mkdir();    
				} 

				String []names = ftp.listName();

				Date dt=new Date();
				SimpleDateFormat matter1=new SimpleDateFormat("yyyyMMdd");
				String date = matter1.format(dt);
				int len = (names.length > 100) ? 100:names.length;
				for(int i = 0; i < len; ++i){//小批量下载
					if(names[i].length() != 14)continue;
					if(names[i].startsWith(date)){
						ftp.downloadFile(names[i],sPathString+names[i]);
						Tools.copyFile(sPathString+names[i], pathString+names[i]);//下载的
					}
					if(!ftp.deleteFile(names[i])){
						System.out.println("删除失败ftp："+names[i]);	
					}
				}
				for(int i = 0; i < len; ++i){//删除
					if(names[i].length() != 14)continue;
					if(!ftp.deleteFile(names[i])){
						System.out.println("删除失败ftp："+names[i]);	
					}
				}
				ftp.disconnect();
				timeModifyNoSQL(hostString);//2017-03-06开始对时
			} catch (Exception e) {
				ftp.disconnect();
				//e.printStackTrace();
				ServerHandler.hostWorkingMap.remove(hostString);
				ServerHandler.hostListALL.remove(hostString);//ftp链接失败，移除该站
			}

		}
	}

}
