package com.vs.crawl.news;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.Arrays;
import java.util.StringTokenizer;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import cn.edu.hfut.dmic.webcollector.crawler.Crawler;
import cn.edu.hfut.dmic.webcollector.fetcher.Fetcher.FetchQueue;
import cn.edu.hfut.dmic.webcollector.util.FileUtils;
import cn.edu.hfut.dmic.webcollector.util.JDBCHelper;

import com.file.filter.TaskPathFilterLocal;
import com.model.JarExecute;
import com.model.JarExecuteConf;
import com.model.xmlElement.DataColumns;
import com.model.xmlElement.input.Target;
import com.model.xmlElement.output.DataBase;
import com.model.xmlElement.output.OutputFile;
import com.model.xmlElement.output.OutputFileHdfs;
import com.model.xmlElement.output.OutputFileLocal;
import com.sort.FileModifyTimeSort;
import com.tool.DaoTool;

/**
 * *********************************************
 * @author Administrator
 * @FileName WriteCrawler.java
 * @Description文件写入器
 **********************************************
 */
public class WriteCrawler {
	public static final Logger LOG = LoggerFactory.getLogger(WriteCrawler.class);
	
	private Crawler crawler;
	private String jarFilePath;
	private String hadoopHomeDir;
	private DataBase dataBase;

	public String getJarFilePath() {
		return jarFilePath;
	}
	public void setJarFilePath(String jarFilePath) {
		this.jarFilePath = jarFilePath;
	}
	
	public String getHadoopHomeDir() {
		return hadoopHomeDir;
	}
	public void setHadoopHomeDir(String hadoopHomeDir) {
		this.hadoopHomeDir = hadoopHomeDir;
	}
	/*--------------------------------------------------------------------------------*/
	public WriteCrawler(Crawler crawler) {
		super();
		this.crawler = crawler;
		initJdbcTemplate();
	}
	public Crawler getCrawler() {
		return crawler;
	}

	public void setCrawler(Crawler crawler) {
		this.crawler = crawler;
	}
	/*--------------------------------------------------------------------------------*/
	
	public void initJdbcTemplate(){
		this.dataBase = crawler.getDataColumns().getOutput().getDataBase();
		if(dataBase != null){
			JDBCHelper.createMysqlTemplate(
					dataBase.getDataBaseName(),
					dataBase.getDataBaseUrl(),
					dataBase.getDataBaseUserName(),
					dataBase.getDataBasePassword(),
					20,
					100);
			DaoTool.createTable(dataBase.getDataBaseName(), dataBase, crawler.getDataColumns());
		}
	}
	
	/**
	 * 写入数据
	 * 每次写完数据后都必须去除掉之前保存在内存中的数据
	 */
	public void writeData(){
		FetchQueue fetchQueue = crawler.getFetcher().getFetchQueue();
		synchronized (fetchQueue) {
			if(!fetchQueue.isWriteing()){
				LOG.info("正在写入文件,队列等待中......");
				/**
				 * 保存到本地
				 */
				writeDataLocal();
				/**
				 * 上传到HDFS
				 */
				writeDataHdfs();
				/**
				 * 写入到数据库
				 */
				insert();
				/**
				 * 保存种子文件
				 */
				writeSeedsData();
				/**
				 * 清空内存数据
				 */
				cleanData();
				LOG.info("完成写入文件,唤醒队列......");
				fetchQueue.notifyAll();
			}
		}
	}
	
	/**
	 * 写入本地文件
	 */
	public void writeDataLocal(){
		try {
			//写入本地文件
			if(!crawler.getData().toString().equals("")){
				OutputFile outputFile = crawler.getDataColumns().getOutput().getOutputFile();
				OutputFileLocal outputfileLocal = outputFile.getOutputFileLocal();
				if(outputfileLocal != null){
					Target target = crawler.getDataColumns().getInput().getTarget();
					FileUtils.createFolder(outputfileLocal.getOutputLocalFileDir(target.getTaskId(),target.getTaskTime(),target.getName()));
					String filePath = outputfileLocal.getOutputLocalFilePath(target.getTaskId(),outputFile.getUser(),target.getTaskTime(),target.getName());
					String fileEncoding = outputfileLocal.getOutputLocalFileEncoding();
					File file = new File(filePath);
					String outputLocalFileMaxKbs = outputfileLocal.getOutputLocalFileMaxKbs();
					if(outputLocalFileMaxKbs == null || outputLocalFileMaxKbs.equals("")){//没有设定文件最大大小
						if (!file.exists()) {
							file.createNewFile();
						}
						FileUtils.writeFileAppend(file, crawler.getData().toString(), fileEncoding);
					}else{
						long outputLocalFileByteLength = file.length();
						long totalByteLength = outputLocalFileByteLength + crawler.getData().toString().getBytes().length;
						long maxKbs = Long.valueOf(outputLocalFileMaxKbs);
						if(totalByteLength > maxKbs * 1024){//设定了文件最大大小,并且超过
							//本次任务所有产生的文件
							File[] taskFiles = new File(outputfileLocal.getOutputLocalFileDir(target.getTaskId(),target.getTaskTime(),target.getName())).listFiles(new TaskPathFilterLocal(target.getTaskId(), outputfileLocal.getOutputLocalFileExt(), outputFile.getUser(),target.getTaskTime()));
							File lastFile = null;
							if(taskFiles.length == 0){
								file.createNewFile();
								lastFile = file;
							}else{
								//对本次任务所有产生文件根据最新修改时间进行排序
								Arrays.sort(taskFiles,new FileModifyTimeSort());
								lastFile = taskFiles[0];
							}
							//lastFile即上次写文件最新修改的文件
							FileUtils.writeFileWithMaxKbs(lastFile, crawler.getData().toString(), maxKbs);
						}else{//设定了文件最大大小，但是没有超过
							if (!file.exists()) {
								file.createNewFile();
							}
							FileUtils.writeFileAppend(file, crawler.getData().toString(), fileEncoding);
						}
					}
				}
			}
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
	
	/**
	 * 写入hdfs
	 * @throws Exception 
	 */
	public void writeDataHdfs(){
		if(!crawler.getData().toString().equals("")){
			OutputFile outputFile = crawler.getDataColumns().getOutput().getOutputFile();
			OutputFileHdfs outputfileHdfs = outputFile.getOutputFileHdfs();
			if(outputfileHdfs != null){
				Target target = crawler.getDataColumns().getInput().getTarget();
				String path1 = outputfileHdfs.getOutputHdfsFilePath(target.getTaskId(),outputFile.getUser(),target.getTaskTime(),target.getName());
				String path2 = outputfileHdfs.getOutputHdfsFileDir(target.getTaskId(),target.getTaskTime(),target.getName());
				
				//生成jar文件执行参数对象
				JarExecuteConf jarExecuteConf = new JarExecuteConf(
						target.getTaskId(),outputfileHdfs.getOutputHdfsFileExt(),
						outputFile.getUser(),target.getTaskTime(),
						target.getName(),outputfileHdfs.getOutputHdfsFileName1(),
						path1,path2,
						outputfileHdfs.getOutputHdfsFileEncoding(),crawler.getData());
				if(hadoopHomeDir != null && !"".equals(hadoopHomeDir)){
					jarExecuteConf.setHadoopHomeDir(hadoopHomeDir);
				}
				//序列化参数对象
				ObjectOutputStream output = null;
				try {
					output = new ObjectOutputStream(new FileOutputStream("user.bin"));
					output.writeObject(jarExecuteConf);
				} catch (FileNotFoundException e1) {
					e1.printStackTrace();
				} catch (IOException e1) {
					e1.printStackTrace();
				} finally{
					if(output != null){
						try {
							output.close();
						} catch (IOException e) {
							e.printStackTrace();
						}
					}
				}
			    //执行jar,hdfsUpload
				new JarExecute().executeJar(this.getJarFilePath());
			}
		}
	}
	
	/**
	 * 写入数据库
	 */
	public void insert(){
		if(dataBase != null && !crawler.getData().toString().equals("")){
			StringBuilder keys = new StringBuilder(200);
			StringBuilder values = new StringBuilder(200);
			DataColumns dataColumns = crawler.getDataColumns();
			dataColumns.sortDataColumnOutputs();
			for(int i = 0,size = dataColumns.size();i < size;i++){
				if(i == size - 1){
					keys.append(dataColumns.get(i).getDataColumnOutput().getOutputColumn());
					values.append("?");
				}else{
					keys.append(dataColumns.get(i).getDataColumnOutput().getOutputColumn()).append(",");
					values.append("?,");
				}
			}
			
			StringTokenizer token = new StringTokenizer(crawler.getData().toString(), "\r\n");
			String line;
			while(token.hasMoreTokens()){
				line = token.nextToken();
				DaoTool.insert(dataBase.getDataBaseName(), dataBase, keys.toString(), values.toString(),line);
			}
		}
	}
	
	/**
	 * 写入种子文件
	 */
	public void writeSeedsData(){
		Target target = crawler.getDataColumns().getInput().getTarget();
		if(!crawler.getSeedsData().toString().equals("") && target.isTarget_save_seed2()){
			OutputFile outputFile = crawler.getDataColumns().getOutput().getOutputFile();
			OutputFileLocal outputfileLocal = outputFile.getOutputFileLocal();
			if(outputfileLocal != null){
				FileUtils.createFolder(outputfileLocal.getOutputSeedLocalFileDir(target.getTaskId(),target.getTaskTime(),target.getName()));
				String filePath = outputfileLocal.getOutputLocalSeedsFilePath(target.getTaskId(),outputFile.getUser(),target.getTaskTime(),target.getName());
				String fileEncoding = outputfileLocal.getOutputLocalFileEncoding();
				File file = new File(filePath);
				if (!file.exists()) {
					try {
						file.createNewFile();
					} catch (IOException e) {
						e.printStackTrace();
					}
				}
				try {
					FileUtils.writeFileAppend(file, crawler.getSeedsData().toString(), fileEncoding);
				} catch (FileNotFoundException e) {
					e.printStackTrace();
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
		}
	}
	
	/**
	 * 清空内存数据
	 */
	public void cleanData(){
		crawler.setData(new StringBuffer());
		crawler.setSeedsData(new StringBuffer());
		crawler.getTotalSize().set(0L);
		crawler.getTotalBytesLength().set(0L);
	}
	
}
