package jerry.file.demo;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;

import jerry.sql.demo.DAO;

public class FileDemo {

	BufferedWriter bufferedWriterPeople;
	BufferedWriter bufferedWriterOrg;
	BufferedWriter bufferedWriterPlace;
	BufferedWriter bufferedWriterKeyWords;
	public void fileReaderTest() throws SQLException, IOException{
		init();
		FileInputStream fileInputStream = new FileInputStream("E:\\infoallKey.trs");
		BufferedReader reader = new BufferedReader(new InputStreamReader(fileInputStream, "gbk"),81920);
		
		FileOutputStream fileOutputStreamPeople = new FileOutputStream("E:\\data\\people.txt");
		bufferedWriterPeople = new BufferedWriter(new OutputStreamWriter(fileOutputStreamPeople, "utf-8"));
		
		FileOutputStream fileOutputStreamOrg = new FileOutputStream("E:\\data\\org.txt");
		bufferedWriterOrg = new BufferedWriter(new OutputStreamWriter(fileOutputStreamOrg, "utf-8"));
		
		FileOutputStream fileOutputStreamPlace = new FileOutputStream("E:\\data\\place.txt");
		bufferedWriterPlace = new BufferedWriter(new OutputStreamWriter(fileOutputStreamPlace, "utf-8"));
		
		FileOutputStream fileOutputStreamKeywords = new FileOutputStream("E:\\data\\keywords.txt");
		bufferedWriterKeyWords = new BufferedWriter(new OutputStreamWriter(fileOutputStreamKeywords, "utf-8"));
		
		String currentString;
		long count = 0;
		List<String> docs = new ArrayList<String>(20);
		while (true) {
			currentString = reader.readLine();
			if (currentString == null) break;
			if(currentString.length() == 0){
				//读取了一块数据，进行处理
				analyzeAndInsertString(docs);
				docs.clear();
			}else{
				docs.add(currentString);
			}
			count++;
			if(count % 10000 == 0){
				System.out.println("读取了："+count+"行");
			}
		}
		System.out.println("共读取了："+count+"行");
//		executeBatch();
		reader.close();
		bufferedWriterPeople.close();
	}
	
	
	int blockCount = 0;
	
	private void analyzeAndInsertString(List<String> docs) throws SQLException{
		blockCount++;
		if(docs==null || docs.size() == 0)	return;
		String rec = docs.get(0);
		if(!rec.equals("<REC>"))	throw new RuntimeException("无法解析文件:"+docs);
		int size = docs.size();
		EType currentType = null;
		for(int i=1; i<size; i++){
			String temp = docs.get(i);
			if(temp.charAt(0) != '<'){
				throw new RuntimeException("解析错误:"+temp);
			}else{
				String head = temp.substring(0, 2);
				currentType = getTypeByHead(head);
				if(currentType == EType.Abstract)	break;
				temp = temp.substring(getHeadLengthByType(currentType), temp.length());
				String[] words = analyzeByType(temp, currentType);
				insertByType(words, currentType);
			}
		}
//		if(blockCount % 2000 == 0)	executeBatch();
	}
	
	private HashMap<String, EType> typeWords = new HashMap<String, EType>();
	
	private HashMap<EType, Integer> headLength = new HashMap<EType, Integer>(7);
	
	private void init(){
		typeWords.put("<摘", EType.Abstract);
		typeWords.put("<人", EType.PEOPLE);
		typeWords.put("<地", EType.PLACE);
		typeWords.put("<机", EType.ORG);
		typeWords.put("<关", EType.KEYWORDS);
		typeWords.put("<隐", EType.ImplicitIndexing);
		typeWords.put("<新", EType.NewIndexing);
		
		headLength.put(EType.Abstract, 5);
		headLength.put(EType.PEOPLE, 5);
		headLength.put(EType.PLACE, 5);
		headLength.put(EType.ORG, 6);
		headLength.put(EType.KEYWORDS, 6);
		headLength.put(EType.ImplicitIndexing, 7);
		headLength.put(EType.NewIndexing, 7);
		
		initConnection();
	}
	
	PreparedStatement insertPeopleStatement = null;
	PreparedStatement insertORGStatement = null;
	PreparedStatement insertPlaceStatement = null;
	PreparedStatement insertKeyWordsStatement = null;
	private void initConnection() {
		Connection connection = DAO.getConnection();
		try {
			insertPeopleStatement = connection.prepareStatement("INSERT INTO PEOPLE(NAME)  VALUES(?)");
			insertORGStatement = connection.prepareStatement("INSERT INTO ORG(NAME)  VALUES(?)");
			insertPlaceStatement = connection.prepareStatement("INSERT INTO PLACE(NAME)  VALUES(?)");
			insertKeyWordsStatement = connection.prepareStatement("INSERT INTO KEYWORDS(NAME)  VALUES(?)");
		} catch (SQLException e) {
			e.printStackTrace();
		}
		
	}
	
	int analyzedFileCount = 0;
	
	
	private void insertByType(String[] words, EType type) throws SQLException{
		BufferedWriter bufferedWriter = null;
		switch (type) {
		case PEOPLE:
			bufferedWriter = bufferedWriterPeople;
			break;
		case ORG:
			bufferedWriter = bufferedWriterOrg;
			break;
		case PLACE:
			bufferedWriter = bufferedWriterPlace;
			break;
		default:
			bufferedWriter = bufferedWriterKeyWords;
			break;
		}
		
		for (String word : words) {
			try {
				bufferedWriter.write(word);
				bufferedWriter.write("\n");
			} catch (IOException e) {
				e.printStackTrace();
			}
		}
		
	}
	
	private EType getTypeByHead(String head){
		return typeWords.get(head);
	}
	
	
	private int getHeadLengthByType(EType type){
		return headLength.get(type).intValue();
	}
	
	private String sepatater = ";";
	
	
	
	private String[] analyzeByType(String tem, EType type){
		return tem.split(";");
	}
	
	public static void main(String[] args) throws SQLException, IOException{
		FileDemo demo = new FileDemo();
		demo.fileReaderTest();
	}

}
