package com.shuting.Analysis_PersonClassifyPaper;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Reducer.Context;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;

public class PaperInf {
 
	public static class MyMapper extends TableMapper<Text, Text> {
		
		private static String deep;
		protected void setup(Context context) throws IOException, InterruptedException {

			Configuration conf = context.getConfiguration();			
			String placePath = conf.get("placePath");
			ConfigInf.initPlaceSet(placePath);			
			deep = conf.get("deep");
		}

		public void map(ImmutableBytesWritable row, Result values, Context context)
				throws IOException, InterruptedException {
			
			String rowkey = Bytes.toString(row.get());			
			if (rowkey.contains("->")) {
				
				String qualifier="classify_deep_"+deep;
				String personID2="";
				String classify="";
				String orgMessage = "";
				ArrayList<String> paperSet = new ArrayList<String>();
				
				Cell rawCell[] = values.rawCells();
				for (Cell cell : rawCell) {
					if ("basicInf".equals(Bytes.toString(CellUtil.cloneFamily(cell)))) {
						if ("personID2".equals(Bytes.toString(CellUtil.cloneQualifier(cell)))) {
							personID2 = Bytes.toString(CellUtil.cloneValue(cell));
						}
						else if (qualifier.equals(Bytes.toString(CellUtil.cloneQualifier(cell)))) {
							classify = Bytes.toString(CellUtil.cloneValue(cell));
						}						
						else if ("orgMessage".equals(Bytes.toString(CellUtil.cloneQualifier(cell)))) {
							orgMessage = Bytes.toString(CellUtil.cloneValue(cell));
						}						
					} 
					else {
						String paperID = Bytes.toString(CellUtil.cloneQualifier(cell));
						paperSet.add(paperID);
					}
				}
				//不存在该字段！！！
				if(orgMessage.isEmpty() || classify.isEmpty()){
					return;
				}			
				
				ArrayList<String> placeSet=new ArrayList<String>();				
				for (String org : orgMessage.split(";")) {
					for(int i=0;i<ConfigInf.placeSet.size();i++){
						String place=ConfigInf.placeSet.get(i);
						if(org.contains(place) && !placeSet.contains(place)){							
							placeSet.add(place);							
						}
					}				
				}
				//基于组织信息过滤人才
				if(placeSet.isEmpty()){
					return;
				}
				
				String personInf=personID2+"->"+classify;
				for(String paperID:paperSet){
					context.write(new Text(paperID),  new Text(personInf));				
				}
			} 
			else {
				String paperID = "";
				//时间信息必须有，期刊信息在统计权威文献时需要
				String journal = "";
				String sortnumber = "";				
				Cell rawCell[] = values.rawCells();
				for (Cell cell : rawCell) {
					if ("isFilter".equals(Bytes.toString(CellUtil.cloneQualifier(cell)))) {
						String isFilter = Bytes.toString(CellUtil.cloneValue(cell));
						if (isFilter.equals("true")){
							return;
						}							
					} 
					//时间戳问题需要考虑到底加不加????????
					else if ("sortnumber".equals(Bytes.toString(CellUtil.cloneQualifier(cell)))) {									
						sortnumber = Bytes.toString(CellUtil.cloneValue(cell)).toUpperCase();
						Pattern p = Pattern.compile("[A-Z]");
						Matcher m = p.matcher(sortnumber);							
						if(!m.find()){
							return;
						}
						sortnumber=sortnumber.replaceAll(" +", ";");					
					}
					//当代旅游（学术版）
					else if ("publishinghouse".equals(Bytes.toString(CellUtil.cloneQualifier(cell)))) {
						journal = Bytes.toString(CellUtil.cloneValue(cell)).replaceAll("\\s*", "");
						journal=journal.replaceAll("（", "(");
						journal=journal.replaceAll("）", ")");
						if (journal.contains("(")) {
							int position = journal.indexOf("(");
							journal = journal.substring(0, position);							
						}					
					}					
					else if ("paperID".equals(Bytes.toString(CellUtil.cloneQualifier(cell)))) {
						paperID = Bytes.toString(CellUtil.cloneValue(cell));
					}
				}
				if(sortnumber.isEmpty()){
					return;
				}
				if(journal.isEmpty()){
					journal="null";
				}
				
				String paperInf=sortnumber+"<=>"+journal;
				context.write(new Text(paperID), new Text(paperInf));
			}
		}
	}

	// MyReduce函数
	public static class MyReducer extends Reducer<Text, Text, Text, Text> {

		public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {

			String paperID = key.toString();
			ArrayList<String> personSet=new ArrayList<String>();
			String paperInf = "";
			for (Text val : values) {
				String valueStr = val.toString();
				if (valueStr.contains("<=>")) {
					paperInf=valueStr;			
				} 
				else {
					personSet.add(valueStr);				
				}
			}
			if(personSet.isEmpty() || paperInf.isEmpty()){
				return;
			}
			
			for(String person:personSet){				
				String valueStr=paperID+"<=>"+paperInf;
				context.write(new Text(person), new Text(valueStr));			
			}	
		}
	}
	
	//当输出hdfs文件夹存在时，删掉该文件夹
    public static void deleteHDFSPath(String hdfsPath) throws IOException, URISyntaxException{
        
    	//需要测试hbaseConf和conf的区别，若用hbaseConf是否可以删除hdfsPath???
    	Configuration conf = new Configuration();
    	URI uri=new URI(hdfsPath);
        FileSystem fs = FileSystem.get(uri, conf);
        if(fs.exists(new Path(hdfsPath))){
            fs.delete(new Path(hdfsPath), true);
        }
    }	

	// 启动函数
	public void start(String PersonTable, String PaperTable, String hdfsPath, String placePath,String deep) throws Exception {

		Configuration hbaseConf = HBaseConfiguration.create();
		hbaseConf.set("deep", deep);
		hbaseConf.set("placePath", placePath);
		Job job = Job.getInstance(hbaseConf, "paper_inf");
		job.setJarByClass(PaperInf.class);
		job.setNumReduceTasks(10);

		List<Scan> scans = new ArrayList<Scan>();

		Scan scan1 = new Scan();
		scan1.setCaching(500);
		scan1.setCacheBlocks(false);
		scan1.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, Bytes.toBytes(PersonTable));
		scan1.addFamily(Bytes.toBytes("paper"));
		scan1.addFamily(Bytes.toBytes("basicInf"));		
		scans.add(scan1);
		
		for(String paper:PaperTable.split(",")){
			
			Scan scan2 = new Scan();
			scan2.setCaching(500);
			scan2.setCacheBlocks(false);
			scan2.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, Bytes.toBytes(paper));
			scan2.addFamily(Bytes.toBytes("inf"));
			scans.add(scan2);		
		}
		

		TableMapReduceUtil.initTableMapperJob(scans, MyMapper.class, Text.class, Text.class, job);

		job.setReducerClass(MyReducer.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(Text.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		job.setOutputFormatClass(SequenceFileOutputFormat.class);
		FileOutputFormat.setOutputPath(job, new Path(hdfsPath));

		deleteHDFSPath(hdfsPath);
		job.waitForCompletion(true);
	}

}
