package mygroup.test.hadooptest;

import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;

public class MyMap extends Mapper<Object,Text,Text,IntWritable>{
	
	private static final IntWritable one =  new IntWritable(1);  
    
    private Text word = new Text();  
    
    String regex1="[\u4e00-\u9fa5]+";// 中文
    String regex2="[a-zA-Z0-9]+"; // 字母，数字

    protected void map(Object key, Text value, Context context)  
            throws IOException, InterruptedException {  
    	 String line = value.toString(); 
         boolean isZH = isContainZH(regex1,value.toString());
         if(isZH){
         	ArrayList<String> list =ikanalysis(value.toString());
         	Iterator<String> it = list.iterator();
			while (it.hasNext()) {
				context.write(new Text((String) it.next()), one);
			}

         }
         
     }  

     /**
      * 传入字符串，判断是否含有中文，如果有返回true 否则返回false
     * @param regex 中文正则表达式
     * @param str 传入字符串
     * @return 返回结果 boolean
     */
    private boolean isContainZH(String regex,String  str){
         Matcher matcher = Pattern.compile(regex).matcher(str);
         if(matcher.find()){
            System.out.println(matcher.group(0));
            return true;
         }
     	return false;
     }
     
     /**
     * 传入字符串，判断是否含有字母或者数字，如果有返回true 否则返回false
     * @param regex 中文正则表达式
     * @param str 传入字符串
     * @return 返回结果 boolean
     */
    private boolean isContainOther(String regex,String  str){
    	 Matcher matcher = Pattern.compile(regex).matcher(str);
         if(matcher.find()){
            System.out.println(matcher.group(0));
            return true;
         }
     	return false;
     }
     
     /**
      *  传入字符串，进行中文分词 ， 返回字符数组
     * @param s 字符串
     * @return 返回结果
     */
	private ArrayList<String> ikanalysis(String s) {
		ArrayList<String> list= new ArrayList<String>();
		try {
			byte[] bt = s.getBytes();
			InputStream ip = new ByteArrayInputStream(bt);
			Reader reader = new InputStreamReader(ip);
			IKSegmenter iks = new IKSegmenter(reader, true);
			Lexeme t;
			while ((t = iks.next()) != null) {
				String text = t.getLexemeText();
				if(!isContainOther(regex2,text)){
					list.add(text);
				}
			}
//			sb.delete(sb.length() - 1, sb.length());

		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
//		System.out.println(sb.toString());
		return list;

	}

}
