package com.sqk.dxpro.dxclear;

import java.io.IOException;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import com.sqk.dxpro.utils.TMatcher;
import com.sqk.dxpro.utils.TProperties;

public class DxClearMapper extends Mapper<LongWritable,Text,NullWritable,Text>{

	@Override
	protected void map(LongWritable key, Text value,
			Mapper<LongWritable, Text, NullWritable, Text>.Context context)
			throws IOException, InterruptedException {
		
		String regex=TProperties.getValue("filesplit");
	
		String[] strs=value.toString().split(regex);
		
		int fileLength=Integer.parseInt(TProperties.getValue("filelength"));
		if (strs.length==fileLength
				&& !"".equals(strs[14])  
				&& !"http://".equals(strs[14])
				&& !"https://".equals(strs[14])
				&& !(strs[14].toLowerCase()).matches(TProperties
						.getValue("fileclear"))) {
			String url=strs[14];
			StringBuffer sb = new StringBuffer();
			if (!url.startsWith("http://") && !url.startsWith("https://")) {
				url = "http://" + url;
			}		
			String domain = url.split("/", -1)[2];
			if (domain.indexOf(":") >= 0) {  
				domain = domain.split("\\:", -1)[0];
			}
			sb.append(strs[1]).append(TProperties.getValue("outfilesplit"))
					.append(TMatcher.getDomain(domain)).append(TProperties.getValue("outfilesplit"))
					.append(url);
			Text ovalue = new Text(sb.toString());
			context.write(NullWritable.get(), ovalue);
		}
		
	}

}

