package job.wuba;

import java.io.IOException;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;

import tools.CatchFileTools;
import tools.CheckFileTools;
import tools.Dict;
import tools.MyDateUtil;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

import com.bj58.analysis.tools.CommonAnalysisToolsDriver;
import com.bj58.analysis.tools.LogToolsTag;
import com.bj58.analysis.tools.etl.bean.ETLTrackBean;
import com.bj58.analysis.track.format.TrackSplitUtils;
import com.bj58.analysis.track.format.TrackInputFormat;

public class WubaNetflowJob {

	public static class M1 extends Mapper<LongWritable, Text, Text, Text> {
		TrackSplitUtils trackSplit = TrackSplitUtils.getInstance();
		protected void setup(Context context) throws IOException, InterruptedException{
			Configuration conf=context.getConfiguration();
			Path[] paths = DistributedCache.getLocalCacheFiles(conf);
			Dict.marketSpmList=CatchFileTools.getListByCache(paths, "pcm_source_spm_list", "utf-8");
			Dict.msourceLMMap=CatchFileTools.getMapByCache(paths, "m_source_lm_dict", "utf-8", "\t");
			Dict.msourceQDMap=CatchFileTools.getMapByCache(paths, "m_source_qd_dict", "utf-8", "\t");
			Dict.pcsourceLMList=CatchFileTools.getListByCache(paths, "m_source_lm_dict", "utf-8");
			Dict.pcsourceList=CatchFileTools.getListByCache(paths, "pc_source_dict", "utf-8");
			Dict.cityIdToNamePathMap=CatchFileTools.getMapByCache(paths, "cmc_displocal_namepath", "utf-8", "\t");
			Dict.cateIdToNamePathMap=CatchFileTools.getMapByCache(paths, "cmc_dispcategory_namepath", "utf-8", "\t");
			Dict.cityIdToNamePathMap.put("0","all\tall\tall\tall");
			Dict.cateIdToNamePathMap.put("-101","自定义组合租房\tall\tall\tall\tall");
			Dict.cateIdToNamePathMap.put("-102","自定义组合商业房产\tall\tall\tall\tall");
			Dict.cateIdToNamePathMap.put("0","all\tall\tall\tall\tall");
			Dict.businessMap=CatchFileTools.getCityCateIdMapByCache(paths, "dict_disp_cate", "utf-8", "\t", "bussiness");

		}
		public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
			Configuration conf=context.getConfiguration();
			String plat=conf.get("plat");
			String runDate=conf.get("runDate");
			List<List<String>> listLines = trackSplit.getSplitVisit(value);
			Map<String, String> keyMap;
			try {
				keyMap = WubaNetflowKeyEntry.uvKeyMap(listLines, runDate, plat);
				 for (Map.Entry<String, String> entry : keyMap.entrySet()) {
					 context.write(new Text(entry.getKey()), new Text(entry.getValue()));
				 }
			} catch (Exception e) {
				e.printStackTrace();
			}
		}
	}
	public static class R1 extends Reducer<Text,Text,Text,Text>
	{
		public void reduce(Text key,Iterable<Text> values,Context context) throws IOException, InterruptedException
		{
			//pv+"\t"+uv+"\t"+new_uv+"\t"+visit+"\t"+new_visit+"\t"+jump_in+"\t"+jump_out+"\t"+timeOnSite
			Integer pv                 =     0;
			Integer uv                 =     0;
			Integer new_uv             =     0;
			Integer visit              =     0;
			Integer new_visit          =     0;
			Integer jump_in            =     0;
			Integer jump_out           =     0;
			Long 	timeOnSite         =     0L;

			Integer first_pv           =     0;
			Integer first_uv           =     0;
			Integer first_new_uv       =     0;
			Integer first_visit        =     0;
			Integer first_new_visit    =     0;
			Integer first_jump_in      =     0;
			Integer first_jump_out     =     0;
			Long	first_timeOnSite   =     0L;
			for (Text text : values) {
				String[] arrys = text.toString().split("\001")[0].split("\t");
				String[] arrys_1 = text.toString().split("\001")[1].split("\t");
				pv += Integer.parseInt(arrys[0]);
				uv += Integer.parseInt(arrys[1]);
				new_uv += Integer.parseInt(arrys[2]);
				visit += Integer.parseInt(arrys[3]);
				new_visit += Integer.parseInt(arrys[4]);
				jump_in += Integer.parseInt(arrys[5]);
				jump_out += Integer.parseInt(arrys[6]);
				timeOnSite += Long.parseLong(arrys[7]);

				first_pv += Integer.parseInt(arrys_1[0]);
				first_uv += Integer.parseInt(arrys_1[1]);
				first_new_uv += Integer.parseInt(arrys_1[2]);
				first_visit += Integer.parseInt(arrys_1[3]);
				first_new_visit += Integer.parseInt(arrys_1[4]);
				first_jump_in += Integer.parseInt(arrys_1[5]);
				first_jump_out += Integer.parseInt(arrys_1[6]);
				first_timeOnSite += Long.parseLong(arrys_1[7]);

			}
			Double avgTime=0.0;
			String newavgTime = "0.00";
			Double avgPage=0.0;
			String newavgPage = "0.00";
			Double out_rate=0.0;
			String new_out_rate = "0.00";

			Double first_avgTime=0.0;
			String first_newavgTime = "0.00";
			Double first_avgPage=0.0;
			String first_newavgPage = "0.00";
			Double first_out_rate=0.0;
			String first_new_out_rate = "0.00";
			DecimalFormat df = new DecimalFormat("0.00");
			if(visit!=0){
				avgTime = (double)timeOnSite/(double)visit;
				newavgTime = df.format(avgTime);
				avgPage = (double)pv/(double)visit;
				newavgPage = df.format(avgPage);
				out_rate=(double)jump_out/(double)visit;
				new_out_rate =  df.format(out_rate);
			}
			if(first_visit!=0){
				first_avgTime = (double)first_timeOnSite/(double)first_visit;
				first_newavgTime = df.format(first_avgTime);
				first_avgPage = (double)first_pv/(double)first_visit;
				first_newavgPage = df.format(first_avgPage);
				first_out_rate=(double)first_jump_out/(double)first_visit;
				first_new_out_rate =  df.format(first_out_rate);
			}
			String a = pv+"\t"+uv+"\t"+new_uv+"\t"+visit+"\t"+new_visit+"\t"+newavgPage+"\t"+newavgTime+"\t"+jump_in+"\t"+jump_out+"\t"+new_out_rate;
			String b = first_pv+"\t"+first_uv+"\t"+first_new_uv+"\t"+first_visit+"\t"+first_new_visit+"\t"+first_newavgPage+"\t"+first_newavgTime+"\t"+first_jump_in+"\t"+first_jump_out+"\t"+first_new_out_rate;

			context.write(key , new Text(a+"\t"+b));
		}
	}

	public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException
	{
		// 获取main输入参数
		Configuration conf = new Configuration();
		//禁止压缩文件输出
		//conf.setBoolean("mapreduce.output.fileoutputformat.compress", false);
		String[] otherArgs =new GenericOptionsParser(conf, args).getRemainingArgs();

		Date currentTime = new Date();
		SimpleDateFormat smdf = new SimpleDateFormat("yyyyMMdd");
		String yestoday = smdf.format(currentTime.getTime() - 1000 * 60 * 60 * 24);
		String startDate = "";
		String endDate = "";
		String[] platArr = null;
		if(otherArgs.length>2){
			startDate = otherArgs[0];
			endDate = otherArgs[1];
			platArr = otherArgs[2].split(",");
		}else{
			startDate = yestoday;
			endDate = yestoday;
			platArr = otherArgs[0].split(",");//pc,m
		}
		int returnCode = 126;
		for (String plat : platArr) {
			for (String runDate : MyDateUtil.getDateList(startDate, endDate, 0)) {
				conf.set("runDate", runDate);
				conf.set("plat", plat);
				Job job = new Job(conf, "WubaNetflowJob");
				job.setInputFormatClass(TrackInputFormat.class);
				job.setJarByClass(WubaNetflowJob.class);
				job.setMapperClass(M1.class);
				job.setReducerClass(R1.class);
				job.setNumReduceTasks(300);
				job.setOutputKeyClass(Text.class);
				job.setOutputValueClass(Text.class);
				job.setMapOutputKeyClass(Text.class);
				job.setMapOutputValueClass(Text.class);
				//job.setInputFormatClass(TextInputFormat.class);
				//-------------------------DP---------------------------------------
				String outPath = "/home/hdp_lbg_ecdata_dw/resultdata/caods/netflow/track.58.com/"+plat+"/"+runDate;
				List<String> inputList = new ArrayList<String>();
				if("pc".equals(plat)){
					inputList.add("/home/hdp_58_common/resultdata/all_site_user_action_etl/pc.track.58.com/" + runDate) ;

				}else if("m".equals(plat)){
					inputList.add("/home/hdp_58_common/resultdata/all_site_user_action_etl/m.track.58.com/" + runDate) ;
					inputList.add("/home/hdp_58_common/resultdata/all_site_user_action_etl/weixin.track.58.com/" + runDate) ;
				}
				FileSystem.get(conf).delete(new Path(outPath), true);
				FileOutputFormat.setOutputPath(job, new Path(outPath));

				job=CatchFileTools.addCache(job, conf, "/home/hdp_lbg_ecdata_dw/warehouse/hdp_lbg_ecdata_dw_defaultdb/dict_source_type_pc", "pc_source_dict");
				job=CatchFileTools.addCache(job, conf, "/home/hdp_lbg_ecdata_dw/warehouse/hdp_lbg_ecdata_dw_defaultdb/dict_m_source_qd","m_source_qd_dict");
				job=CatchFileTools.addCache(job, conf, "/home/hdp_lbg_ecdata_dw/warehouse/hdp_lbg_ecdata_dw_defaultdb/dict_m_source_lm","m_source_lm_dict");
				job=CatchFileTools.addCache(job, conf, "/home/hdp_lbg_ecdata_dw/warehouse/hdp_lbg_ecdata_dw_defaultdb/dict_m_market_spm","pcm_source_spm_list");
				job=CatchFileTools.addCache(job, conf, "/home/hdp_lbg_ecdata_dw/warehouse/hdp_lbg_ecdata_dw_defaultdb/dict_disp_cate", "dict_disp_cate");
//				job=CatchFileTools.addCache(job, conf, "/home/hdp_58_common/rawdata/cmc/dict/cmc_dispcategory/"+runDate, "cmc_dispcategory");
//				job=CatchFileTools.addCache(job, conf, "/home/hdp_58_common/rawdata/cmc/dict/cmc_displocal/"+runDate, "cmc_diplocal");
				job=CatchFileTools.addCache(job, conf, "/home/hdp_lbg_ecdata_dw/resultdata/caods/netflow/dict/cmc_dispcategory_namepath/"+yestoday, "cmc_dispcategory_namepath");
				job=CatchFileTools.addCache(job, conf, "/home/hdp_lbg_ecdata_dw/resultdata/caods/netflow/dict/cmc_displocal_namepath/"+yestoday, "cmc_displocal_namepath");
				//-------------------------------DSAP----------------------------------
//				String outPath = "/dsap/resultdata/caods/netflow_v2/track.58.com/"+plat+"/"+runDate;
//				System.out.println("###########输出路径#############"+outPath);
//				List<String> inputList = new ArrayList<String>();
//				if("pc".equals(plat)){
//					inputList.add("/dsap/rawdata/track.58.com/" + runDate) ;
//				}else if("m".equals(plat)){
//					inputList.add("/dsap/rawdata/m.track.58.com/" + runDate) ;
//					inputList.add("/dsap/rawdata/weixin.track.58.com/" + runDate) ;
//				}
//				FileSystem.get(conf).delete(new Path(outPath), true);
//				FileOutputFormat.setOutputPath(job, new Path(outPath));
//
//				job=CatchFileTools.addCache(job, conf, "/dsap/rawdata/source_type_pc/"+yestoday, "pc_source_dict");
//				job=CatchFileTools.addCache(job, conf, "/dsap/resultdata/ecdataconfig/TMSourceQD","m_source_qd_dict");
//				job=CatchFileTools.addCache(job, conf, "/dsap/resultdata/ecdataconfig/TMSourceLM","m_source_lm_dict");
//				job=CatchFileTools.addCache(job, conf, "/flume/dsap/rawdata/lianmeng/market_spm/"+yestoday,"pcm_source_spm_list");
//				job=CatchFileTools.addCache(job, conf, "/dsap/resultdata/caods/tmp_dict_disp_cate/"+yestoday, "dict_disp_cate");
//				job=CatchFileTools.addCache(job, conf, "/dsap/rawdata/cmc_dispcategory/"+yestoday, "cmc_dispcategory");
//				job=CatchFileTools.addCache(job, conf, "/dsap/rawdata/cmc_diplocal/"+yestoday, "cmc_diplocal");

				for(String ifPath:inputList)
				{
					System.out.println("###########输入路径#############"+ifPath);
					CheckFileTools.myJobWait(ifPath, 100, 300000, 30);
					FileInputFormat.addInputPath(job, new Path(ifPath));
				}
				returnCode = job.waitForCompletion(true) ? 0 : 1;
				if (returnCode != 0)
				{
					System.exit(returnCode);
				}
				System.out.println("**********************	end	"+plat+"---"+runDate+"+*********************");
			}
		}
		System.exit(returnCode);
	}
}
