package org.gthelper.imageProcess;

import java.io.DataOutputStream;
import java.io.IOException;
import java.net.URI;
//import java.util.UUID;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.gthelper.database.FileInfo;
import org.gthelper.database.FileStatus;
import org.gthelper.error.ErrorCode;
import org.gthelper.hdfs.HdfsMA;
import org.gthelper.hdpWritable.HdpRenderedImageWritable;
import org.gthelper.order.OrderInfo;
import org.gthelper.server.SysInfo;

/**
 * 对图片作均值处理
 * @author pig
 * 有继承Mapper和Reducer的内部类以在hadoop
 * mapreduce内处理
 */
public class MeanFilter extends ImageProcess {
	
	
	public static class MeanMap extends Mapper<Text, HdpRenderedImageWritable, Text, HdpRenderedImageWritable>
    {
        //private Text word = new Text();
        //private HdpRasterWritable wr=new HdpRasterWritable();
        public void map(Text key, HdpRenderedImageWritable value, Context context)
        {
            //wr.set(value.name, value.maxX, value.maxY, value.minX, value.minY, value.wr);
            //word.set(key);
        	Configuration conf = context.getConfiguration();
        	int residule = Integer.parseInt(conf.get("residule"));
        	String path = conf.get("savepath");
        	String newName = conf.get("newname");
        	HdpRenderedImageWritable res = MeanFilterTreater.image_MeanFilter(value, residule);
        	res.name=new Text(newName);
        	try {
        		mkdirs(conf,path);
				saveSeqFile(conf,path+"/"+newName+"_"+res.number_x+"_"+res.number_y+".seq",res.name,res);
			} catch (IOException e) {
				// TODO 自动生成的 catch 块
				e.printStackTrace();
			}
        	
//            try {
//            	
//            	
//				context.write(key,res);
//			} 
//            catch (IOException | InterruptedException e)
//            {
//                e.printStackTrace();
//            }
        }
        
        
        private void saveSeqFile(Configuration conf,String path,Text key,HdpRenderedImageWritable val) throws IOException
    	{

    		FileSystem fs = FileSystem.get(URI.create(path), conf);
    		Path hdfsPath = new Path(path);
    		@SuppressWarnings("deprecation")
    		SequenceFile.Writer writer = SequenceFile.createWriter(fs,conf,hdfsPath,Text.class,HdpRenderedImageWritable.class);
    		writer.append(key, val);
//    		writer.hflush();
    		IOUtils.closeStream(writer);
    	}
        
        private boolean mkdirs(Configuration conf,String path) throws IOException
    	{
    		FileSystem fs = FileSystem.get(URI.create(path), conf);
    		Path hdfsPath = new Path(path);
    		return fs.mkdirs(hdfsPath);
    	}
        
    }
     
    public static class MeanReduce extends Reducer<Text, HdpRenderedImageWritable, Text, HdpRenderedImageWritable>
    {
//    	/** 
//         * 设置多个文件输出 
//         * */  
//    	private MultipleOutputs<Text,HdpRenderedImageWritable> mos;  
//          
//        @Override  
//        protected void setup(Context context)
//        		throws IOException, InterruptedException {
//        	mos=new MultipleOutputs<Text,HdpRenderedImageWritable>(context);//初始化mos  
//        } 
        
        public void reduce(Text key, Iterable<HdpRenderedImageWritable> values, Context context) 
        		throws IOException, InterruptedException
        {
        	
//            for (HdpRenderedImageWritable val : values)
//            {
////            	HdpRenderedImageWritable tmp = new HdpRenderedImageWritable();
////            	tmp.set(val.name, val.number_x, val.number_y, val.position_x, val.position_y, val.sri);
//            	mos.write("seq", key, val, val.name.toString()+"_"+val.number_x+"_"+val.number_y+".seq");
////              context.write(key, val);
//            }
        }
        
        
    }
    
    

	@Override
	public int doProcess(OrderInfo orderinfo, String userid, String filename) {
		// TODO 自动生成的方法存根
		//create a unique name for result file
		String out = getOutputName( userid, filename);
		if(out==null)return ErrorCode.FROMHDFS;
		FileInfo fileinfo;
		try {
			fileinfo = FileInfo.getfileInfobyuserIDandfilename(userid, filename);
		} catch (Exception e1) {
			// TODO 自动生成的 catch 块
			e1.printStackTrace();
			return ErrorCode.DATABASEERR;
		}
		if(fileinfo==null)return ErrorCode.FILENOTEXIST;
		fileinfo.setstatus(FileStatus.processing);
		fileinfo.updatefilelist();
		
		//config the job
		Configuration conf = new Configuration();
		conf.set("residule", String.valueOf(SysInfo.residule));
		conf.set("fs.default.name", SysInfo.hdpFsDefault);
		conf.set("mapred.job.tracker", SysInfo.hdpTracker); 
		conf.set("savepath", HdfsMA.getUrl()+"/"+userid+"/"+out);
		conf.set("newname", out);
//		HdfsMA ma = new HdfsMA();
		
        Job job = null;
        try
        {
            job = Job.getInstance(conf);
        }
        catch (IOException e)
        {
            e.printStackTrace();
            fileinfo.setstatus(FileStatus.normal);
            fileinfo.updatefilelist();
    		return ErrorCode.FROMHDFS;
        }
         
        job.setJarByClass(MeanFilter.class);
         
        job.setMapperClass(MeanMap.class);
        job.setReducerClass(MeanReduce.class);
        
        job.setOutputFormatClass(SequenceFileOutputFormat.class);
        job.setInputFormatClass(SequenceFileInputFormat.class);
         
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(HdpRenderedImageWritable.class);
//        String res = UUID.randomUUID().toString();
		try
        {
//			MultipleOutputs.addNamedOutput(job, "seq", SequenceFileOutputFormat.class, Text.class,HdpRenderedImageWritable.class);
            FileInputFormat.addInputPath(job,new Path(HdfsMA.getUrl()+"/"+userid+"/"+filename));
            FileOutputFormat.setOutputPath(job, new Path(HdfsMA.getUrl()+"/"+userid+"/"+out));
        }
        catch (IllegalArgumentException | IOException e)
        {
            e.printStackTrace();
            fileinfo.setstatus(FileStatus.normal);
            fileinfo.updatefilelist();
    		return ErrorCode.FROMHDFS;
        }
         
        try
        {
            job.waitForCompletion(false);
        }
        catch (ClassNotFoundException | IOException | InterruptedException e)
        {
            e.printStackTrace();
            fileinfo.setstatus(FileStatus.normal);
            fileinfo.updatefilelist();
    		return ErrorCode.FROMHDFS;
        }
		
        fileinfo.setstatus(FileStatus.normal);
        fileinfo.updatefilelist();
        
        //add result file into database
        FileInfo.insertnewfileinfo(userid, out, FileStatus.creating);
        FileInfo outfileinfo=null;
        try {
			 outfileinfo = FileInfo.getfileInfobyuserIDandfilename(userid, out);
		} catch (Exception e1) {
			// TODO 自动生成的 catch 块
			e1.printStackTrace();
			return ErrorCode.DATABASEERR;
		}
        //config the new file
        outfileinfo.setCRS_decode(fileinfo.getCRS_decode());
        outfileinfo.setenv_max_x(fileinfo.getenv_max_x());
        outfileinfo.setenv_max_y(fileinfo.getenv_max_y());
        outfileinfo.setenv_min_x(fileinfo.getenv_min_x());
        outfileinfo.setenv_min_y(fileinfo.getenv_min_y());
        outfileinfo.setpixelsize_x(fileinfo.getpixelsize_x());
        outfileinfo.setpixelsize_y(fileinfo.getpixelsize_y());
        outfileinfo.setresidule(fileinfo.getresidule());
        outfileinfo.setsplit_size(fileinfo.getsplit_size());
        outfileinfo.setsplit_num_x(fileinfo.getsplit_num_x());
        outfileinfo.setsplit_num_y(fileinfo.getsplit_num_y());
        outfileinfo.setstatus(FileStatus.normal);
        outfileinfo.updatefilelist();
       
        
        //向用户返回成功
		DataOutputStream dos = new DataOutputStream(orderinfo.mainOutput);
		try {
			dos.writeLong(1);
		} catch (IOException e) {
			// TODO 自动生成的 catch 块
			e.printStackTrace();
		}
        
		return ErrorCode.NORMAL;
	}



	private String getOutputName(String userid, String filename) {
		// TODO 自动生成的方法存根
		String postfix = "-meanfilter";
		HdfsMA ma = new HdfsMA();
		while(true)
		{
			try {
				if(ma.isExist("/"+userid+"/"+filename+postfix))postfix=postfix+"1";
				else break;
			} catch (IOException e) {
				// TODO 自动生成的 catch 块
				e.printStackTrace();
				return null;
			}
		}
		return (filename+postfix);
	}

}
