package org.gthelper.handler;


//import java.awt.Point;
import java.awt.image.BufferedImage;
import java.awt.image.ColorModel;
import java.awt.image.Raster;
//import java.awt.image.Raster;
//import java.awt.image.Raster;
import java.awt.image.WritableRaster;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.util.UUID;

import javax.imageio.ImageIO;
import javax.media.jai.remote.SerializableRenderedImage;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.gthelper.data.DataGeter;
import org.gthelper.database.FileInfo;
import org.gthelper.database.FileStatus;
import org.gthelper.error.ErrorCode;
import org.gthelper.hdfs.HdfsMA;
import org.gthelper.hdpWritable.HdpRenderedImageWritable;
import org.gthelper.imageProcess.PreviewGeter;
import org.gthelper.order.OrderInfo;
import org.gthelper.server.SysInfo;

/**
 * 预览图片
 * @author pig
 * 有继承Mapper和Reducer的内部类以在hadoop
 * mapreduce内处理
 */
public class PreView extends Handler
{
	
	
	public static class PreviewMap extends Mapper<Text, HdpRenderedImageWritable, Text, HdpRenderedImageWritable>
    {
        //private Text word = new Text();
        //private HdpRasterWritable wr=new HdpRasterWritable();
        public void map(Text key, HdpRenderedImageWritable value, Context context)
        {
            //wr.set(value.name, value.maxX, value.maxY, value.minX, value.minY, value.wr);
            //word.set(key);
        	//get parameter
           	Configuration conf =context.getConfiguration();
           	int splitsize = Integer.parseInt(conf.get("splitsize"),10);
          	int previewsize = Integer.parseInt(conf.get("previewsize"),10);
           	String resname = conf.get("resname");
           	String respath = conf.get("respath");
           			
            //process:get preview
           	int reH=(int)((double)value.sri.getHeight()*(((double)previewsize)/(double)splitsize));
           	int reW=(int)((double)value.sri.getWidth()*(((double)previewsize)/(double)splitsize));
//           	WritableRaster sourceRaster = (WritableRaster)value.sri.getData();
           	Raster rst = value.sri.getData();
    		WritableRaster sourceRaster = Raster.createWritableRaster(rst.getSampleModel(),rst.getDataBuffer(),null);
       		ColorModel sourcecColorModel = value.sri.getColorModel();
       		BufferedImage resultBufferedImage = new BufferedImage(sourcecColorModel, sourceRaster, false, null);
           
           	PreviewGeter pg = new PreviewGeter();
           	SerializableRenderedImage sri = new SerializableRenderedImage(pg.transformtopng(resultBufferedImage, reW, reH),true);
           	HdpRenderedImageWritable hriw = new HdpRenderedImageWritable();
           	hriw.set(key, 0, 0, 0, 0, sri);
           	
           	//save
//				context.write(key,hriw);
           	try {
//				saveSeqFile(conf,respath+resname,key,hriw);
	       		FSDataOutputStream fdos = getOutputStream(conf,respath+resname);
	       		ImageIO.write(resultBufferedImage, "png", fdos);
	       		fdos.flush();
	       		fdos.close();
	       		
			} catch (IOException e) {
				// TODO 自动生成的 catch 块
				e.printStackTrace();
			}
            
        }
        
        
        
//        private void saveSeqFile(Configuration conf,String path,Text key,HdpRenderedImageWritable val) throws IOException
//    	{
//
//    		FileSystem fs = FileSystem.get(URI.create(path), conf);
//    		Path hdfsPath = new Path(path);
//    		@SuppressWarnings("deprecation")
//    		SequenceFile.Writer writer = SequenceFile.createWriter(fs,conf,hdfsPath,Text.class,HdpRenderedImageWritable.class);
//    		writer.append(key, val);
////    		writer.hflush();
//    		IOUtils.closeStream(writer);
//    		FSDataOutputStream fsdo = fs.append(hdfsPath);
//    		
//    	}
        private FSDataOutputStream getOutputStream(Configuration conf,String path) throws IOException
        {
        	FileSystem fs = FileSystem.get(URI.create(path), conf);
    		Path hdfsPath = new Path(path);
    		FSDataOutputStream fsdo = fs.create(hdfsPath);
    		return fsdo;
        }
        
        
        
    }
     
    public static class PreviewReduce extends Reducer<Text, HdpRenderedImageWritable, Text, HdpRenderedImageWritable>
    {
    	
        public void reduce(Text key, Iterable<HdpRenderedImageWritable> values, Context context) 
        		throws IOException, InterruptedException
        {
        	
//            for (HdpRenderedImageWritable val : values)
//            {
//              context.write(key, val);
//            }
        }
        
        
    }
    
    
    

	@Override
	public int handle(OrderInfo orderinfo) {
		// TODO 自动生成的方法存根
		//get id
		ByteArrayOutputStream baos = new ByteArrayOutputStream();
		DataGeter dg = new DataGeter();
		int errcode=dg.getData(orderinfo.orderStream, baos);
		if(errcode!=ErrorCode.NORMAL)return errcode;
		byte[] userid = baos.toByteArray();
		baos.reset();
		
		//get filename
		errcode = dg.getData(orderinfo.orderStream, baos);
		if(errcode!=ErrorCode.NORMAL)return errcode;
		byte[] filename = baos.toByteArray();
		baos.reset();
		
		//to String
		String usrid="";
		String fileName="";
		try {
			usrid = new String(userid,"UTF-8");
			fileName = new String(filename,"UTF-8");
		} catch (UnsupportedEncodingException e) {
			// TODO 自动生成的 catch 块
			e.printStackTrace();
			return ErrorCode.UNKNOWN;
		}
		String[] tmp1=fileName.split("_");
		int count = tmp1.length-4;
		fileName = tmp1[0];
		for(int i=0;i<count;++i)
		{
			fileName = fileName+"_"+tmp1[i+1];
		}
		
		//check
		if((fileName.indexOf('/'))>-1)return ErrorCode.WRONGNAME;
		if((fileName.indexOf('\\'))>-1)return ErrorCode.WRONGNAME;
		FileInfo fileinfo=null;
		try {
			fileinfo=FileInfo.getfileInfobyuserIDandfilename(usrid, fileName);
		} catch (Exception e) {
			// TODO 自动生成的 catch 块
			e.printStackTrace();
			return ErrorCode.DATABASEERR;
		}
		if(fileinfo.getstatus()<0)return ErrorCode.FILENOTEXIST;
		else if(fileinfo.getstatus()==FileStatus.deleting)return ErrorCode.DELETING;
		fileinfo.setstatus(FileStatus.processing);
		fileinfo.updatefilelist();
		
		String srcfilename = "/"+fileName+"_"+tmp1[count+1]+"_"+tmp1[count+2];
		String resultfilename = "/"+UUID.randomUUID().toString();
		String srcpath = "/"+usrid+"/"+fileName;
		String resultpath = "/"+usrid+"/"+fileName+"_"+tmp1[count+3];
		HdfsMA ma=new HdfsMA();
		try {
			ma.mkdirs(resultpath);
			if(!ma.isExist(srcpath+srcfilename+".seq"))return ErrorCode.FILENOTEXIST;
			while(true)
			{
				if(ma.isExist(resultpath+resultfilename+".png"))resultfilename="/"+UUID.randomUUID().toString()+".png";
				else break;
			}
		} catch (IOException e) {
			// TODO 自动生成的 catch 块
			e.printStackTrace();
			fileinfo.setstatus(FileStatus.normal);
			fileinfo.updatefilelist();
			return ErrorCode.FROMHDFS;
		}
		
		//start a mapred job
		int size = getPreviewSize(Integer.parseInt(tmp1[count+3],10));
		Configuration conf = new Configuration();
		conf.set("fs.default.name", SysInfo.hdpFsDefault);
		conf.set("mapred.job.tracker", SysInfo.hdpTracker); 
		conf.set("splitsize", String.valueOf((fileinfo.getsplit_size())));
		conf.set("previewsize", String.valueOf(size));
		conf.set("resname", resultfilename+".png");
		conf.set("respath", HdfsMA.getUrl()+resultpath);
//		conf.set("mapred.job.map.memory.mb", "20");
//		conf.set("mapred.job.reduce.memory.mb","1");		
		Job job = null;
	      try
	      {
	          job = Job.getInstance(conf);
	      }
	      catch (IOException e)
	      {
	          e.printStackTrace();
	          fileinfo.setstatus(FileStatus.normal);
				fileinfo.updatefilelist();
				return ErrorCode.FROMHDFS;
	      }
	       
	      job.setJarByClass(PreView.class);
	       
	      job.setMapperClass(PreviewMap.class);
	      job.setReducerClass(PreviewReduce.class);
	       
	      job.setOutputFormatClass(SequenceFileOutputFormat.class);
	      job.setInputFormatClass(SequenceFileInputFormat.class);
	         
	      job.setOutputKeyClass(Text.class);
	      job.setOutputValueClass(HdpRenderedImageWritable.class);
	       
	      try
	      {
//	    	  MultipleOutputs.addNamedOutput(job, "seq", SequenceFileOutputFormat.class, Text.class,HdpRenderedImageWritable.class);
	          FileInputFormat.addInputPath(job, new Path(HdfsMA.getUrl()+srcpath+srcfilename+".seq"));
	          FileOutputFormat.setOutputPath(job, new Path(HdfsMA.getUrl()+resultpath+resultfilename));
	          
	      }
	      catch (IllegalArgumentException | IOException e)
	      {
	          e.printStackTrace();
	          fileinfo.setstatus(FileStatus.normal);
				fileinfo.updatefilelist();
				return ErrorCode.FROMHDFS;
	      }
	       
	      try
	      {
//	          job.submit();
	      	job.waitForCompletion(false);
	      }
	      catch (ClassNotFoundException | IOException | InterruptedException e)
	      {
	          e.printStackTrace();
	          fileinfo.setstatus(FileStatus.normal);
				fileinfo.updatefilelist();
				return ErrorCode.FROMHDFS;
	      }
	      
	      
	     
	      
	      fileinfo.setstatus(FileStatus.normal);
	      fileinfo.updatefilelist();
	      
	      //return file
	      DataOutputStream dos = new DataOutputStream(orderinfo.mainOutput);
	      try {
			dos.writeLong(ma.getLength(resultpath+resultfilename+".png"));
			ma.downloadFromHDFS(resultpath+resultfilename+".png", dos, 819200);
			dos.close();
		} catch (IOException e) {
			// TODO 自动生成的 catch 块
			e.printStackTrace();
		}finally
		{
		      //deletetmpfile
		      try {
				ma.delete(resultpath+resultfilename);
				ma.delete(resultpath+resultfilename+".png");
			} catch (IOException e) {
				// TODO 自动生成的 catch 块
				e.printStackTrace();
			}
		}
	      
		return ErrorCode.NORMAL;
	}
	
	private int getPreviewSize(int level)
	{
		switch(level)
		{
		case 0:
			return 200;
		case 1:
			return 350;
		case 2:
			return 500;
		case 3:
			return 750;
		case 4:
			return 1000;
		default:
			return 200;
		}
	}
	
}