package com.cennavi.opers;

import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.net.URLEncoder;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import net.sf.json.JSONArray;
import net.sf.json.JSONObject;

import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.FileUploadBase.InvalidContentTypeException;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;

import com.cennavi.dbs.ZooKeeperConfigs;
import com.mercator.HttpClientUtil;

public class SandboxExtract extends HttpServlet {

	private static final long serialVersionUID = -1909422936250456023L;

	// 上传配置
	private static final int MEMORY_THRESHOLD = 1024 * 1024 * 3; // 3MB
	private static final int MAX_FILE_SIZE = 1024 * 1024 * 40; // 40MB
	private static final int MAX_REQUEST_SIZE = 1024 * 1024 * 50; // 50MB

	public SandboxExtract() {
		super();
	}

	/**
	 * Destruction of the servlet. <br>
	 */
	public void destroy() {
		super.destroy(); // Just puts "destroy" string in log
		// Put your code here
	}

	/**
	 * The doGet method of the servlet. <br>
	 * 
	 * This method is called when a form has its tag value method equals to get.
	 * 
	 * @param request
	 *            the request send by the client to the server
	 * @param response
	 *            the response send by the server to the client
	 * @throws ServletException
	 *             if an error occurred
	 * @throws IOException
	 *             if an error occurred
	 */
	public void doGet(HttpServletRequest request, HttpServletResponse response)
			throws ServletException, IOException {

		doPost(request, response);
	}

	/**
	 * The doPost method of the servlet. <br>
	 * 
	 * This method is called when a form has its tag value method equals to
	 * post.
	 * 
	 * @param request
	 *            the request send by the client to the server
	 * @param response
	 *            the response send by the server to the client
	 * @throws ServletException
	 *             if an error occurred
	 * @throws IOException
	 *             if an error occurred
	 */
	public void doPost(HttpServletRequest request, HttpServletResponse response)
			throws ServletException, IOException {

		response.setHeader("Access-Control-Allow-Origin", "*");
		response.setHeader("Access-Control-Allow-Methods",
				"POST, GET, OPTIONS, DELETE,PUT");

		PrintWriter out = response.getWriter();

		try {

			StringBuilder sb = new StringBuilder();

//			if (!ServletFileUpload.isMultipartContent(request)) {
//				// 如果不是则停止
//				PrintWriter writer = response.getWriter();
//				writer.println("Error: 表单必须包含 enctype=multipart/form-data");
//				writer.flush();
//				return;
//			}

			// 配置上传参数
			DiskFileItemFactory factory = new DiskFileItemFactory();
			// 设置内存临界值 - 超过后将产生临时文件并存储于临时目录中
			factory.setSizeThreshold(MEMORY_THRESHOLD);
			// 设置临时存储目录
			factory.setRepository(new File(System.getProperty("java.io.tmpdir")));

			ServletFileUpload upload = new ServletFileUpload(factory);

			// 设置最大文件上传值
			upload.setFileSizeMax(MAX_FILE_SIZE);

			// 设置最大请求值 (包含文件和表单数据)
			upload.setSizeMax(MAX_REQUEST_SIZE);

			// 中文处理
			upload.setHeaderEncoding("UTF-8");

			// 构造临时路径来存储上传的文件
			// 这个路径相对当前应用的目录

//			String uploadPath = request.getContextPath() + File.separator
//					+ UPLOAD_DIRECTORY;
//
//			System.out.println(uploadPath);

			// 如果目录不存在则创建
//			File uploadDir = new File(uploadPath);
//			if (!uploadDir.exists()) {
//				uploadDir.mkdir();
//			}
			
			
			
			SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
			String taskId = "task_" + sdf.format(new Date());
			
			boolean isWkt2 = false;
			JSONObject json = null;
			
			List<FileItem> formItems = null;
			try {
				formItems = upload.parseRequest(request);
				
				 if (formItems != null && formItems.size() > 0) {
		                // 迭代表单数据
		                for (FileItem item : formItems) {
		                    // 处理不在表单中的字段
		                    if (!item.isFormField()) {
		                        
		                    	isWkt2 = true;
		                    	
		                        byte[] bs = new byte[1024];
		                        
		                        InputStream in = item.getInputStream();
		                        
		                        int len = 0;
		                        
		                        StringBuilder sb2 = new StringBuilder();
		                        
		                        while((len = in.read(bs))!=-1){
		                        	sb2.append((new String(bs,0,len,"UTF-8")));
		                        }
		                        
		                        String url = "http://172.16.10.142:8080/log-collector/log";
		                        
		                        String jsonStr = "json={\"table\":\"upload_wkts\",\"content\":{\"task_id\":\""+taskId+"\",\"wkt\":\""+sb2.toString()+"\"}}";
		                        
		                        HttpClientUtil.sendPost(url, jsonStr);
		                        
		                        
		                        
		                    }else{
		                    	
		                    	json = JSONObject.fromObject(new String(item.get(),"UTF-8"));
		                    	
		                    }
		                }
		            }else{
		            	throw new Exception("没有传输参数");
		            }
				
			} catch (InvalidContentTypeException e) {
				String jsonStr = request
						.getParameter("request");
				
				jsonStr = new String(jsonStr.getBytes("ISO-8859-1"),"UTF-8");
				
				json = JSONObject.fromObject(jsonStr);
			}
			 
			
			
			
			
           
            
            

			String zooms = json.getString("zooms");
			
			StringBuilder sb2 = new StringBuilder();
			
			String task_name= json.getString("task_name").replaceAll(" ", "");
			
			if (task_name.length()==0) task_name ="blank";
			
			
			String[] splitZooms = zooms.split(",");
			
			int start = Integer.parseInt(splitZooms[0]);
			
			int end = Integer.parseInt(splitZooms[1]);
			
			sb2.append(start);
			
			for(int i=start+1;i<=end;i++){
				sb2.append(",");
				sb2.append(String.valueOf(i));
			}

			String sourceName = json.getString("source_name");

			String path = json.getString("path");

			String version = json.getString("version");

			String areaIds = "no";

			if (json.containsKey("area_ids")
					&& json.getJSONArray("area_ids").size() > 0) {
				areaIds = "";
				
				
				JSONArray areas = json.getJSONArray("area_ids");
				
				if (areas.size()>1){
				
					for (int i = 0; i < areas.size() - 1; i++) {
						areaIds += areas.getString(i) + ",";
					}

					areaIds += areas.getString(areas.size()-1);
				
				}else{
					areaIds += areas.getString(areas.size()-1);
				}
				
				
			}

			String conditions = "no";

			if (json.containsKey("conditions") && !"".equals(json.getString("conditions")))
				conditions = json.getString("conditions");

			String zooUrls = ZooKeeperConfigs.props.getProperty("zoo_urls");

			String names = json.getString("names");
			
			String mainTab = json.getString("main_tab_name");
            
			
			if (sourceName.split(",").length >1){
				sb = new StringBuilder(
						"spark-submit --master yarn --num-executors "
								+ 100
								+ " --executor-memory 16500m --class com.cennavi.engine.define_pbf_2 --name lunch --jars /var/lib/hadoop-hdfs/lilei/tools.jar,/var/lib/hadoop-hdfs/lilei/spark-csv_2.11-1.3.2.jar,/var/lib/hadoop-hdfs/lilei/commons-csv-1.1.jar /var/lib/hadoop-hdfs/lilei/engine-a.jar ");

				sb.append(sb2.toString() + " ");
				sb.append(names + " ");
				sb.append(sourceName + " ");
				sb.append(path + " ");
				sb.append(areaIds + " ");
				sb.append(taskId + " ");
				sb.append(version + " ");
				sb.append(zooUrls + " ");
				sb.append(task_name + " ");

				String wkt1 = "no";
				String wkt2 = "no";
				
				if (isWkt2)
					wkt2 = "yes";
				
				if (json.containsKey("WKT1") && json.getString("WKT1").length()>0){
					wkt1 = json.getString("WKT1");
					wkt2 = "no";
				}
				else
					wkt1 = "no";

				
//				sb.append(wkt1 + " ");
				sb.append(wkt2 + " ");
				
				String action = "no";
				if (json.containsKey("relation") && json.getString("relation").length()>0)
					action = json.getString("relation");
				
				sb.append(action + " ");
				
				String spatial_tabs = "no";
				
				if (json.containsKey("geom_tabs") && json.getString("geom_tabs").length()>0)
					spatial_tabs = json.getString("geom_tabs");
				sb.append(spatial_tabs + " ");
				sb.append(mainTab + " ");
				sb.append(wkt1+"^"+conditions);
				
				System.out.println(sb.toString());
			}else {
				if (isWkt2|| json.containsKey("WKT1") || json.containsKey("relation")){
					sb = new StringBuilder(
							"spark-submit --master yarn --num-executors "
									+ 100
									+ " --executor-memory 16500m --class com.cennavi.engine.define_pbf_2 --name lunch --jars /var/lib/hadoop-hdfs/lilei/tools.jar,/var/lib/hadoop-hdfs/lilei/spark-csv_2.11-1.3.2.jar,/var/lib/hadoop-hdfs/lilei/commons-csv-1.1.jar /var/lib/hadoop-hdfs/lilei/engine-a.jar ");

					sb.append(sb2.toString() + " ");
					sb.append(names + " ");
					sb.append(sourceName + " ");
					sb.append(path + " ");
					sb.append(areaIds + " ");
					sb.append(taskId + " ");
					sb.append(version + " ");
					sb.append(zooUrls + " ");
					sb.append(task_name + " ");

					String wkt1 = "no";
					String wkt2 = "no";
					
					if (isWkt2)
						wkt2 = "yes";
					
					if (json.containsKey("WKT1") && json.getString("WKT1").length()>0){
						wkt1 = json.getString("WKT1");
						wkt2 = "no";
					}
					else
						wkt1 = "no";

					
//					sb.append(wkt1 + " ");
					sb.append(wkt2 + " ");
					
					String action = "no";
					if (json.containsKey("relation") && json.getString("relation").length()>0)
						action = json.getString("relation");
					
					sb.append(action + " ");
					
					String spatial_tabs = "no";
					
					if (json.containsKey("geom_tabs") && json.getString("geom_tabs").length()>0)
						spatial_tabs = json.getString("geom_tabs");
					sb.append(spatial_tabs + " ");
					sb.append(mainTab + " ");
					sb.append(wkt1+"^"+conditions);
					
					System.out.println(sb.toString());
				}else{
					if (names.split(",").length>1){
						sb = new StringBuilder(
								"spark-submit --master yarn --num-executors "
										+ 100
										+ " --executor-memory 16500m --class com.cennavi.engine.define_pbf --name lunch --jars /var/lib/hadoop-hdfs/lilei/tools.jar,/var/lib/hadoop-hdfs/lilei/spark-csv_2.11-1.3.2.jar,/var/lib/hadoop-hdfs/lilei/commons-csv-1.1.jar /var/lib/hadoop-hdfs/lilei/engine-a.jar ");

						sb.append(sb2.toString() + " ");
						sb.append(names + " ");
						sb.append(sourceName + " ");
						sb.append(path + " ");
						sb.append(areaIds + " ");
						sb.append(taskId + " ");
						sb.append(version + " ");
						sb.append(zooUrls + " ");
						sb.append(task_name + " ");
						sb.append(conditions);
						
						System.out.println(sb.toString());
					}else{
					
					sb = new StringBuilder(
							"spark-submit --master yarn --num-executors "
									+ 100
									+ " --executor-memory 16500m --class com.cennavi.engine.csv2pbf --name lunch --jars /var/lib/hadoop-hdfs/lilei/tools.jar,/var/lib/hadoop-hdfs/lilei/spark-csv_2.11-1.3.2.jar,/var/lib/hadoop-hdfs/lilei/commons-csv-1.1.jar /var/lib/hadoop-hdfs/lilei/engine-a.jar ");

					sb.append(sourceName + " ");
					sb.append(path + " ");
					sb.append(zooUrls + " ");
					sb.append(sb2.toString() + " ");
					
					sb.append(areaIds + " ");

					sb.append(taskId + " ");

					sb.append(version + " ");
					sb.append(task_name + " ");
					sb.append(conditions);
					
					}
				}
			}
			
			
			

			final String cmd = sb.toString();

			scala.io.Source.fromURL(
					"http://10.20.20.11:8080/maven-web/opers?param="
							+ URLEncoder.encode(cmd, "UTF-8"), "UTF-8");

			out.println("{\"error_code\":0,\"error_msg\":\"\",\"app_id\":\""
					+ taskId + "\"}");

		} catch (Exception e) {

			e.printStackTrace();

			out.println("{\"error_code\":-1,\"error_msg\":\"" + e.getMessage()
					+ "\"}");
		} finally {
			out.flush();

			out.close();
		}

	}

	/**
	 * Initialization of the servlet. <br>
	 * 
	 * @throws ServletException
	 *             if an error occurs
	 */
	public void init() throws ServletException {
		// Put your code here
	}

}
