package com.ehualu.impda.dahua;
/**
 * 大华过车数据
 */

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.ehualu.impda.hadoop.KafkaProperties;
import com.ehualu.impda.kafkasecurity.LoginUtilKafkaHuawei;
import com.ehualu.impda.security.LoginUtil;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.http.HttpEntity;
import org.apache.http.NameValuePair;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.ExecutionException;


public class Hdfsdahua2 {
	private final static Log LOG = LogFactory.getLog(Hdfsdahua2.class.getName());
	public static KafkaProducer<String, String> producer;

	public static String topic;

	public static Boolean isAsync;


	public static String bootstrapServers = "bootstrap.servers";

	public static String clientId = "client.id";

	public static String keySerializer = "key.serializer";

	public static String valueSerializer = "value.serializer";

	public static String securityProtocol = "security.protocol";

	public static String saslKerberosServiceName = "sasl.kerberos.service.name";

	public static String kerberosDomainName = "kerberos.domain.name";

	public static int messageNumToSend = 100;

	/**
	 * 用户自己申请的机机账号keytab文件名称
	 */
	/**
	 * 用户自己申请的机机账号keytab文件名称
	 */
	private static final String USER_KEYTAB_FILE = "user.keytab";

	/**
	 * 用户自己申请的机机账号名称
	 */
	private static final String USER_PRINCIPAL = "super_rj";
	private static final String STORAGE_POLICY_HOT = "HOT";
	private static String PATH_TO_HDFS_SITE_XML = Hdfsdahua2.class.getClassLoader().getResource("hdfs-site.xml")
			.getPath();
	private static String PATH_TO_CORE_SITE_XML = Hdfsdahua2.class.getClassLoader().getResource("core-site.xml")
			.getPath();

	private static Configuration conf = null;

	private static String PRNCIPAL_NAME = "super_rj";
	private static String PATH_TO_KEYTAB = Hdfsdahua2.class.getClassLoader().getResource("user.keytab").getPath();
	private static String PATH_TO_KRB5_CONF = Hdfsdahua2.class.getClassLoader().getResource("krb5.conf").getPath();

	//private static String PATH_TO_SMALL_SITE_XML = HdfsExample.class.getClassLoader().getResource("smallfs-site.xml")
			//.getPath();

	private FileSystem fSystem; /* HDFS file system */
	private String DEST_PATH;
	private String FILE_NAME;


	public Hdfsdahua2(String path, String fileName) throws IOException {
		this.DEST_PATH = path;
		this.FILE_NAME = fileName;
		instanceBuild();
	}

	/**
	 * HDFS operator instance
	 * 
	 * @throws IOException
	 *
	 * @throws Exception
	 *
	 */
	public void test() throws IOException {
		// create directory
		mkdir();

		// write file
		write();

		// append file
		append();

		// read file
		read();

		// delete file
		delete();

		// delete directory
		rmdir();
	}

	/**
	 * build HDFS instance
	 */
	private void instanceBuild() throws IOException {
		// get filesystem
		// 一般情况下，FileSystem对象JVM里唯一，是线程安全的，这个实例可以一直用，不需要立马close。
		// 注意：
		// 若需要长期占用一个FileSystem对象的场景，可以给这个线程专门new一个FileSystem对象，但要注意资源管理，别导致泄露。
		// 在此之前，需要先给conf加上：
		// conf.setBoolean("fs.hdfs.impl.disable.cache",
		// true);//表示重新new一个连接实例，不用缓存中的对象。
		fSystem = FileSystem.get(conf);
	}

	/**
	 * delete directory
	 *
	 * @throws IOException
	 */
	private void rmdir() throws IOException {
		Path destPath = new Path(DEST_PATH);
		if (!deletePath(destPath)) {
			LOG.error("failed to delete destPath " + DEST_PATH);
			return;
		}

		LOG.info("success to delete path " + DEST_PATH);

	}

	/**
	 * create directory
	 *
	 * @throws IOException
	 */
	private void mkdir() throws IOException {
		Path destPath = new Path(DEST_PATH);
		if (!createPath(destPath)) {
			LOG.error("failed to create destPath " + DEST_PATH);
			return;
		}

		LOG.info("success to create path " + DEST_PATH);
	}


	/**
	 * create file,write file
	 *
	 * @throws IOException
	 */
	private void write() throws IOException {
		final String content = "这是一条记录 啊哈哈 hphm001";
		FSDataOutputStream out = null;
		try {
			out = fSystem.create(new Path(DEST_PATH + File.separator + FILE_NAME));
			out.write(content.getBytes());
			out.hsync();
			LOG.info("success to write.");
		} finally {
			// make sure the stream is closed finally.
			IOUtils.closeStream(out);
		}
	}


    private void writejs(String js) throws IOException {
        final String content = js;
        FSDataOutputStream out = null;
        try {
            out = fSystem.create(new Path(DEST_PATH + File.separator + FILE_NAME));
            out.write(content.getBytes());
            out.hsync();
            LOG.info("success to write.");
        } finally {
            // make sure the stream is closed finally.
            IOUtils.closeStream(out);
        }
    }


    private void writeMore(String js,FSDataOutputStream out) throws IOException {
        final String content = js;
//        FSDataOutputStream out = null;
        try {
            out = fSystem.create(new Path(DEST_PATH + File.separator + FILE_NAME));
            out.write(content.getBytes());
            out.hsync();
            LOG.info("success to write.");
        } finally {
            // make sure the stream is closed finally.
//            IOUtils.closeStream(out);
        }
    }
	/**
	 * append file content
	 *
	 * @throws IOException
	 */
	private void append() throws IOException {
		final String content = "I append this content.";
		FSDataOutputStream out = null;
		try {
			out = fSystem.append(new Path(DEST_PATH + File.separator + FILE_NAME));
			out.write(content.getBytes());
			out.hsync();
			LOG.info("success to append.");
		} finally {
			// make sure the stream is closed finally.
			IOUtils.closeStream(out);
		}
	}

	/**
	 * read file
	 *
	 * @throws IOException
	 */
	private void read() throws IOException {
		String strPath = DEST_PATH + File.separator + FILE_NAME;
		Path path = new Path(strPath);
		FSDataInputStream in = null;
		BufferedReader reader = null;
		StringBuffer strBuffer = new StringBuffer();

		try {
			in = fSystem.open(path);
			reader = new BufferedReader(new InputStreamReader(in));
			String sTempOneLine;

			// write file
			while ((sTempOneLine = reader.readLine()) != null) {
				strBuffer.append(sTempOneLine);
			}

			LOG.info("result is : " + strBuffer.toString());
			LOG.info("success to read.");

		} finally {
			// make sure the streams are closed finally.
			IOUtils.closeStream(reader);
			IOUtils.closeStream(in);
		}
	}

	/**
	 * delete file
	 *
	 * @throws IOException
	 */
	private void delete() throws IOException {
		Path beDeletedPath = new Path(DEST_PATH + File.separator + FILE_NAME);
		if (fSystem.delete(beDeletedPath, true)) {
			LOG.info("success to delete the file " + DEST_PATH + File.separator + FILE_NAME);
		} else {
			LOG.warn("failed to delete the file " + DEST_PATH + File.separator + FILE_NAME);
		}
	}

	/**
	 * create file path
	 *
	 * @param filePath
	 * @return
	 * @throws IOException
	 */
	private boolean createPath(final Path filePath) throws IOException {
		if (!fSystem.exists(filePath)) {
			fSystem.mkdirs(filePath);
		}
		return true;
	}

	/**
	 * delete file path
	 *
	 * @param filePath
	 * @return
	 * @throws IOException
	 */
	private boolean deletePath(final Path filePath) throws IOException {
		if (!fSystem.exists(filePath)) {
			return false;
		}
		// fSystem.delete(filePath, true);
		return fSystem.delete(filePath, true);
	}

	/**
	 * 认证
	 * @throws IOException
	 */
	public static void securityPrepare() throws IOException {
		String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator;
		String krbFile = filePath + "krb5.conf";
		String userKeyTableFile = filePath + USER_KEYTAB_FILE;

		//windows路径下分隔符替换
		userKeyTableFile = userKeyTableFile.replace("\\", "\\\\");
		krbFile = krbFile.replace("\\", "\\\\");

		LoginUtilKafkaHuawei.setKrb5Config(krbFile);
		LoginUtilKafkaHuawei.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com");
		LoginUtilKafkaHuawei.setJaasFile(USER_PRINCIPAL, userKeyTableFile);
	}

	public static void main(String[] args) throws Exception {
		// 完成初始化和认证
		confLoad();
		authentication();
		securityPrepare();
		String ip=args[0];//ip，必填
		String sqm=args[1];//授权码，必填
		String topic=args[2];//topic，市区名称，必填
		String startTime=null;//开始时间，默认当前时间-2秒开始查询
		String endTime=null;//结束时间，默认当前时间
		String dk=null;//端口，默认8088
		if(args.length==5){
			for(int i=3;i<args.length;i++){
				startTime=args[3];//开始时间
				endTime=args[4];//结束时间
			}
		}
		if(args.length==4){
			for(int i=3;i<args.length;i++){
				startTime=args[3];//开始时间
			}
		}
		if(args.length==6){
			for(int i=3;i<args.length;i++){
				startTime=args[3];//开始时间
				endTime=args[4];//结束时间
				dk=args[5];
			}
		}
		List<String> list1 = new ArrayList<String>();
		List<String> list3 = new ArrayList<String>();
		Properties props = new Properties();
		KafkaProperties kafkaProc = KafkaProperties.getInstance();
		// Broker地址列表
		props.put(bootstrapServers, kafkaProc.getValues(bootstrapServers, "37.158.97.137:21005,37.158.97.136:21005"));
		// 客户端ID
		props.put(clientId, kafkaProc.getValues(clientId, "DemoProducer"));
		// Key序列化类
		props.put(keySerializer,
				kafkaProc.getValues(keySerializer, "org.apache.kafka.common.serialization.IntegerSerializer"));
		// Value序列化类
		props.put(valueSerializer,
				kafkaProc.getValues(valueSerializer, "org.apache.kafka.common.serialization.StringSerializer"));
		// 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT
		props.put(securityProtocol, kafkaProc.getValues(securityProtocol, "SASL_PLAINTEXT"));
		// 服务名
		props.put(saslKerberosServiceName, "kafka");
		// 域名
		props.put(kerberosDomainName, kafkaProc.getValues(kerberosDomainName, "hadoop.hadoop.com"));
		producer = new KafkaProducer<String, String>(props);
		PutHdfs(list1,list3,producer,topic,sqm,ip,startTime,endTime,dk);
	}

	/**
	 * 具体业务
	 * @param list1
	 * @param list3
	 * @param producer
	 * @param topic
	 * @param sqm
	 * @param ip
	 * @param startTime
	 * @param endTime
	 */
	private static void PutHdfs(List<String> list1, List<String> list3, KafkaProducer<String, String> producer, String topic, String sqm, String ip, String startTime, String endTime,String dk) throws IOException, ExecutionException, InterruptedException, ParseException {
		while (!false) {
			// 业务
			Date dt = new Date();
			SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
			SimpleDateFormat sdf1 = new SimpleDateFormat("yyyy-MM-dd");
			if(startTime==null || startTime.equals("") || endTime==null || endTime.equals("")){
				Long timeStamp = System.currentTimeMillis();
				startTime= sdf.format(new Date(Long.parseLong(String.valueOf(timeStamp-3000))));
				endTime= sdf.format(new Date(Long.parseLong(String.valueOf(timeStamp))));
			}
			System.out.println(startTime);
			System.out.println(endTime);
			if(dk==null||dk.equals("")){
				dk="8088";
			}
			String path = "/user/super_rj/hive/textfile/"+topic+"_passinfo/" + sdf1.format(dt);
			Hdfsdahua2 hdfs_examples = new Hdfsdahua2(path, "gcxx");
			/** 下面是http get 请求*/
			String ISUrl = "http://"+ip+":"+dk+"/dahuaIS/rest/picrecord/search";
			List<NameValuePair> params = new ArrayList<NameValuePair>();
			String q="{\"startDate\": \""+startTime+"\", \n" +
					"\t\"endDate\":\""+endTime+"\"}";
			params.add(new BasicNameValuePair("q", q));
			String str = EntityUtils.toString(new UrlEncodedFormEntity(params));
			HttpGet httpMethod = new HttpGet(ISUrl + "?" + str);
			RequestConfig requestConfig = RequestConfig.custom().setConnectionRequestTimeout(60000).setConnectTimeout(60000)
					.setSocketTimeout(60000).build();
			httpMethod.setConfig(requestConfig);
			httpMethod.addHeader("Accept", "application/json;charset=UTF-8");
			httpMethod.addHeader("Content-Type", "application/json;charset=UTF-8");
			httpMethod.addHeader("authorization", "YHLGS");
			// 创建客户端，请自行进行优化的custom设置
			CloseableHttpClient client = HttpClients.custom().build();
			CloseableHttpResponse response = client.execute(httpMethod);
			HttpEntity entity = response.getEntity();
			/** 通过http请求获得的结果*/
			String result = EntityUtils.toString(entity, "UTF-8");

			JSONObject json = JSON.parseObject(result);
			String data = json.getString("data");
			JSONObject json2 = json.parseObject(data);
			String rows = json2.getString("rows");
			JSONArray jsonArr = JSONArray.parseArray(rows);
			list1 = GetList(jsonArr);
			//System.out.println(result);
			if (list3.retainAll(list1) == true) {
				Iterator<String> iter = list3.iterator();
				while (iter.hasNext()) {
					System.out.println("4111111111111111111111");
					String s = iter.next();
					for (int j = 0; j < list1.size(); j++) {
						if (list1.get(j).equals(s)) {
							list1.remove(list1.get(j));
						}
					}
				}
			}
			String s ="";
			for(int i=0;i<list1.size();i++){
				s=list1.get(i)+s;
			}
			//System.out.println("55555555555555555555555555555"+s);
			hdfs_examples.writeHDFS(s.toString());
			//写入kafka
			ProducerRecord<String, String> record = new ProducerRecord<String, String>(topic, s);
			producer.send(record).get();
			startTime=null;
			endTime=null;
			list3.clear();
			list3.addAll(list1);
			list1.clear();
			// 释放资源
			//response.close();
			//httpMethod.releaseConnection();
		}
	}

	/**
	 * 返回list
	 *
	 * @param jsonArr
	 * @return
	 */
	private static List<String> GetList(JSONArray jsonArr) {
		List<String> list1 = new ArrayList<String>();
		String oneStr="";
		for(int i=0;i<jsonArr.size();i++){
			String str = jsonArr.getJSONObject(i).toString();
			JSONObject jsStr = JSONObject.parseObject(str);
			String id = jsStr.getString("id")+",";
			String pos = jsStr.getString("devChnId")+",";
			String posname =jsStr.getString("devChnName")+",";
			String passtime =jsStr.getString("capDate")+",";
			String track = jsStr.getString("carWayCode")+",";
			String platenumber = jsStr.getString("carNum")+",";
			String platecolor = jsStr.getString("carNumColor")+",";
			String dirname =jsStr.getString("carDirect")+",";
			String dir = jsStr.getString("dir")+",";
			String vehicletype = jsStr.getString("carType")+",";
			String vehiclecolor = jsStr.getString("carColor")+",";
			String platetype = jsStr.getString("platetype")+",";
			String speed = jsStr.getString("carSpeed")+",";
			String image02 = jsStr.getString("carNumPic")+",";
			String image01 =jsStr.getString("carImgUrl")+"\n";
			if(image01==null||image01.equals("")){
				image01=",";
			}
			oneStr=id+pos+posname+passtime+track+platenumber+platecolor+dirname+
					dir+vehicletype+vehiclecolor+platetype+speed+image02+image01;
			list1.add(oneStr);
		}
		return list1;
	}

	/**
	 * 写入hdfs
	 * @param js
	 * @throws IOException
	 */
	private void writeHDFS(String js) throws IOException {
		final String content = js;
		FSDataOutputStream out = null;
		Path path = new Path(DEST_PATH + File.separator + FILE_NAME);
		try {
			boolean fiex = fSystem.exists(path);
			if (fiex) {
				System.out.println("文件已经存在");
				out = fSystem.append(path);
				out.write(content.getBytes());
				out.hsync();
				LOG.info("append to write.");
			} else {
				System.out.println("文件不存在");
				out = fSystem.create(path);
				out.write(content.getBytes());
				out.hsync();
				LOG.info("success to write.");
			}

		} finally {
			// make sure the stream is closed finally.
			IOUtils.closeStream(out);
		}
	}

	/**
	 * 
	 * Add configuration file if the application run on the linux ,then need
	 * make the path of the core-site.xml and hdfs-site.xml to in the linux
	 * client file
	 * 
	 */
	private static void confLoad() throws IOException {
		System.setProperty("java.security.krb5.conf", PATH_TO_KRB5_CONF);
		conf = new Configuration();
		// conf file
		conf.addResource(new Path(PATH_TO_HDFS_SITE_XML));
		conf.addResource(new Path(PATH_TO_CORE_SITE_XML));
		// conf.addResource(new Path(PATH_TO_SMALL_SITE_XML));
	}

	/**
	 * kerberos security authentication if the application running on Linux,need
	 * the path of the krb5.conf and keytab to edit to absolute path in Linux.
	 * make the keytab and principal in example to current user's keytab and
	 * username
	 * 
	 */
	private static void authentication() throws IOException {
		// security mode
		if ("kerberos".equalsIgnoreCase(conf.get("hadoop.security.authentication"))) {
			System.setProperty("java.security.krb5.conf", PATH_TO_KRB5_CONF);
			LoginUtil.login(PRNCIPAL_NAME, PATH_TO_KEYTAB, PATH_TO_KRB5_CONF, conf);
		}
	}

}

