package com.hadoopSpark.application.test;

import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.KeyFactory;
import java.security.KeyStore;
import java.security.PrivateKey;
import java.security.cert.Certificate;
import java.security.cert.CertificateFactory;
import java.security.spec.PKCS8EncodedKeySpec;

import javax.net.ssl.SSLContext;

import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.ssl.SSLContexts;
import org.apache.http.util.EntityUtils;
import org.bouncycastle.openssl.PEMKeyPair;
import org.bouncycastle.openssl.PEMParser;

import com.aliyun.configuration.ConfigurationTool;
import com.certificate.tool.SetTrustStore;
import com.common.constant.Constant;

/*
 * 在阿里云上，创建一个hadoop应用,
 * 测试，
 * 1、compose yaml 字符串  中传入参数
 * 2、测试创建3个节点
 * 3、想想如何根据用户传入的集群个数，来确定
 */
public class CreateHadoopAppDemo001 {
	
	public static void main(String[] args) {
	
		 //get certificate address
		 try {
			 SSLConnectionSocketFactory sslsf = SetTrustStore.getSSLConnection();
			 //httpclient链接
			 CloseableHttpClient httpClient = HttpClients.custom().setSSLSocketFactory(sslsf).build();
			 
			 HttpPost httpPost = new HttpPost(ConfigurationTool.getProperties(Constant.ALIYUN_PROJECTS_URL));
			 
			 httpPost.addHeader("Content-type","application/json;charset=UTF-8");
			 
			 String requestEntityStr = 
					 "{\"name\":\"hadoop7\","
							+ "\"description\":\"This is a test application\","
							+ "\"template\":\"master:\\r\\n  image: 'registry.aliyuncs.com/registry_yxkj/hadoop_spark:1.0'\","
							+ "\"version\":\"1.0\"}";
			 
			 String composeYamlMode =
					 "\"master-hadoop:\\r\\n \""
								 + "image: 'registry.aliyuncs.com/registry_yxkj/hadoop_spark:1.0'\\r\\n "
								 + "hostname: 'master-hadoop'\\r\\n "
								 + "environment: '- isForSpark=yes'\\r\\n '- isForHive=yes'\\r\\n   "
								 + "ports: '- 80'\\r\\n  '- 22'\\r\\n  '- 9000'\\r\\n "
								 + "volumes: '- /mnt/acs_mnt/ossfs/yxkj/hadoop/master-hadoop:/usr/local/'\\r\\n  "
								 + "restart: 'always'\\r\\n"
					+ "slave1-hadoop:\\r\\n "
								+ "image: 'registry.aliyuncs.com/registry_yxkj/hadoop_spark:1.0'\\r\\n "
								+ "hostname: 'slave1-hadoop'\\r\\n "
								+ "environment: '- isForSpark=yes'\\r\\n '- isForHive=yes'\\r\\n   "
								+ "ports: '- 80'\\r\\n  '- 22'\\r\\n  '- 9000'\\r\\n "
								+ "volumes: '- /mnt/acs_mnt/ossfs/yxkj/hadoop/master-hadoop:/usr/local/'\\r\\n  "
								+ "restart: 'always'\\r\\n";                                                                                                                         
//			 
//			 String composeYamlMode1 = 
//					 "\"master:\\r\\n image: 'registry.aliyuncs.com/registry_yxkj/hadoop_spark:1.0'\\r\\n  hostname: 'master-hadoop'\\r\\n environment: '- isForSpark=yes'\\r\\n  '- isForHive=yes'\\r\\n  ports: '- 80'\\r\\n  '- 22'\\r\\n '- 9000'\\r\\n  volumes: '- /mnt/acs_mnt/ossfs/yxkj/hadoop/master-hadoop:/usr/local/'\\r\\n     restart: 'always'\\r\\n  "
//			 		+ "slave1:\\r\\n image: 'registry.aliyuncs.com/registry_yxkj/hadoop_spark:1.0'\\r\\n  hostname: 'slave1-hadoop'\\r\\n environment: '- isForSpark=yes'\\r\\n  '- isForHive=yes'\\r\\n  ports: '- 80'\\r\\n  '- 22'\\r\\n '- 9000'\\r\\n  volumes: '- /mnt/acs_mnt/ossfs/yxkj/hadoop/master-hadoop:/usr/local/'\\r\\n     restart: 'always'\\r\\n  \"";
			 
			 //下面这个是标准，已经测试，可以用的
//			 String composeYamlMode1 = 
//					 "\"master:\\r\\n image: 'registry.aliyuncs.com/registry_yxkj/hadoop_spark:1.0'\\r\\n hostname: 'master-hadoop'\\r\\n"
//					 + "slave1:\\r\\n image: 'registry.aliyuncs.com/registry_yxkj/hadoop_spark:1.0'\\r\\n hostname: 'slave1-hadoop'\\r\\n"
//					 + "slave2:\\r\\n image: 'registry.aliyuncs.com/registry_yxkj/hadoop_spark:1.0'\\r\\n hostname: 'slave2-hadoop'\\r\\n\"";
			 //下面这个，已经完全OK了，其他，可以删掉了
			 String composeYamlMode1 = 
					 "\"master:\\r\\n  image: 'registry.aliyuncs.com/registry_yxkj/hadoop_spark:1.0'\\r\\n  hostname: 'master-hadoop'\\r\\n  ports:\\r\\n    - 80\\r\\n    - 22\\r\\n    - 9000\\r\\n  environment:\\r\\n    - isForSpark=yes\\r\\n    - isForHive=yes\\r\\n  volumes:\\r\\n    - '/dataValue2/master-Hadoop:/usr/local'\\r\\n  restart:  always\\r\\n"
					 + "slave1:\\r\\n  image: 'registry.aliyuncs.com/registry_yxkj/hadoop_spark:1.0'\\r\\n  hostname: 'slave1-hadoop'\\r\\n  ports:\\r\\n    - 80\\r\\n    - 22\\r\\n    - 9000\\r\\n  environment:\\r\\n    - isForSpark=yes\\r\\n    - isForHive=yes\\r\\n  volumes:\\r\\n    - '/dataValue2/slave1-Hadoop:/usr/local'\\r\\n  restart:  always\\r\\n"
					 + "slave2:\\r\\n  image: 'registry.aliyuncs.com/registry_yxkj/hadoop_spark:1.0'\\r\\n  hostname: 'slave2-hadoop'\\r\\n  ports:\\r\\n    - 80\\r\\n    - 22\\r\\n    - 9000\\r\\n  environment:\\r\\n    - isForSpark=yes\\r\\n    - isForHive=yes\\r\\n  volumes:\\r\\n    - '/dataValue2/slave2-Hadoop:/usr/local'\\r\\n  restart:  always\"";
			 
			 
			 String ceshi1 = "\"master-hadoop :"
			 		+ " image : 'registry.aliyuncs.com/registry_yxkj/hadoop_spark:1.0'"
			 		+ " hostname : 'master-hadoop'"
			 		+ " environment :"
			 		+ " - isForSpark=yes"
			 		+ " - isForHive=yes"
			 		+ " - NODES=10"
			 		+ " ports :"
			 		+ " - 80"
			 		+ " - 2181"
			 		+ " volumes : "
			 		+ " - '/dataVolumes/master-hadoop:/usr/local/'"
			 		+ " restart : always\"";
			 
			 String ceshi = "\"master-hadoop:\\r\\n"
				 		+ "  image: 'registry.aliyuncs.com/registry_yxkj/hadoop_spark:1.0'\\r\\n"
				 		+ "  hostname: 'master-hadoop'\\r\\n"
				 		+ "  ports:\\r\\n"
				 		+ "    - 80\\r\\n\""; 
			 
			 
			 
			 
			 String  requestEntity =
					 "{\"name\":\"hadoop8\","
					+ "\"description\":\"This is a test application\","
					+ "\"template\":"+composeYamlMode1+","
					+ "\"version\":\"1.0\"}";
			 
			 StringEntity stringEntity = new StringEntity(requestEntity, Charset.forName("UTF-8"));
			 
			 
			 httpPost.setEntity(stringEntity);

			 //使用客户端向服务器发送数据
			 HttpResponse httpResponse = httpClient.execute(httpPost);
			 
			 if (httpResponse.getStatusLine().getStatusCode() == 201) {
				HttpEntity resultEntity = httpResponse.getEntity();
				
				//返回结果是，服务器上，返回来的是json字符串数据
				String strResultEntity = EntityUtils.toString(resultEntity);
				System.out.println(strResultEntity);
				System.out.println("SB--------------------> OK");
			} else {
				HttpEntity resultEntity = httpResponse.getEntity();
				
				//返回结果是，服务器上，返回来的是json字符串数据
				String strResultEntity = EntityUtils.toString(resultEntity);
				System.out.println(strResultEntity);
				System.out.println(httpResponse.getStatusLine().getStatusCode());
				System.out.println("SB--------------------> 错啦");
			}
		
			} catch (Exception e) {
				e.printStackTrace();
			}
	}
}
		
		
		
