package com.hadoopSpark.application.test;

import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;

import com.aliyun.configuration.ConfigurationTool;
import com.certificate.tool.SetTrustStore;
import com.common.constant.Constant;

/* 2016.9.6
 * 删除一个已经停止的hadoop应用，好像是，必须是停止哦
 */
public class DeleteApp {
	
	public static void main(String[] args) {
		
		delApplicaiton(args[0]);

	}
	
	
	public static String delApplicaiton(String deleAppName){
		String result = "400";
		 //get certificate address
		 try {
			 SSLConnectionSocketFactory sslsf = SetTrustStore.getSSLConnection();
			 //httpclient链接
			 CloseableHttpClient httpClient = HttpClients.custom().setSSLSocketFactory(sslsf).build();
			 
			 HttpDelete httpDelete = new HttpDelete(ConfigurationTool.getProperties(Constant.ALIYUN_PROJECTS_URL) + deleAppName);
			 
			 //使用客户端向服务器发送数据
			 HttpResponse httpResponse = httpClient.execute(httpDelete);
			 
			 if (httpResponse.getStatusLine().getStatusCode() == 200) {
				//输出协议，响应码，结果
//				System.out.println(httpResponse.getProtocolVersion() 
//							+ " " + httpResponse.getStatusLine().getStatusCode() 
//							+ " " + httpResponse.getStatusLine().getReasonPhrase());
				
				result = "200";
				System.out.println("200");
			} else {
//				System.out.println(httpResponse.getStatusLine().getStatusCode());
				System.out.println("400");
			}
		
			} catch (Exception e) {
				e.printStackTrace();
			}
		
		return result;
		
		
	}
	
	
	
	
}
		
	
		
		
		
		
		
	
	
	
	
	
