package com.flute.icrawler.app.processor.fetch;

import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.net.MalformedURLException;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.net.UnknownHostException;
import java.util.Random;
import java.util.jar.JarFile;

import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.HttpVersion;
import org.apache.http.NoHttpResponseException;
import org.apache.http.ParseException;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.CookieStore;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.params.ClientPNames;
import org.apache.http.client.params.CookiePolicy;
import org.apache.http.client.params.HttpClientParams;
import org.apache.http.client.protocol.ClientContext;
import org.apache.http.conn.ConnectTimeoutException;
import org.apache.http.conn.params.ConnManagerParams;
import org.apache.http.impl.client.BasicCookieStore;
import org.apache.http.message.BasicHeader;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
import org.apache.http.params.HttpProtocolParams;
import org.apache.http.protocol.BasicHttpContext;
import org.apache.http.protocol.HttpContext;
import org.apache.http.util.EntityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.flute.icrawler.app.entity.CrawlResult;
import com.flute.icrawler.app.processor.AbstractProcessor;
import com.flute.icrawler.app.service.CookieService;
import com.flute.icrawler.app.service.CrawlService;
import com.flute.icrawler.app.util.ParameterKey;
import com.flute.icrawler.app.util.TextUtil;
import com.flute.icrawler.app.util.XStreamUtil;
import com.flute.icrawler.framework.autoadjust.FetchAdjuster.FetchStatus;
import com.flute.icrawler.framework.framework.CrawlUrl;
import com.flute.icrawler.framework.framework.container.AdditionalUrlInformation;
import com.flute.icrawler.framework.processor.result.AddUrlProcessorResult;
import com.flute.icrawler.framework.processor.result.FailProcessorResult;
import com.flute.icrawler.framework.processor.result.NotificationProcessorResult;
import com.flute.icrawler.framework.processor.result.ResultParameter;
import com.flute.icrawler.framework.processor.result.RetryProcessorResult;
import com.flute.icrawler.framework.processor.result.SuccessProcessorResult;

public class FetchHttp extends AbstractProcessor {

	// Connection 一个用于表明是否保存socket连接为开放的通用头标。例如：Connection: close或Connection:
	// keep-alive

	private static final Header HEADER_CONNECTION_CLOSE = new BasicHeader(
			"Connection", "close");

	private static final Header HEADER_CONNECTION_ALIVE = new BasicHeader(
			"Connection", "Keep-Alive");

	private static final String REFERER = "Referer";

	private static final Header HEADER_REFER_GOOGLE = new BasicHeader(REFERER,
			"http://www.google.com");
	/**
	 * 最大的网页大小设置成2M
	 */
	private static long DEFAULT_MAX_LENGTH_BYTES = 2 * 1024 * 1024L;
	/**
	 * Default character encoding to use for pages that do not specify.
	 */
	private static String DEFAULT_CONTENT_CHARSET = "GBK";

	private static final String HEAD_NAME_SET_COOKIE = "Set-Cookie";
	private static final String HEAD_NAME_COOKIE = "Cookie";
	// private static final String RANGE = "Range";
	// private static final String RANGE_PREFIX = "bytes=0-";
	private static final String HTTP_SCHEME = "http";
	private static final String HTTPS_SCHEME = "https";

	private static final Logger LOGGER = LoggerFactory
			.getLogger(FetchHttp.class);

	private transient HttpClient httpClient = null;

	// cookie存储方式
	public int intelligentCookieType = 0;

	public String httpVersion = "";

	public String contentCharset = "";

	public String userAgent = "";

	public int connectionTimeout = 1000;

	public int soTimeout = 1000;

	public boolean isRedirecting = false;

	public int maxTotalConnections = 100;

	@Override
	public void process(CrawlUrl crawlUrl) {
		// TODO Auto-generated method stub

		LOGGER.debug("HttpFetchAlgo:{}", crawlUrl.getUrl());
		CrawlResult result = (CrawlResult) crawlUrl.getResultParameter(
				ParameterKey.RESULT_CRAWL).getValue();

		try {

			checkDNS(crawlUrl);

			httpClient = CrawlService.getInstance().getThreadSafeHttpClient(
					getHttpParams());

			executeHttp(httpClient, crawlUrl);

		} catch (SocketTimeoutException e) {

			LOGGER.error("HttpFetchAlgo:{}", "SocketTimeoutException->"
					+ crawlUrl.getUrl());
			result.setFetchStatus(FetchStatus.TimeOut);
			registerRetryUrlHandle(e.getLocalizedMessage(), crawlUrl);
			crawlUrl.registerProcessorResult(new NotificationProcessorResult());
			return;
		} catch (ConnectTimeoutException e) {

			LOGGER.error("HttpFetchAlgo:{}", "ConnectTimeoutException->"
					+ crawlUrl.getUrl());
			result.setFetchStatus(FetchStatus.TimeOut);
			registerRetryUrlHandle(e.getLocalizedMessage(), crawlUrl);
			crawlUrl.registerProcessorResult(new NotificationProcessorResult());
			return;
		} catch (SocketException e) {

			if (e.getLocalizedMessage().equalsIgnoreCase("Connection reset")) {
				registerRetryUrlHandle(e.getLocalizedMessage(), crawlUrl);
				result.setFetchStatus(FetchStatus.TimeOut);
				crawlUrl
						.registerProcessorResult(new NotificationProcessorResult());
				LOGGER.error("HttpFetchAlgo:{}", "Connection reset->"
						+ crawlUrl.getUrl());
				return;
			}
		} catch (UnknownHostException e) {
			registerRetryUrlHandle(e.getLocalizedMessage(), crawlUrl);
			result.setFetchStatus(FetchStatus.UnknowHostException);
			crawlUrl.registerProcessorResult(new NotificationProcessorResult());

			LOGGER.error("HttpFetchAlgo:{}", "UnknownHostException->"
					+ crawlUrl.getUrl());
			return;
		} catch (NoHttpResponseException e) {
			crawlUrl.registerProcessorResult(new FailProcessorResult());
			LOGGER.error("HttpFetchAlgo:{}", "NoHttpResponseException->"
					+ crawlUrl.getUrl());
			CrawlService.getInstance().logFailResult(
					"NoHttpResponseException=" + crawlUrl.getUrl());
			return;
		} catch (IOException e) {

			crawlUrl.registerProcessorResult(new FailProcessorResult());
			LOGGER.error("HttpFetchAlgo:", e);
			CrawlService.getInstance().logFailResult(
					"IOException=" + crawlUrl.getUrl());
			return;
		} catch (ArrayIndexOutOfBoundsException e) {

			crawlUrl.registerProcessorResult(new FailProcessorResult());

			CrawlService.getInstance().logFailResult(
					"ArrayIndexOutOfBoundsException=" + crawlUrl.getUrl());
			LOGGER.error("HttpFetchAlgo:{}", e);
			return;
		} catch (Exception e) {
			crawlUrl.registerProcessorResult(new FailProcessorResult());
			LOGGER.error("HttpFetchAlgo:{}", e);
			CrawlService.getInstance().logFailResult(
					e.getLocalizedMessage() + crawlUrl.getUrl());
			return;
		}

	}

	/**
	 * 发起HTTP连接
	 * 
	 * @param httpClient
	 * @param crawlUrl
	 * @throws ClientProtocolException
	 * @throws IOException
	 */
	private void executeHttp(HttpClient httpClient, CrawlUrl crawlUrl)
			throws ClientProtocolException, IOException {
		CrawlResult crawlResult = (CrawlResult) crawlUrl.getResultParameter(
				ParameterKey.RESULT_CRAWL).getValue();
		// GET
		HttpGet method = configureHttpGet(crawlResult.getUrl());

		if (null == method) {
			CrawlService.getInstance().logFailResult(
					"HttpMethod is null=" + crawlUrl.getUrl());
			return;
		}

		// Create a local instance of cookie store
		CookieStore cookieStore = new BasicCookieStore();
		// Create local HTTP context
		HttpContext localContext = new BasicHttpContext();
		// Bind custom cookie store to the local context
		localContext.setAttribute(ClientContext.COOKIE_STORE, cookieStore);
		// execute the method
		long s = System.currentTimeMillis();
		HttpResponse response = httpClient.execute(method, localContext);
		long fetchTime = System.currentTimeMillis() - s;
		crawlResult.setFetchTime(fetchTime);

		HttpEntity httpEntity = response.getEntity();
		if (null == httpEntity) {
			crawlUrl.registerProcessorResult(new FailProcessorResult());
			CrawlService.getInstance().logFailResult(
					"httpEntity is null=" + crawlUrl.getUrl());
			return;
		}
		int iStatusCode = response.getStatusLine().getStatusCode();

		StringBuilder builder = new StringBuilder();
		builder.append("ReasonPhrase:").append(iStatusCode).append(
				"|localCookie:").append(
				method.getFirstHeader(HEAD_NAME_COOKIE).getValue())
				.append("->").append(crawlUrl.getUrl()).append(" fetchTime=")
				.append(fetchTime).append("ms");
		LOGGER.info(builder.toString());

		// 保存当前获取的中间结果
		addResultParameter(ParameterKey.RESULT_CRAWL, crawlResult, crawlUrl);

		// 保存响应值
		crawlResult.setFetchCode(iStatusCode);

		// HttpStatus响应含义
		// 2xx 成功
		// 200 正常;请求已完成。
		// 201 正常;紧接 POST 命令。
		// 202 正常;已接受用于处理，但处理尚未完成。
		// 203 正常;部分信息 — 返回的信息只是一部分。
		// 204 正常;无响应 — 已接收请求，但不存在要回送的信息。
		// 3xx 重定向
		// 301 已移动 — 请求的数据具有新的位置且更改是永久的。
		// 302 已找到 — 请求的数据临时具有不同 URI。
		// 303 请参阅其它 — 可在另一 URI 下找到对请求的响应，且应使用 GET 方法检索此响应。
		// 304 未修改 — 未按预期修改文档。
		// 305 使用代理 — 必须通过位置字段中提供的代理来访问请求的资源。
		// 306 未使用 — 不再使用;保留此代码以便将来使用。
		// 4xx 客户机中出现的错误
		// 400 错误请求 — 请求中有语法问题，或不能满足请求。
		// 401 未授权 — 未授权客户机访问数据。
		// 402 需要付款 — 表示计费系统已有效。
		// 403 禁止 — 即使有授权也不需要访问。
		// 404 找不到 — 服务器找不到给定的资源;文档不存在。
		// 407 代理认证请求 — 客户机首先必须使用代理认证自身。
		// 410 请求的网页不存在(永久);
		// 415 介质类型不受支持 — 服务器拒绝服务请求，因为不支持请求实体的格式。
		// 5xx 服务器中出现的错误
		// 500 内部错误 — 因为意外情况，服务器不能完成请求。
		// 501 未执行 — 服务器不支持请求的工具。
		// 502 错误网关 — 服务器接收到来自上游服务器的无效响应。
		// 503 无法获得服务 — 由于临时过载或维护，服务器无法处理请求。
		// 504 网关超时

		// HttpStatus.SC_CONTINUE = 100;
		// HttpStatus.SC_SWITCHING_PROTOCOLS = 101;
		// HttpStatus.SC_PROCESSING = 102;
		// HttpStatus.SC_OK = 200;
		// HttpStatus.SC_CREATED = 201;
		// HttpStatus.SC_ACCEPTED = 202;
		// HttpStatus.SC_NON_AUTHORITATIVE_INFORMATION = 203;
		// HttpStatus.SC_NO_CONTENT = 204;
		// HttpStatus.SC_RESET_CONTENT = 205;
		// HttpStatus.SC_PARTIAL_CONTENT = 206;
		// HttpStatus.SC_MULTI_STATUS = 207;
		// HttpStatus.SC_MULTIPLE_CHOICES = 300;
		// HttpStatus.SC_MOVED_PERMANENTLY = 301;
		// HttpStatus.SC_MOVED_TEMPORARILY = 302;
		// HttpStatus.SC_SEE_OTHER = 303;
		// HttpStatus.SC_NOT_MODIFIED = 304;
		// HttpStatus.SC_USE_PROXY = 305;
		// HttpStatus.SC_TEMPORARY_REDIRECT = 307;
		// HttpStatus.SC_BAD_REQUEST = 400;
		// HttpStatus.SC_UNAUTHORIZED = 401;
		// HttpStatus.SC_PAYMENT_REQUIRED = 402;
		// HttpStatus.SC_FORBIDDEN = 403;
		// HttpStatus.SC_NOT_FOUND = 404;
		// HttpStatus.SC_METHOD_NOT_ALLOWED = 405;
		// HttpStatus.SC_NOT_ACCEPTABLE = 406;
		// HttpStatus.SC_PROXY_AUTHENTICATION_REQUIRED = 407;
		// HttpStatus.SC_REQUEST_TIMEOUT = 408;
		// HttpStatus.SC_CONFLICT = 409;
		// HttpStatus.SC_GONE = 410;
		// HttpStatus.SC_LENGTH_REQUIRED = 411;
		// HttpStatus.SC_PRECONDITION_FAILED = 412;
		// HttpStatus.SC_REQUEST_TOO_LONG = 413;
		// HttpStatus.SC_REQUEST_URI_TOO_LONG = 414;
		// HttpStatus.SC_UNSUPPORTED_MEDIA_TYPE = 415;
		// HttpStatus.SC_REQUESTED_RANGE_NOT_SATISFIABLE = 416;
		// HttpStatus.SC_EXPECTATION_FAILED = 417;
		// HttpStatus.SC_INSUFFICIENT_SPACE_ON_RESOURCE = 419;
		// HttpStatus.SC_METHOD_FAILURE = 420;
		// HttpStatus.SC_UNPROCESSABLE_ENTITY = 422;
		// HttpStatus.SC_LOCKED = 423;
		// HttpStatus.SC_FAILED_DEPENDENCY = 424;
		// HttpStatus.SC_INTERNAL_SERVER_ERROR = 500;
		// HttpStatus.SC_NOT_IMPLEMENTED = 501;
		// HttpStatus.SC_BAD_GATEWAY = 502;
		// HttpStatus.SC_SERVICE_UNAVAILABLE = 503;
		// HttpStatus.SC_GATEWAY_TIMEOUT = 504;
		// HttpStatus.SC_HTTP_VERSION_NOT_SUPPORTED = 505;
		// HttpStatus.SC_INSUFFICIENT_STORAGE = 507;

		int status = crawlResult.getFetchCode();

		switch (status) {
		case HttpStatus.SC_OK:
			handleOK(response, crawlUrl);
			break;

		case HttpStatus.SC_PARTIAL_CONTENT:
			handle206(response, crawlUrl);
			break;

		case HttpStatus.SC_UNAUTHORIZED:
			// 401 is not 'success'.
			// handle401(response, crawlUrl);
			break;
		case HttpStatus.SC_FORBIDDEN:
			handle403(httpClient, crawlUrl);
			break;

		case HttpStatus.SC_MOVED_PERMANENTLY:
		case HttpStatus.SC_MOVED_TEMPORARILY:
			registerNewUrlHandle(response, crawlUrl);
			break;

		case HttpStatus.SC_NOT_FOUND:
		case HttpStatus.SC_INTERNAL_SERVER_ERROR:
		case HttpStatus.SC_NOT_IMPLEMENTED:
		case HttpStatus.SC_BAD_GATEWAY:
		case HttpStatus.SC_SERVICE_UNAVAILABLE:
			// 5xx都是服务器出现内部错误
			registerFailHandle(response, crawlUrl);
			CrawlService.getInstance().logFailResult(
					status + "=" + crawlUrl.getUrl());
			break;

		default:
			crawlUrl.registerProcessorResult(new FailProcessorResult());
			CrawlService.getInstance().logFailResult(
					"unknow status" + crawlUrl.getUrl());
			break;
		}
		if (httpEntity != null) {
			httpEntity.consumeContent();
		}

		httpClient.getConnectionManager().closeExpiredConnections();
		method.abort();
	}

	/**
	 * 访问被禁止，另外连接访问过快时被封，也会返回该值
	 * 
	 * @param httpclient
	 * @param crawlUrl
	 * @throws IOException
	 */
	private void handle403(HttpClient httpclient, CrawlUrl crawlUrl)
			throws IOException {
		// TODO Auto-generated method stub
		LOGGER.info("HttpFetchAlgo:{}", "HttpFetchAlgo:handle403-"
				+ crawlUrl.getUrl());
		CrawlResult result = (CrawlResult) crawlUrl.getResultParameter(
				ParameterKey.RESULT_CRAWL).getValue();
		result.setFetchStatus(FetchStatus.Reject);

		CrawlService.getInstance().logResult(result.getHostName(),
				"403=" + crawlUrl.getUrl());
		crawlUrl.registerProcessorResult(new FailProcessorResult());
		crawlUrl.registerProcessorResult(new NotificationProcessorResult());

	}

	/**
	 * 该响应也成功 只是传回的是部分数据
	 * 
	 * @param response
	 * @param crawlUrl
	 */
	private void handle206(HttpResponse response, CrawlUrl crawlUrl) {
		// TODO Auto-generated method stub
		CrawlResult result = (CrawlResult) crawlUrl.getResultParameter(
				ParameterKey.RESULT_CRAWL).getValue();

		// String content = CrawlService.getInstance().getMirrorFileContent(
		// crawlUrl.getUrl(), result.getCharSet());

		try {
			result.setFetchStatus(FetchStatus.Success);
			result.setContent(EntityUtils.toByteArray(response.getEntity()));
		} catch (ParseException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (SocketTimeoutException e) {
			e.printStackTrace();

			crawlUrl.registerProcessorResult(new RetryProcessorResult());
			return;
		}

		catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

	/**
	 * 处理返回200 正常连接响应
	 * 
	 * @param response
	 * @param crawlUrl
	 * @throws ParseException
	 * @throws IOException
	 */
	private void handleOK(HttpResponse response, CrawlUrl crawlUrl)
			throws ParseException, IOException {

		CrawlResult result = (CrawlResult) crawlUrl.getResultParameter(
				ParameterKey.RESULT_CRAWL).getValue();
		result.setFetchStatus(FetchStatus.Success);
		crawlUrl.registerProcessorResult(new NotificationProcessorResult());

		HttpEntity httpEntity = response.getEntity();

		String charSet = "";

		long contentLength = httpEntity.getContentLength();
		LOGGER.debug("HttpFetchAlgo:{}", crawlUrl.getUrl() + ":"
				+ "contentLength = " + contentLength);

		// 最大设置为2M
		if (contentLength > DEFAULT_MAX_LENGTH_BYTES || -1 == contentLength) {
			LOGGER.debug("{}", crawlUrl.getUrl() + ":" + "contentLength = "
					+ contentLength + ";DEFAULT_MAX_LENGTH_BYTES="
					+ DEFAULT_MAX_LENGTH_BYTES);
		}

		// 获取传回来的第一个cookie
		Header cookieHeader = response.getFirstHeader(HEAD_NAME_SET_COOKIE);

		String responseCookie = "";
		if (cookieHeader != null) {
			responseCookie = cookieHeader.getValue();
		}
		// CookieService.registerCookie(crawlUrl.getUrl(), responseCookie);

		LOGGER.debug("HttpFetchAlgo:{}", "response-cookie:" + responseCookie
				+ "(flush cookie)->" + crawlUrl.getUrl());

		// 内容超过最大长度

		// 保存当前获取的中间结果
		byte[] contentBytes = null;

		// 保存类型
		Header header = httpEntity.getContentType();
		if (null == header) {
			result.setContentType(ParameterKey.CONTENTTYPE_HTML[0]);
		} else {
			result.setContentType(header.getValue());
		}

		// 保存字符编码 如果没有 则设置默认值
		charSet = EntityUtils.getContentCharSet(httpEntity);

		contentBytes = EntityUtils.toByteArray(httpEntity);

		// 设置内容
		result.setContent(contentBytes);

		// 是网页类型则需要获取字符集
		if (isExpectedType(result.getContentType(),
				ParameterKey.CONTENTTYPE_HTML)
				&& null == charSet) {
			// 从内容中进行正则解析 得到正确的字符集

			charSet = TextUtil.findCharset(new String(contentBytes));
			result.setCharSet(charSet);

		} else {
			// 如果没有设置字符编码 则为二进制读取
			if (null == charSet || "".equalsIgnoreCase(charSet)) {
				result.setCharSet(ParameterKey.CHARSET_TYPE_BINARY);
			} else {
				result.setCharSet(charSet);
			}

		}

		if (null == contentBytes) {
			SuccessProcessorResult result2 = new SuccessProcessorResult();
			result2.setNeedeInterrupt(true);
			crawlUrl.registerProcessorResult(result2);
			return;
		}

		addResultParameter(ParameterKey.RESULT_CRAWL, result, crawlUrl);

	}

	/**
	 * 执行注册错误链接的处理
	 * 
	 * @param response
	 * @param crawlUrl
	 */
	private void registerFailHandle(HttpResponse response, CrawlUrl crawlUrl) {
		// TODO Auto-generated method stub
		CrawlResult result = (CrawlResult) crawlUrl.getResultParameter(
				ParameterKey.RESULT_CRAWL).getValue();
		result.setFetchStatus(FetchStatus.Invalid);
		crawlUrl.registerProcessorResult(new FailProcessorResult());
		crawlUrl.registerProcessorResult(new NotificationProcessorResult());
	}

	/**
	 * 对发现新链接的处理
	 * 
	 * @param response
	 * @param crawlUrl
	 */
	private void registerNewUrlHandle(HttpResponse response, CrawlUrl crawlUrl) {
		// TODO Auto-generated method stub
		Header[] locationHeaders = response.getHeaders("Location");
		for (int i = 0; i < locationHeaders.length; i++) {
			addHeaderLink(crawlUrl, locationHeaders[i]);
		}

		Header[] contentLocationHeaders = response
				.getHeaders("Content-Location");

		for (int i = 0; i < contentLocationHeaders.length; i++) {
			addHeaderLink(crawlUrl, contentLocationHeaders[i]);
		}

		SuccessProcessorResult result = new SuccessProcessorResult();
		result.setNeedeInterrupt(true);
		crawlUrl.registerProcessorResult(result);
	}

	/**
	 * 对重试链接的处理
	 * 
	 * @param strreason
	 *            重试原因
	 * @param crawlUrl
	 */
	private void registerRetryUrlHandle(String strreason, CrawlUrl crawlUrl) {
		// 三次以内继续重试否则认为失败
		if (crawlUrl.getRetryCount() <= 2) {
			crawlUrl.registerProcessorResult(new RetryProcessorResult(
					strreason, strreason));
		} else {
			String strMsg = "Too many retry = " + crawlUrl.getUrl();
			LOGGER.info(strMsg);
			CrawlService.getInstance().logResult("retry", strMsg);
			crawlUrl.registerProcessorResult(new FailProcessorResult());
		}

	}

	/**
	 * 检查DNS 如果有对链接名进行变换 并更新到CrawlResult中
	 * 
	 * @param crawlUrl
	 */
	private void checkDNS(CrawlUrl crawlUrl) {

		CrawlResult result = (CrawlResult) crawlUrl.getResultParameter(
				ParameterKey.RESULT_CRAWL).getValue();
		// DNS名称
		String dnsName = "";
		// IP地址
		String strIP = "";
		// 协议名称
		String scheme = "";

		String curiString = crawlUrl.getUrl();
		try {
			URL url = new URL(curiString);
			dnsName = url.getHost();
			scheme = url.getProtocol();
		} catch (MalformedURLException e1) {
			// TODO Auto-generated catch block
			LOGGER.error("HttpFetchAlgo:{}", e1.getLocalizedMessage());
			crawlUrl.registerProcessorResult(new FailProcessorResult());
			CrawlService.getInstance().logFailResult(
					"MalformedURLException=" + curiString);

			return;
		}
		if (!canFetch(scheme)) {
			// Cannot fetch this, due to protocol, retries, or other problems
			LOGGER.warn("HttpFetchAlgo:{}", "HttpFetchAlgo:can't Fetch");
			crawlUrl.registerProcessorResult(new FailProcessorResult());
			CrawlService.getInstance().logFailResult(
					"scheme err=" + crawlUrl.getUrl());
			return;
		}
		result.setHostName(dnsName);

		strIP = CrawlService.getInstance().getIP(dnsName);

		// 没有DNS记录则进行DNS查找
		// 将DNS链接添加进URL池 以便DNS解析器可以解析出其地址
		if (null == strIP || "".equals(strIP)) {

			LOGGER.debug("HttpFetchAlgo:{}", "no dns");

		} else {// 有DNS记录则进行DNS变换
			curiString = curiString.replace(dnsName, strIP);
		}
		result.setUrl(curiString);
	}

	/**
	 * 看该协议是否可以解析
	 * 
	 * @param scheme
	 * @return
	 */
	private boolean canFetch(String scheme) {

		if (!(scheme.contains(HTTP_SCHEME) || scheme.contains(HTTPS_SCHEME))) {
			// handles only plain http and https
			return false;
		}

		return true;
	}

	/**
	 * 配置HttpGet参数
	 * 
	 * @param curiString
	 * @return
	 */
	private HttpGet configureHttpGet(String curiString) {
		// TODO Auto-generated method stub
		HttpGet httpGet = null;

		try {
			httpGet = new HttpGet(curiString);
		} catch (IllegalArgumentException exception) {
			LOGGER.error("HttpFetchAlgo:{},{}", exception, curiString);
			return null;
		} catch (NullPointerException e) {
			LOGGER.error("HttpFetchAlgo:{},{}", e, curiString);
			return null;
		}

		// 此段暂时不要设置 如果配置会出现206 部分数据重传 后续是个性能优化点
		// final long maxLength = getMaxLength();
		// if (maxLength > 0) {
		// httpGet.addHeader(RANGE, RANGE_PREFIX.concat(Long
		// .toString(maxLength - 1)));
		// }

		httpGet.addHeader(HEADER_CONNECTION_CLOSE);
		httpGet.addHeader(HEADER_REFER_GOOGLE);

		// CookiePolicy.NETSCAPE;
		// CookiePolicy.RFC_2109
		// CookiePolicy.RFC_2965

		String strCookie = "";
		if (0 == intelligentCookieType) {
			strCookie = "";
		} else if (1 == intelligentCookieType) {
			// 通过Cookie存储服务获取Cookie信息填入
			strCookie = CookieService.getCookie(curiString);
		} else {
			// 通过随机生成BID校验码来防止被网站封杀 目前只适用于豆瓣
			strCookie = "bid=" + "\"" + randomChar() + "\"";
		}

		httpGet.addHeader(HEAD_NAME_COOKIE, strCookie);

		// httpGet.addHeader(HEADER_CONNECTION_ALIVE);
		httpGet.getParams().setParameter(ClientPNames.COOKIE_POLICY,
				CookiePolicy.BROWSER_COMPATIBILITY);

		return httpGet;
	}

	// private static String randomString() {
	// StringBuffer buffer = new StringBuffer();
	// Random r = new Random();
	// int i = 0;
	// int c;
	// while (i < 10) {
	// c = r.nextInt(122);
	// if ((64 < c && c < 90) || 96 < c) {
	// buffer.append((char) c);
	// i++;
	// } else if (0 <= c && c < 10) {
	// buffer.append(c);
	// i++;
	// }
	// }
	//
	// return buffer.toString();
	//
	// }

	// private long getMaxLength() {
	// // TODO Auto-generated method stub
	// return OLD_DEFAULT_MAX_LENGTH_BYTES;
	// }

	/**
	 * 增加一个新的转向链接URL
	 * 
	 * @param curi
	 * @param loc
	 */
	private void addHeaderLink(CrawlUrl curi, Header loc) {
		if (loc == null) {
			// If null, return without adding anything.
			return;
		}
		// TODO: consider possibility of multiple headers
		AddUrlProcessorResult addUrlProcessorResult = new AddUrlProcessorResult();
		CrawlUrl newUrl = new CrawlUrl(curi.getJob(), loc.getName());
		AdditionalUrlInformation info = curi.getAdditionalUrlInformation()
				.newInstance();
		newUrl.setAdditionalUrlInformation(info);
		addUrlProcessorResult.addUrl(newUrl);

		curi.registerProcessorResult(addUrlProcessorResult);

	}

	/**
	 * 给crawlUrl增加一个中间结果参数
	 * 
	 * @param parameterName
	 *            参数名
	 * @param parameterObject
	 *            参数对象
	 * @param crawlUrl
	 */
	private void addResultParameter(String parameterName,
			Serializable parameterObject, CrawlUrl crawlUrl) {

		// curi.setContentType((ct == null) ? null : ct.getValue());

		ResultParameter resultParameter = new ResultParameter();
		resultParameter.setName(parameterName);
		resultParameter.setValue(parameterObject);

		crawlUrl.registerResultParameter(resultParameter);

	}

	private static String randomChar() {
		StringBuffer buffer = new StringBuffer();
		Random r = new Random();
		int i = 0;
		int c;
		while (i < 10) {
			c = r.nextInt(122);
			if ((64 < c && c < 90) || 96 < c) {
				buffer.append((char) c);
				i++;
			} else if (0 <= c && c < 10) {
				buffer.append(c);
				i++;
			}
		}

		return buffer.toString();

	}

	private boolean isExpectedType(String contentType, String[] expectedPrefixs) {

		for (int i = 0; i < expectedPrefixs.length; i++) {

			if (isExpectedType(contentType, expectedPrefixs[i])) {
				return true;
			}
		}
		return false;
	}

	private boolean isExpectedType(String contentType, String expectedPrefix) {

		return contentType != null
				&& contentType.toLowerCase().startsWith(expectedPrefix);
	}

	private HttpParams getHttpParams() {
		// 浏览器抓包参数userAgent
		// Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0;
		// SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729;
		// Media Center PC 6.0; CIBA)
		// Create and initialize scheme registry

		// String userAgent =
		// "Mozilla/5.0 (compatible; heritrix/1.14.3 +http://www.baidu.com)";

		HttpParams params = new BasicHttpParams();

		if ("".equalsIgnoreCase(httpVersion) || null == httpVersion) {
			HttpProtocolParams.setVersion(params, HttpVersion.HTTP_1_1);
		} else {
			if (httpVersion.equalsIgnoreCase("1.0")) {
				HttpProtocolParams.setVersion(params, HttpVersion.HTTP_1_0);
			} else {
				HttpProtocolParams.setVersion(params, HttpVersion.HTTP_1_1);
			}
		}

		if ("".equalsIgnoreCase(contentCharset) || null == contentCharset) {
			contentCharset = "UTF-8";
		}

		if ("".equalsIgnoreCase(userAgent) || null == userAgent) {
			StringBuffer userAgentBuffer = new StringBuffer();
			userAgentBuffer
					.append(
							"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0;")
					.append(
							"SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729;")
					.append("Media Center PC 6.0; CIBA)");

			userAgent = userAgentBuffer.toString();
		}

		HttpProtocolParams.setContentCharset(params, contentCharset);
		HttpProtocolParams.setUserAgent(params, userAgent);
		HttpConnectionParams.setConnectionTimeout(params, connectionTimeout);
		HttpConnectionParams.setSoTimeout(params, soTimeout);

		HttpClientParams.setRedirecting(params, isRedirecting);
		ConnManagerParams.setMaxTotalConnections(params, maxTotalConnections);

		return params;

	}

	public static void main(String[] args) {
		Field[] fields = FetchHttp.class.getFields();
		for (int i = 0; i < fields.length; i++) {
			System.out.println(fields[i].getName() + " " + fields[i].getType());
		}

		XStreamUtil.buildXMLFile(new FetchHttp(), "f://1.xml");

		XStreamUtil.newObject("f://1.xml");

		try {
			JarFile jarFile = new JarFile("f://crawler.jar");
			jarFile.getEntry("com.cpkf.yyjd.crawler.app.processor.extractor");
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

}
