package com.data;

import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import android.util.Log;

import com.bean.JobBean;
import com.constant.Constant;
import com.net.ClientError;
import com.net.ClientException;
import com.net.RequestParameter;
/**
 * 获取四川大学就业处信息
 * 请求参数
 * <start></start>:从第几个开始，默认一22个为一页数据
 * 第一次加载时：start = ""
 * 
 * @author lenovo
 *
 */
public final class CDJobData extends BaseData {

	private final static String TAG = "cd_job_data";
	private final static String SERVER = "http://222.18.15.135/jiuye/news.php?type_id=4";
	private final static String SERVER_DETAIL = "http://222.18.15.135/jiuye/";
	private boolean bFirst = false;
	
	/**
	 * 总条数
	 */
	private int mTotal;
	/**
	 * 当前页码
	 */
	private int mCurPage;
	/**
	 * 每一页中，返回数据的条数
	 */
	private int mPageCount;
	/**
	 * 就业信息列表数据，该bean中只使用三个字段，title，date，url
	 */
	private ArrayList<JobBean> mJobList;
	
	@Override
	public void startParse(RequestParameter parameter) throws Exception,
			ClientException {
		
		Log.i(TAG, "开始获取川大就业信息");
		
		final String start = parameter.getValue("start");
		String url = null;
		
		if(start == null && start.equals("")) {
			url = SERVER;
			// 标记第一次加载
			bFirst = true;
		} // if
	
		url = SERVER + "&start=" + start;
		final String stream = openConnection(url);
		if(stream == null)
			return;
		
		readStream(stream);
		
	} // startParse

	private String openConnection(final String urls) throws IOException, ClientException {
		
		URL url = new URL(urls);
		HttpURLConnection connection = (HttpURLConnection) url.openConnection();
		connection.setDoOutput(true);
		connection.setConnectTimeout(Constant.SET_CONNECTION_TIMEOUT);
		connection.setReadTimeout(Constant.SET_READ_TIMEOUT);
		connection.connect();
		
		if(connection.getResponseCode() != 200) {
			Log.e(TAG, "获取数据失败, 错误代码：" + connection.getResponseCode());
			throw new ClientException(ClientError.ERROR_CONNECTION);
		} 
		
		Log.i(TAG, "获取川大就业信息成功");
		
		return getWebSource(connection.getInputStream(), "gb2312");
		
	} // openConnection
	
	private void readStream(final String stream) {
		Log.i(TAG, "开始解析就业信息数据");
		
		Document document 	= 	Jsoup.parse(stream);
//		System.out.println(stream);
		
		Elements tables 	= 	document.select("table");
		
		/**
		 * 数据格式如下：
		 * <页次1/40 每页22条共868条>
		 */
		// 解析数据总条数
		Element table 		= 	tables.get(6);
		Elements totals 	= 	table.getElementsByAttributeValue("valign", "middle");
		Element total		=	totals.get(0).child(0);
		String text			= 	total.ownText();
		
		// 获取当前页码
		String array[] 		= 	text.split(" ");//split表示分开、分裂
		String curPage 		= 	array[0].split("/")[0].replaceAll("页次", "");
		mCurPage			=	Integer.parseInt(curPage);
		
		// 获取每页返回数据条数
		curPage				=	array[1].split("条共")[0].replaceAll("每页", "");
		mPageCount			=	Integer.parseInt(curPage);
		
		// 获取返回数据的总条数
		curPage				=	array[1].split("条共")[1].replaceAll("条", "");
		mTotal				=	Integer.parseInt(curPage);
		
		Log.i(TAG, "获取总条数数据成功");
		
		table				=	tables.get(7);
		
//		for(int i = 0; i < tables.size(); i++)
//		{
//			System.out.println(tables.get(i).html().toString());
//			Log.e(TAG, "my name is mo mo");
//		}
		if(table == null)
			return;
		Elements trs		=	table.select("tr");
		if(trs == null)
			return;
		
		mJobList			=	new ArrayList<JobBean>();
		for	(int i = 0, len = trs.size(); i < len - 1; i += 2 )
		{
			Element tr		=	trs.get(i);
			Element td		=	tr.child(1);
			
			JobBean	bean 	=	new JobBean();
			bean.url		=	SERVER_DETAIL + td.child(0).attr("href");
			
			text			=	td.child(0).attr("title");
			text			=	text.replaceAll("到我校招聘", "");
			
			bean.title		=	text;
			mJobList.add(bean);
		}
		
		Log.i(TAG, "就业数据解析完成");
		
	} // readStream
	
	public int getTotal() {
		return this.mTotal;
	}
	
	public int getCurPage() {
		return this.mCurPage;
	}
	
	public int getPageCount() {
		return this.mPageCount;
	}
	
	public ArrayList<JobBean> getJobList() {
		return this.mJobList;
	}
	
	public void printData() {
		System.out.println("total:" + mTotal);
		System.out.println("pageCount:" + mPageCount);
		System.out.println("curPage:" + mCurPage);
		System.out.println(mJobList.size());
		for(int i = 0; i < mJobList.size(); i++)
		{
			JobBean bean = mJobList.get(i);
			System.out.println(bean.title);
			System.out.println(bean.url);
		}
	}
	
}
