package com.spark.actions.fin.gld;

import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import javax.servlet.http.HttpServletRequest;

import org.apache.ibatis.exceptions.PersistenceException;
import org.apache.ibatis.session.SqlSession;

import com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException;
import com.spark.actions.sys.CompanyAction;
import com.spark.actions.sys.DefaultAction;
import com.spark.core.Message;
import com.spark.core.SparkContext;
import com.spark.core.SparkException;
import com.spark.dao.SessionFactory;
import com.spark.tools.Logger;
import com.spark.util.DateFormatHelper;
import com.spark.util.IDGenerator;
import com.spark.util.InitializerHelper;

@SuppressWarnings("unchecked")
public class JournalAction extends DefaultAction {
	
	protected static final String DEFAULT_SPLITTER = "_";
	
	
	/** SQL **/
	protected static final String SQL_JOURNAL_HEAD_INSERT = "gld_journal.insert";
	protected static final String SQL_JOURNAL_HEAD_UPADTE = "gld_journal.update";
	protected static final String SQL_JOURNAL_HEAD_CODE = "gld_journal.updateJournalCode";
	protected static final String SQL_JOURNAL_TYPE_NUM = "gld_journal.selectTypeNum";
	protected static final String SQL_JOURNAL_LINES_INSERT = "gld_journal.insertLine";
	protected static final String SQL_JOURNAL_LINES_DELETE = "gld_journal.deleteLines";
	protected static final String SQL_JOURNAL_SELECT_HEAD = "gld_journal.selectJournal";
	protected static final String SQL_JOURNAL_SELECT_LINES = "gld_journal.selectJournalLines";
	protected static final String SQL_JOURNAL_SELECT_LINES_BY_HEADERS = "gld_journal.selectJournalLinesByHeaders";
	protected static final String SQL_JOURNAL_HEAD_MAX = "gld_journal.selectMaxId";
	protected static final String SQL_JOURNAL_HEAD_PRE = "gld_journal.selectPreJournalHead";
	protected static final String SQL_JOURNAL_HEAD_NEXT = "gld_journal.selectNextJournalHead";
	
	
	protected static final String SQL_JOURNAL_POST_SELECT_HEADS = "gld_journal.selectPostJournalHeads";
	protected static final String SQL_JOURNAL_POST_SELECT_LINES = "gld_journal.selectPostJournalLines";
	protected static final String SQL_JOURNAL_POST_SELECT_LINES_GROUP = "gld_journal.selectPostJournalLinesGroup";
	protected static final String SQL_JOURNAL_POST_UPDATE_BALANCES = "gld_journal.updateBalances";
	
	
	/** CONFLICT **/
	protected static final String ERROR_TYPE_NUM_CONFLICT = "JOURNAL_TYPE_NUM_CONFLICT";
	protected static final String CONFLICT_FIELD_TYPE_NUM_ = "JOURNAL_TYPE_NUM";
	
	
	/** ERROR **/
	protected static final String ERROR_CODE_JOURNAL_CREATE = "JOURNAL_ACTION_CREATE";
	protected static final String ERROR_CODE_JOURNAL_UPDATE = "JOURNAL_ACTION_UPDATE";
	protected static final String ERROR_CODE_JOURNAL_POST = "JOURNAL_ACTION_POST";
	protected static final String ERROR_CODE_JOURNAL_LOAD = "JOURNAL_ACTION_LOAD";
	protected static final String ERROR_CODE_JOURNAL_LOAD_GET = "JOURNAL_ACTION_LOAD_GET";
	protected static final String ERROR_CODE_JOURNAL_LOAD_PRE = "JOURNAL_ACTION_LOAD_PRE";
	protected static final String ERROR_CODE_JOURNAL_LOAD_NEXT = "JOURNAL_ACTION_LOAD_NEXT";
	
	
	/** HEAD **/
	public static final String FIELD_HEAD_JOURNAL_HEADER_ID = "JOURNAL_HEADER_ID";
	public static final String FIELD_HEAD_JOURNAL_DATE = "JOURNAL_DATE";
	public static final String FIELD_HEAD_JOURNAL_CODE = "JOURNAL_CODE";
	public static final String FIELD_HEAD_JOURNAL_TYPE = "JOURNAL_TYPE";
	public static final String FIELD_HEAD_JOURNAL_TYPE_NUMBER = "JOURNAL_TYPE_NUM";
	public static final String FIELD_HEAD_PERIOD_NUM = "PERIOD_NUM";
	public static final String FIELD_HEAD_PERIOD_YEAR = "PERIOD_YEAR";
	public static final String FIELD_HEAD_SOURCE_CODE = "SOURCE_CODE";
	public static final String FIELD_HEAD_CATEGORY_CODE = "CATEGORY_CODE";
	public static final String FIELD_HEAD_CURRENCY_CODE = "CURRENCY_CODE";
	public static final String FIELD_HEAD_STATUS = "STATUS";
	public static final String FIELD_HEAD_TOTAL_AMOUNT_DR = "TOTAL_AMOUNT_DR";
	public static final String FIELD_HEAD_TOTAL_AMOUNT_CR = "TOTAL_AMOUNT_CR";
	public static final String FIELD_HEAD_TOTAL_AMOUNT_FUC_DR = "TOTAL_AMOUNT_FUC_DR";
	public static final String FIELD_HEAD_TOTAL_AMOUNT_FUC_CR = "TOTAL_AMOUNT_FUC_CR";
	public static final String FIELD_HEAD_SIGNED_BY = "SIGNED_BY";
	public static final String FIELD_HEAD_SIGNED_BY_NAME = "SIGNED_BY_NAME";
	public static final String FIELD_HEAD_SIGNED_DATE = "SIGNED_DATE";
	public static final String FIELD_HEAD_APPROVED_BY = "APPROVED_BY";
	public static final String FIELD_HEAD_APPROVED_BY_NAME = "APPROVED_BY_NAME";
	public static final String FIELD_HEAD_APPROVED_DATE = "APPROVED_DATE";
	
	/** LINE **/
	public static final String FIELD_LINE_JOURNAL_HEADER_ID = "JOURNAL_HEADER_ID";
	public static final String FIELD_LINE_ACCOUNT_ID = "ACCOUNT_ID";
	public static final String FIELD_LINE_AMOUNT_DR = "AMOUNT_DR";
	public static final String FIELD_LINE_AMOUNT_CR = "AMOUNT_CR";
	public static final String FIELD_LINE_AMOUNT_FUC_DR = "AMOUNT_FUC_DR";
	public static final String FIELD_LINE_AMOUNT_FUC_CR = "AMOUNT_FUC_CR";
	public static final String FIELD_LINE_DESCRIPTION = "DESCRIPTION";
	public static final String FIELD_LINE_RESPONSIBILITY_CENTER_ID = "RESPONSIBILITY_CENTER_ID";
	
	
	public static final String FIELD_BALANCES_PERIOD_ENTERED_DR = "PERIOD_ENTERED_DR";
	public static final String FIELD_BALANCES_PERIOD_FUNCTIONAL_DR = "PERIOD_FUNCTIONAL_DR";
	public static final String FIELD_BALANCES_PERIOD_ENTERED_CR = "PERIOD_ENTERED_CR";
	public static final String FIELD_BALANCES_PERIOD_FUNCTIONAL_CR = "PERIOD_FUNCTIONAL_CR";
	
	public static final String FIELD_BALANCES_QUARTER_ENTERED_DR = "QUARTER_ENTERED_DR";
	public static final String FIELD_BALANCES_QUARTER_FUNCTIONAL_DR = "QUARTER_FUNCTIONAL_DR";
	public static final String FIELD_BALANCES_QUARTER_ENTERED_CR = "QUARTER_ENTERED_CR";
	public static final String FIELD_BALANCES_QUARTER_FUNCTIONAL_CR = "QUARTER_FUNCTIONAL_CR";
	
	public static final String FIELD_BALANCES_YEAR_ENTERED_DR = "YEAR_ENTERED_DR";
	public static final String FIELD_BALANCES_YEAR_FUNCTIONAL_DR = "YEAR_FUNCTIONAL_DR";
	public static final String FIELD_BALANCES_YEAR_ENTERED_CR = "YEAR_ENTERED_CR";
	public static final String FIELD_BALANCES_YEAR_FUNCTIONAL_CR = "YEAR_FUNCTIONAL_CR";
	
	/** DEFAULT VALUE **/
	public static final String DEFAULT_SOURCE_CODE = "GLD";
	public static final String DEFAULT_CURRENCY_CODE = "CNY";
	public static final String DEFAULT_JOURNAL_TYPE = "J";
	
	/**凭证默认状态**/
	public static final String DEFAULT_STATUS_UNSIGNED = "U";
	public static final String DEFAULT_STATUS_SIGNED = "S";
	public static final String DEFAULT_STATUS_APPROVED = "C";
	
	
	protected static final String DEFAULT_ORDER_FIELD = FIELD_HEAD_JOURNAL_DATE;
	
	
	/** 过账成功 **/
	protected static final int STATUS_SUCCESS = 1;
	/** 期间未打开 **/
	protected static final int STATUS_ERROR_PERIOD_NOT_OPEN = -1;
	/** 凭证行不能为空 **/
	protected static final int STATUS_ERROR_EMPTY_LINE = -2;
	/** 借贷不平 **/
	protected static final int STATUS_ERROR_DR_NOT_EQUAL_CR = -3;
	/** 行和头金额不等 **/
	protected static final int STATUS_ERROR_HEAD_NOT_EQUAL_LINE = -4;
	
	protected static final String JOURNAL_LINES = "JOURNAL_LINES";
	
	
	
	
	
	
	
	/**
	 * 凭证查询
	 * @param context SparkContext
	 * @param statement statement
	 * @param errorCode 错误代码
	 * @throws SparkException
	 */
	protected void selectJournals(SparkContext context,String statement,String errorCode) throws SparkException{
		Message message = new Message(true);
		Map params = context.getParameterMap();
		parsePageSizeAndPageNum(context, params);
		String orderField = getOrderField(params);
		if(orderField == null ) orderField = DEFAULT_ORDER_FIELD;
		params.put(ORDER_SQL, "H."+orderField + " " + getOrderType(params,DEFAULT_ORDER_TYPE_DESC));	
		SqlSession sqlSession = SessionFactory.getSession();
		try {
			selectCount(sqlSession,statement,params,message);
			List headers = sqlSession.selectList(statement, params);
			if(headers!=null && headers.size()>0){
				Map m = new HashMap();
				m.put(LIST, headers);
				List lines = sqlSession.selectList(SQL_JOURNAL_SELECT_LINES_BY_HEADERS,m);
				Iterator hit = headers.iterator();
				while(hit.hasNext()){
					Map header = (Map)hit.next();
					List headerLines = new ArrayList();
					header.put("lines", headerLines);
					Integer headerId = (Integer)header.get(FIELD_HEAD_JOURNAL_HEADER_ID);
					if(lines!=null){
						Iterator lit = lines.iterator();
						while(lit.hasNext()){
							Map line = (Map)lit.next();
							Integer lineHeaderId = (Integer)line.get(FIELD_LINE_JOURNAL_HEADER_ID);
							if(lineHeaderId.equals(headerId)){
								headerLines.add(line);
							}
						}
					}
				}
				message.setResult(headers);
			}
			writeMessage(context, message);
		}catch(Exception e){
			Integer errorId = Logger.log(errorCode, context, e);
			throw new SparkException(errorId);
		}finally {
			SessionFactory.closeSession(sqlSession);
		}		
	}
	
	
	
	
	
	
	
	/**
	 * 保存凭证.
	 * @param context
	 * @throws Exception
	 */
	public void saveJournal(SparkContext context) throws SparkException {
		Map requestData = getRequestData(context);
		List list = (List)requestData.get(PARAMETER);
		if(list != null && list.size() > 0){
			Map head = (Map)list.get(0);
			Integer headId = (Integer)head.get(FIELD_HEAD_JOURNAL_HEADER_ID);
			if(headId == null){
				try{
					doExecute(context, SQL_JOURNAL_HEAD_INSERT, ACTION_INSERT);
				}catch(Exception e){
					throw new SparkException(Logger.log(ERROR_CODE_JOURNAL_CREATE, context, e));
				}
			} else {
				try{
					doExecute(context, SQL_JOURNAL_HEAD_UPADTE, ACTION_UPDATE);
				}catch(Exception e){
					throw new SparkException(Logger.log(ERROR_CODE_JOURNAL_UPDATE, context, e));
				}
			}
		}
	}
	
	
	
	
	/**
	 * 
	 * -------------
	 * 凭证过账
	 * -------------
	 * TODO:过账事务的并发问题,不能同时多个过账事务
	 * TODO:由于validatePostJournal和外层函数存在重复的行循环，以后测试一下性能差异
	 * TODO:测试SQL的效率，不清楚IN的效率如何
	 * TODO:科目指定币种,汇总不考虑币种
	 * 
	 * 
	 * (1)校验凭证的会计期间是否是当前打开期间
	 * (2)判断当前过账凭证是否借贷平衡(通过凭证行判断), 判断当前凭证行金额合计是否等于凭证头金额.
	 * (3)按照凭证行的期间，责任中心，科目进行汇总 (TODO:科目禁用，如何处理?)
	 * (4)根据公司,凭证的科目，取出科目表中的所有层次的科目 可以和(3)合并到一起
	 * (5)讲(3)(4)汇总的数据按照(期间-责任中心-科目)同步到gld_balances表
	 * (6)循环凭证行更新到gld_balances
	 *    (A)存在相同的标识行
	 *       (a)更新同一个期间的金额
	 *    	 (b)更新凭证期间之后的其他期间的金额  
	 *          1.年不同，按照项目汇总? 
	 *          2.年相同，更新年度汇总金额 
	 *          3.年相同，季度相同,更新年度季度汇总金额
	 *    (B)不存在
	 *       (a)查询当年小于指定期间的数据，进行汇总
	 *       (b)插入汇总数据
	 * 
	 * (7)更新凭证状态为P
	 * 
	 * @param context
	 * @param list
	 */
	public void postJournal(SparkContext context) throws SparkException {
		Message message = new Message(true);
		try{
			Map requestData = getRequestData(context);
			List plist = (List)requestData.get(PARAMETER);
			List list = retrievalPostJournalHeadLines(plist);
			if(list != null){
				doPostJournal(context,list);
			}
			message.setResult(plist);
			writeMessage(context, message);
		}catch(Exception e){
			Integer errorId = Logger.log(ERROR_CODE_JOURNAL_POST, context, e);
			throw new SparkException(errorId);
		}
	}
	
	
	private List retrievalPostJournalHeadLines(List list){
		if(list != null){
			StringBuilder sb = new StringBuilder();
			Iterator it = list.iterator();
			while(it.hasNext()){
				Map head = (Map)it.next();
				Integer id = (Integer)head.get(FIELD_HEAD_JOURNAL_HEADER_ID);
				sb.append(id);
				if(it.hasNext()) sb.append(",");
			}
			SqlSession sqlSession = SessionFactory.getSession();
			List postHeads = sqlSession.selectList(SQL_JOURNAL_POST_SELECT_HEADS,sb.toString());
			
			
			
			List postLines = sqlSession.selectList(SQL_JOURNAL_POST_SELECT_LINES,sb.toString());
			if(postHeads != null && postLines != null){
				Iterator hit = postHeads.iterator();
				while(hit.hasNext()){
					Map head = (Map)hit.next();
					Integer id = (Integer)head.get(FIELD_HEAD_JOURNAL_HEADER_ID);
					List lines = (List)head.get(JOURNAL_LINES);
					if(lines == null){
						lines = new ArrayList();
						head.put(JOURNAL_LINES,lines);
					}
					Iterator lit = postLines.iterator();
					while(lit.hasNext()){
						Map line = (Map)lit.next();
						Integer pid = (Integer)line.get(FIELD_HEAD_JOURNAL_HEADER_ID);
						if(id.equals(pid)) {
							lines.add(line);
						}
					}
				}
				return postHeads;
			}
		}
		return null;
	}
	
	
	private void doPostJournal(SparkContext context, List list) {
		if(list != null){
			Map parentList = new HashMap();
			List postList = new ArrayList();
			Iterator it = list.iterator();
			while(it.hasNext()){
				Map head = (Map)it.next();
				int status = validatePostJournal(context,head);
				if(STATUS_SUCCESS == status) postList.add(head);
			}
			if(postList.size() > 0){
				StringBuilder ids = new StringBuilder();
				Iterator ptit = postList.iterator();
				while(ptit.hasNext()){
					Map head = (Map)ptit.next();
					Integer id = (Integer)head.get(FIELD_HEAD_JOURNAL_HEADER_ID);
					ids.append(id);
					if(ptit.hasNext()) ids.append(",");
				}
				SqlSession sqlSession = SessionFactory.getSession();
				List groupList = sqlSession.selectList(SQL_JOURNAL_POST_SELECT_LINES_GROUP, ids.toString());
				if(groupList != null) {
					//TODO:默认科目表从当前帐套中获取
					List allAccounts = InitializerHelper.retrievalAccounts(1);
					Iterator git = groupList.iterator();
					while(git.hasNext()){
						Map account = (Map)git.next();
						Integer accountId = (Integer)account.get(FIELD_LINE_ACCOUNT_ID);
						List parentAccounts = (List)parentList.get(accountId);
						if(parentAccounts == null){
							parentAccounts = retrievalParentAccounts(allAccounts,accountId);
							parentList.put(accountId,parentAccounts);
						}
					}
				}
				
				Map postMaps = new HashMap();
				StringBuilder bk = new StringBuilder();
				Iterator sit = postList.iterator();
				while(sit.hasNext()){
					Map head = (Map)sit.next();
					Integer companyId = (Integer)head.get(CompanyAction.FIELD_COMPANY_ID);
					Integer sob = 1;//TODO:从当前账户获取当前帐套ID
					String currencyCode = (String)head.get(FIELD_HEAD_CURRENCY_CODE);
					Integer periodYear = (Integer)head.get(FIELD_HEAD_PERIOD_YEAR);
					Integer periodNum = (Integer)head.get(FIELD_HEAD_PERIOD_NUM);
					List lines = (List)head.get(JOURNAL_LINES);
					Iterator lit = lines.iterator();
					while(lit.hasNext()){
						Map line = (Map)lit.next();
						Integer accountId = (Integer)line.get(FIELD_LINE_ACCOUNT_ID);
						Integer resId = (Integer)line.get(FIELD_LINE_RESPONSIBILITY_CENTER_ID);
						Float dr = (Float)line.get(FIELD_LINE_AMOUNT_DR);
						Float cr = (Float)line.get(FIELD_LINE_AMOUNT_CR);
						Float fuc_dr = (Float)line.get(FIELD_LINE_AMOUNT_FUC_DR);						
						Float fuc_cr = (Float)line.get(FIELD_LINE_AMOUNT_FUC_CR);
						
						
						
						List parentAccounts = (List)parentList.get(accountId);
						Iterator pit = parentAccounts.iterator();
						while(pit.hasNext()){
							Map act = (Map)pit.next();
							Integer actId = (Integer)act.get(FIELD_LINE_ACCOUNT_ID);
							
							//TODO：默认都是同一个科目表内的科目
							//GLD_BALANCES的主键 <COMPANY_ID>_<SET_OF_BOOKS_ID>_<RESPONSIBILITY_CENTER_ID>_<ACCOUNT_ID>_<CURRENCY_CODE>_<PERIOD_YEAR>_<PERIOD_NUM>
							bk.delete(0, bk.length());
							bk.append(companyId).append(DEFAULT_SPLITTER).append(sob).append(DEFAULT_SPLITTER).append(resId).append(DEFAULT_SPLITTER).append(actId);
							bk.append(DEFAULT_SPLITTER).append(currencyCode).append(DEFAULT_SPLITTER).append(periodYear).append(DEFAULT_SPLITTER).append(periodNum);
							
							Map actMap = (Map)postMaps.get(bk.toString());
							if(actMap == null){
								Map map = new HashMap();
								map.put(CompanyAction.FIELD_COMPANY_ID, companyId);
								map.put(CompanyAction.FIELD_SET_OF_BOOKS_ID, sob);
								map.put(FIELD_LINE_RESPONSIBILITY_CENTER_ID, resId);
								map.put(FIELD_LINE_ACCOUNT_ID, actId);
								map.put(FIELD_HEAD_CURRENCY_CODE, currencyCode);//TODO:币种应该和科目一一对应,不需要考虑币种问题？
								map.put(FIELD_HEAD_PERIOD_YEAR, periodYear);
								map.put(FIELD_HEAD_PERIOD_NUM, periodNum);
								map.put(FIELD_BALANCES_PERIOD_ENTERED_DR, getFloat(dr));
								map.put(FIELD_BALANCES_PERIOD_FUNCTIONAL_DR, getFloat(fuc_dr));
								map.put(FIELD_BALANCES_PERIOD_ENTERED_CR, getFloat(cr));
								map.put(FIELD_BALANCES_PERIOD_FUNCTIONAL_CR, getFloat(fuc_cr));
								map.put(FIELD_BALANCES_QUARTER_ENTERED_DR, getFloat(dr));
								map.put(FIELD_BALANCES_QUARTER_FUNCTIONAL_DR, getFloat(fuc_dr));
								map.put(FIELD_BALANCES_QUARTER_ENTERED_CR, getFloat(cr));
								map.put(FIELD_BALANCES_QUARTER_FUNCTIONAL_CR, getFloat(fuc_cr));
								map.put(FIELD_BALANCES_YEAR_ENTERED_DR, getFloat(dr));
								map.put(FIELD_BALANCES_YEAR_FUNCTIONAL_DR, getFloat(fuc_dr));
								map.put(FIELD_BALANCES_YEAR_ENTERED_CR, getFloat(cr));
								map.put(FIELD_BALANCES_YEAR_FUNCTIONAL_CR, getFloat(fuc_cr));	
								postMaps.put(bk.toString(), map);
							}else {
								actMap.put(FIELD_BALANCES_PERIOD_ENTERED_DR, getFloat(actMap.get(FIELD_BALANCES_PERIOD_ENTERED_DR)) + getFloat(dr));
								actMap.put(FIELD_BALANCES_PERIOD_FUNCTIONAL_DR, getFloat(actMap.get(FIELD_BALANCES_PERIOD_FUNCTIONAL_DR)) + getFloat(fuc_dr));
								actMap.put(FIELD_BALANCES_PERIOD_ENTERED_CR, getFloat(actMap.get(FIELD_BALANCES_PERIOD_ENTERED_CR)) + getFloat(cr));
								actMap.put(FIELD_BALANCES_PERIOD_FUNCTIONAL_CR, getFloat(actMap.get(FIELD_BALANCES_PERIOD_FUNCTIONAL_CR)) + getFloat(fuc_cr));
								actMap.put(FIELD_BALANCES_QUARTER_ENTERED_DR, getFloat(actMap.get(FIELD_BALANCES_QUARTER_ENTERED_DR)) + getFloat(dr));
								actMap.put(FIELD_BALANCES_QUARTER_FUNCTIONAL_DR, getFloat(actMap.get(FIELD_BALANCES_QUARTER_FUNCTIONAL_DR)) + getFloat(fuc_dr));
								actMap.put(FIELD_BALANCES_QUARTER_ENTERED_CR, getFloat(actMap.get(FIELD_BALANCES_QUARTER_ENTERED_CR)) + getFloat(cr));
								actMap.put(FIELD_BALANCES_QUARTER_FUNCTIONAL_CR, getFloat(actMap.get(FIELD_BALANCES_QUARTER_FUNCTIONAL_CR)) + getFloat(fuc_cr));
								actMap.put(FIELD_BALANCES_YEAR_ENTERED_DR, getFloat(actMap.get(FIELD_BALANCES_YEAR_ENTERED_DR)) + getFloat(dr));
								actMap.put(FIELD_BALANCES_YEAR_FUNCTIONAL_DR, getFloat(actMap.get(FIELD_BALANCES_YEAR_FUNCTIONAL_DR)) + getFloat(fuc_dr));
								actMap.put(FIELD_BALANCES_YEAR_ENTERED_CR, getFloat(actMap.get(FIELD_BALANCES_YEAR_ENTERED_CR)) + getFloat(cr));
								actMap.put(FIELD_BALANCES_YEAR_FUNCTIONAL_CR, getFloat(actMap.get(FIELD_BALANCES_YEAR_FUNCTIONAL_CR)) + getFloat(fuc_cr));	
							}
						}
					}
				}
				Collection col = postMaps.values();
				sqlSession = SessionFactory.getSession(false);
				try{
					Map map = new HashMap();
					map.put(LIST, col);
					sqlSession.insert(SQL_JOURNAL_POST_UPDATE_BALANCES, map);
					
					//TODO:更新状态P
					sqlSession.commit();
				}finally{
					SessionFactory.closeSession(sqlSession);
				}
			}
		}
	}
	
	/**
	 * 获取当前科目的所有父辈科目
	 * TODO:考虑父节点的状态，如果失效，怎不添加进去
	 * TODO:暂未考虑是否是同一个科目表的科目
	 * 
	 * @param allAccounts
	 * @param accountId
	 * @param list
	 */
	private List retrievalParentAccounts(List allAccounts, Integer accountId){
		List result = new ArrayList();
		Iterator it = allAccounts.iterator();
		String accountCode = null;
		Map account = null;
		while(it.hasNext()){
			account = (Map)it.next();
			Integer actId = (Integer)account.get(AccountsAction.FIELD_ACCOUNT_ID);
			if(accountId.equals(actId)){
				accountCode = (String)account.get(AccountsAction.FIELD_ACCOUNT_CODE);
				break;
			}
		}
		if(accountCode != null)
		recursiveRetrievalParentAccount(allAccounts,accountCode,result,account);
		return result;
	}
	
	private void recursiveRetrievalParentAccount(List allAccounts, String accountCode, List rs, Map account){
		if(accountCode.indexOf(".")!= -1){
			String pc = accountCode.substring(0,accountCode.lastIndexOf("."));
			Iterator it = allAccounts.iterator();
			while(it.hasNext()){
				Map act = (Map)it.next();
				String code = (String)act.get(AccountsAction.FIELD_ACCOUNT_CODE);
				if(pc.equals(code)){
					rs.add(act);
					recursiveRetrievalParentAccount(allAccounts,pc,rs,account);
				}
			}
		}else if(account != null){
			rs.add(account);
		}
	}
	
	
	/**
	 * 
	 * -------------
	 * 过账凭证校验 
	 * -------------
	 * 
	 * @param context
	 */
	private int validatePostJournal(SparkContext context, Map head) {
		
		
		/** (0)TODO：凭证是否是已审批状态 **/
		
		/** (1)校验凭证所在期间是否打开 **/
		Integer periodYear = (Integer)head.get(FIELD_HEAD_PERIOD_YEAR);
		Integer periodNum = (Integer)head.get(FIELD_HEAD_PERIOD_NUM);
		//TODO:从当前公司系统变量中获取,为了方便测试，注释掉
		if(getPeriodYear(context).intValue() != periodYear.intValue() || getPeriodNum(context).intValue() != periodNum.intValue()) {
			return STATUS_ERROR_PERIOD_NOT_OPEN;
		}
		
		/** (2)判断当前过账凭证是否借贷平衡 **/
		Float totalAmountDR = (Float)head.get(FIELD_HEAD_TOTAL_AMOUNT_DR);
		Float totalAmountCR = (Float)head.get(FIELD_HEAD_TOTAL_AMOUNT_CR);
		if(totalAmountDR!= null && !totalAmountDR.equals(totalAmountCR)) {
			return STATUS_ERROR_DR_NOT_EQUAL_CR;
		}
		
		/** (3)判断凭证行是否为空 **/
		List lines = (List)head.get(JOURNAL_LINES);
		if(lines == null) return STATUS_ERROR_EMPTY_LINE;
		
		
		
		/** (4)判断凭证行金额是否和头上金额相等 **/
		float lineTotalDR = 0, lineTotalCR = 0;
		Iterator it = lines.iterator();
		while(it.hasNext()){
			Map line = (Map)it.next();
			lineTotalDR += getFloat((Float)line.get(FIELD_LINE_AMOUNT_DR));
			lineTotalCR += getFloat((Float)line.get(FIELD_LINE_AMOUNT_CR));
			
		}
		if(lineTotalDR != totalAmountDR.intValue() || lineTotalCR != totalAmountCR.intValue()){
			return STATUS_ERROR_HEAD_NOT_EQUAL_LINE;
		}
		
		return STATUS_SUCCESS;
	}
	
	/**
	 * -------------
	 * 凭证保存更新校验逻辑 
	 * -------------
	 * 
	 * (1)校验凭证日期不能为空
	 * (2)校验凭证行借贷是否平衡
	 * 
	 * TODO:校验凭证日期是否在账期内
	 * TODO:
	 * 
	 * @param map 头数据
	 * @param message message
	 * @return boolean
	 */
	private boolean validate(Map head, Message message){
		
		//校验凭证行借贷是否平衡
		List lines = (List)head.get("JOURNAL_LINES");
		float total_dr = 0;
		float total_cr = 0;
		if(lines !=null){
			Iterator it = lines.iterator();
			while(it.hasNext()){
				Map line = (Map)it.next();
				Double dr = transferDouble(line.get(FIELD_LINE_AMOUNT_DR));
				Double cr = transferDouble(line.get(FIELD_LINE_AMOUNT_CR));
				line.put(FIELD_LINE_AMOUNT_FUC_DR, dr);
				line.put(FIELD_LINE_AMOUNT_FUC_CR, cr);
				total_dr += (dr == null ? 0 : dr.floatValue());
				total_cr += (cr == null ? 0 : cr.floatValue());
			}
		}
		if(total_dr != total_cr ){
			message.setWarning("凭证分录借贷不平!");
			message.setWidth(300);
			message.setHeiht(150);
			return false;
		}else {
			head.put(FIELD_HEAD_TOTAL_AMOUNT_DR, total_dr);
			head.put(FIELD_HEAD_TOTAL_AMOUNT_CR, total_cr);
			head.put(FIELD_HEAD_TOTAL_AMOUNT_FUC_DR, total_dr);
			head.put(FIELD_HEAD_TOTAL_AMOUNT_FUC_CR, total_cr);
		}
		
		//校验凭证日期
		message.setWidth(300);
		message.setHeiht(150);
		return isNotEmpty(head.get(FIELD_HEAD_JOURNAL_DATE), message,"凭证日期不能为空!");
	}
	
	
	protected void doExecute(SparkContext context, String statement, String action) throws Exception {
		Map requestData = getRequestData(context);
		List list = (List)requestData.get(PARAMETER);
		Message message = new Message(true);
		if(list!=null && list.size() > 0){
			Map head = (Map)list.get(0);
			if(validate(head,message)){
				head.put(FIELD_HEAD_JOURNAL_DATE, DateFormatHelper.getDateFormat().parse((String)head.get(FIELD_HEAD_JOURNAL_DATE)));
				List lines = (List)head.get("JOURNAL_LINES");
				
				SqlSession sqlSession = SessionFactory.getSession(false);
				if(ACTION_INSERT.equals(action)) 
				head.put(FIELD_HEAD_JOURNAL_CODE, IDGenerator.getInstance().generate());
				
				head.put(FIELD_HEAD_PERIOD_NUM, getPeriodNum(context));
				head.put(FIELD_HEAD_PERIOD_YEAR, getPeriodYear(context));
				head.put(FIELD_HEAD_SOURCE_CODE, DEFAULT_SOURCE_CODE);//TODO:根据凭证来源
				head.put(FIELD_HEAD_CURRENCY_CODE, DEFAULT_CURRENCY_CODE);//TODO:币种?
				head.put(FIELD_HEAD_STATUS, DEFAULT_STATUS_UNSIGNED);
				//validate中已经设置
//				head.put(FIELD_HEAD_TOTAL_AMOUNT_DR, total_dr);
//				head.put(FIELD_HEAD_TOTAL_AMOUNT_CR, total_cr);
//				head.put(FIELD_HEAD_TOTAL_AMOUNT_FUC_DR, total_dr);
//				head.put(FIELD_HEAD_TOTAL_AMOUNT_FUC_CR, total_cr);
				if(head.get(FIELD_HEAD_JOURNAL_TYPE)== null){
					head.put(FIELD_HEAD_JOURNAL_TYPE, DEFAULT_JOURNAL_TYPE);
				}
				setCompanyId(head, context);
				createWho(head, context);
				if(head.get(FIELD_HEAD_JOURNAL_TYPE_NUMBER)== null){
					Integer num = (Integer) sqlSession.selectOne(SQL_JOURNAL_TYPE_NUM, head);
					head.put(FIELD_HEAD_JOURNAL_TYPE_NUMBER, num);
				}
				
				try {
					doAction(sqlSession, action, statement,head);
					Integer headId = (Integer)head.get(FIELD_HEAD_JOURNAL_HEADER_ID);
					if(ACTION_INSERT.equals(action)) {
						if(lines !=null){
							Iterator it = lines.iterator();
							while(it.hasNext()){
								Map line = (Map)it.next();
								line.put(FIELD_LINE_JOURNAL_HEADER_ID, headId);
								//TODO:默认责任中心
								line.put(FIELD_LINE_RESPONSIBILITY_CENTER_ID, 1);
								sqlSession.insert(SQL_JOURNAL_LINES_INSERT,line);
							}
						}
						
						String code = getPeriodYear(context).toString() + padding(getPeriodNum(context).toString(),2) + padding(headId.toString(), 5);
						Map p = new HashMap();
						p.put(FIELD_HEAD_JOURNAL_CODE, code);
						p.put(FIELD_HEAD_JOURNAL_HEADER_ID, headId);
						sqlSession.update(SQL_JOURNAL_HEAD_CODE,p);
					}else if(ACTION_UPDATE.equals(action)) {
						sqlSession.delete(SQL_JOURNAL_LINES_DELETE,head);
						if(lines !=null){
							Iterator it = lines.iterator();
							while(it.hasNext()){
								Map line = (Map)it.next();
								line.put(FIELD_LINE_JOURNAL_HEADER_ID, headId);
								sqlSession.update(SQL_JOURNAL_LINES_INSERT,line);
							}
						}
					}
					sqlSession.commit();
					Integer num = (Integer) sqlSession.selectOne(SQL_JOURNAL_TYPE_NUM, head);
					message.setContent(num);
					message.setResult(list);
				} catch(PersistenceException e){
					if(e.getCause() instanceof MySQLIntegrityConstraintViolationException){
						String msg = e.getCause().getMessage();
						if(msg !=null && msg.contains(CONFLICT_FIELD_TYPE_NUM_)) {
							Integer num = (Integer) sqlSession.selectOne(SQL_JOURNAL_TYPE_NUM, head);
							message.setSuccess(false);
							message.setCode(ERROR_TYPE_NUM_CONFLICT);
							message.setContent(num);
						}else{
							throw e;
						}
					}else {
						System.out.println(head.toString());
						throw e;
					}
				} finally {
					SessionFactory.closeSession(sqlSession);
				}
			}
		}
		writeMessage(context, message);
	}
	
	
	public void loadJournal(SparkContext context) throws SparkException {
		Map<String, Object> model = context.getModel();
		SqlSession sqlSession = SessionFactory.getSession();
		try {
			HttpServletRequest request = context.getRequest();
			String id = request.getParameter("id");
			if(id == null)return;
			Integer headId = Integer.valueOf(id);
			Map head = (Map)sqlSession.selectOne(SQL_JOURNAL_SELECT_HEAD, headId);
			if(head!=null)
			model.put("journal_head", head);
			
			List lines = (List)sqlSession.selectList(SQL_JOURNAL_SELECT_LINES, headId);
			if(lines!=null) {
				int len = lines.size();
				for(int i=len;i<8;i++){
					lines.add(new HashMap());
				}
				model.put("journal_lines", lines);
			}	
		} catch (NumberFormatException e){
			return;
		} catch (Exception e){
			Integer errorId = Logger.log(ERROR_CODE_JOURNAL_LOAD, context, e);
			throw new SparkException(errorId);
		} finally {
			SessionFactory.closeSession(sqlSession);
		}
	}
	
	/**
	 * 加载上一单凭证
	 * @param context
	 * @throws SparkException
	 */
	public void loadPreJournal(SparkContext context) throws SparkException{
		loadJournalByDirection(context,SQL_JOURNAL_HEAD_PRE,ERROR_CODE_JOURNAL_LOAD_PRE);
	}
	
	/**
	 * 加载下一单凭证
	 * @param context
	 * @throws SparkException
	 */
	public void loadNextJournal(SparkContext context) throws SparkException{
		loadJournalByDirection(context,SQL_JOURNAL_HEAD_NEXT,ERROR_CODE_JOURNAL_LOAD_NEXT);
	}
	
	
	

	public void getJournal(SparkContext context) throws SparkException{
		Message message = new Message(true);
		HttpServletRequest request = context.getRequest();
		SqlSession sqlSession = SessionFactory.getSession();
		try{
			String sid = request.getParameter("id");
			if(sid != null){
				Integer hid = Integer.valueOf(sid);
				Map p = new HashMap();
				p.put(FIELD_HEAD_JOURNAL_HEADER_ID, hid);
				setCompanyId(p,context);
				Map head = (Map)sqlSession.selectOne(SQL_JOURNAL_SELECT_HEAD, p);
				if(head!=null){
					Integer preId = (Integer)head.get(FIELD_HEAD_JOURNAL_HEADER_ID);
					List lines = (List)sqlSession.selectList(SQL_JOURNAL_SELECT_LINES, preId);
					head.put("JOURNAL_LINES", lines);
					List list = new ArrayList();
					list.add(head);
					message.setResult(list);
				}
			}
			writeMessage(context, message);
		} catch (NumberFormatException e){
			return;
		} catch (Exception e){
			Integer errorId = Logger.log(ERROR_CODE_JOURNAL_LOAD_GET, context, e);
			throw new SparkException(errorId);
		} finally {
			SessionFactory.closeSession(sqlSession);
		}
	}
	

	private void loadJournalByDirection(SparkContext context, String sql,String errorCode) throws SparkException{
		Message message = new Message(true);
		HttpServletRequest request = context.getRequest();
		SqlSession sqlSession = SessionFactory.getSession();
		try{
			String sid = request.getParameter("id");
			if(sid == null || "".equals(sid.trim())){
				Integer id = (Integer)sqlSession.selectOne(SQL_JOURNAL_HEAD_MAX, getCompanyId(context));
				if(id !=null){
					Map head = (Map)sqlSession.selectOne(SQL_JOURNAL_SELECT_HEAD, id);
					List lines = (List)sqlSession.selectList(SQL_JOURNAL_SELECT_LINES, id);
					head.put("JOURNAL_LINES", lines);
					List list = new ArrayList();
					list.add(head);
					message.setResult(list);
				}
			}else{
				Integer hid = Integer.valueOf(sid);
				Map p = new HashMap();
				p.put(FIELD_HEAD_JOURNAL_HEADER_ID, hid);
				setCompanyId(p,context);
				Map head = (Map)sqlSession.selectOne(sql, p);
				if(head!=null){
					Integer preId = (Integer)head.get(FIELD_HEAD_JOURNAL_HEADER_ID);
					List lines = (List)sqlSession.selectList(SQL_JOURNAL_SELECT_LINES, preId);
					head.put("JOURNAL_LINES", lines);
					List list = new ArrayList();
					list.add(head);
					message.setResult(list);
				}
			}
			writeMessage(context, message);
		} catch (NumberFormatException e){
			return;
		} catch (Exception e){
			Integer errorId = Logger.log(errorCode, context, e);
			throw new SparkException(errorId);
		} finally {
			SessionFactory.closeSession(sqlSession);
		}
	}
}
