package cn.edu.nju.ws.sview.reasoning;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashSet;

import cn.edu.nju.ws.sview.cache.DomainCache;
import cn.edu.nju.ws.sview.cache.RangeCache;
import cn.edu.nju.ws.sview.cache.SubclassCache;
import cn.edu.nju.ws.sview.cache.SubpropertyCache;
import cn.edu.nju.ws.sview.cache.URIIDCache;
import cn.edu.nju.ws.sview.database.DBConnectionFactory;
import cn.edu.nju.ws.sview.rdf.BlankNode;
import cn.edu.nju.ws.sview.rdf.PlainLiteral;
import cn.edu.nju.ws.sview.rdf.RDFDocument;
import cn.edu.nju.ws.sview.rdf.Resource;
import cn.edu.nju.ws.sview.rdf.ResourceFactory;
import cn.edu.nju.ws.sview.rdf.TypedLiteral;
import cn.edu.nju.ws.sview.rdf.URIResource;
import cn.edu.nju.ws.sview.reasoning.provenance.Provenance;
import cn.edu.nju.ws.sview.reasoning.provenance.ProvenanceFactory;
import cn.edu.nju.ws.sview.util.URIUtil;
import cn.edu.nju.ws.sview.views.CustomDataMember;
import cn.edu.nju.ws.sview.views.CustomDataMember.Base;
import cn.edu.nju.ws.sview.views.DataMember;
import cn.edu.nju.ws.sview.views.Group;

import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.RDFS;

/**
 * rule and facts extraction factory
 * 
 * @author Gong Saisai
 * 
 */
public class Repository {

//	static Logger logger = Logger.getLogger(Repository.class.getName());
	private static Repository INSTANCE = null;

	private Repository() {
		/* Nothing */
	}

	public static synchronized Repository getInstance() {
		if (INSTANCE == null) {
			INSTANCE = new Repository();
		}
		return INSTANCE;
	}

	/**
	 * extract all the rules with the head
	 * For head predicate,
	 * 		If it is user defined predicate, get the rules transformed from datamember table
	 *      else get the rules from ontology axioms
	 * @param headPredicate
	 *            the head specified
	 * @return all the relevant rules
	 */
	public HashSet<Rule> getRulesByHead(Predicate headPredicate) {
		Resource resource = headPredicate.getResource();
		int arity  = headPredicate.getArity();
		
		HashSet<Rule> rules = new HashSet<Rule>();
		
		if ( resource instanceof BlankNode ) {
			/*return no rules*/
		} else if  ( resource instanceof URIResource ) {
			
			String uri = ((URIResource) resource).getURI();
			
			if ( uri.startsWith(ResourceFactory.INTERNAL_URI_PREFIX) ) {

				/*
				 * user defined rules, divided into two kinds:
				 * First, custom datamember rules, 
				 * 		the uri must be the form of INTERNAL_URI_PREFIX+ "dm/group/isInner/classID/isCopy/name"
				 * Second, custom facet rules, TODO
				*/
				if(uri.startsWith(ResourceFactory.CUSTOM_DATAMEMBER_URI_PREFIX)){
					ArrayList<Rule> customDMRules = getCustomDatamemberRules(uri);
					for (Rule rule : customDMRules) {
						rules.add(rule);
					}
				}
			} else {

				URIResource uriResource = (URIResource)resource;
				if(arity==1){
					rules.addAll(generateRulesFromRDFSSubClassOf(uriResource));
					rules.addAll(generateRulesFromRDFSDomain(uriResource));
					rules.addAll(generateRulesFromRDFSRange(uriResource));
				}else if(arity==2){
					rules.addAll(generateRulesFromRDFSSubPropertyOf(uriResource));
				}
			}
		}
		
		return rules;
	}	
	
	/**
	 * get the rules of the datamember based on the uri
	 * NEED to be checked 
	 * @param uri
	 * @return
	 */
	private ArrayList<Rule> getCustomDatamemberRules(String uri) {
		// TODO Auto-generated method stub
		ArrayList<Rule> rules = new ArrayList<Rule>();
		
		ArrayList<String> infos = URIUtil.parseCustomDatamemberURI(uri) ;
		String group = infos.get(0) ;
		boolean isInner = Boolean.parseBoolean(infos.get(1));
		int classID = Integer.parseInt(infos.get(2)) ;
		boolean isCopy = Boolean.parseBoolean(infos.get(3));
		String name = infos.get(4) ;
		
		//get datamember id and make method
		int dmID = CustomDataMember.getDMIdByNameGroupClassIDIsCopy(name, Group.getGroupIdByName(group),isInner,classID,isCopy) ;
		int mkmethod = CustomDataMember.getMakeMethodByDMId(dmID) ;
		int arity = CustomDataMember.getArityByDMId(dmID) ;
		
		//get the base datamembers
		ArrayList<Base> bases = CustomDataMember.getBasesByDMId(dmID) ;
		ArrayList<Term> terms = new ArrayList<Term>();
		terms.add(new Variable("x"));
		terms.add(new Variable("y"));
		URIResource uRes = ResourceFactory.getInstance().createURIResource(uri) ;
		Atom<Term> headAtom = null;
		if(arity==2){
			headAtom = new Atom<Term>(new Predicate(uRes, 2), terms);
		}else{
			//deal with the datamember which is made from make_method_connect
			ArrayList<Term> termsIsConnect = new ArrayList<Term>();
			termsIsConnect.add(new Variable("x"));
			for(int i=0;i<arity-1;i++){
				termsIsConnect.add(new Variable("y"+i));
			}
			headAtom = new Atom<Term>(new Predicate(uRes,arity), termsIsConnect);
		}
		
		//deal with the following make_method
	  if(mkmethod==DataMember.MAKE_METHOD_MERGE){
			for(Base base:bases){
				int rowID  =base.getRowID();
				Predicate pred = this.createPredicateFromBase(base) ;
				if(pred!=null){
					int dmtype =base.getDMType();
					int dmid = base.getID();
					
					boolean ismid = false;
					
					//判断是否是chain生成的中介谓词
					if(dmtype==DataMember.CUSTOM_DATAMEMBER){
						int makeMethod = CustomDataMember.getMakeMethodByDMId(dmid);
						if(makeMethod==DataMember.MAKE_METHOD_CHAIN){
							ismid = true;
						}
					}
					
					if (!ismid) {
						ArrayList<Atom<Term>> bodyAtoms = new ArrayList<Atom<Term>>();
						if (!base.isInverse()) {
							bodyAtoms.add(new Atom<Term>(pred,
									new ArrayList<Term>(terms)));
						} else {
							ArrayList<Term> inverse_terms = new ArrayList<Term>();
							inverse_terms.add(new Variable("y"));
							inverse_terms.add(new Variable("x"));
							bodyAtoms.add(new Atom<Term>(pred,
									new ArrayList<Term>(inverse_terms)));
						}

						Rule rule = new Rule(new Atom<Term>(
								headAtom.getPredicate(), headAtom.getTerms()),
								bodyAtoms, ProvenanceFactory.getInstance()
										.createProvFromDataMembers());
						ArrayList<Integer> rowIDs = new ArrayList<Integer>();
						rowIDs.add(rowID);
						rule.setRowIDs(rowIDs);
						rules.add(rule);
					} else {
						CustomDataMember cdm  =CustomDataMember.getCustomDataMemberById(dmid);
						if (cdm != null) {
							URIResource pred_chain = ResourceFactory
									.getInstance()
									.createCustomDatamemberURIResource(
											cdm.getGroup(), cdm.getName(),
											cdm.isInner(), cdm.getClassID(),
											cdm.isCopy());
							String chain_uri = pred_chain.getURI();
							ArrayList<Rule> chainRules = this
									.getCustomDatamemberRules(chain_uri);
							for (Rule chain_rule : chainRules) {
								Rule rule = new Rule(new Atom<Term>(
										headAtom.getPredicate(),
										headAtom.getTerms()),
										chain_rule.getBody(), ProvenanceFactory
												.getInstance()
												.createProvFromDataMembers());
								ArrayList<Integer> rowIDs = new ArrayList<Integer>();
								rowIDs.add(rowID);
								rowIDs.addAll(chain_rule.getRowIDs());
								rule.setRowIDs(rowIDs);
								rules.add(rule);
							}
						} else {
							deleteInvalidRuleRows(rowID);
						}
					}
				}else{
					/*pred==null说明这个规则对应的规则体已经失效将相应记录删除*/
					deleteInvalidRuleRows(rowID);
				}
			}
		}else if(mkmethod == DataMember.MAKE_METHOD_CHAIN){
			//表明是中介规则，即真实的自定义规则(make_method为merge)遇到它需要取它底层元素。
			
			//标识是否有datamember失效了而需要删除。
			boolean valid = true;
			ArrayList<Atom<Term>> bodyAtoms = new ArrayList<Atom<Term>>();
			int length = bases.size();
			ArrayList<Integer> rowIDs = new ArrayList<Integer>();
			
		    for(int i=0;i<bases.size();i++){
		    	Base base = bases.get(i);
			    int rowID = base.getRowID();
			    rowIDs.add(rowID);
				Predicate pred0 = this.createPredicateFromBase(base) ;
				if(pred0==null){
					valid = false;
					break;
				}
				ArrayList<Term> terms0 = new ArrayList<Term>();
				//(x,z1) (z1,z2), ...(zx,y)
				Variable v1 = null;
				Variable v2 = null;
				if(i==0){
					v1 =new Variable("x");
                    if(length==2){
                    	v2 = new Variable("z");
					}else if(length>2){
						v2 = new Variable("z1");
					}else{
						throw new IllegalArgumentException("The args of the rule has errors");
					}
				}else if(i==bases.size()-1){
					v2 =new Variable("y");
                    if(length==2){
                    	v1 = new Variable("z");
					}else if(length>2){
						v1 = new Variable("z"+(length-1));
					}else{
						throw new IllegalArgumentException("The args of the rule has errors");
					}
				}else{
					if(length>2){
						v1 = new Variable("z"+i);
						v2 =new Variable("z"+(i+1));
					}else{
						throw new IllegalArgumentException("The args of the rule has errors");
					}
				}
			    if(!base.isInverse()){
                	terms0.add(v1);
                	terms0.add(v2);
                }else{
                	terms0.add(v2);
                	terms0.add(v1);
                }
				bodyAtoms.add(new Atom<Term>(pred0, new ArrayList<Term>(terms0)));
		    }
		
			if(!valid){
				//对于chain，其中有一个依赖的datamember（规则体中）失效，那么就要进行删除
				for(Base base:bases){
					int rowID = base.getID();
					this.deleteInvalidRuleRows(rowID);
				}
			}
			
			Rule rule = new Rule(new Atom<Term>(headAtom.getPredicate(), headAtom.getTerms()),bodyAtoms,ProvenanceFactory.getInstance().createProvFromDataMembers());
			rule.setRowIDs(rowIDs);
			rules.add(rule) ;
		}
		
		return rules;
	}
	
	/**
	 * 在编辑datamember过程中，用到的其它自定义datamember可能在保存当前datamember时已被删除。现在正好将这些无效的规则删除。
	 * @param rowID. 对应dm_from表中的行
	 */
	private void deleteInvalidRuleRows(int rowID){
		/*pred==null说明这个规则对应的规则体已经失效将相应记录删除*/
		Connection con = DBConnectionFactory.getConnection();
		try {
			Statement stmt = con.createStatement();
			stmt.executeUpdate("delete from dm_from where id="+rowID);
			stmt.close();
			con.close();
		} catch (SQLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	
	/**
	 * create predicate from datamember base
	 * @param base
	 * @return. the new created predicate
	 */
	private Predicate createPredicateFromBase(Base base){
		Predicate pred = null;
		int dmType = base.getDMType();
		int dm_id = base.getID();
		
		URIResource uriResource = null;
		if(dmType==DataMember.URI_DATAMEMBER){
			try {
				String uri = URIIDCache.getInstance().getURI(new Integer(dm_id));
				uriResource = ResourceFactory.getInstance().createURIResource(uri) ;
			} catch (Throwable e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}else if(dmType==DataMember.CUSTOM_DATAMEMBER){
			CustomDataMember csdm = CustomDataMember.getCustomDataMemberById(dm_id);
			if(csdm!=null){
				String mgroup = csdm.getGroup();
				String mname = csdm.getName();
				boolean misInner = csdm.isInner();
				int mclassID = csdm.getClassID();
				boolean misCopy = csdm.isCopy();
				uriResource = ResourceFactory.getInstance().createCustomDatamemberURIResource(mgroup, mname,misInner,mclassID,misCopy);
			}
		}
		
		/*The datamember arity must be 2*/
		if(uriResource!=null){
			pred = new Predicate(uriResource,2);
		}
		return pred;
	}

	/**
	 * 从rdfs:subClassOf生成rule
	 * XXX rdfs:subClassOf superClass
	 * superClass(x):-XXX(x)
	 * @param superClass
	 *            作为super class的ontology class
	 * @return 所有生成的rule
	 */
	private HashSet<Rule> generateRulesFromRDFSSubClassOf(URIResource superClass
			) {
		HashSet<Rule> result = new HashSet<Rule>();

	    String uri = superClass.getURI();
	    int uriID=0;
		try {
			uriID = URIIDCache.getInstance().getURIID(uri, true);
		} catch (Throwable e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	    
		//get subclasses 
		HashSet<Integer> subclasses = new HashSet<Integer>();
		try {
			subclasses = SubclassCache.getInstance().getSubClass(uriID);
		} catch (Throwable e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
		//generate rules 
		ArrayList<Term> terms = new ArrayList<Term>();
		terms.add(new Variable("x"));
		Atom<Term> headAtom = new Atom<Term>(new Predicate(superClass, 1), terms);
		for(Integer subclassID:subclasses){
			String subclassURI = null;
			try {
				subclassURI = URIIDCache.getInstance().getURI(subclassID);
			} catch (Throwable e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			URIResource subclassResource = ResourceFactory.getInstance().createURIResource(subclassURI) ;
			Predicate subclassPredicate  =  new Predicate(subclassResource,1);
			ArrayList<Atom<Term>> bodyAtoms = new ArrayList<Atom<Term>>();
			bodyAtoms.add(new Atom<Term>(subclassPredicate, new ArrayList<Term>(terms)));
			
			Rule rule = new Rule(new Atom<Term>(headAtom.getPredicate(), headAtom.getTerms()),bodyAtoms,ProvenanceFactory.getInstance().createProvFromSchemaReasoning());
			result.add(rule) ;
		}
		
		return result;
	}

	/**
	 * 从rdfs:subPropertyOf生成rule
	 *  XXX rdfs:subPropertyOf superProperty
	 *  superProperty(x,y):-XXX(x,y)
	 * @param superProperty
	 *            作为super property的ontology property
	 * @return 所有生成的rule
	 */
	private HashSet<Rule> generateRulesFromRDFSSubPropertyOf(
			URIResource superProperty) {
		HashSet<Rule> result = new HashSet<Rule>();

	    String uri = superProperty.getURI();
	    int uriID=0;
		try {
			uriID = URIIDCache.getInstance().getURIID(uri, true);
		} catch (Throwable e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	    
		//get subproperties
		HashSet<Integer> subproperties = new HashSet<Integer>();
		try {
			subproperties = SubpropertyCache.getInstance().getSubProperty(uriID) ;
		} catch (Throwable e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
		//generate rules 
		ArrayList<Term> terms = new ArrayList<Term>();
		terms.add(new Variable("x"));
		terms.add(new Variable("y"));
		Atom<Term> headAtom = new Atom<Term>(new Predicate(superProperty, 2), terms);
		for(Integer subpropertyID:subproperties){
			String subpropertyURI = null;
			try {
				subpropertyURI = URIIDCache.getInstance().getURI(subpropertyID);
			} catch (Throwable e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			URIResource subpropertyResource = ResourceFactory.getInstance().createURIResource(subpropertyURI) ;
			Predicate subpropertyPredicate  =  new Predicate(subpropertyResource,2);
			ArrayList<Atom<Term>> bodyAtoms = new ArrayList<Atom<Term>>();
			bodyAtoms.add(new Atom<Term>(subpropertyPredicate, new ArrayList<Term>(terms)));
			
			Rule rule = new Rule(new Atom<Term>(headAtom.getPredicate(), headAtom.getTerms()),bodyAtoms,ProvenanceFactory.getInstance().createProvFromSchemaReasoning());
			result.add(rule) ;
		}
		
		return result;
	}

	/**
	 * 从rdfs:domain生成rule
	 * XXX rdfs:domain classResource
	 * classResource(x):-XXX(x,y)
	 * @param classResource
	 *            作为domain的ontology class
	 * @return 所有生成的rule
	 */
	private HashSet<Rule> generateRulesFromRDFSDomain(URIResource classResource
		) {
		HashSet<Rule> result = new HashSet<Rule>();

	    String uri = classResource.getURI();
	    int uriID=0;
		try {
			uriID = URIIDCache.getInstance().getURIID(uri, true);
		} catch (Throwable e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	    
		//get subproperties
		HashSet<Integer> properties = new HashSet<Integer>();
		try {
			properties = DomainCache.getInstance().getDomain(uriID) ;
		} catch (Throwable e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
		//generate rules 
		ArrayList<Term> terms = new ArrayList<Term>();
		terms.add(new Variable("x"));
		Atom<Term> headAtom = new Atom<Term>(new Predicate(classResource, 1), terms);
		
		terms = new ArrayList<Term>();
		terms.add(new Variable("x"));
		terms.add(new Variable("y"));
		for(Integer propertyID:properties){
			String propertyURI = null;
			try {
				propertyURI = URIIDCache.getInstance().getURI(propertyID);
			} catch (Throwable e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			URIResource propertyResource = ResourceFactory.getInstance().createURIResource(propertyURI) ;
			Predicate propertyPredicate  =  new Predicate(propertyResource,2);
			ArrayList<Atom<Term>> bodyAtoms = new ArrayList<Atom<Term>>();
			bodyAtoms.add(new Atom<Term>(propertyPredicate, new ArrayList<Term>(terms)));
			
			Rule rule = new Rule(new Atom<Term>(headAtom.getPredicate(), headAtom.getTerms()),bodyAtoms,ProvenanceFactory.getInstance().createProvFromSchemaReasoning());
			result.add(rule) ;
		}
		
		return result;
	}

	/**
	 * 从rdfs:range生成rule
	 * XXX rdfs:range classResource
	 * classResource(y):-XXX(x,y)
	 * @param classResource
	 *            作为range的ontology class
	 * @return 所有生成的rule
	 */
	private HashSet<Rule> generateRulesFromRDFSRange(URIResource classResource
			) {
		HashSet<Rule> result = new HashSet<Rule>();

	    String uri = classResource.getURI();
	    int uriID=0;
		try {
			uriID = URIIDCache.getInstance().getURIID(uri, true);
		} catch (Throwable e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	    
		//get subproperties
		HashSet<Integer> properties = new HashSet<Integer>();
		try {
			properties = RangeCache.getInstance().getRange(uriID) ;
		} catch (Throwable e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
		//generate rules 
		ArrayList<Term> terms = new ArrayList<Term>();
		terms.add(new Variable("x"));
		Atom<Term> headAtom = new Atom<Term>(new Predicate(classResource, 1), terms);
		
		terms = new ArrayList<Term>();
		terms.add(new Variable("x"));
		terms.add(new Variable("y"));
		for(Integer propertyID:properties){
			String propertyURI = null;
			try {
				propertyURI = URIIDCache.getInstance().getURI(propertyID);
			} catch (Throwable e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			URIResource propertyResource = ResourceFactory.getInstance().createURIResource(propertyURI) ;
			Predicate propertyPredicate  =  new Predicate(propertyResource,2);
			ArrayList<Atom<Term>> bodyAtoms = new ArrayList<Atom<Term>>();
			bodyAtoms.add(new Atom<Term>(propertyPredicate, new ArrayList<Term>(terms)));
			
			Rule rule = new Rule(new Atom<Term>(headAtom.getPredicate(), headAtom.getTerms()),bodyAtoms,ProvenanceFactory.getInstance().createProvFromSchemaReasoning());
			result.add(rule) ;
		}
		
		return result;
	}

	/**
	 * 在指定的数据源(documents集合上)
	 *      获取能够匹配到指定atom的所有ground atom；
	 *      注意首先要根据数据源之间的owl:imports关系扩展数据源
	 * 由于目前不缓存查询结果，所以对于用户自定义predicate关联
	 * 的GroundAtom为空，只有本体中的那些class,property关联到
	 * GroundAtom。
	 * 需要注意可能一个atom有些参数已经是常量了
	 * @param queryAtom
	 *            待匹配的atom
	 * @return 所有ground atom
	 */
	public HashSet<GroundAtom> getGroundAtomsByAtom(Atom<Term> queryAtom
			) {
		HashSet<GroundAtom> hs = new HashSet<GroundAtom>();
		Predicate predicate = queryAtom.getPredicate();
		Resource resource = predicate.getResource();
		ArrayList<Term> terms = queryAtom.getTerms();
		
		if ( resource instanceof URIResource && 
			 ((URIResource) resource).getURI().startsWith(ResourceFactory.INTERNAL_URI_PREFIX) ) {
			// Do nothing
			
		} else {
			if ( predicate.getArity() == 1 ) {
				hs.addAll(generateGroundAtomsFromOntologyClass(
						resource, terms.get(0)));
			} else if ( predicate.getArity() == 2 ) {
				hs.addAll(generateGroundAtomsFromOntologyProperty(
						resource, terms.get(0), terms.get(1)));
			}
		}
		return hs;
	}
	
	/**
	 * 通过ID检索其对应的URIResource
	 * @param id
	 * 		一个URI的ID
	 * @return id对应的URIResource
	 */
	private URIResource getURIResourceById(int id) {
		URIResource resource = null;
		
		try {
			String uri = URIIDCache.getInstance().getURI(new Integer(id)) ;
			resource = ResourceFactory.getInstance().createURIResource(uri) ;
		} catch (Throwable e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
		return resource;
	}
	
	/**
	 * 通过URIResource检索其对应的ID
	 * @param resource
	 * 			一个URI的Resource
	 * @return 这个URIResource的uriId，若不存在，返回0
	 */
	private int getIdByURIResource(URIResource resource) {
		int id =0;
		try {
			id = URIIDCache.getInstance().getURIID(resource.getURI(), false) ;
		} catch (Throwable e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return id;
	}	

	/**
	 * 把resource转换为四元组表中对应的字符串标示符
	 * @param resource
	 * 			resource将要被转换为id的资源
	 * @return resource在四元组表中对应的id
	 */
	public String getIDByResource(Resource resource) {
		
		String id = null;
		
		if ( resource instanceof URIResource ) {
			try {
				id = "u" + URIIDCache.getInstance().getURIID((((URIResource) resource).getURI()),true);
			} catch (Throwable e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			
		} else if ( resource instanceof BlankNode ) {
			int docId = 0;
			try {
				docId = URIIDCache.getInstance().getURIID((((BlankNode) resource).getRDFDocument().getURI().getURI()),true);
			} catch (Throwable e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			int NodeId = ((BlankNode) resource).getBlankNodeID();
			
			id = "b" + docId + ":" + NodeId;
		} else if ( resource instanceof PlainLiteral ) {
			id = "l" + ((PlainLiteral) resource).getLexicalForm();
		
		} else if ( resource instanceof TypedLiteral ) {
			id = "l" + ((TypedLiteral) resource).getLexicalForm();
		}
		
		return id;
	}
	
	/**
	 * 把四元组表中的id转换为对应的资源
	 * @param id
	 * 			四元组表中的id
	 * @param datatypeid
	 * 			如果该id为type literal，则datatypeid是其类型的id
	 * @param lang
	 * 			如果该id有plain literal，则lang为其语言，如果没有语言使用""，不能使用NULL
	 * @return id对应的资源
	 */
	public Resource getResourceById(String id, String datatypeid, String lang) {
		Resource resource = null;
		//防止lang误用为NULL
		if(lang==null) lang="";
		if ( id.charAt(0) == 'u' ) { //uri
			int uriId = Integer.parseInt(id.substring(1));
			resource = getURIResourceById(uriId);
			
		} else if ( id.charAt(0) == 'b' ) { // blankNode
			int docId = Integer.parseInt(id.substring(1).split(":")[0]);
			int NodeId = Integer.parseInt(id.substring(1).split(":")[1]);
			
			URIResource docURI = getURIResourceById(docId);
			RDFDocument doc = ResourceFactory.getInstance().
								createRDFDocument(docURI.getURI());
			
			resource = ResourceFactory.getInstance().
								createBlankNode(NodeId, doc);
			
		} else if ( id.charAt(0) == 'l' ) { // literal
			
			if ( datatypeid == null ) { // plain literal
				resource = ResourceFactory.getInstance().
								createPlainLiteral(id.substring(1), lang);
				
			} else { // type literal
				int typeId = Integer.parseInt(datatypeid);
				URIResource typeURI = getURIResourceById(typeId);
				resource = ResourceFactory.getInstance().
							createTypedLiteral(id.substring(1), typeURI.getURI());
			}
		}
		return resource;
	}
	
	/**
	 * 在指定数据源上 ，从ontology class匹配ground atom
	 * 
	 * @param classResource
	 *            ontology class
	 * @param queryTerm
	 *            待匹配的term
	 * @param documentsURIs 扩展后的document URI集合
	 * @return 所有匹配的ground atom
	 */
	private HashSet<GroundAtom> generateGroundAtomsFromOntologyClass(
			Resource classResource, Term queryTerm
			) {
		HashSet<GroundAtom> hs = new HashSet<GroundAtom>();
		String p = getIDByResource(ResourceFactory.getInstance().
									createURIResource(RDF.type.getURI()));
		String o = getIDByResource(classResource);
		
		Predicate predicate = new Predicate(classResource, 1);
		
		
		try {
			Connection con = DBConnectionFactory.getConnection();
			PreparedStatement pstmt = null;
			
			String s = null;
			
			if ( queryTerm instanceof Variable ) {
				pstmt = con.prepareStatement(
						"select s,doc_id " +
						"from quadruple " +
						"where p = ? and o = ?");
				pstmt.setString(1, p);
				pstmt.setString(2, o);
				
				ResultSet rs = pstmt.executeQuery();
				while ( rs.next() ) {
					s = rs.getString("s");
					int doc_id = rs.getInt("doc_id") ;
					RDFDocument rdfDocument = null;
					try {
						rdfDocument = ResourceFactory.getInstance().createRDFDocument(URIIDCache.getInstance().getURI(doc_id));
					} catch (Throwable e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}
					Provenance prov = ProvenanceFactory.getInstance().createProvFromRDFDocument(rdfDocument) ;
					Resource subjectResource = getResourceById(s, null, "");

					ArrayList<Constant> constants = new ArrayList<Constant>();
					constants.add(new Constant(subjectResource));
						
					GroundAtom atom = new GroundAtom(predicate, constants,prov);
					hs.add(atom);
				}
				rs.close();
				
			} else if ( queryTerm instanceof Constant ) {
				pstmt = con.prepareStatement(
						"select doc_id from quadruple where p = ? and o = ? and s = ?" );

				// subject resource
				Resource subjectResource = ((Constant) queryTerm).getResource();
				
				s = getIDByResource(subjectResource);
				pstmt.setString(1, p);
				pstmt.setString(2, o);
				pstmt.setString(3, s);
				ResultSet rs = pstmt.executeQuery();
				if( rs.next() ) {
					int doc_id = rs.getInt("doc_id") ;
					RDFDocument rdfDocument = null;
					try {
						rdfDocument = ResourceFactory.getInstance().createRDFDocument(URIIDCache.getInstance().getURI(doc_id));
					} catch (Throwable e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}
					Provenance prov = ProvenanceFactory.getInstance().createProvFromRDFDocument(rdfDocument) ;
					ArrayList<Constant> constants = new ArrayList<Constant>();
					constants.add(new Constant(subjectResource));
						
					GroundAtom atom = new GroundAtom(predicate, constants,prov);
					hs.add(atom);
				}
				rs.close();
			}

			pstmt.close();
			con.close();
			
		} catch (SQLException e) {
			e.printStackTrace();
		}
		
		return hs;
	}	
	
	/**
	 * 在指定数据源上，从ontology property匹配ground atom
	 * 
	 * @param propertyResource
	 *            ontology property
	 * @param queryTerm1
	 *            待匹配的第1个term
	 * @param queryTerm2
	 *            待匹配的第2个term
	 * @param documentsURIs 扩展后的document URI集合         
	 * @return 所有匹配的ground atom
	 */
	private HashSet<GroundAtom> generateGroundAtomsFromOntologyProperty(
			Resource propertyResource, Term queryTerm1, Term queryTerm2) {
		HashSet<GroundAtom> hs = new HashSet<GroundAtom>();
		if ( queryTerm1 instanceof Variable && 
			 queryTerm2 instanceof Variable ) {
			
			hs.addAll(generateGroundAtomsFromOntologyPropertyVariableVariable(
				propertyResource, (Variable) queryTerm1, (Variable) queryTerm2));
			
		} else if ( queryTerm1 instanceof Variable &&
					queryTerm2 instanceof Constant ) {
			hs.addAll(generateGroundAtomsFromOntologyPropertyVariableConstant(
				propertyResource, (Variable) queryTerm1, (Constant) queryTerm2));
			
		} else if ( queryTerm1 instanceof Constant &&
					queryTerm2 instanceof Variable ) {

			hs.addAll(generateGroundAtomsFromOntologyPropertyConstantVariable(
				propertyResource, (Constant) queryTerm1, (Variable) queryTerm2));
			
		} else if ( queryTerm1 instanceof Constant &&
					queryTerm2 instanceof Constant) {
			
			hs.addAll(generateGroundAtomsFromOntologyPropertyConstantConstant(
				propertyResource, (Constant) queryTerm1, (Constant) queryTerm2));
		}
			
		return hs;
	}
	
	private HashSet<GroundAtom> generateGroundAtomsFromOntologyPropertyVariableVariable(
			Resource propertyResource, Variable queryTerm1, Variable queryTerm2) {
		HashSet<GroundAtom> hs = new HashSet<GroundAtom>();
	
		String p = getIDByResource(propertyResource);
		
		Predicate predicate = new Predicate(propertyResource, 2);
		
		try {
			Connection con = DBConnectionFactory.getConnection();
			PreparedStatement pstmt = null;
			ResultSet rs;
			
			String s = null;
			String o = null;
			String datatypeidString = null;
			String lang = null;
			
			pstmt = con.prepareStatement(
					"select s, o, datatypeid, lang,doc_id " +
					"from quadruple " +
					"where p = ?");
			
			pstmt.setString(1, p);
				
			rs = pstmt.executeQuery();
			while ( rs.next() ) {
				s = rs.getString("s");
				o = rs.getString("o");
				datatypeidString = rs.getString("datatypeid");
				lang = rs.getString("lang");
				int doc_id = rs.getInt("doc_id") ;
				RDFDocument rdfDocument = null;
				try {
					rdfDocument = ResourceFactory.getInstance().createRDFDocument(URIIDCache.getInstance().getURI(doc_id));
				} catch (Throwable e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
				Provenance prov = ProvenanceFactory.getInstance().createProvFromRDFDocument(rdfDocument) ;
				ArrayList<Constant> constants = new ArrayList<Constant>();
				constants.add(new Constant(getResourceById(s, null, "")));
				constants.add(new Constant(getResourceById(o, datatypeidString, lang)));
					
				GroundAtom atom = new GroundAtom(predicate, constants,prov); 
				hs.add(atom);
			}
			rs.close();
			pstmt.close();
			con.close();
			
		} catch (SQLException e) {
			e.printStackTrace();
		}
		
		return hs;
	}
	
	
	private HashSet<GroundAtom> generateGroundAtomsFromOntologyPropertyVariableConstant(
			Resource propertyResource, Variable queryTerm1, Constant queryTerm2) {
		HashSet<GroundAtom> hs = new HashSet<GroundAtom>();
	
		String p = getIDByResource(propertyResource);
		
		Predicate predicate = new Predicate(propertyResource, 2);
		
		try {
			Connection con = DBConnectionFactory.getConnection();
			PreparedStatement pstmt = null;
			ResultSet rs;
			
			String s = null;
			String o = null;
			int datatypeid = 0;
			String lang = null;
			
			Resource objectResource = queryTerm2.getResource();
			o = getIDByResource(objectResource);
			
			if ( objectResource instanceof PlainLiteral ) {
				pstmt = con.prepareStatement(
						"select s,doc_id " +
						"from quadruple " +
						"where p = ? and o = ? " +
						" and lang = ?");
				lang = ((PlainLiteral) objectResource).getLanguageTag();
				pstmt.setString(3, lang);
			} else if ( objectResource instanceof TypedLiteral ) {
				pstmt = con.prepareStatement(
						"select s,doc_id " +
						"from quadruple " +
						"where p = ? and o = ? and datatypeid = ?");
				try {
					datatypeid = URIIDCache.getInstance().getURIID((((TypedLiteral) objectResource).getDatatypeURI().getURI()),false);
				} catch (Throwable e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
				pstmt.setInt(3, datatypeid);
				
			} else { // URI or BlankNode
				pstmt = con.prepareStatement(
						"select s,doc_id " +
						"from quadruple " +
						"where p = ? and o = ?");
			}
			pstmt.setString(1, p);
			pstmt.setString(2, o);
				
			rs = pstmt.executeQuery();
			while ( rs.next() ) {
                s= rs.getString("s") ;
                int doc_id = rs.getInt("doc_id") ;
				RDFDocument rdfDocument = null;
				try {
					rdfDocument = ResourceFactory.getInstance().createRDFDocument(URIIDCache.getInstance().getURI(doc_id));
				} catch (Throwable e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
				Provenance prov = ProvenanceFactory.getInstance().createProvFromRDFDocument(rdfDocument) ;
				ArrayList<Constant> constants = new ArrayList<Constant>();
			    constants.add(new Constant(getResourceById(s, null, "")));
				constants.add(new Constant(objectResource));
					
				GroundAtom atom = new GroundAtom(predicate, constants,prov); 
				hs.add(atom);
					
			}
			rs.close();
			pstmt.close();
			con.close();
		} catch (SQLException e) {
			e.printStackTrace();
		}
		
		return hs;
	}
	
	
	private HashSet<GroundAtom> generateGroundAtomsFromOntologyPropertyConstantVariable(
			Resource propertyResource, Constant queryTerm1, Variable queryTerm2) {
		HashSet<GroundAtom> hs = new HashSet<GroundAtom>();
	
		String p = getIDByResource(propertyResource);
		
		Predicate predicate = new Predicate(propertyResource, 2);
		
		try {
			Connection con = DBConnectionFactory.getConnection();
			PreparedStatement pstmt = null;
			ResultSet rs;
			
			String s = null;
			String o = null;
			String datatypeidString = null;
			String lang = null;
			
			Resource subjectResource = queryTerm1.getResource();
			s = getIDByResource(subjectResource);
			
			pstmt = con.prepareStatement(
					"select doc_id,o, datatypeid, lang " +
					"from quadruple " +
					"where p = ? and s = ?");
			
			pstmt.setString(1, p);
			pstmt.setString(2, s);
			rs = pstmt.executeQuery();
			while ( rs.next() ) {
				o = rs.getString("o");
				datatypeidString = rs.getString("datatypeid");
				lang = rs.getString("lang");
				int doc_id = rs.getInt("doc_id") ;
				RDFDocument rdfDocument = null;
				try {
					rdfDocument = ResourceFactory.getInstance().createRDFDocument(URIIDCache.getInstance().getURI(doc_id));
				} catch (Throwable e) {
						// TODO Auto-generated catch block
					e.printStackTrace();
				}
				Provenance prov = ProvenanceFactory.getInstance().createProvFromRDFDocument(rdfDocument) ;
				ArrayList<Constant> constants = new ArrayList<Constant>();
				constants.add(new Constant(subjectResource));
				constants.add(new Constant(getResourceById(o, datatypeidString, lang)));
					
				GroundAtom atom = new GroundAtom(predicate, constants,prov); 
				hs.add(atom);
			}
			rs.close();
			
			pstmt.close();
			con.close();
			
		} catch (SQLException e) {
			e.printStackTrace();
		}
		
		return hs;
	}	
	
	
	private HashSet<GroundAtom> generateGroundAtomsFromOntologyPropertyConstantConstant(
			Resource propertyResource, Constant queryTerm1, Constant queryTerm2) {
		HashSet<GroundAtom> hs = new HashSet<GroundAtom>();
	
		String p = getIDByResource(propertyResource);
		
		Predicate predicate = new Predicate(propertyResource, 2);
		
		try {
			Connection con = DBConnectionFactory.getConnection();
			PreparedStatement pstmt = null;
			ResultSet rs;
			
			String s = null;
			String o = null;
			int datatypeid = 0;
			String lang = null;
			
			Resource subjectResource = queryTerm1.getResource();
			s = getIDByResource(subjectResource);
			
			Resource objectResource = queryTerm2.getResource();
			o = getIDByResource(objectResource);
			
			if ( objectResource instanceof PlainLiteral ) {
				pstmt = con.prepareStatement(
						"select s,doc_id " +
						"from quadruple " +
						"where p = ? and s = ? and o = ? " +
						"		and lang = ?");
				lang = ((PlainLiteral) objectResource).getLanguageTag();
				pstmt.setString(4, lang);
				
			} else if ( objectResource instanceof TypedLiteral ) {
				pstmt = con.prepareStatement(
						"select s,doc_id " +
						"from quadruple " +
						"where p = ? and s = ? and o = ? and datatypeid = ?");
				try {
					datatypeid =URIIDCache.getInstance().getURIID((((TypedLiteral) objectResource).getDatatypeURI().getURI()),false);
				} catch (Throwable e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
				pstmt.setInt(4, datatypeid);
				
			} else { // URI or BlankNode
				pstmt = con.prepareStatement(
						"select s,doc_id " +
						"from quadruple " +
						"where p = ? and s = ? and o = ?");
			}
			pstmt.setString(1, p);
			pstmt.setString(2, s);
			pstmt.setString(3, o);
			
			rs = pstmt.executeQuery();
			if ( rs.next() ) {
				int doc_id = rs.getInt("doc_id") ;
				RDFDocument rdfDocument = null;
				try {
					rdfDocument = ResourceFactory.getInstance().createRDFDocument(URIIDCache.getInstance().getURI(doc_id));
				} catch (Throwable e) {
						// TODO Auto-generated catch block
					e.printStackTrace();
				}
				Provenance prov = ProvenanceFactory.getInstance().createProvFromRDFDocument(rdfDocument) ;
				ArrayList<Constant> constants = new ArrayList<Constant>();
				constants.add(new Constant(subjectResource));
				constants.add(new Constant(objectResource));
					
				GroundAtom atom = new GroundAtom(predicate, constants,prov); 
				hs.add(atom);
			}
			rs.close();
			pstmt.close();
			con.close();
		} catch (SQLException e) {
			e.printStackTrace();
		}
		
		return hs;
	}
}