package org.NooLab.itexx.storage.nodes;

import java.io.File;
import java.io.FileNotFoundException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
 
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;

import org.apache.commons.dbutils.QueryRunner;
import org.apache.commons.dbutils.ResultSetHandler;
import org.apache.commons.dbutils.handlers.BeanListHandler;
import org.h2.jdbc.JdbcSQLException;
import org.h2.tools.Server;
import org.math.array.StatisticSample;

import com.iciql.Db;

import org.NooLab.utilities.nums.NumUtils;
import org.NooLab.utilities.datatypes.IndexDistance;
import org.NooLab.utilities.datatypes.ValuePair;
import org.NooLab.utilities.files.DFutils;
import org.NooLab.utilities.logging.PrintLog;
import org.NooLab.utilities.objects.StringedObjects;
import org.NooLab.utilities.strings.ArrUtilities;



import org.NooLab.astor.storage.iciql.NodeContent;
import org.NooLab.astor.storage.iciql.NodeContentX;
 

import org.NooLab.itexx.storage.ConnectorClientIntf;
import org.NooLab.itexx.storage.DataBaseAccessDefinitionIntf;
import org.NooLab.itexx.storage.DataBaseMaintenance;
import org.NooLab.itexx.storage.DatabaseMgmt;
import org.NooLab.itexx.storage.DbConnector;
import org.NooLab.itexx.storage.DbLogin;
import org.NooLab.itexx.storage.MetaData;



 

/**
 * 
 * is being created by SomNodesDataConverter
 *
 */
public class AstorNodesDataBase implements ConnectorClientIntf{

	
	SomTexxPropertiesIntf stxProperties;
	DataBaseAccessDefinitionIntf dba;
	
	String cfgResourceJarPath = "";
	// in factory, it is set to persistence settings, from where we fetch it here
	// "org/NooLab/texx/resources/sql/" ; // trailing / needed !!   
	
 
	private boolean dbFileExists;
	private boolean isOpen;
	boolean dbRecreated = false;
	
	 
	String databaseName = "";
	String configFile = "";
	String storageDir = "" , h2Dir=""; 
	int _DB_TARGET_LOCATING=0;
	
	String databaseUrl="";

	String accessMode = "tcp"; // "http" = external server via 8082, "file" 
	Server server;
	Connection connection;
	Db iciDb;
	DatabaseMetaData jdbMetaData ;
	String dbCatalog ;
	MetaData metaData ; 
	
	DbConnector dbConnector ;
	DataBaseHandler dbHandler ;

	DataBaseMaintenance dataBaseBasics;
	
	protected String internalCfgStoreName;
	

	String user = "";
	String password = "" ;
	
	String databaseFile="";
	
	
	StatisticSample sampler ;
	DFutils fileutil = new DFutils();
	PrintLog out = new PrintLog(2,false);
	StringedObjects strObj = new StringedObjects();
 
	 
	
	// ========================================================================
	public AstorNodesDataBase( SomTexxPropertiesIntf stxProps ) throws Exception{
		
		// TODO : storageDir = "" , h2Dir=""; must be provided 
		
		// sth like "rg-fingerprints";
		// databaseName =  ps.getDatabaseName();
		
		stxProperties = stxProps;
		 
		
		dba = stxProperties.getDatabaseAccessSettings() ;
		
		user = dba.getDbUser() ;          // the database name in dba is astordocs, which does not really match :( 
		password = dba.getDbpassword() ;
		
		databaseName = stxProperties.getNodesSomDataBaseName();
		storageDir = stxProperties.getStorageDir() ;
		h2Dir = storageDir;
		
		//String dbfile = DFutils.createPath(storageDir, databaseName+".h2.db") ;
		
		dbHandler = new DataBaseHandler(this) ; 
		dbConnector = new DbConnector( (ConnectorClientIntf)this);
		connect( databaseName , storageDir );
		open(connection);
	
		sampler = new StatisticSample(172838);
		
		dataBaseBasics = new DataBaseMaintenance( (ConnectorClientIntf)this );
		
		out = dataBaseBasics.getOut() ;
		out.setPrefix("[ASTOR-DB]");
	}
 
	// ========================================================================
	
	public boolean dbFileExists(){
		return dbFileExists;
	}
	
	public boolean isOpen(){
		return isOpen;
	}
	
	
	public boolean prepareDatabase( String targetDB, int keepOpenAfterPrepare ) throws Exception {
		boolean rB=false;
		 
		try {
			// remove any lock 
			if (connection==null){
				dataBaseBasics.removeLock(connection , storageDir);
				dataBaseBasics.removeLock(connection , getRelocatedH2Dir(storageDir) );
			}else{
				return true;
			}
			
		
		
			databaseName = targetDB;
		
			connect( databaseName , storageDir );
			open(connection);
			
			// tables needed to create ?
			
			
			
			
			int servermodeFlag=1;
			
		
			//  
			// String h2Dir = DatabaseMgmt.setH2BaseDir(storageDir, DatabaseMgmt._BASEDIR_QUERY_PROJECT) ;
			// h2Dir = DFutils.createPath(h2Dir, "storage/") ;
			if (stxProperties.getContext().contentEquals("itexx") == false){
				getRelocatedH2Dir(storageDir) ;
			}
			databaseFile = connect( databaseName, h2Dir, servermodeFlag ) ;
			
			
			rB = databaseFile.length()>0;
			
			if (rB){
				
				dbHandler = new DataBaseHandler( this ) ;
		

				{
					metaData = dbHandler.getMetadata();
					metaData.retrieveMetaData();
					
					String str = ArrUtilities.arr2Text( metaData.getTableNames(1),";");

					out.printErr(2, "Tables : "+str) ;
				}
				
			}
			
			if ((keepOpenAfterPrepare>=1) && (connection!=null) && (connection.isClosed()==false)){
				connection.close() ;
			}
			
		} catch (FileNotFoundException e) {
			rB=false;
			e.printStackTrace();
		}
		 
		return rB;
	}

	//             e.g. resourceName = "create-db-sql-xml" ;
	
	public String getRelocatedH2Dir(String storageDir){
		
		String _h2Dir;
		try {
			
			if ((h2Dir.length()==0) || (DFutils.folderExists(h2Dir)==false) || 
					(h2Dir.contentEquals(storageDir))){
				_h2Dir = DatabaseMgmt.setH2BaseDir( storageDir, _DB_TARGET_LOCATING);// DatabaseMgmt._BASEDIR_QUERY_PROJECT);
				h2Dir = DFutils.createPath( _h2Dir, "storage/") ;
			}
		} catch (Exception e) {
			e.printStackTrace();
		}
		
		return h2Dir;
	}

	 
	
	public void close() {
		
		try{

			if (dataBaseBasics!=null){
				dataBaseBasics.disconnect(connection);
			}
			
			jdbMetaData = null;
			dbCatalog = "";
			
			
			if (server != null){
				server.stop() ;
			}
		
		}catch(Exception e){
			
		}
		
	}


	protected void getJdbMeta(Connection c) throws SQLException{
		jdbMetaData = c.getMetaData();
		dbCatalog = c.getCatalog() ;
		
	}

	public void setDbMetaData(DatabaseMetaData dbMetaData) {
		jdbMetaData = dbMetaData;
	}

	public void updateInfraStructure(Connection c ) throws Exception{

		getJdbMeta(c);
		
		if (dbHandler==null){
			out.printErr(1, "Database has not been properly instantiated.");
		}
		dbHandler.setiDb( iciDb ) ;

		// creates a new instance for new MetaData and retrieves the latest info again
		dbHandler.updateMetaData(c) ; 
	
	}
	
	
	public void open(Connection c) throws Exception{
		
		try{
			
			if ((connection==null) || (connection.isClosed())){
				
				connect(this.databaseName, getRelocatedH2Dir(storageDir));
				c=connection;
			}
			iciDb = Db.open(c);
			
		}catch(Exception e){
			e.printStackTrace() ;
			out.printErr(1, "\n\nretry...");
			
			connection=null;
			prepareDatabase(databaseName,1) ;
			
			iciDb = Db.open(connection);
			getJdbMeta(connection);
		}
		updateInfraStructure(c);
	}

	public void open() throws Exception{
		
		if (databaseUrl.length()==0){

			
			String _h2Dir = getRelocatedH2Dir(storageDir);
			
			if (_h2Dir.endsWith("/")){
				_h2Dir = _h2Dir.substring(0,_h2Dir.length()-1);
			}

			databaseUrl = 	"jdbc:h2:tcp://localhost/"+ _h2Dir+"/"+databaseName; // +";AUTO_SERVER=TRUE" ;

		}
			
		iciDb = Db.open(databaseUrl, user, password);
		connection = iciDb.getConnection();

		updateInfraStructure(connection);
	}

	  	
	public void resetConnection() {
		// 
		
		
		if (connection != null){
			try{
				
				connection.close();
				
				 
				dbHandler = new DataBaseHandler(this) ; 

				connection = null;
				dbConnector = new DbConnector( (ConnectorClientIntf)this);
			
				
			}catch(Exception e){
				
			}
		}
		
	}

	public void startServer( String dbNamePattern ) { 
		
		dataBaseBasics.removeLock(connection , storageDir);
		dataBaseBasics.removeLock(connection , getRelocatedH2Dir(storageDir));

		try {
			prepareDatabase( dbNamePattern,1 );
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
	}

	/*
SELECT * FROM INFORMATION_SCHEMA.ROLES ;
SELECT * FROM INFORMATION_SCHEMA.RIGHTS ;
SELECT * FROM INFORMATION_SCHEMA.USERS ;
SELECT * FROM INFORMATION_SCHEMA.TABLE_PRIVILEGES ;
	 */
	public Connection getWireLessDatabaseServer(String user, String password) throws Exception{
		
		String url;
		
		
		 
		try {
			// "jdbc:h2:"+
			url = 	"jdbc:h2:tcp://localhost/~/"+databaseName ;
			
			DbLogin login = new DbLogin(user,password) ; // may contain a pool of users
			connection = dbConnector.getConnection(url, login);

			
			dbHandler = new DataBaseHandler( this ) ;
			
		}catch(Exception e){
			e.printStackTrace() ;
		}
		
		throw(new Exception("wrong db mode in getWireLessDatabaseServer() !!!!!!!!!!!"));
		// return c;
	}
	
	public int createServer(String dbname, String storageDir){
		

		// 
		// start the TCP Server
		String url;
		int r=-1;
		
		try {
			// "jdbc:h2:"+
			url = 	storageDir+"/"+dbname+".h2.db"; // +";AUTO_SERVER=TRUE" ;
			url = url.replace("//", "/") ;

			if (DFutils.fileExists(url)==false){
				dbRecreated = true;
				connect(dbname, storageDir);
				if (connection!=null){
					connection.close();
				}else{
					throw(new Exception("It was not possible to create a new database, or to connect to the new database\n"+
							            "           Also check the installation of H2, or whether its serive is running at all.\n"+
							            "           Requested name was <"+dbname+">, "+
							            "           requested folder was "+storageDir+"\n\n")) ;
				}
				connection = null ;
				r=1;
			}

			String[] args = new String[]{"-tcpAllowOthers","-tcpPort","8052"};

			server = Server.createTcpServer( args );
			// this starts the console in the browser Server.main(args);
			server.start() ;
			
			r=2; 
			
			int port = server.getPort();
			// Service srvc = server.getService();
			// boolean allowOthers = srvc.getAllowOthers() ;
			
			if (server.isRunning(true)){
				out.print(2, "H2 server is running on port "+port) ;
				r=0;
			}
			 
			
		} catch (Exception e) {
			r = -7;
			out.printlnErr(1, "Potential Problem met in createServer(): "+ e.getMessage());
		}
		//createTcpServer(args).start();

		// stop the TCP Server
		// server.stop();


		return r;
	}

	
	public String connect( String dbname, String filepath) throws FileNotFoundException{
		return connect( dbname, filepath, 0);
	}
	@SuppressWarnings({ "unused", "rawtypes" })
	public String connect( String dbname, String filepath, int serverMode) throws FileNotFoundException{
		
		String dbfile ="";
		int err=0;
		
		
		
		try{
 			
			if ( ((connection!=null) && (connection.isClosed()==false))){
				dbfile = DFutils.createPath(filepath,dbname+".h2.db") ;
				if (DFutils.fileExists(dbfile)){
					return dbfile ;
				}
			}

			
			// String docoservUrl = connection.
			// "jdbc:h2:tcp://localhost/~/docoserv"
			// :nio
			
			String _h2Dir = getRelocatedH2Dir(storageDir);
			
			if (_h2Dir.endsWith("/")){
				_h2Dir = _h2Dir.substring(0,_h2Dir.length()-1);
			}

			String url = 	"jdbc:h2:tcp://localhost/"+_h2Dir+"/"+dbname; // +";AUTO_SERVER=TRUE" ;
			
						
			databaseUrl = "";
				        /* 
				            ";MODE=MYSQL"
		    				";FILE_LOCK=FS" +
		    				";PAGE_SIZE=1024" +
		    				";CACHE_SIZE=8192";
						    DB_CLOSE_DELAY=10
				        */
		
			Class h2Driver = Class.forName("org.h2.Driver");
	        
			dbfile = DFutils.createPath(filepath,dbname+".h2.db") ;
			
			if (connection==null){
				if ((user==null) || (user.length()==0)){
					user="sa"; password="sa";
				}
				
				DbLogin login = new DbLogin(user,password) ; // may contain a pool of users
				connection = dbConnector.getConnection(url, login);
				
			}
			
			
			if (connection != null){
				
			}
			
			File fil = new File(dbfile);
			if (fil.exists()){
				if (connection.isClosed()==false){
					databaseUrl = url;
				}
				
			}
			 
			// CALL DATABASE_PATH(); 
			
			if (fil.exists()==false){
				throw new FileNotFoundException("\nDatabase file not found (err="+err+")\n"+
											    "expected directory : " + filepath+"\n"+
											    "expected db file   : " + dbname+".h2.db\n");
			}

			out.print(1,"database <"+dbname+"> has been started ...");
			out.print(3,"...its connection url is : "+ databaseUrl) ;

			
		}catch(JdbcSQLException jx){
			
			System.err.println("Connecting to database <"+dbname+"> failed \n"+jx.getMessage() );
			String lockfile = filepath+"/"+dbname+".lock.db";
			File fil = new File(lockfile);
			fil.deleteOnExit();
			fil.delete();
			
			dbfile = "";
			
		}catch( SQLException sx){
			System.err.println ("Cannot connect to database server");
			dbfile = "";
			sx.printStackTrace();
			
		}catch( ClassNotFoundException e){
			dbfile = "";
			e.printStackTrace();
		}catch( Exception e){
			e.printStackTrace();
		} 
		
		 
		return dbfile;
	}


	public void checkForDbAvailability() { 
		
		try {
			
			if ((connection==null) || (connection.isClosed())){
				
				connect( databaseName, getRelocatedH2Dir(storageDir)) ;
			}

			if (iciDb==null){
				open( connection );
			}
			
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (SQLException e) {
			e.printStackTrace();
		} catch (Exception e) {
			e.printStackTrace();
		}
		
	}

	public Connection getConnection(){
		
		return connection;
	}
	
	 
	public String getDatabaseName() {
		return databaseName;
	}

	public void setDatabaseName(String databaseName) {
		this.databaseName = databaseName;
	}


	public String getDatabaseUrl() {
		return databaseUrl;
	}

	public void setInternalCfgStoreName(String fname) {
		internalCfgStoreName = fname;
	}

	public String getCfgResourceJarPath() {
		return cfgResourceJarPath;
	}

	public void setCfgResourceJarPath(String cfgResourceJarPath) {
		this.cfgResourceJarPath = cfgResourceJarPath;
	}

	public Db getIciDb() {
		return iciDb;
	}

	public DatabaseMetaData getDbMetaData() {
		return jdbMetaData;
	}

	public String getDbCatalog() {
		return dbCatalog;
	}

	public DataBaseHandler getDbHandler() {
		return dbHandler;
	}

	public String getDatabaseFile() {
		return databaseFile;
	}

	public PrintLog getOut() {
		return out;
	}
  
	@SuppressWarnings("unused")
	public long insertUpdateNodeContent(long somID, long nodeNumGuid, long docid, long contextId, long fpindex) {
		 
		long dbKey=-1L, result=-1L;
		long _max = 0L;
		
		
		NodeContent nc = new NodeContent();
		List<NodeContent> ncs;
		
		try{
			
			if (iciDb.getConnection().isClosed()){
				// iciDb = Db.open(databaseUrl, user, password);
				iciDb = dataBaseBasics.iciOpenTolerant( databaseUrl, user, password);
			}	
			
			// is it already contained?
			ncs = iciDb.from(nc).where(nc.somid).is(somID )
			                    .and(nc.nodeid).is(nodeNumGuid)
			                    .and(nc.docid).is(docid)
			                    .and(nc.contextid).is(contextId)
			                    .select() ;
			
			
			if ((ncs!=null) && (ncs.size()>0)){
				nc = ncs.get(0);
			}else{
				// not known....
				// our own increment of id :
				
				try{
					List<Object> indexes ;
					
					
					indexes = iciDb.from(nc).select(nc.id) ;
					if (indexes.size()==0){
						_max=1L;
					}else{
						int n = indexes.size();
						long lastvalue = (Long)indexes.get(n-1); 
						_max = lastvalue +1L;	
					}
					
					result = 0;
					
				}catch(Exception e){
					result = -3;
				}
				
				
				nc.id = _max;
				// now the data
				nc.somid = somID  ;
                nc.nodeid = nodeNumGuid  ;
                nc.docid = docid ;
                nc.contextid = contextId ;
				
                dbKey = iciDb.insertAndGetKey(nc);
                // iciDb.update(nc);
			}
			
		}catch(Exception e){
			e.printStackTrace();
		}
		 
		return dbKey;
	}
	
	

	private int getFirstValueColumn(ArrayList<?> strs, String endsnip) {
		
		int cix =-1;
		String str;
		
		for (int i=0;i<strs.size();i++){
						
			str = (String)strs.get(i) ;
			if ((str.endsWith(endsnip)) &&  (str.contains("_"))){
				cix = i;
				break;
			}
		}
		
		return cix;
	}

	

	private ArrayList<Integer> getIndexColumns(ArrayList<?> strs, ArrayList<?> numvalues, String[] indicators) {
		
		ArrayList<Integer> ixcols = new ArrayList<Integer>();
		String str;
		Object obj;
		String  cn="";
		
		
		for (int c=0;c<indicators.length;c++){

			for (int i=0;i<strs.size();i++){
				
				obj = numvalues.get(i);
				cn = obj.getClass().getName().replace("java.lang.", "").toLowerCase() ;
				
				str = (String)strs.get(i) ;
				
				if ((cn.contains("string")==false) && (str.contains( indicators[c] ))){
					ixcols.add(i) ;
				}
				
			}// i->
			str="";
		} // c->
		
		return ixcols;
	}
	
	
	protected ArrayList<Long> getListOfDocs(long somid, int minAbundance){
		
		ArrayList<Long> doclist = new ArrayList<Long>();
		NodeContentX nc = new NodeContentX();
		String sqlstr="";
		
		
		try{
			
			
			if (iciDb.getConnection().isClosed()){
				// iciDb = Db.open(databaseUrl, user, password);
				iciDb = dataBaseBasics.iciOpenTolerant( databaseUrl, user, password);
				connection = iciDb.getConnection() ;
			}	
			
			sqlstr = "SELECT * from (SELECT somid,docid,count(distinct nodeid) AS cc "+  
		                            " from NODECONTENT group by somid,docid) "+ 
		                            " where (cc>=5) and (somid="+somid+") order by cc desc;" ; 
										// 509897495454545553
			
			Long sid ;
			QueryRunner run = new QueryRunner();
			
			// Use the BeanListHandler implementation of DbUtils to convert all
			// ResultSet rows into a List of Person JavaBeans.
			
			ResultSetHandler<List<NodeContentX>> ncs ;
			ncs = new BeanListHandler<NodeContentX>(NodeContentX.class);

			// Execute the SQL statement and return the results in a List of NodeCOntent objects generated by the BeanListHandler.
			List<NodeContentX> nodecs = run.query( connection,sqlstr, ncs);
			// this is almost the same procedure as with iciQl... and it works with any valid SQL statement, not just the "select *" !!!
			
			for (int i=0;i<nodecs.size();i++){
				nc = nodecs.get(i);
			
				if (nc!=null){
					sid = nc.getCc() ;
					sid = (Long)nc.docid ;
					if (doclist.indexOf(sid)<0){
						doclist.add(sid) ;
					}
				}
			}
			

		}catch(Exception e){
			e.printStackTrace();
		}finally{
			try {
				 
			} catch (Exception e) {
			}
			
		}
		
		return doclist;
	}

	
	protected ArrayList<Long> getListOfNodes(long somid, String tablename){

		ArrayList<Long> nodelist = new ArrayList<Long>();
		
		// list of distinct nodes in the table
		// SELECT DISTINCT nodeid from NODECONTENT order by nodeid
		
		 
		NodeContentX nc = new NodeContentX();
		String sqlstr="";
		
		
		try{

			if (iciDb.getConnection().isClosed()){
				iciDb = dataBaseBasics.iciOpenTolerant( databaseUrl, user, password);
				connection = iciDb.getConnection() ;
				
			}
			if (connection.isClosed()){
				open(connection);
			}
			 	
			
			//                             cc is part of the extended class NodeContentX 
			sqlstr = "SELECT somid,nodeid, count(nodeid) as cc from NODECONTENT where somid="+somid+" group by nodeid,somid order by nodeid ;" ;
			// it is easy to add the docid as further constraint
			
			Long sid ;
			QueryRunner run = new QueryRunner();
			
			// Use the BeanListHandler implementation of DbUtils to convert all
			// ResultSet rows into a List of Person JavaBeans.
			
			ResultSetHandler<List<NodeContentX>> ncs ;
			ncs = new BeanListHandler<NodeContentX>(NodeContentX.class);

			// Execute the SQL statement and return the results in a List of NodeCOntent objects generated by the BeanListHandler.
			List<NodeContentX> nodecs = run.query( connection,sqlstr, ncs);
			// this is almost the same procedure as with iciQl... and it works with any valid SQL statement, not just the "select *" !!!
			
			for (int i=0;i<nodecs.size();i++){
				nc = nodecs.get(i);
			
				if (nc!=null){
					sid = nc.getCc() ;
					sid = (Long)nc.nodeid ;
					if (nodelist.indexOf(sid)<0){
						nodelist.add(sid) ;
					}
				}
			}
			

		}catch(Exception e){
			e.printStackTrace();
		}finally{
			try {
				
			} catch (Exception e) {
			}
			
		}
		
		return nodelist;
	}

	/*
	 * 
	 */
	/**
	 * for an apriori given doc we get its frequency in all affected nodes;
	 * 
	 * note that the list most likely does NOT contain all available nodes from the SOM
	 */
	@SuppressWarnings({ "rawtypes", "unchecked" })
	public DocNodesDescription getRawHistoList(long somId, long docid, ArrayList<Long> globalNodeList) {
		
		// ValuePairs adHocHisto = new ValuePairs();
		DocNodesDescription docnodedescr = new DocNodesDescription();
		
		String sqlstr="";
		double v;
		long cc,nid ;
		
		NodeContentX nc = new NodeContentX(), nt = new NodeContentX();
		List<NodeContentX> nodecs , nodedn, nodetn;
		ValuePair vp;
		QueryRunner run ;
		
		
		try{

			if (iciDb.getConnection().isClosed()){
				iciDb = dataBaseBasics.iciOpenTolerant( databaseUrl, user, password);
				connection = iciDb.getConnection() ;
				
			}
			if (connection.isClosed()){
				open(connection);
			}
			 	
			
			//                             cc is part of the extended class NodeContentX 
			sqlstr = "SELECT docid, nodeid,COUNT(nodeid) AS cc FROM NODECONTENT where somid="+somId+" and docid="+docid+" GROUP BY nodeid,docid order by docid, nodeid ;" ;
			// it is easy to add the docid as further constraint

			
			
			run = new QueryRunner();
			
			// Use the BeanListHandler implementation of DbUtils to convert all
			// ResultSet rows into a List of Person JavaBeans.
			
			ResultSetHandler<List<NodeContentX>> ncs ;
			ncs = new BeanListHandler<NodeContentX>(NodeContentX.class);

			
			// Execute the SQL statement and return the results in a List of NodeContent objects generated by the BeanListHandler.
			nodecs = run.query( connection,sqlstr, ncs);
			// this is almost the same procedure as with iciQl... and it works with any valid SQL statement, not just the "select *" !!!

			/*
			DOCID  	NODEID  	        CC  
			491	    100102531015355101	8
			*/
			for (int i=0;i<nodecs.size();i++){
				nc = nodecs.get(i);
			
				if (nc!=null){
					cc  = nc.getCc() ;
					nid = (Long)nc.nodeid ;
					 
					vp = new ValuePair(nid,cc) ;
					docnodedescr.histogram.add(vp) ;
				}
			}
			
			
			
			// number of distinct documents for a particular node or for all nodes (without nodeid constraint)
			
			sqlstr = "SELECT somid,nodeid,count(distinct docid) AS cc "+  
			                               " from NODECONTENT  where somid = "+somId+" "+  // 509897495454545553
			                                                  // and  nodeid=100102531015355101  
			                               " group by somid,nodeid order by nodeid; ";
			/*
			  		SOMID  				NODEID  			CC  
					509897495454545553	100102531015355101	4
					509897495454545553	100494898101995356	3
					509897495454545553	100495551561029854	1
					509897495454545553	100565010151991014	2
			 */
			
			run = new QueryRunner();
			nodedn = run.query( connection,sqlstr, ncs);

			
			// total number of documents across all nodes of a given SOM
			
			sqlstr = "SELECT somid,nodeid,count(docid) AS cc "+  
			                        " from NODECONTENT  where somid = "+somId+" "+ // 509897495454545553  
			                        //    and  nodeid=100102531015355101  
			                        "group by somid,nodeid order by nodeid;";

			run = new QueryRunner();
			nodetn = run.query( connection,sqlstr, ncs);
			 
			/*
			 
			 		SOMID  				NODEID  			CC  
					509897495454545553	100102531015355101	33
					509897495454545553	100494898101995356	40
					509897495454545553	100495551561029854	1
					509897495454545553	100565010151991014	2 
			 */
			
			
			/*
			 * now we have 5 values
			 *       - prevalence of the doc in a node = from the histogram = 20 [...]
			 *       - size of the node = number of entries in a node       = 33 [...]
			 *       - number of different documents in a node              = 4  [...]
			 *       - size of the histogram                                = 62 
			 *       - total number of nodes                                = 78
			 *       
			 * from that we can fill the following fields = calculate the following descriptives

			 *       - nodesizetotal     33
			 *       
			 *       - docnvariety       avg of the number of different docs in the nodes where we found the doc of the record ; 
			 *       - docnvarivar		 variance of that number
			 *       
			 *       - docpvariety		 avg of the proportion the doc of the record takes in the nodes where it is found;
			 *       - docpvarivar		 variance of that number 
			 *       
			 *       - docabundance		 describes the probability to find this doc in a particular node, normalized by its length and the size of the SOM
			 *       
			 *       - histogramsize	
			 *       
			 */

			double 	sum=0.0, qsum = 0.0, 
					docnvarivar=0.0,docpvarivar=0.0,docabundance=0.0,
					docnvariety = -1.0, docpvariety = -1.0 ;
			
			// avg of the number of different docs in the nodes where we found the doc of the record ;
			for (int i=0;i<nodedn.size();i++){
				nc = nodedn.get(i);  // this returns only nodes where the count >0
				v = (double)nc.getCc() ;
				sum = sum + v;
				qsum = qsum + (v*v);
			}// i-> nodedn
			
			if (nodedn.size()>0){
				docnvariety = sum/((double)nodedn.size());
				docnvarivar = NumUtils.lazyVariance(sum,qsum, nodedn.size());
			}
			
			
			// avg of the proportion the doc of the record takes in the nodes where it is found;
			sum=0.0; qsum = 0.0;
			for (int i=0;i<nodedn.size();i++){
				nc = nodedn.get(i);
				nt = nodetn.get(i);
				v = (double)nt.getCc();
				if (v!=0){
					v = (double)nc.getCc()/v ;
					sum = sum + v;
					qsum = qsum + (v*v);
				}
			}// i-> nodedn
			
			if (nodedn.size()>0){
				docpvariety = sum/((double)nodedn.size());
				docpvarivar = NumUtils.lazyVariance(sum,qsum, nodedn.size());
			}
			

			//describes the probability to find this doc upon a selection of size n=1 from an arbitrary node
			
			docabundance = (double)nodecs.size()/(double)globalNodeList.size() * docpvariety;
			
			
			// recording....
			docnodedescr.docsomStats.add( new IndexDistance( 0, (double)1.0*globalNodeList.size() , "nodesizetotal") );
			docnodedescr.docsomStats.add( new IndexDistance( 1, docnvariety, "docnvariety") );
			docnodedescr.docsomStats.add( new IndexDistance( 2, docnvarivar, "docnvarivar") );
			docnodedescr.docsomStats.add( new IndexDistance( 3, docpvariety, "docpvariety") );
			docnodedescr.docsomStats.add( new IndexDistance( 4, docpvarivar, "docpvarivar") );
			docnodedescr.docsomStats.add( new IndexDistance( 5, docabundance, "docabundance") );
			docnodedescr.docsomStats.add( new IndexDistance( 6, (double)nodecs.size(), "histogramsize") );
			
		}catch(Exception e){
			e.printStackTrace();
		}finally{
			try {
				
			} catch (Exception e) {
			}
			
		}
		 
		return docnodedescr;
	}

}











