package com.toutiao.store

import scala.collection.mutable.ArrayBuffer

import org.elasticsearch.common.settings.{Settings,ImmutableSettings}
import org.elasticsearch.client.transport.TransportClient
import org.elasticsearch.common.transport.InetSocketTransportAddress
import org.elasticsearch.index.query.FilterBuilders._
import org.elasticsearch.index.query.QueryBuilders._
import org.elasticsearch.action.search.SearchType
import org.elasticsearch.common.unit.TimeValue
import org.elasticsearch.search.aggregations.metrics.max.Max
import org.elasticsearch.search.aggregations.AggregationBuilders.max

import net.liftweb.json.{JsonAST,Extraction,Printer,parse}
import net.liftweb.json.JsonDSL._

import scala.collection.JavaConverters._

import java.util.Date
import java.sql.Timestamp

import org.apache.log4j.Logger

import com.toutiao.issue.SimpleDate

import com.toutiao.time.RawDocument



class DocumentStore(
			esName:String,
			esUri:String,
			esPort:Int,
			esIndexName:String,
			esTypeName:String
    ) {
  
  private
  val _log = Logger.getLogger(this.getClass.getName)
  _log.info("esName "+esName)
  _log.info("esUri "+esUri)
  _log.info("esPort "+esPort)
  _log.info("esIndex "+esIndexName)
  _log.info("esType "+esTypeName)
  
  private
  val s = ImmutableSettings.settingsBuilder().
  			put("cluster.name",esName).build()
  			
  private
  val ES = new TransportClient(s).addTransportAddress(
		  	new InetSocketTransportAddress(esUri,esPort))
		  	
  implicit val _ = net.liftweb.json.DefaultFormats

  def getMaxTime(tt:String) = {
    val resp = ES.prepareSearch(esIndexName).setTypes(esTypeName)
    			.addAggregation(max("maxTime").field(tt)).execute.actionGet
    val r:Max = resp.getAggregations.get("maxTime")
    val x = r.getValue
    val z = new Date(new Timestamp(x.toLong).getTime)
    val t = SimpleDate.increment(z, -8*60) // 这里不知道会不会有问题
    t
  }
  
  def retrieve(st:Date, et:Date) = {
    val rt = "TIME"
    val xs = new ArrayBuffer[RawDocument]
	var scrollResp = ES.prepareSearch(esIndexName).
						setTypes(esTypeName).
						setSearchType(SearchType.SCAN).
						setScroll(new TimeValue(60000)).
						setPostFilter(rangeFilter(rt).
						    from(SimpleDate.format(st)).
						    to(SimpleDate.format(et)).
						    includeLower(true).
						    includeUpper(false)).
						setSize(200).
						execute().actionGet()
	var z = 1
	while (z > 0){
	  scrollResp = ES.prepareSearchScroll(scrollResp.getScrollId).
	  					setScroll(new TimeValue(60000)).
	  					execute.actionGet
	  for (n <- scrollResp.getHits.hits) {
	    val s = n.getSource
	    val name = s.get("NAME").toString
	    val t = SimpleDate.parse(s.get("TIME").asInstanceOf[String])
	    val title = s.get("CUT_TITLE").asInstanceOf[String]
	    val content = s.get("CUT_CONTENT").asInstanceOf[String]
	    val vs = 
	    try {
	      parse(s.get("KEYWORD").asInstanceOf[String]).extract[Map[String,Double]].
	    			map(vv => {
	    			  val v = vv._1.replace(":","")
	    			  (v,vv._2)
	    			}) //.filter(_._2 > 5.0)
	    }
	    catch {
	      case _: Throwable => Map[String,Double]()
	    }
	    val u = s.get("URL").asInstanceOf[String]
	    xs += new RawDocument(name,title,content,vs,u,t)
//	    val us = vs.toArray
//	    us.sortBy(-_._2)
//	    val cc = 40
//	    val cnt = if (us.size > cc) cc else us.size
//	    val vvs = us.slice(0,cnt).toMap
//	    if (vs.size > 0)
//	    	xs += new Document(name,vvs,title,t)
	  }
	  
	  if (scrollResp.getHits.hits.length == 0)
	    z = 0
	}
    
    //_log.info("Got "+xs.size)
    xs.toArray.sortWith(_.date.getTime < _.date.getTime)
  }
  
  def close = ES.close
  
}
