package com.qing.search

import java.io.File
import java.nio.file.Paths

import org.apache.hadoop.conf.Configuration
import org.apache.lucene.analysis.standard.StandardAnalyzer
import org.apache.lucene.index.{DirectoryReader, IndexWriter, IndexWriterConfig}
import org.apache.lucene.search.IndexSearcher
import org.apache.lucene.store.FSDirectory
import org.apache.spark.storage.StorageLevel
import org.apache.spark.{SparkConf, SparkContext}
import spark.{Request, Response, Route, Spark}

import scala.collection.mutable.ListBuffer
import scala.util.control.Breaks.break

/**
  * Created by wuliao on 2017/8/17.
  */
object SearchServer {
  def main(args: Array[String]): Unit = {

    val hadoopConfig: Configuration = sc.hadoopConfiguration
    hadoopConfig.set("fs.hdfs.impl", classOf[org.apache.hadoop.hdfs.DistributedFileSystem].getName)
    hadoopConfig.set("fs.file.impl", classOf[org.apache.hadoop.fs.LocalFileSystem].getName)


    Spark.post("/search/:page", new Route {
      override def handle(request: spark.Request, response: Response): String = {
        val page = request.params(":page")
        val begin = System.currentTimeMillis()
        val map = request.queryMap().toMap
        val rdd = new LucenePageRDD(sc, map, Integer.parseInt(page))
        rdd.mapPartitionsWithIndex((index, it) => {
          val path = sc.getConf.get("lucene.path")
          val num = sc.getConf.get("lucene.node").toInt
          for (a <- 0 until num) {
            val file = new File(path + a)
            val lock = new File(path + a + "/lock")
            if (file.exists() && lock.exists()) {
              lock.delete()
            }
          }
          it
        }).count()

        rdd.collect.toBuffer.toString() + "..." + (System.currentTimeMillis() - begin) + "..."
      }
    })

    //支持queryparse语法
    //查询大小在145到150， 名称与javv相似， 创建时间在1334550379955之前的不要， lucene与action相距1
    //"size:[145 TO 150] + name:javv~ - date:[1 TO 1334550379954] + \"lucene action\"~1");
    Spark.post("/parse/:page", new Route {
      override def handle(request: Request, response: Response) = {

        ""
      }
    })

    Spark.post("/analysis/ipcount", new Route {
      override def handle(request: Request, response: Response): String = {
        val begin = System.currentTimeMillis()
        val map = request.queryMap().toMap

        sc.parallelize(Seq("apark-lucene is a good util"), 30)
          .mapPartitions(it => {
            val path = sc.getConf.get("lucene.path")
            val num = sc.getConf.get("lucene.node").toInt
            for (a <- 0 until num) {
              val file = new File(path + a)
              val lock = new File(path + a + "/lock")
              if (file.exists() && lock.exists()) {
                lock.delete()
              }
            }
            it
          }).count()

        val rdd = new LuceneRDD(sc, map)
          .flatMap(s => {
            println("..." + s.toIterator.length)
            s.iterator
          }).persist(StorageLevel.MEMORY_ONLY)
        val count = rdd.count()

        sc.parallelize(Seq("apark-lucene is a good util"), 30)
          .mapPartitions(it => {
            val path = sc.getConf.get("lucene.path")
            val num = sc.getConf.get("lucene.node").toInt
            for (a <- 0 until num) {
              val file = new File(path + a)
              val lock = new File(path + a + "/lock")
              if (file.exists() && lock.exists()) {
                lock.delete()
              }
            }
            it
          }).count()


        count + "..." + (System.currentTimeMillis() - begin) + "..."
      }
    })


  }

  val sc = {
    //    val hdfsPath = "/Users/wuliao/Desktop/lucene/"
    val hdfsPath = "/mnt/disk/lucene/"
    val conf = new SparkConf()
      .setAppName("hello")
      .set("lucene.node", "10")
      .set("spark.driver.cores", "1")
      .set("lucene.path", hdfsPath)
      .set("spark.driver.memory", "4096m")
      .set("spark.executor.memory", "4096m")
      .setMaster("spark://175.102.18.112:7077")
      .setJars(Array("file:///mnt/disk/jar/spark-lucene-1.0-SNAPSHOT-jar-with-dependencies.jar"))
    //      .setMaster("local")
    new SparkContext(conf)
  }


}
