import org.apache.hadoop.io.{LongWritable, Text}
import org.apache.hadoop.mapred.TextInputFormat
import org.apache.spark.{SparkConf, SparkContext}
import org.rocksdb.{Options, RocksDB}
object RocksDBSparkExample {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("RocksDB Example")
    val sc = new SparkContext(conf)
    // 设置RocksDB选项
    val options = new Options().setCreateIfMissing(false)
    // 从HDFS中加载RocksDB数据文件到Spark的RDD中
    val file = "/path/to/rocksdb/data"

    val rocksdbRDD = sc.newAPIHadoopFile[LongWritable, Text, TextInputFormat](input)
//    (
//      file,
//      classOf[org.apache.hadoop.mapreduce.lib.input.TextInputFormat],
//      classOf[org.apache.hadoop.io.Text],
//      classOf[org.apache.hadoop.io.Text]
//    ).map(_._2.toString)
    // 实现点读操作
    val keysToGet = Seq("key1", "key2", "key3")
    val resultsRDD = rocksdbRDD.flatMap(record => {
      // 打开RocksDB数据库
      val db = RocksDB.open(options, "/path/to/rocksdb")
      try {
        // 获取指定key的值
        keysToGet.map(key => {
          val value = db.get(key.getBytes)
          (key, value)
        })
      } finally {
        // 关闭RocksDB数据库
        db.close()
      }
    })
    // 实现范围查询操作
    val startKey = where.start_key.getBytes
    val endKey = where.end_key.getBytes
    val rangeResultsRDD = rocksdbRDD.mapPartitions(records => {
      // 打开RocksDB数据库
      val db = RocksDB.open(options, "/path/to/rocksdb")
      try {
        // 获取迭代器并设置范围条件
        val iter = db.newIterator()
        iter.seek(startKey)
        // 遍历迭代器并查找符合范围条件的记录
        val results = scala.collection.mutable.ListBuffer.empty[(String, String)]
        while (iter.isValid && iter.key().compareTo(endKey) <= 0) {
          val key = new String(iter.key())
          val value = new String(iter.value())
          results += ((key, value))
          iter.next()
        }
        // 关闭迭代器
        iter.close()
        results.iterator
      } finally {
        // 关闭RocksDB数据库
        db.close()
      }
    })
    // 保存查询结果到HDFS中
    resultsRDD.saveAsTextFile("/path/to/point-read-results")
    rangeResultsRDD.saveAsTextFile("/path/to/range-query-results")
  }
}