package to_hbase_xiemingyang
import org.apache.hadoop.hbase.client.{Put, Result}
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.mapreduce.Job
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.HashMap
import scala.util.parsing.json.JSON

object make_webinfo {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("HBaseTest").setMaster("local")
    val sc = new SparkContext(sparkConf)
    val table: String = new String("BaseMapInfos")
    sc.hadoopConfiguration.set("hbase.zookeeper.quorum", "10.120.65.181,10.120.67.164,10.120.64.243,10.120.64.144")
    sc.hadoopConfiguration.set("hbase.zookeeper.property.clientPort", "2181")
    sc.hadoopConfiguration.set(TableOutputFormat.OUTPUT_TABLE, table)

    val job = Job.getInstance(sc.hadoopConfiguration)
    job.setOutputKeyClass(classOf[ImmutableBytesWritable])
    job.setOutputValueClass(classOf[Result])
    job.setOutputFormatClass(classOf[TableOutputFormat[ImmutableBytesWritable]])

    //make a cache:
    val thepath = makeConceptDic_step02.getClass.getClassLoader().getResource("Map01_02_dealTypesList/unicef_webinfo.txt").getPath

    val jsonStr = sc.textFile(thepath) //.sample(true,0.01);

    val result = jsonStr.map(s => JSON.parseFull(s))

    val WebCode = "UNICEF"
    //WebCode ,SourceCode , ConceptId, DetailId,DetailName
    val rdds = result.map {
      r =>
        r match {
          case Some(maps: Map[String, Any]) => {
            val webcode = "UNICEF"
            val thashmap: HashMap[String, String] = new HashMap[String, String]()
            maps.foreach(t => {
              thashmap += (t._1.toString -> t._2.toString.replace("\n", ""))
            })
            //maybe there should deal with _id
            thashmap("_id") = thashmap("_id").substring(12, 36)
            // get webcode + sourcecode
            // rowkeys
            //????
            (thashmap("SourceCode"), thashmap)
            //(webcode + "_" + thashmap("SourceCode"), thashmap)
            //(scala.util.Random.nextInt(10).toString + "_" + webcode + "_" + thashmap("SourceCode"), thashmap)
            //("0"+"_WTO"+"_"+thashmap("SourceCode"),thashmap)
            val rowkey = thashmap("WebCode") + "_" + thashmap("SourceCode")
            val put = new Put(Bytes.toBytes(rowkey))

            put.addColumn(Bytes.toBytes("WebInfo"), Bytes.toBytes("WebCode"), Bytes.toBytes(thashmap("WebCode")))
            put.addColumn(Bytes.toBytes("WebInfo"), Bytes.toBytes("WebName"), Bytes.toBytes(thashmap("WebName")))
            put.addColumn(Bytes.toBytes("WebInfo"), Bytes.toBytes("SourceCode"), Bytes.toBytes(thashmap("SourceCode")))
            put.addColumn(Bytes.toBytes("WebInfo"), Bytes.toBytes("SourceName"), Bytes.toBytes(thashmap("SourceName")))
            (new ImmutableBytesWritable, put)
          }
        }
    }

    rdds.saveAsNewAPIHadoopDataset(job.getConfiguration())

    sc.stop()
  }

}
