set -x

submit(){
   :<<EOF
   rm -rf output
   spark-submit \
      --packages com.databricks:spark-xml_2.12:0.12.0,mysql:mysql-connector-java:5.1.49 \
      --master local[4] 32.py

      --packages org.apache.spark:spark-sql-kafka-0-10_2.12:3.1.2,com.databricks:spark-xml_2.12:0.12.0,mysql:mysql-connector-java:5.1.49
EOF
   spark-submit  \
      --packages org.apache.spark:spark-sql-kafka-0-10_2.12:3.1.2,mysql:mysql-connector-java:5.1.49 \
      --master local[4] 1712.py \
      172.16.0.59:9092 subscribe topic1
}
prepare_little_file(){
   #arr=(10000 20000 50000 100000)
   arr=(10)

   for k in ${arr[@]}; do
      rm -rf $k
      mkdir -p $k
      for ((i=0; i<$k; i++)); do
         cp books.xml $i.xml
         mv $i.xml $k/
      done
   done
}

prepare_large_file(){
   arr=(10000 20000 50000 100000)

   for k in ${arr[@]}; do
      rm -f $k.xml
      touch $k.xml

      cat head.xml >> $k.xml

      for ((i=0; i<$k; i++)); do
         cat body.xml >> $k.xml
      done

      cat tail.xml >> $k.xml
   done
}

sshell(){
   :<<EOF
   spark-shell --packages com.databricks:spark-xml_2.11:0.12.0
   spark-shell --packages com.databricks:spark-xml_2.12:0.12.0,org.json4s:json4s-xml_2.12:4.0.1
   spark-shell --packages com.databricks:spark-xml_2.12:0.12.0,org.json4s:json4s-native_2.12:4.0.2
      --packages com.databricks:spark-xml_2.12:0.12.0,org.json4s:json4s-jackson_2.12:4.0.3,org.json4s:json4s-xml_2.12:4.0.1
EOF
   spark-shell \
      --packages com.databricks:spark-xml_2.12:0.12.0,mysql:mysql-connector-java:5.1.49
}

clean(){
   sbt clean
   rm -rf output target project
}

zoo_off(){
   pid=`ps -ef|grep zookeeper |grep -v grep|awk '{print $2}'`
   kill -9 $pid
}

kafka_daemon(){
   clear_up

   nohup ./bin/zookeeper-server-start.sh ./config/zookeeper.properties >1.log 2>&1 &
   sleep 1
   nohup ./bin/kafka-server-start.sh ./config/server.properties >2.log 2>&1 &
   :<<EOF
EOF
}

kafka_off(){
   pid=`ps -ef|grep kafka |grep -v grep|awk '{print $2}'`
   kill -9 $pid
}

clear_up(){
   ./bin/kafka-server-stop.sh
   ./bin/zookeeper-server-stop.sh

   :<<EOF
   pid=`ps -ef|grep kafka |grep -v grep|awk '{print $2}'`
   kill -9 $pid

   pid=`ps -ef|grep zookeeper |grep -v grep|awk '{print $2}'`
   kill -9 $pid
EOF

   rm -rf logs nohup.out /tmp/kafka-logs /tmp/zookeeper *.log
}

create_topic(){
   kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic topic1
}

produce(){
   /usr/local/kafka/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic topic1
}

put(){
   cat 10.xml|tr -d '\n' |tr -d '\r' > 1.xml
   :<<EOF
   rm -rf input
   mkdir input
   mv 1.xml input/`date +%s`
EOF
   /usr/local/kafka/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic topic1 <1.xml
}

consumer(){
   /usr/local/kafka/bin/kafka-console-consumer.sh --bootstrap-server localhost:9092  --topic topic1
}

reboot(){
   rm -rf target
   sbt package
   boot
}

toggle(){
   CD=$PWD

   cd src/main/scala
   if [ -f "Put.scala" ]; then
      mv Put.scala Put.scala.bk
      mv DirectKafkaWordCount.scala.bk DirectKafkaWordCount.scala
   else
      mv Put.scala.bk Put.scala
      mv DirectKafkaWordCount.scala DirectKafkaWordCount.scala.bk
   fi
   ls -l

   cd $CD
}

boot(){
   :<<EOF
         --packages com.databricks:dbutils-api_2.12:0.0.5
         --packages org.apache.spark:spark-sql-kafka-0-10_2.12:3.1.2,com.databricks:spark-xml_2.12:0.12.0,mysql:mysql-connector-java:5.1.49
EOF

   MAIN_CLASS=`find src -type f -name '*.scala'|xargs grep -l 'def main'|awk -F '[/.]' '{print $4}'`
   if [ ! -n "$MAIN_CLASS" ];then
      exit -1
   fi
   ARTIFACT=`find target/scala-* -name '*.jar'`
   if [ -n "$ARTIFACT" ];then
      rm -f nohup.out
      nohup spark-submit \
         --packages org.apache.spark:spark-sql-kafka-0-10_2.12:3.1.2,mysql:mysql-connector-java:5.1.49,org.apache.spark:spark-streaming-kafka-0-10_2.12:3.1.2,com.databricks:spark-xml_2.12:0.12.0 \
         --class "$MAIN_CLASS" --master local[*] $ARTIFACT "172.16.0.59:9092" "1" "topic1" &
   fi
}

downgrade(){
   rm -f /usr/local/scala
   rm -f /usr/local/spark
   rm -f build.sbt

   ln -s /usr/local/scala-2.11.8 /usr/local/scala
   ln -s /usr/local/spark-2.1.1-bin-hadoop2.3 /usr/local/spark
   ln -s build.sbt.2.11 build.sbt
}

upgrade(){
   rm -f /usr/local/scala
   rm -f /usr/local/spark
   rm -f build.sbt

   ln -s /usr/local/scala-2.12.14 /usr/local/scala
   ln -s /usr/local/spark-3.1.2 /usr/local/spark
   ln -s build.sbt.2.12 build.sbt
}

$@
