#!/bin/bash
# 将导出的数据合并到一个文件中，需要先执行hive_filter_words.sql
rm -f ~/words.csv
cat ~/data/*/000000_0 >> ~/words.csv
rm -rf ~/data
mkdir -p ~/data/log
mkdir -p ~/data/hy
mkdir -p ~/data/words
cd ~/data/words || exit
split -l 2000 /home/liu/words.csv -d
cd ~/data/words || exit
line_number=$(ls | wc -l) # 当前逻辑有漏洞，低于10个和高于99个文件

# 需要先对模块GivingHyperlinksData打包，并拷贝到/home/liu目录下，重命名为GivingHyperlinksData.jar
for i in $(seq 0 9); do
  nohup java -jar /home/liu/GivingHyperlinksData.jar x0$((i)) ../hy/hyperlink0$((i)).txt >../log/log0$((i)).file 2>&1 &
done

for i in $(seq 10 $((line_number-1))); do
  flag=$(ps -ef | grep Giving | grep jar | wc -l)
  while [ $((flag)) -gt 9 ]; do
    flag=$(ps -ef | grep Giving | grep jar | wc -l)
    echo $((flag))
    sleep 5
  done
  nohup java -jar /home/liu/GivingHyperlinksData.jar x$((i)) ../hy/hyperlink$((i)).txt >../log/log$((i)).file 2>&1 &
done

# start
# nohup ~/properties/giving.sh > ~/giving.log 2>&1 &
# kill
# ps -ef | grep giving.sh | grep -v grep | awk '{print $2}' |xargs kill -9
# ps -ef | grep GivingHyperlinksData.jar | grep -v grep | awk '{print $2}' |xargs kill -9
#
#
# Loading Redis
# 需要过滤无效的行数：不含有超链接的
# sed -i -e 's/^/hsetnx hash_hyperlink_key /' redis_keyword.txt
# cat redis_keyword.txt | /opt/bigdata/redis-6.0.10/src/redis-cli -h liu -p 6381 --pipe