#!/bin/bash
# 总控脚本
# 每个业务人员在conf/run.conf中定义自己的类别actveilist
#

ip=`ifconfig eth0 |grep "inet addr"| cut -f 2 -d ":"|cut -f 1 -d " "`
hadoop_home_dir=/home/horae/shell-horae/mr/aa/IEHACK/url_tag_mining
test_home_dir=/root/wengyunhe/svn/gsvn/url-tag-minning
execute_sql=false
if [ x"$ip" = x"10.133.10.18" ];then
	home_dir=$hadoop_home_dir
	run_conf="run_hadoop.conf"
	execute_sql=true
else
	home_dir=$test_home_dir
	run_conf="run_test.conf"
fi
echo "home dir: "$home_dir

dos2unix $home_dir/conf/*
source $home_dir/conf/${run_conf}

echo "params: "$*
refresh=false

date_time=$(date '+%Y-%m-%d')
activelist=$default_list
while [[ "$*" != "" ]]
do
	if [[ "$1" == "-r" ]];then
		refresh=true
	elif [[ "$1" == "-u" ]];then
		shift
		eval activelist=\$$1
	elif [[ "$1" == "-t" ]];then
		shift
		date_time=$1
	else
		echo "usage: sh ./run.sh -r -u user -t datetime"
	fi
	shift
done
echo "refresh: "$refresh
echo "activelist: "$activelist
echo "datetime: "$date_time

function contains() {
  for word in $1; do
    [[ $word = $2 ]] && return 0
  done
  return 1
}

if [ ! -e $output/total/ ]; then mkdir -p $output/total/;fi
chmod -R 777 $output
:>$total_result
# If we only need to change the threshold, then input nothing, 
# but if the whole process needs to restart, then "r" param should be added
if [[ "$refresh" == "true" ]]; then
for dir in `dir $home_dir/input`; do
	if contains "$activelist" "$dir"; then
		if [ -e $home_dir/input/$dir/$pattern -a -e $home_dir/input/$dir/$context_weight ]; then
			echo "parsing $dir"
			if [ ! -e $output/$dir/ ]; then mkdir -p $output/$dir/;fi
			$bin $home_dir/input/$dir/$pattern $home_dir/input/$dir/$context_weight $data $base_threshold > $output/$dir/$result
		else
			echo "$home_dir/input/$dir/$pattern or $home_dir/input/$dir/$context_weight does not exist."
		fi
	fi
done
fi

#python scripts/addpv.py $urldata ./$output/$dir/$result | sort -k3 -nr > ./$output/$dir/$result.sorted
for dir in `dir $output`; do
	if contains "$activelist" "$dir"; then
		if [ -e $output/$dir/$result ]; then
			echo "filtering $dir"
			cat $output/$dir/$result | sort -k4 -nr -t$'\t' > $output/$dir/$result.sorted
			topn=$(cat $home_dir/conf/threshold | awk -F"\t" '{if ($1=="'$dir'") print $5}')
			python $home_dir/scripts/filter.py $output/$dir/$result.sorted $thconf $dir > $output/$dir/$result.final
			head -n $topn $output/$dir/$result.final > $output/$dir/$result.filtered
			cat $output/$dir/$result.filtered | awk 'BEGIN{OFS="\t"}{print $1,$2,$3,$4,$5,"'$dir'"}' >> $total_result
		else
			echo "$output/$dir/$result does not exist."
		fi
	fi
done

chmod -R 777 $output

if [ x"$execute_sql" = x"false" ];then exit; fi

groupname=cug_p_sdo_data
/opt/app/hive-0.7.0-rc1/bin/hive \
        --hiveconf user.group=d_sdo_data \
        --hiveconf mapred.job.queue.name=$groupname \
        --hiveconf mapred.fairscheduler.pool=$groupname \
        -e \
"

use p_sdo_data_etl;
set hive.business.name='aa';
set mapred.compress.map.output=false;
set hive.exec.compress.output=false;
set mapred.output.compress=false;
drop table t_aa_iehack_type_url_tmp;
create external table if not exists t_aa_iehack_type_url_tmp
(
  score     string,
  url       string,
  pv        string,
  uv        string,
  title     string,
  tag       string
)
row format delimited
fields terminated by '\t'
lines terminated by '\n'
stored as textfile;
LOAD DATA LOCAL INPATH '$total_result' OVERWRITE INTO TABLE t_aa_iehack_type_url_tmp ;

create table if not exists t_aa_iehack_type_url
(
  score     string,
  url       string,
  pv        string,
  uv        string,
  title     string,
  tag       string
)
PARTITIONED BY (pt string)
row format serde 'org.apache.hadoop.hive.serde2.lazy.lazysimpleserde'
with serdeproperties ('serialization.null.format'='')
stored as rcfile;
insert overwrite table t_aa_iehack_type_url partition (pt ='${date_time}') 
select *  from t_aa_iehack_type_url_tmp;

"
echo "all done!"
