#bash_profile
unset USERNAME


# local variables
export PATH=/bin:/sbin:/usr/sbin:/usr/bin:/usr/local/bin:~/python
export LD_LIBRARY_PATH=/usr/local/lib:/usr/lib

export HADOOP_HOME=/home/tscadmin/hadoop/current
export HADOOP_ROOT=/home/tscadmin/hadoop/current
export JAVA_HOME=/home/tscadmin/java/current
export HIVE_HOME=/home/tscadmin/hive/bin
#export JRE_HOME=${JAVA_HOME}/jre

export CLASSPATH=$HADOOP_HOME/lib:$JAVA_HOME/lib:$JRE_HOME/lib
#:$HOME"/QA/Scripts/SearchCenter/algorithm/hadoop/tools/compress/HDFSClient.jar"

export PATH=$PATH:$HADOOP_HOME/bin:$JAVA_HOME/bin:$JRE_HOME/bin:$HIVE_HOME

#设置了Ls的目录颜色
#export LSCOLORS=Dxfxcxdxbxegedabagacad

export HOSTSHORT=`hostname`
export OS=`uname`;
#export LC_ALL='zh_CN.GBK'
export LC_ALL='en_US.UTF-8'
#export LANG='zh_CN.GBK'
export LANG='en_US.UTF-8'
export SVN_EDITOR=vim

#short commands
alias ls='ls -GF'
alias s='sudo'
alias st='sudo -u tscadmin'
alias m='mysql -u root'
alias gu='iconv -f gbk -t utf-8'
alias ug='iconv -f utf-8 -t gbk'
alias ssh='ssh -2'
alias ll='ls -al --color=tty'

# go to google for a definition
#define() {
#  local LNG=$(echo $LANG | cut -d '_' -f 1)
#  local CHARSET=$(echo $LANG | cut -d '.' -f 2)
#  lynx -accept_all_cookies -dump -hiddenlinks=ignore -nonumbers -assume_charset="$CHARSET" -display_charset="$CHARSET" "http://www.google.com/search?hl=${LNG}&q=define%3A+${1}&btnG=Google+Search" | grep -m 5 -C 2 -A 5 -w "*" > /tmp/deleteme
#  if [ ! -s /tmp/deleteme ]; then
#    echo "Sorry, google doesn't know this one..."
#  else
#    cat /tmp/deleteme | grep -v Search
#    echo ""
#  fi
#  rm -f /tmp/deleteme
#}

#lingyue.wkl added ----------------------------------
#hadoop
alias hls='hadoop dfs -ls'
alias hlsr='hadoop dfs -lsr'
alias hdu='hadoop dfs -du'
alias hdus='hadoop dfs -dus'
alias hcount='hadoop dfs -count'
alias hmv='sudo -u tscadmin hadoop dfs -mv'
alias hcp='sudo -u tscadmin hadoop dfs -cp'
alias hrm='sudo -u tscadmin hadoop dfs -rm'
alias hrmr='sudo -u tscadmin hadoop dfs -rmr'
alias hexpunge='hadoop dfs -expunge'
alias hput='sudo -u tscadmin hadoop dfs -put'
alias hcopyFromLocal='hadoop dfs -copyFromLocal'
alias hmoveFromLocal='hadoop dfs -moveFromLocal'
alias hget='hadoop dfs -get'
alias hgetmerge='hadoop dfs -getmerge'
alias hcat='hadoop dfs -cat'
alias htext='hadoop dfs -text'
alias hcopyToLocal='hadoop dfs -copyToLocal'
alias hmoveToLocal='hadoop dfs -moveToLocal'
alias hmkdir='sudo -u tscadmin hadoop dfs -mkdir'
alias hsetrep='hadoop dfs -setrep'
alias htouchz='sudo -u tscadmin hadoop dfs -touchz'
alias htest='hadoop dfs -test'
alias hstat='hadoop dfs -stat'
alias htail='hadoop dfs -tail'
alias hchmod='hadoop dfs -chmod'
alias hchown='hadoop dfs -chown'
alias hchgrp='hadoop dfs -chgrp'
alias hhelp='hadoop dfs -help'

#alias lftp="lftp pubftp:look@172.24.23.127"
alias lftp="lftp pubftp:look@110.75.5.128"

if [ $OS = "Linux" ]; then
    alias ls='ls --color -F'
    alias scp='scp -1'
fi

#shell promaote and title.
PROMPT_COMMAND='echo -ne "\033]0;[${HOSTSHORT}:`basename $PWD`\$]${PWD}\007"'
PS1='[\u@\h:${YROOT_NAME}\w\$] '

#private key manager.
#keychain=`which keychain 2>/dev/null`;
#if [ -f $keychain ]; then
#    $keychain ~/.ssh/identity 1>/dev/null 2>&1
#    if [ -f ~/.ssh-agent-${HOSTNAME} ]; then
#        source ~/.ssh-agent-${HOSTNAME}
#    else
#        source ~/.keychain/${HOSTNAME}-sh        
#    fi
#fi    

#.bashrc
if [ -f ~/.bashrc ]; then
    source ~/.bashrc
fi   

# Aliases
if [ -f ~/.alias ]; then
    . ~/.alias
fi

# svn command auto completion
if [ -f ~/.bash_svn_completion ]; then
    source ~/.bash_svn_completion
fi   

#cMatch testing hosts.
#for((i=1; i<=3; i++)); do
#    export cmtest${i}=cmtest${i}.corp.cnb
#done
export S_P_T=http://svn.simba.taobao.com/svn/QA/Scripts/SearchCenter/algorithm/test
PATH=$PATH:$HOME/bin

