#!/bin/env  python
# -*- coding: gbk -*- 

import os.path
import sys
import subprocess

import yaml

class config:
    def __init__(self, config_file):
        try:
            f = open(config_file, 'r')
            self.__config_dict = yaml.load(f) 
        except IOError, message:
            print >> sys.stderr, "File could not be opened", message
            sys.exit(1)
        finally:
            f.close()  

    "È¡ÅäÖÃÎÄ¼þÖÐµÄÖµ£¬Èç¹û¸ÃÅäÖÃÏî²»´æÔÚ£¬Ôò·µ»Ønull"
    def get(self, key):
        try:
            value = self.__config_dict[key]
            return value
        except KeyError, message:
            print >> sys.stderr, "%s could not found in config file." %key
            sys.exit(1)

"ÓÃpython·â×°ÁËhadoopµÄÒ»Ð©ÃüÁî"
class hadoop:
    def __init__(self, config):
        self.__site_xml = config.get("HADOOP_SITE_XML")
        self.__home = config.get("HADOOP_HOME_PATH")
        self.__exe = self.__home + "/bin/hadoop  --config " + self.__site_xml
        self.__ls = self.__exe + " dfs -ls "
        self.__mv = self.__exe + " dfs -mv "
        self.__put = self.__exe + " dfs -put "
        self.__get = self.__exe + " dfs -get "
        self.__rmdir = self.__exe + " dfs -rmr "
        self.__rmfile = self.__exe + " dfs -rm "
        self.__mkdir = self.__exe + " dfs -mkdir "
        self.__cat = self.__exe + " dfs -cat "
        self.__getmerge = self.__exe + " dfs -getmerge "
        self.__test = self.__exe + " dfs -test -e "

    def debug(self):
        print self.__site_xml
        print self.__home
        print self.__exe
        print self.__ls
        print self.__mv
        print self.__put
        print self.__get
        print self.__rmdir
        print self.__rmfile
        print self.__mkdir
        print self.__cat
        print self.__test

    def ls(self, path):
        cmd = " ".join([self.__ls, path])
        p = subprocess.Popen(cmd, shell=True)
        if p.wait() !=0 :
            print >>sys.stderr, "%s failed" % cmd

    def mv(self, src, dest):
        cmd = " ".join([self.__mv, src, dest])
        p = subprocess.Popen(cmd, shell=True)
        if p.wait() !=0 :
            print >>sys.stderr, "%s failed" % cmd

    def put(self, src, dest):
        cmd = " ".join([self.__put, src, dest])
        p = subprocess.Popen(cmd, shell=True)
        if p.wait() !=0 :
            print >>sys.stderr, "%s failed" % cmd

    def get(self, src, dest):
        cmd = " ".join([self.__get, src, dest])
        p = subprocess.Popen(cmd, shell=True)
        if p.wait() !=0 :
            print >>sys.stderr, "%s failed" % cmd

    def rmdir(self, path):
        cmd = " ".join([self.__rmdir, path])
        p = subprocess.Popen(cmd, shell=True)
        if p.wait() !=0 :
            print >>sys.stderr, "%s failed" % cmd

    def rmfile(self, path):
        cmd = " ".join([self.__rmfile, path])
        p = subprocess.Popen(cmd, shell=True)
        if p.wait() !=0 :
            print >>sys.stderr, "%s failed" % cmd

    def mkdir(self, path):
        cmd = " ".join([self.__mkdir, path])
        p = subprocess.Popen(cmd, shell=True)
        if p.wait() !=0 :
            print >>sys.stderr, "%s failed" % cmd

    def cat(self, path):
        cmd = " ".join([self.__cat, path])
        p = subprocess.Popen(cmd, shell=True)
        if p.wait() !=0 :
            print >>sys.stderr, "%s failed" % cmd

    def getmerge(self, src, dest):
        cmd = " ".join([self.__getmerge, src, dest])
        p = subprocess.Popen(cmd, shell=True)
        if p.wait() !=0 :
            print >>sys.stderr, "%s failed" % cmd

    def test(self, path, flag=False):
        retval = True
        cmd = " ".join([self.__test, path])
        p = subprocess.Popen(cmd, shell=True)
        if p.wait() !=0 :
            print cmd
            retval = False
            print >>sys.stderr, "%s not exists" % path
            if flag:
                raise Error(" %s dose not exists" % path)
        return retval

if __name__ == "__main__":
    conf = config("hadoop_conf.yaml")
    hadoop = hadoop(conf)
    hadoop.debug()
    hadoop.mkdir("/user/spider/atscript")
    hadoop.ls("/user/spider")
