# -*- coding:utf-8 -*-
import paramiko
import datetime
import time
import os
import re

# 连接远程Linux主机:远程主机的ip,ssh，用户名，密码
hostname = '192.168.110.211'
username = 'root'
password = 'k1k2k3&*('
port = 22

#当前目录文件名
dir_list = os.listdir(os.getcwd())
file_name = list(filter(lambda x: 'exe' not in x, dir_list))[0]

#目标路径 服务器及HDFS地址
linux_site = '/root/linhl_work_space/upload_file/'
#hdfs_site = '/tmp/external4/tempdata/load_data/'

#目标服务器文件地址
local_file = os.path.join(os.getcwd(), file_name)
remote_path = os.path.join(linux_site,file_name)

#上传LINUX服务器
def upload_linux(local_file, remote_path):
    try:
        t = paramiko.Transport((hostname, port))
        t.connect(username=username, password=password)
        sftp = paramiko.SFTPClient.from_transport(t)
        print('linux开始上传文件%s ' % datetime.datetime.now())

        try:
            sftp.put(local_file, remote_path)
        except Exception as e:
            sftp.mkdir(os.path.split(remote_path)[0])
            sftp.put(local_file, remote_path)
            print("从本地： %s 上传到： %s" % (local_file, remote_path))
        print('linux文件上传成功 %s ' % datetime.datetime.now())
        t.close()
    except Exception as e:
        print(repr(e))

#清空服务器对应文件目录
def linux_clear(linux_site):
    try:
        ssh = paramiko.SSHClient()
        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        ssh.connect(hostname, port, username, password)
        order_1 = 'rm -f ' + linux_site + '*'
        ssh.exec_command(order_1)
        print("operate successfully!\n")
    except Exception as e:
        print(repr(e))
    ssh.close() 
    
#清空hadoop上HDFS文件路径文件并上传文件
def upload_hdfs(file_name):
    try:
        ssh = paramiko.SSHClient()
        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        ssh.connect(hostname, port, username, password)
        order_2 = '/root/linhl_work_space/clearn.sh'
        order_3 = '/root/linhl_work_space/upload.sh' + file_name
		order_4 = '/root/linhl_work_space/ls_result.sh'
        ssh.exec_command("bash --login -c "+order_2)
        print('HDFS清理成功')
        time.sleep(3)
        #带参数的脚本要如下运行，不然会失败，why？
        stdin,stdout,dtederr =ssh.exec_command("bash --login -c '"+ order_3 + "'")
		
        #print(stdout.read())
        #print(dtederr.read())
        print('HDFS文件上传成功')
		tdin1,stdout1,dtederr1 =ssh.exec_command("bash --login -c '"+ order_4)
		print("hdsf目录文件如下，如上传不成功或者有误请重跑该程序")
		print(stdout1.read())
		time.sleep(5)
    except Exception as e:
        print(repr(e))
    ssh.close()
    
if __name__ == '__main__':
    linux_clear(linux_site)
    upload_linux(local_file, remote_path)
    upload_hdfs(file_name)