from pyhive import hive


class Hive:
    def __init__(self, host='192.168.62.164', port=10000, username='root', database='douban'):
        self.host = host
        self.port = port
        self.username = username
        self.database = database

    # 查询数据
    def select_pyhive(self, sql):
        # 创建连接
        conn = hive.Connection(host=self.host, port=self.port, username=self.username, database=self.database)
        # 获取连接对象的游标数据
        cursor = conn.cursor()
        try:
            # 查询sql语句
            cursor.execute(sql)
            rows = cursor.fetchall()
            for res in rows:
                print(res)
        finally:
            # 判断是否连接
            if conn:
                # 关闭连接
                conn.close()

    # 创建数据库或表
    def create_hive(self, sql):
        # 建立连接
        conn = hive.Connection(host=self.host, port=self.port, username=self.username, database=self.database)
        # 创建游标
        cursor = conn.cursor()
        try:
            # 创建的sql语句
            cursor.execute(sql)
        finally:
            # 判断是否连接
            if conn:
                # 关闭连接
                conn.close()

    # 通过Hadoop上传数据
    def upload_hive(self, file_dir):
        # 创建连接
        conn = hive.Connection(host=self.host, port=self.port, username=self.username, database=self.database)
        # 创建游标
        cursor = conn.cursor()
        # 编写上传的语句
        sql = "load data inpath '" + file_dir + "'overwrite into table orders"
        try:
            # 进行上传
            cursor.execute(sql)
        finally:
            # 检查是否连接
            if conn:
                # 关闭连接
                conn.close()
