from impala.dbapi import connect
from hdfs.client import Client, InsecureClient
from influxdb import InfluxDBClient
from confluent_kafka import Consumer, KafkaException
import pymysql



def hive_query():
    conn = connect(
        host='hadoop128',
        port=10000,
        database='default',
        auth_mechanism='PLAIN'
    )
    cursor = conn.cursor()
    query = "SELECT * FROM px_dim.dim_sal_shop LIMIT 10"
    cursor.execute(query)
    results = cursor.fetchall()

    for row in results:
        print(row)

    cursor.close()
    conn.close()


def hdfs_query():
    url = 'http://hadoop128:9870'
    filename = '/test/1.txt'

    # 指定hdfs地址和用户名
    lian = InsecureClient(url=url, user='bigdata')
    # 列出根目录下的所有文件
    print(lian.list('/'))

    flag = lian.status(hdfs_path=filename, strict=False)

    # 遍历
    with lian.read(filename, offset=0, length=100, delimiter=',', encoding='utf-8') as f:
        for i in f:
            print(i)

    # 下载
    # lian.download(hdfs_path=filename, overwrite=True, local_path='D:/tmp')



def influxdb_query():
    host = 'hadoop102'
    port = '8086'
    database = 'valor'
    username = 'admin'
    password = 'Xnetworks.c0M'

    client = InfluxDBClient(host=host, port=port, username=username, password=password, database=database)
    query = "SELECT * FROM performance_glink limit 2;"
    result = client.query(query)

    for point in result.get_points():
        time = point['time']
        field_value = point['type']  # 替换为您要查询的字段名称
        print(f"Time: {time}, Field Value: {field_value}")

    client.close()


def kafka_query():
    conf = {
        'bootstrap.servers': '192.168.0.41:9092',
        'security.protocol': 'SASL_PLAINTEXT',
        'sasl.mechanisms': 'PLAIN',
        'sasl.username': 'admin',
        'sasl.password': 'admin',
        'group.id': 'my_consumer_group3',
        'auto.offset.reset': 'earliest'
    }

    consumer = Consumer(conf)
    topics = ['test']
    consumer.subscribe(topics)

    try:
        while True:
            msg = consumer.poll(1.0)  # 指定超时时间
            if msg is None:
                continue
            if msg.error():
                if msg.error().code() == KafkaException._PARTITION_EOF:
                    continue
                else:
                    print(msg.error())
                    break

            print('Received message: {0}'.format(msg.value().decode('utf-8')))

    except KeyboardInterrupt:
        pass

    finally:
        consumer.close()



def  mysql_query():
    try:
        # 建立数据库连接
        conn = pymysql.connect(
            host='192.168.0.40',
            user='qingmei',
            password='123456',
            database='pinshang',
            charset='utf8mb4',
            cursorclass=pymysql.cursors.DictCursor
        )
        print("连接成功！")

        # 创建游标对象
        mycursor = conn.cursor()

        # 执行 SQL 查询
        mycursor.execute("SELECT * FROM t_pos_invoice limit 10")

        # 获取查询结果
        results = mycursor.fetchall()

        # 遍历结果并打印
        for row in results:
            print(row)

        # 关闭游标和数据库连接
        mycursor.close()
        conn.close()
    except pymysql.Error as e:
        print(f"连接失败: {e}")

hive_query()