from re import split
import os, sys

sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from monitoring import sh, get_mysql_data_management

TABLE = 'item_records'


def hadoop_fs_du_0():
    def hadoop_fs_du(item_id: int, cmd: str) -> dict:
        """查询HDFS文件大小(不含副本)"""
        result = sh.evaluate(cmd)
        now = sh.now
        print(result)
        value = split(r'\s+', result)[0]
        return {
            'item_id': item_id,
            'monitoring_time': now,
            'monitoring_value': int(value)}

    db1 = get_mysql_data_management()
    db2 = get_mysql_data_management()
    for i, c in db1.fetchone('SELECT item_id,item_cmd FROM item_info WHERE item_type="HDFS文件大小(不含副本)"'):
        dt = hadoop_fs_du(i, c)
        print(dt)
        db2.insert(hadoop_fs_du(i, c), TABLE)


def hadoop_fs_du_1():
    def hadoop_fs_du(item_id: int, cmd: str) -> dict:
        """查询HDFS文件夹大小"""
        result = sh.evaluate(cmd)
        now = sh.now
        print(result)
        value = split(r'\s+', result)[1]
        return {
            'item_id': item_id,
            'monitoring_time': now,
            'monitoring_value': int(value)}

    db1 = get_mysql_data_management()
    db2 = get_mysql_data_management()
    for i, c in db1.fetchone('SELECT item_id,item_cmd FROM item_info WHERE item_type="HDFS文件大小"'):
        dt = hadoop_fs_du(i, c)
        print(dt)
        db2.insert(hadoop_fs_du(i, c), TABLE)


if __name__ == '__main__':
    hadoop_fs_du_0()
    hadoop_fs_du_1()
