#!/use/bin/python
# coding=utf-8
# 同步node-red 剧本数据到mysql

import pymysql
import requests
import json

from dbutils.pooled_db import PooledDB

# proactive_service_conf 数据源
def getConfConnection():
    # 开发环境
    #pool = PooledDB(pymysql, 1, host='172.20.135.96', user='pushdb', passwd='SkYWOrTh$TcOs',
    #                db='proactive_service_conf', port=3306)  # 1为连接池里的最少连接数
    # 测试环境
    pool = PooledDB(pymysql, 1, host='172.20.150.109', user='test_dmp', passwd='DghHC3lFM1KzT3ZJ',
                    db='proactive_service_conf', port=3307)  # 1为连接池里的最少连接数
    # pool = PooledDB(pymysql,1,host='127.0.0.1',user='root',passwd='root',db='life_assistant_data',port=3306) # 5为连接池里的最少连接数
    conn = pool.connection()
    cur = conn.cursor()
    return conn, cur

# proactive_service_data 数据源
def getDataConnection():
    # 开发环境
    #pool = PooledDB(pymysql, 1, host='172.20.151.80', user='pushdb', passwd='SkYWOrTh$TcOs',
    #                db='proactive_service_data',port=3306)  # 1为连接池里的最少连接数
    pool = PooledDB(pymysql, 1, host='172.20.154.103', user='test_dmp', passwd='DghHC3lFM1KzT3ZJ',
                    db='proactive_service_data', port=3407)  # 1为连接池里的最少连接数
    # pool = PooledDB(pymysql,1,host='127.0.0.1',user='root',passwd='root',db='life_assistant_data',port=3306) # 5为连接池里的最少连接数
    conn = pool.connection()
    cur = conn.cursor()
    return conn, cur

#同步node-red tab数据
def syncNodeRedTab():

    url = "http://172.20.151.17:1880/flows"

    payload = {}
    headers = {}

    response = requests.request("GET", url, headers=headers, data=payload)

    print(response.text)
    data = json.loads(response.text)
    tabs  = [d for d in data if d['type'] == 'tab']

    # 构造 SQL 语句和数据
    sql = 'INSERT INTO inter_actify_service (service_key, service_name, service_type, node_red_id) VALUES (%s, %s, %s, %s)'
    values = [(d['id'], d['label'],2, d['id']) for d in tabs]
    # 执行插入操作
    try:
        conn, cur = getConfConnection()
        cur = conn.cursor()
        cur.executemany(sql, values)
        conn.commit()
    except Exception as e:
        print(e)
    finally:
        cur.close()
        conn.close()

#同步node-red 所有flow数据
def syncNodeRedFlow():

    url = "http://172.20.151.17:1880/flows"
    #url = "http://172.20.151.17:1880/flows"

    payload = {}
    headers = {}

    response = requests.request("GET", url, headers=headers, data=payload)

    print(response.text)
    data = json.loads(response.text)
    values = []
    for d in data:
        node_id = d.get('id')
        node_name = d.get('name', None)
        node_red_id = d.get('z')
        node_type = d.get('type')
        service_url = d.get('url', None)
        if node_type == 'myhttpin':
            service_url = f"/start_{node_id}"
        type_ = d.get('type')
        values.append((node_id, node_name, node_red_id, service_url, type_))

    delSql = 'delete from node_ai_nodered_flows'
    # 构造 SQL 语句和数据
    sql = 'INSERT INTO node_ai_nodered_flows (node_id,node_name, node_red_id, service_url, type) VALUES (%s, %s, %s, %s, %s)'
    print(sql)
    # 执行插入操作
    if values:
        try:
            conn, cur = getConfConnection()
            cur = conn.cursor()
            cur.execute(delSql)
            cur.executemany(sql, values)
            conn.commit()
        except Exception as e:
            print(e)
        finally:
            cur.close()
            conn.close()

if __name__ == '__main__':
    syncNodeRedFlow()