# coding=utf-8

import argparse
import copy
import json
import os.path
import re
import requests
import sys
import time
from time import strftime
import pymysql.cursors

import csv
from requests import HTTPError
from f5.bigip import ManagementRoot
from f5_openstack_agent.client.encrypt import decrypt_data
from requests.packages.urllib3.exceptions import InsecureRequestWarning

requests.packages.urllib3.disable_warnings(InsecureRequestWarning)

"""
1. 从lbaas_loadbalanceragentbindings表找出所有device_id为空的行
2. 从inventory db中所有active bigip查询所有的loadbalancer，存入dict {loadbalancer_id: device_id}
3. 遍历从1中得到的loadbalancer_id，判断其是否在2中的dict，若存在则把device_id更新到表中。
4. 保存所有更新成功与不能更新的记录到/tmp/device_migration.csv
"""

OUTPUT_PATH = os.path.join('/tmp', 'device_migration_' + strftime("%Y%m%d-%H%M%S") + '.csv')

uuid_regex = "[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}"
vip_regex = "^[a-zA-Z0-9]+_" + uuid_regex + "$"


def retry_icontrol(icontrol, **kwargs):
    max_retry = 3
    interval = 1

    retry = 0
    while retry < max_retry:
        try:
            ret = icontrol(**kwargs)
            break
        except HTTPError as ex:
            # Retry, if 401
            if ex.response.status_code == 401:
                print >> sys.stderr, "BIG-IP returns HTTP 401"
                retry = retry + 1
                time.sleep(interval)
            else:
                raise ex

    if retry >= max_retry:
        raise Exception("Too many retries")
    else:
        return ret


class BigipResource:

    def __init__(self, conn):
        self.conn = conn

    def get_bigips_from_db(self):
        # 从lbaas_device_members获取所有bigip信息
        with self.conn:
            with self.conn.cursor() as cursor:
                sql = """
                SELECT device_id, mgmt_ipv4, device_info, operating_status FROM lbaas_device_members;
                """
                cursor.execute(sql)
                records = cursor.fetchall()
        return records

    def get_bigips(self):
        records = self.get_bigips_from_db()
        bigips = []
        for record in records:
            device_info = json.loads(record['device_info'])
            operating_status = json.loads(record['operating_status'])

            if operating_status['failover_state'] == 'active':
                bigips.append({
                            'device_id': record['device_id'],
                            'hostname': record['mgmt_ipv4'],
                            'username': decrypt_data(device_info['serial_number'],
                                                     device_info['username']),
                            'password': decrypt_data(device_info['serial_number'],
                                                     device_info['password']),
                            'port': device_info['port'],
                })
        return bigips

    def load_lb_resources(self):
        device_map = dict()
        bigips = self.get_bigips()
        for info in bigips:
            device_id = info['device_id']
            print ("get lb from bigip: %s" % info)
            bigip = ManagementRoot(info['hostname'],
                                   info['username'],
                                   info['password'],
                                   port=info['port'])
            vips = retry_icontrol(bigip.tm.ltm.virtual_address_s.get_collection)
            pattern = re.compile(vip_regex)
            for vip in vips:
                if re.match(pattern, vip.name):
                    loadbalancer_id = vip.name.split('_')[1]
                    device_map[loadbalancer_id] = device_id
        return device_map


def connect_neutron_db(db_conn):
    db_type = db_conn[0:db_conn.find("+")]
    if db_type != "mysql":
        print >> sys.stderr, "Only support MySQL DB"
        sys.exit(1)

    u_start = db_conn.find("//") + 2
    u_end = db_conn.find(":", u_start)
    p_start = u_end + 1
    p_end = db_conn.find("@", p_start)
    h_start = p_end + 1
    h_end = db_conn.find("/", h_start)
    d_start = h_end + 1

    return pymysql.connect(host=db_conn[h_start:h_end],
                           user=db_conn[u_start:u_end],
                           password=db_conn[p_start:p_end],
                           database=db_conn[d_start:],
                           cursorclass=pymysql.cursors.DictCursor)


def load_records_from_db(connection):
    with connection:
        with connection.cursor() as cursor:
            sql = """
            SELECT loadbalancer_id, agent_id, device_id
            FROM lbaas_loadbalanceragentbindings
            WHERE device_id is NULL and loadbalancer_id not in 
            (select resource_id from providerresourceassociations where provider_name = 'eslb');
            """
            cursor.execute(sql)
            records = cursor.fetchall()
    return records


def migrate_device_id(conn, db_records, device_map):
    result = []
    for record in db_records:
        r = copy.deepcopy(record)
        loadbalancer_id = record['loadbalancer_id']
        agent_id = record['agent_id']
        if loadbalancer_id in device_map:
            device_id = device_map[loadbalancer_id]
            device_id_csv = device_map[loadbalancer_id]
        else:
            device_id = None
            device_id_csv = "Could not find this loadbalancer in inventory BIG-IP"
        r['device_id'] = device_id_csv
        result.append(r)
        update_db(conn, loadbalancer_id, agent_id, device_id)

    return result


def update_db(connection, loadbalancer_id, agent_id, device_id):
    with connection:
        with connection.cursor() as cursor:
            sql = """
            UPDATE lbaas_loadbalanceragentbindings
            SET device_id=%s
            WHERE loadbalancer_id=%s AND agent_id=%s
            """
            cursor.execute(sql, (device_id, loadbalancer_id, agent_id))


def output_result(result):
    fieldnames = ['loadbalancer_id', 'agent_id', 'device_id']
    with open(OUTPUT_PATH, mode='a') as csv_file:
        writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
        writer.writeheader()
        writer.writerows(result)


def main():
    parser = argparse.ArgumentParser(
        description="BIG-IP Device_id Migration Tool")
    parser.add_argument("--db-connection", type=str, required=True,
                        help="Neutron DB connection string")

    args = parser.parse_args()
    db_conn = args.db_connection

    # Load empty device_id records from lbaas_loadbalanceragentbindings
    conn = connect_neutron_db(db_conn)
    records = load_records_from_db(conn)
    print "################# get %s db records that device_id is NULL" % len(records)

    # load all loadbalancer resource from bigips
    device_map = BigipResource(conn=conn).load_lb_resources()
    print "################# get %s loadbalancer from bigip" % len(device_map)

    # update device_id column of table lbaas_loadbalanceragentbindings
    result = migrate_device_id(conn, records, device_map)
    print "################# update lbaas_loadbalanceragentbindings done"

    # output migration result
    output_result(result)
    print "################# device id migration finished, result saved in %s" % OUTPUT_PATH


if __name__ == '__main__':
    main()
