package ods_odb

import (
	"database/sql"
	"encoding/json"
	"fmt"
	"github.com/Shopify/sarama"
	"go.uber.org/zap"
	xcanal "server/dbtools/xcanal/canal"
	"server/global"
	"server/utils"
	"strconv"
	"strings"
	"time"
)


type OdsOdb struct {
	SupplierId uint `json:"supplierId" form:"supplierId" gorm:"column:supplierId;comment:提供人id;"`

	DbType     string `json:"dbType" form:"dbType" gorm:"not null;column:dbType;comment:数据库类型;size:100;"`
	DbIp       string `json:"dbIp" form:"dbIp" gorm:"not null;column:dbIp;comment:数据库ip;size:100;"`
	DbPort     string `json:"dbPort" form:"dbPort" gorm:"not null;column:dbPort;comment:数据库端口;size:50;"`
	DbUser     string `json:"dbUser" form:"dbUser" gorm:"not null;column:dbUser;comment:数据库用户名;size:50;"`
	DbPassword string `json:"dbPassword" form:"dbPassword" gorm:"not null;column:dbPassword;comment:数据库密码;size:100;"`
	DbName     string `json:"dbName" form:"dbName" gorm:"not null;column:dbName;comment:数据库名;size:50;"`
	DbParm     string `json:"dbParm" form:"dbParm" gorm:"column:dbParm;comment:url参数;size:100;"`

	RtUser     string `json:"rtUser" form:"rtUser" gorm:"not null;column:rtUser;comment:实时同步用户名;size:50;"`
	RtPassword string `json:"rtPassword" form:"rtPassword" gorm:"not null;column:rtPassword;comment:实时同步密码;size:100;"`

	MaxIdle     string `json:"maxIdle" form:"maxIdle" gorm:"column:maxIdle;comment:最大空闲数;size:100;"`
	MaxOpen     string `json:"maxOpen" form:"maxOpen" gorm:"column:maxOpen;comment:最大连接数;size:100;"`
	MaxLifetime string `json:"maxLifetime" form:"maxLifetime" gorm:"column:maxLifetime;comment:最大生命周期;size:100;"`
	DbOs        string `json:"os" form:"os" gorm:"column:os;comment:操作系统;size:200;"`
	DbVersion   string `json:"dbVersion" form:"dbVersion" gorm:"column:dbVersion;comment:数据库版本;size:100;"`

	Dsn     string `json:"dsn" form:"dsn" gorm:"column:dsn;comment:连接字符串;size:200;"`
	Ping    string `json:"ping" form:"ping" gorm:"column:ping;comment:Ping结果;size:200;"`
	DbState int    `json:"dbState" form:"dbState" gorm:"column:dbState;comment:数据库状态;"`
	DbTopic string `json:"dbTopic" form:"dbTopic" gorm:"not null;column:dbTopic;comment:数据库主题;size:50;"`

	InOds     int64  `json:"inOds" gorm:"-"`
	Connected string `json:"Connected" gorm:"-"`

	Remarks string `json:"remarks" form:"remarks" gorm:"column:remarks;comment:备注;size:250;"`

	global.BaseModel
}


func (m *OdsOdb) TableName() string {
	return "ods_db_info"
}


func (m *OdsOdb) Save() error {
	err := global.SysDB.Save(&m).Error
	return err
}


func (m *OdsOdb) GetDsn() string {
	var dsn string
	if m.DbUser != "" && m.DbPassword != "" && m.DbIp != "" && m.DbPort != "" && m.DbName != "" {
		switch m.DbType {
		case "mysql":
			dsn = fmt.Sprintf("%s:%s@tcp(%s:%s)/%s%s", m.DbUser, m.DbPassword, m.DbIp, m.DbPort, m.DbName, m.DbParm)
		case "oracle":
			dsn = fmt.Sprintf("oracle://%s:%s@%s:%s/%s", m.DbUser, m.DbPassword, m.DbIp, m.DbPort, m.DbName)
		default:
			dsn = ""
		}
		return dsn
	}
	return ""
}


func (m *OdsOdb) SetDsn() error {
	dsn := m.GetDsn()
	if dsn != "" {
		m.Dsn = dsn
		return nil
	}
	return fmt.Errorf("get db dsn failed")
}


func (m *OdsOdb) GetConn() (*sql.DB, error) {
	var conn *sql.DB
	var err error
	var ok bool

	if m.Dsn == "" {
		err = m.SetDsn()
		if err != nil {
			return nil, err
		}
	}

	conn, ok = global.SqlDBList[m.Dsn]
	if ok {
		return conn, nil
	}

	conn, err = sql.Open(m.DbType, m.Dsn)
	if err != nil {
		return nil, err
	}
	err = conn.Ping()
	if err != nil {
		return nil, err
	}
	m.Ping = "ok"

	global.SqlDBList[m.Dsn] = conn
	return conn, nil
}


func (m *OdsOdb) AddCanalTb(db, tb string) error {
	dt := fmt.Sprintf("%s.%s", db, tb)
	dsn := m.Dsn

	cn, ok := global.CanalList[dsn]
	if ok {
		ok1 := cn.FindIncludeTable(dt)
		if ok1 {
			return nil
		} else {
			global.CanalList[dsn].AddIncludeTable(dt)
			return nil
		}
	}

	cfg := xcanal.NewDefaultConfig()
	cfg.Addr = fmt.Sprintf("%s:%s", m.DbIp, m.DbPort)
	cfg.User = m.RtUser
	cfg.Password = m.RtPassword
	cfg.Flavor = m.DbType
	cfg.UseDecimal = true
	cfg.Dump.ExecutionPath = global.XidwConfig.Canal.MysqlDump
	cfg.Dump.DiscardErr = false

	c, err := xcanal.NewCanal(cfg)
	if err != nil {
		return err
	}

	c.OdbId = m.ID
	c.AddIncludeTable(dt)

	err = c.Dump()
	if err != nil {
		return err
	}

	c.SetEventHandler(&MyEventHandler{})
	pos, err := c.GetMasterPos()
	if err != nil {
		return err
	}

	go func() {
		err := c.RunFrom(pos)
		if err != nil {
			return
		}
		defer c.Close()
	}()
	c.AddIncludeTable(dt)
	global.CanalList[dsn] = c

	return nil
}


func (m *OdsOdb) RemoveCanalTb(db, tb string) error {
	dt := fmt.Sprintf("%s.%s", db, tb)
	dsn := m.Dsn

	cn, ok := global.CanalList[dsn]
	if ok {
		ok1 := cn.FindIncludeTable(dt)
		if ok1 {
			global.CanalList[dsn].RemoveIncludeTable(dt)
			return nil
		}
	}
	return nil
}

type MyEventHandler struct {
	xcanal.DummyEventHandler
}

func (h *MyEventHandler) OnRow(e *xcanal.RowsEvent) error {
	if kfk, ok := global.KafkaProducerList[strconv.Itoa(int(e.Header.OdbId))]; ok {
		err := KfkSendMessage(kfk, e)
		if err != nil {
			global.Logger.Error(fmt.Sprintf("KfkSendMessage err:%s", err))
		}
	} else {
		newKfk, err := NewKafkaProducer(e.Header.OdbId)
		if err != nil {
			global.Logger.Error(fmt.Sprintf("创建kafka失败,odbId:%d", e.Header.OdbId))
		} else {
			if newKfk != nil {
				global.KafkaProducerList[strconv.Itoa(int(e.Header.OdbId))] = newKfk

				err = KfkSendMessage(newKfk, e)
				if err != nil {
					global.Logger.Error(fmt.Sprintf("KfkSendMessage err:%s", err))
				}

				consumer, err := NewKafkaConsumer(e.Header.OdbId)
				if err != nil {
					global.Logger.Error(fmt.Sprintf("创建kafka消费失败,odbId:%v", e.Header.OdbId), zap.Error(err))
				}
				if consumer != nil {
					global.KafkaConsumerList[strconv.Itoa(int(e.Header.OdbId))] = consumer
				} else {
					global.Logger.Error(fmt.Sprintf("创建kafka消费失败,odbId:%v, consumer is nil", e.Header.OdbId))
				}

			}
		}
	}

	return nil
}

func (h *MyEventHandler) String() string {
	return "MyEventHandler"
}

func NewKafkaProducer(id uint) (*sarama.AsyncProducer, error) {
	if id <= 0 {
		return nil, fmt.Errorf("odbId 为0,创建kfk producer失败")
	}

	config := sarama.NewConfig()
	config.Producer.RequiredAcks = sarama.WaitForAll
	config.Producer.Partitioner = sarama.NewRandomPartitioner
	config.Producer.Return.Successes = true
	config.Producer.Return.Errors = true
	config.Producer.Timeout = 3 * time.Second

	dsn := fmt.Sprintf("%s", global.XidwConfig.Kafka.Dsn)
	producer, err := sarama.NewAsyncProducer([]string{dsn}, config)
	if err != nil {
		return nil, fmt.Errorf("创建kfk producer失败 :%v\n", err)
	}

	return &producer, nil
}

type KfkEvent struct {
	Action string          `json:"action"`
	OdbId  uint            `json:"odbId"`
	Table  string          `json:"table"`
	Rows   [][]interface{} `json:"rows"`
	Cols   []string        `json:"cols"`
	Pk     string          `json:"pk"`
	Pki    int             `json:"pki"`
}

func KfkSendMessage(producer *sarama.AsyncProducer, e *xcanal.RowsEvent) error {
	if producer == nil || e == nil {
		return fmt.Errorf("kafka 发送消息失败,p为%v 或e为 %v\n", producer, e)
	}

	topic := fmt.Sprintf("odb_%d", e.Header.OdbId)
	var kmsg KfkEvent
	kmsg.Action = e.Action
	kmsg.OdbId = e.Header.OdbId
	kmsg.Table = fmt.Sprintf("%s", e.Table)
	kmsg.Rows = e.Rows

	for _, c := range e.Table.Columns {
		kmsg.Cols = append(kmsg.Cols, c.Name)
	}

	if len(e.Table.Indexes) > 0 {
		kmsg.Pk = e.Table.Indexes[0].Columns[0]
		kmsg.Pki = e.Table.PKColumns[0]
	}

	kmsgJ, err := json.Marshal(kmsg)
	if err != nil {
		return fmt.Errorf("kfkMsg转json失败, err:%s\n", err)
	}

	msg := &sarama.ProducerMessage{
		Topic: fmt.Sprintf(topic),
		Key:   sarama.StringEncoder(topic),
		Value: sarama.StringEncoder(kmsgJ),
	}

	(*producer).Input() <- msg
	select {
	case suc := <-(*producer).Successes():
		suc.Offset++
		return nil
	case fail := <-(*producer).Errors():
		return fmt.Errorf("err: %s\n", fail.Err.Error())
	}
}

func NewKafkaConsumer(id uint) (*sarama.Consumer, error) {
	var err error

	if id <= 0 {
		return nil, fmt.Errorf("odbId 为0,创建kfk producer失败")
	}

	config := sarama.NewConfig()
	config.Consumer.Return.Errors = true

	dsn := fmt.Sprintf("%s", global.XidwConfig.Kafka.Dsn)
	consumer, err := sarama.NewConsumer([]string{dsn}, config)
	if err != nil {
		return nil, fmt.Errorf("create consumer error %s\n", err.Error())
	}

	topic := fmt.Sprintf("odb_%d", id)
	partitionList, err := consumer.Partitions(topic)
	if err != nil {
		return nil, fmt.Errorf("创建%s kafka消费者失败,err %s\n", topic, err.Error())
	}

	for partition := range partitionList {
		offset := sarama.OffsetOldest

		key := fmt.Sprintf("bin/kfk/%s/%d", topic, partition)
		kps, err := utils.EtcdGet(global.Etcd, key)
		if err != nil {
			global.Logger.Info(fmt.Sprintf("etcd获取%s kafka分区信息失败，默认为0: %s\n", topic, err.Error()))
		}

		if len(kps.Kvs) == 0 {
			global.Logger.Warn(fmt.Sprintf("etcd key(%s) 没有值\n", key))
		} else {
			for _, kv := range kps.Kvs {
				fmt.Printf("etcd kv：%+v\n", kv)
				fmt.Printf("etcd kv.Value：%v\n", kv.Value)
				if err != nil {
					global.Logger.Error(fmt.Sprintf("etcd获取%s kafka分区offset失败，默认为0: %s\n", topic, err.Error()))
				}
			}
		}

		pc, err := consumer.ConsumePartition(topic, int32(partition), sarama.OffsetOldest)
		if err != nil {
			return nil, fmt.Errorf("默认连接%s kafka消费者失败,p:%d,s:%d,err:%s\n", topic, partition, offset, err.Error())
		}

		if pc.HighWaterMarkOffset() > offset {
			pc.Close()
			pc, err = consumer.ConsumePartition(topic, int32(partition), offset)
			if err != nil {
				return nil, fmt.Errorf("记忆连接%s kafka消费者失败,p:%d,s:%d,err:%s\n", topic, partition, offset, err.Error())
			}
		}
		go func(partitionConsumer sarama.PartitionConsumer) {
			for {
				select {
				case msg := <-partitionConsumer.Messages():
					if msg.Offset > offset {
						err := Bk2Ods(msg.Value)
						if err != nil {
							global.Logger.Error(fmt.Sprintf("binlog数据处理失败 %s\n", err.Error()))
						}

						err = utils.EtcdPut(global.Etcd, key, fmt.Sprintf("%d", msg.Offset))
						if err != nil {
							global.Logger.Error(fmt.Sprintf("保存%s kafka消费offset失败 %s\n", topic, err.Error()))
						}
					}
				case err := <-partitionConsumer.Errors():
					global.Logger.Error(fmt.Sprintf("binlog数据消费失败,err :%s\n", err.Error()))
				}
			}
		}(pc)
	}

	return &consumer, err
}

func Bk2Ods(binlog []byte) error {
	var e KfkEvent

	if global.OdsDB == nil {
		return fmt.Errorf("odsDB 为nil")
	}

	err := json.Unmarshal(binlog, &e)
	if err != nil {
		return fmt.Errorf("解析binlog JSON 数据失败,err:%v", err)
	}

	schemaTb := fmt.Sprintf("%d_%s", e.OdbId, strings.Replace(e.Table, ".", "_", 1))
	colsStr := strings.Join(e.Cols, ",")
	var sqle, qStr string

	for idx := range e.Cols {
		qStr += "?"
		if idx < len(e.Cols)-1 {
			qStr += ","
		}
	}

	switch e.Action {
	case "insert":
		if len(e.Rows) > 0 {
			sqle = fmt.Sprintf("insert into %s(%s) value(%s)", schemaTb, colsStr, qStr)
			stmt, err := global.OdsDB.Prepare(sqle)
			if err != nil {
				return fmt.Errorf("sql prepare失败:%s,err:%v\n", sqle, err)
			}
			_, err = stmt.Exec(e.Rows[0]...)
			if err != nil {
				return fmt.Errorf("binlog写入%s数据失败,err:%v\n", schemaTb, err)
			}
		} else {
			return fmt.Errorf("binlog写入%s数据不存在,values:%v,err:%v\n", schemaTb, e.Rows, err)
		}

	case "update":
		var set string
		for idx, c := range e.Cols {
			set += c + fmt.Sprintf("=values(`%s`)", c)
			if idx < len(e.Cols)-1 {
				set += ","
			}
		}
		if len(e.Rows) > 1 {
			sqle = fmt.Sprintf("insert into %s(%s) values(%s) on duplicate key update %s", schemaTb, colsStr, qStr, set)
			stmt, err := global.OdsDB.Prepare(sqle)
			if err != nil {
				return fmt.Errorf("sql prepare失败:%s,err:%v\n", sqle, err)
			}
			_, err = stmt.Exec(e.Rows[1]...)
			if err != nil {
				return fmt.Errorf("binlog写入%s数据失败,err:%v\n", schemaTb, err)
			}
		} else {
			return fmt.Errorf("binlog更新%s数据不存在,values:%v,err:%v\n", schemaTb, e.Rows, err)
		}
	case "delete":
		if len(e.Rows) > 0 {
			sqle = fmt.Sprintf("delete from %s where %s = %v", schemaTb, e.Pk, e.Rows[0][e.Pki])
			_, err := global.OdsDB.Exec(sqle)
			if err != nil {
				return fmt.Errorf("binlog删除%s数据失败,col:%s,val:%v,err:%v\n", schemaTb, e.Pk, e.Rows[0][e.Pki], err)
			}
		} else {
			return fmt.Errorf("binlog删除%s数据不存在,values:%v,err:%v\n", schemaTb, e.Rows, err)
		}
	default:
		return fmt.Errorf("未知的binlog类型:%s\n", e.Action)
	}
	return nil
}
