package io.xxx.omni.oss.core

import com.aliyun.oss.OSS
import com.hy.corecode.idgen.WFGIdGenerator
import io.github.bucket4j.Bandwidth
import io.github.bucket4j.BlockingBucket
import io.github.bucket4j.Bucket
import io.github.bucket4j.Refill
import io.github.bucket4j.local.SynchronizationStrategy
import io.xxx.omni.oss.common.format
import io.xxx.omni.oss.common.toJsonString
import io.xxx.omni.oss.config.OssProperties
import io.xxx.omni.oss.config.ProxyProperties
import io.xxx.omni.oss.domain.*
import io.xxx.omni.oss.repository.CheckpointRepository
import io.xxx.omni.oss.repository.mapper.DocumentMapper
import org.apache.pulsar.client.api.*
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.context.ApplicationContext
import org.springframework.data.redis.core.StringRedisTemplate
import org.springframework.web.client.RestTemplate
import java.time.Duration
import java.time.LocalDateTime
import java.util.concurrent.CompletableFuture
import java.util.concurrent.ThreadPoolExecutor

@Suppress("SpringJavaInjectionPointsAutowiringInspection")
abstract class Synchronizer<R> : Runnable {

    protected val log: Logger = LoggerFactory.getLogger(javaClass)

    lateinit var platform: Platform

    protected lateinit var store: Store

    private lateinit var job: Job

    @Autowired
    private lateinit var idGenerator: WFGIdGenerator

    @Autowired
    protected lateinit var executor: ThreadPoolExecutor

    @Autowired
    protected lateinit var applicationContext: ApplicationContext

    @Autowired
    private lateinit var checkpointRepository: CheckpointRepository

    @Autowired
    private lateinit var documentMapper: DocumentMapper

    @Autowired
    protected lateinit var restTemplate: RestTemplate

    @Autowired
    protected lateinit var redisTemplate: StringRedisTemplate

    @Autowired
    private lateinit var oss: OSS

    @Autowired
    private lateinit var ossProperties: OssProperties

    @Autowired
    protected lateinit var proxyProperties: ProxyProperties

    @Autowired
    private lateinit var client: PulsarClient

    private val producers = mutableMapOf<String, Producer<String>>()

    /**
     * 使用不同的参数多次调用接口
     */
    protected open fun getParameters(): List<Any> = emptyList()

    /**
     * 检查扩展参数是否为空，如果为空则不继续执行
     */
    protected open val checkParameters = false

    /**
     * 参数是否可以并行执行
     */
    protected open val parallel = true

    /**
     * 限制开始时间和结束时间的时间间隔范围不超过平台的限制，
     * 当 interval=[Duration.ZERO] 时，表示没有区间限制，
     * 默认为1天。
     */
    protected open val interval: Duration = Duration.ofDays(1)

    /**
     * 数据延迟时长
     */
    protected open val delay: Duration = Duration.ZERO

    /**
     * 限流，接口每分钟调用次数限制，= 0 时表示不限流
     */
    protected open val permitsPerMinutes: UInt = 1U

    private val rateLimiter: BlockingBucket by lazy {
        val tokensPerMinutes = permitsPerMinutes.toLong()
        val refill = Refill.intervally(tokensPerMinutes, Duration.ofMinutes(1))
        val capacity = if (tokensPerMinutes < 2) 1 else tokensPerMinutes / 2
        Bucket.builder()
            .withSynchronizationStrategy(SynchronizationStrategy.SYNCHRONIZED)
            .addLimit(Bandwidth.classic(capacity, refill))
            .build() as BlockingBucket
    }

    /**
     * 申请可继续操作的资源，ex：进行限流等操作
     */
    protected open fun acquire() {
        if (permitsPerMinutes > 0U) {
            rateLimiter.consume(1)
        }
    }

    /**
     * 返回满足平台限制的开始时间
     */
    protected open fun getStartTime(store: Store, checkpoint: Checkpoint): LocalDateTime {
        return if (startTimeLimit == 0U) {
            checkpoint.time
        } else {
            val minStartTime = LocalDateTime.now().minusDays(startTimeLimit.toLong())
            val startTime = checkpoint.time
            if (startTime < minStartTime) minStartTime else startTime
        }
    }

    /**
     * 部分平台限制只能查询指定天数以内的数据，默认没有限制
     */
    protected open val startTimeLimit = 0U

    /**
     * 根据type生成topic: persistent://public/default/document-{platform.id}-{type}，
     * 例如: persistent://public/default/document-taobao-trade
     */
    abstract val documentType: DocumentType

    /**
     * 任务入口
     */
    override fun run() {
        fetchAndProcess()
    }

    protected open fun fetchAndProcess() {
        fun updateCheckpoint(
            checkpoint: Checkpoint,
            endTime: LocalDateTime,
            startTime: LocalDateTime,
        ) {
            checkpointRepository.save(checkpoint.copy(time = endTime, modified = LocalDateTime.now()))
            log.info("${getLogPrefix(startTime, endTime)}数据同步完成\n")
        }

        val checkpoint = checkpointRepository.get(store, job)
        val startTime = getStartTime(store, checkpoint)
        val endTime = LocalDateTime.now().minusSeconds(delay.toSeconds())
        val totalSeconds = Duration.between(startTime, endTime).seconds
        if (totalSeconds < 1)
            return

        if (interval == Duration.ZERO) {
            fetchAndProcess(startTime, endTime)
            updateCheckpoint(checkpoint, endTime, startTime)
        } else {
            val ds = interval.seconds
            val ranges = totalSeconds / ds + if (totalSeconds % ds == 0L) 0 else 1
            for (i in 1..ranges) {
                val st = startTime.plusSeconds(ds * (i - 1))
                val et = if (i == ranges) endTime else startTime.plusSeconds(ds * i)
                fetchAndProcess(st, et)
                updateCheckpoint(checkpoint, endTime, startTime)
            }
        }
    }


    private fun fetchAndProcess(startTime: LocalDateTime, endTime: LocalDateTime) {
        val parameters = getParameters()
        if (checkParameters && parameters.isEmpty())
            return

        val stream = if (parallel && parameters.size >= 2)
            parameters.parallelStream()
        else
            listOf(null).stream()
        val producer = getProducer(platform, documentType)
        log.info("${getLogPrefix(startTime, endTime)}数据同步开始")
        stream.forEach {
            val context = Context(startTime, endTime, it, producer)
            fetchAndProcess(context)
        }
    }

    private fun getProducer(platform: Platform, documentType: DocumentType): Producer<String> {
        val type = "${platform.id}-${documentType.desc}"
        var producer = producers[type]
        return if (producer != null) {
            producer
        } else {
            producer = client.newProducer(Schema.STRING)
                .topic("persistent://public/default/document-${type}")
                .blockIfQueueFull(true)
                .messageRoutingMode(MessageRoutingMode.RoundRobinPartition)
                .compressionType(CompressionType.LZ4)
                .create()
            producers[type] = producer
            producer
        }
    }

    protected fun getLogPrefix(startTime: LocalDateTime, endTime: LocalDateTime): String {
        return "${platform.name}[${store.name}:${store.id}][${startTime.format()} - ${endTime.format()}]"
    }

    /**
     * 由子类实现拉取数据并处理
     */
    protected abstract fun fetchAndProcess(context: Context)

    /**
     * 将响应转换为Document
     * @see Document
     */
    protected abstract fun buildDocuments(context: Context, response: R): List<Document>

    protected open fun process(context: Context, response: R) {
        val now = LocalDateTime.now()
        val documents = buildDocuments(context, response)
        for (document in documents) {
            document.id = idGenerator.next()
            document.storeId = store.id
            document.fetchedCreated = now
            document.fetchedModified = now
        }
        sendToPulsar(context, documents)
    }

    private fun sendToPulsar(context: Context, documents: List<Document>) {
        val producer = context.producer
        val futures = mutableListOf<CompletableFuture<*>>()
        for (document in documents) {
            val future = producer.newMessage()
                .value(buildMessage(document))
                .sendAsync()
                .whenComplete { m, t ->
                    if (t != null) {
                        val messageId = MessageId.fromByteArray(m.toByteArray())
                        log.error("${document.sn}消息[$messageId]发送失败")
                        documentMapper.insert(document)
                    }
                }
            futures.add(future)
        }
        futures.forEach { it.join() }
    }

    private fun buildMessage(document: Document): String {
        return mapOf(
            "platformId" to platform.id,
            "storeId" to store.id,
            "containsData" to containsData,
            "document" to document.toJsonString(),
        ).toJsonString()
    }

    /**
     * 记录错误日志并抛出RuntimeException。
     * @param request 请求对象
     * @param response 错误响应对象
     */
    protected fun throwException(context: Context, request: Any?, response: Any?) {
        val (startTime, endTime) = context
        var message = getLogPrefix(startTime, endTime)
        if (request != null)
            message += "请求数据: ${request.toJsonString()}"
        if (response != null)
            message += ", 响应数据: ${response.toJsonString()}"
        log.error(message)
        throw RuntimeException(message)
    }

    /**
     * 历史数据查询是否需要包含 data（默认不包含，因为判断数据是否为新数据一般只需要通过比较数据的最后更新时间 modified 就可以判断）
     */
    protected open val containsData = false

    /**
     * 判断Document是否是新的
     */
    protected open fun Document.isNew(old: Document) = this > old

    class Context(
        val startTime: LocalDateTime,
        val endTime: LocalDateTime,
        val parameter: Any?,
        val producer: Producer<String>,
    ) : Cloneable {

        operator fun component1(): LocalDateTime = startTime

        operator fun component2(): LocalDateTime = endTime

        operator fun component3(): Any? = parameter

        operator fun component4(): Any = producer
    }
}