#!groovy

import com.freeleaps.devops.SourceFetcher
import com.freeleaps.devops.DependenciesResolver
import com.freeleaps.devops.CommitMessageLinter
import com.freeleaps.devops.ChangedComponentsDetector
import com.freeleaps.devops.CodeLintExecutor
import com.freeleaps.devops.SASTExecutor
import com.freeleaps.devops.ImageBuilder
import com.freeleaps.devops.SemanticReleasingExecutor
import com.freeleaps.devops.ArgoApplicationVersionUpdater

import com.freeleaps.devops.enums.DependenciesManager
import com.freeleaps.devops.enums.ServiceLanguage
import com.freeleaps.devops.enums.CodeLinterTypes
import com.freeleaps.devops.enums.ImageBuilderTypes
import org.csanchez.jenkins.plugins.kubernetes.pod.yaml.YamlMergeStrategy
import org.csanchez.jenkins.plugins.kubernetes.pod.yaml.Merge


/**
 * 为指定组件生成所有构建阶段
 * 这个函数会根据组件的配置动态生成相应的构建、测试、扫描、打包等阶段
 * 
 * @param component 组件配置对象，包含组件的所有配置信息
 * @param configurations 全局配置对象，包含整个管道的配置信息
 * @return 返回一个闭包，执行时会运行该组件的所有构建阶段
 */
def generateComponentStages(component, configurations) {
  // 创建阶段列表，用于存储该组件的所有构建阶段
  def stages = []
  
  // 添加基础构建阶段
  stages.addAll([
    // 阶段1：构建代理设置
    {stage("${component.name} :: Build Agent Setup") {
      script {
          // 检查是否需要执行此组件（完全执行模式或该组件发生了变更）
          if (env.executeMode == "fully" || env.changedComponents.split(/\s+/).toList().contains(component.name)) {
              // 获取组件的构建代理镜像
              def buildAgentImage = component.buildAgentImage
              
              // 如果未设置构建代理镜像，使用默认镜像
              if (buildAgentImage == null || buildAgentImage.isEmpty()) {
                  log.warn("Pipeline", "Not set buildAgentImage for ${component.name}, using default build agent image")

                  // 根据编程语言选择默认的构建镜像
                  def language = ServiceLanguage.parse(component.language)
                  switch(language) {
                      case ServiceLanguage.PYTHON:
                          buildAgentImage = "docker.io/python:3.10-slim-buster"
                          break
                      case ServiceLanguage.JS:
                          buildAgentImage = "docker.io/node:lts-alpine"
                          break
                      default:
                          error("Unknown service language")
                  }
              }
              
              // 记录使用的构建镜像并设置到环境变量中
              log.info("Pipeline", "Using ${buildAgentImage} as build agent image for ${component.name}")
              env."${component.name}_buildAgentImage" = buildAgentImage
          }
      }
    }},
    // 阶段2：依赖解析
    // 作用：下载和安装项目所需的所有依赖包，为后续的构建、测试、扫描等阶段做准备
    // 这是构建流程中的关键步骤，确保所有必要的依赖都已正确安装
    {stage("${component.name} :: Dependencies Resolving") {
      // 在Kubernetes Pod中运行依赖解析，使用容器化环境确保环境一致性
      // 需要插件：Kubernetes Plugin (kubernetes) - 提供podTemplate和Kubernetes集成功能
      podTemplate(
        label: "dep-resolver-${component.name}", // 指定节点标签
        // Kubernetes Pod配置，指定节点亲和性和容忍度，确保在指定的devops节点上运行
        // 需要插件：Kubernetes Plugin - 支持YAML配置和Pod模板功能
        yaml: """
apiVersion: v1
kind: Pod
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
          - matchExpressions:
              - key: "node-role.kubernetes.io/devops"
                operator: In
                values: ["true"]
  tolerations:
    - key: "node-role.kubernetes.io/devops"
      operator: Equal
      value: "true"
      effect: "NoSchedule"
        """,
        // 需要插件：Kubernetes Plugin - 支持YAML合并策略
        yamlMergeStrategy: merge(),
        // 定义容器模板，使用之前设置的构建代理镜像
        // 需要插件：Kubernetes Plugin - 提供containerTemplate功能
        containers: [
          containerTemplate(
            name: 'dep-resolver',
            image: env."${component.name}_buildAgentImage",  // 使用对应组件的构建镜像
            ttyEnabled: true,
            command: 'sleep',
            args: 'infinity'  // 保持容器运行，等待Jenkins执行任务
          )
        ]
      ) {
        // 在指定的节点上运行
        // 需要插件：Pipeline Plugin (基础插件) - 提供node步骤功能
        // node步骤用于指定在哪个Jenkins节点上执行任务
        // 这里使用podTemplate的label作为节点标识符
        // 字符串插值语法：${component.name} 是Groovy的字符串插值语法
        // 例如：如果component.name = "frontend"，则结果为 "dep-resolver-frontend"
        // 这种语法允许在字符串中嵌入变量值，类似于其他语言的模板字符串
        // node('节点标签')
        node("dep-resolver-${component.name}") {
          // 在dep-resolver容器中执行
          // 需要插件：Kubernetes Plugin - 提供container步骤功能
          container('dep-resolver') {
            script {
              // 检查是否需要执行此组件（完全执行模式或该组件发生了变更）
              if (env.executeMode == "fully" || env.changedComponents.split(/\s+/).toList().contains(component.name)) {
                // 获取构建代理镜像信息
                def buildAgentImage = env."${component.name}_buildAgentImage"
                log.info("Pipeline", "Using ${buildAgentImage} as build agent image for dependencies resolving")
                
                // 步骤1：获取源代码
                // 从Git仓库拉取最新的源代码到工作目录
                def sourceFetcher = new SourceFetcher(this)
                sourceFetcher.fetch(configurations)

                // 步骤2：解析编程语言和依赖管理器
                // 根据组件配置确定使用的编程语言（如Python、JavaScript等）
                def language = ServiceLanguage.parse(component.language)
                // 根据组件配置确定使用的依赖管理器（如pip、npm、yarn等）
                def depManager = DependenciesManager.parse(component.dependenciesManager)

                // 步骤3：创建依赖解析器
                // 创建DependenciesResolver实例，传入工作目录路径
                def dependenciesResolver = new DependenciesResolver(this, language, env.workroot + "/" + component.root + "/")
                // 设置使用的依赖管理器
                dependenciesResolver.useManager(depManager)
                
                // 步骤4：配置缓存支持
                // 如果启用了构建缓存，可以加速后续构建过程
                if (component.buildCacheEnabled) {
                    dependenciesResolver.enableCachingSupport()  // 启用缓存，复用之前下载的依赖
                } else {
                    dependenciesResolver.disableCachingSupport()  // 禁用缓存，每次都重新下载
                }

                // 步骤5：执行依赖解析
                // 根据组件的依赖管理器配置，下载和安装所有必要的依赖包
                // 例如：
                // - 对于Python项目：执行 pip install -r requirements.txt
                // - 对于Node.js项目：执行 npm install 或 yarn install
                // - 对于Java项目：执行 mvn dependency:resolve 或 gradle dependencies
                dependenciesResolver.resolve(component)
              }
            }
          }
        }
      }
    }},
  ])

  // 如果启用了代码检查，添加代码检查相关的阶段
  if (component.lintEnabled != null && component.lintEnabled) {
    stages.addAll([
      // 阶段1：代码检查器环境准备
      {stage("${component.name} :: Code Linter Preparation") {
        script {
          // 检查是否需要执行此组件（完全执行模式或该组件发生了变更）
          if (env.executeMode == "fully" || env.changedComponents.split(/\s+/).toList().contains(component.name)) {
            if (component.lintEnabled != null && component.lintEnabled) {
              // 记录日志：代码检查已启用，准备检查器
              log.info("Pipeline", "Code linting has enabled, preparing linter...")

              // 验证检查器是否配置
              if (component.linter == null || component.linter.isEmpty()) {
                log.error("Pipeline", "Not set linter for ${component.name}, using default linter settings as fallback")
              }

              // 解析检查器类型
              def linter = CodeLinterTypes.parse(component.linter)

              // 验证检查器类型是否有效
              if (linter == null) {
                log.error("Pipeline", "Unknown linter for ${component.name}, skipping code linting")
              }

              // 验证检查器是否支持当前编程语言
              if (linter.language != ServiceLanguage.parse(component.language)) {
                log.error("Pipeline", "Linter ${linter.linter} is not supported for ${component.language}, skipping code linting")
              }

              // 记录使用的检查器信息和容器镜像
              log.info("Pipeline", "Using ${linter.linter} with image ${linter.containerImage} as linter for ${component.name}")
              // 设置检查器容器镜像到环境变量，供后续阶段使用
              env."${component.name}_linterContainerImage" = linter.containerImage
            } else {
              log.info("Pipeline", "Code linting is not enabled for ${component.name}, skipping...")
            }
          }
        }
      }},
      // 阶段2：代码检查执行
      {stage("${component.name} :: Code Linting") {
        // 需要插件：Kubernetes Plugin (kubernetes) - 提供podTemplate和Kubernetes集成功能
        podTemplate(
          label: "code-linter-${component.name}",
          // Kubernetes Pod配置，指定节点亲和性和容忍度，确保在指定的devops节点上运行
          // 需要插件：Kubernetes Plugin - 支持YAML配置和Pod模板功能
          yaml: """
apiVersion: v1
kind: Pod
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
          - matchExpressions:
              - key: "node-role.kubernetes.io/devops"
                operator: In
                values: ["true"]
  tolerations:
    - key: "node-role.kubernetes.io/devops"
      operator: Equal
      value: "true"
      effect: "NoSchedule"
        """,
        // 需要插件：Kubernetes Plugin - 支持YAML合并策略
        yamlMergeStrategy: merge(),
          // 定义容器模板，使用之前设置的检查器容器镜像
          // 需要插件：Kubernetes Plugin - 提供containerTemplate功能
          containers: [
            containerTemplate(
              name: 'code-linter',
              image: env."${component.name}_linterContainerImage",  // 使用对应组件的检查器镜像
              ttyEnabled: true,
              command: 'sleep',
              args: 'infinity'  // 保持容器运行，等待Jenkins执行任务
            )
          ]
        ) {
          // 需要插件：Pipeline Plugin (基础插件) - 提供node步骤功能
          // 在指定的节点上运行
          node("code-linter-${component.name}") {
            // 需要插件：Kubernetes Plugin - 提供container步骤功能
            // 在code-linter容器中执行
            container('code-linter') {
              script {
                // 检查是否需要执行此组件（完全执行模式或该组件发生了变更）
                if (env.executeMode == "fully" || env.changedComponents.split(/\s+/).toList().contains(component.name)) {
                  if (component.lintEnabled != null && component.lintEnabled) {
                    // 记录日志：代码检查已启用，开始检查代码
                    log.info("Pipeline", "Code linting has enabled, linting code...")

                    // 步骤1：获取源代码
                    // 从Git仓库拉取最新的源代码到工作目录
                    def sourceFetcher = new SourceFetcher(this)
                    sourceFetcher.fetch(configurations)

                    // 步骤2：解析编程语言和检查器类型
                    def linterType = CodeLinterTypes.parse(component.linter)
                    def language = ServiceLanguage.parse(component.language)

                    // 步骤3：解析依赖管理器类型
                    def depManager = DependenciesManager.parse(component.dependenciesManager)

                    // 步骤4：解析依赖（从缓存中恢复）
                    // 创建依赖解析器并配置缓存支持
                    def dependenciesResolver = new DependenciesResolver(this, language, env.workroot + "/" + component.root + "/")
                    dependenciesResolver.useManager(depManager)
                    
                    // 配置缓存支持
                    if (component.buildCacheEnabled) {
                        dependenciesResolver.enableCachingSupport()  // 启用缓存，复用之前下载的依赖
                    } else {
                        dependenciesResolver.disableCachingSupport()  // 禁用缓存，每次都重新下载
                    }

                    // 执行依赖解析
                    dependenciesResolver.resolve(component)

                    // 步骤5：执行代码检查
                    // 创建代码检查执行器并执行检查
                    def codeLintExecutor = new CodeLintExecutor(this, env.workroot + "/" + component.root + "/", component.linterConfig, linterType, component)
                    codeLintExecutor.execute()
                  }
                }
              }
            }
          }
        }
      }}
    ])
  }

  // 如果启用了SAST安全扫描，添加安全扫描相关的阶段
  if (component.sastEnabled != null && component.sastEnabled) {
    stages.addAll([
      // 阶段1：SAST扫描器环境准备
      {stage("${component.name} :: SAST Scanner Preparation") {
        script {
          // 检查是否需要执行此组件（完全执行模式或该组件发生了变更）
          if (env.executeMode == "fully" || env.changedComponents.split(/\s+/).toList().contains(component.name)) {
            if (component.sastEnabled != null && component.sastEnabled) {
              // 记录日志：SAST安全扫描已启用，准备扫描器
              log.info("Pipeline", "SAST scanning has enabled, preparing scanner...")

              // 验证SAST扫描器是否配置
              if (sastScanner == null || sastScanner.isEmpty()) {
                log.error("Pipeline", "Not set sastScanner for ${component.name}")
              }

              // 解析SAST扫描器类型
              def sastScannerType = SASTScannerTypes.parse(component.sastScanner)
              
              // 验证扫描器类型是否有效
              if (sastScannerType == null) {
                log.error("Pipeline", "Unknown SAST scanner for ${component.name}, skipping SAST scanning")
              } 
              // 验证扫描器是否支持当前编程语言
              else if (sastScannerType.language != ServiceLanguage.parse(component.language)) {
                log.error("Pipeline", "SAST scanner ${sastScannerType.scanner} is not supported for ${component.language}, skipping SAST scanning")
              } else {
                // 记录使用的扫描器信息和容器镜像
                log.info("Pipeline", "Using ${sastScanner} as SAST scanner for ${component.name}")
                // 设置扫描器容器镜像到环境变量，供后续阶段使用
                env."${component.name}_sastScannerContainerImage" = sastScannerType.containerImage
              }
            }
          }
        }
      }},
      // 阶段2：SAST安全扫描执行
      {stage("${component.name} :: SAST Scanning") {
        // 条件执行：只有在满足条件时才执行此阶段
        // 需要插件：Pipeline Plugin (基础插件) - 提供when条件执行功能
        when {
          expression {
            // 执行条件：
            // 1. 完全执行模式或该组件发生了变更
            // 2. 扫描器容器镜像已设置且不为空
            return (env.executeMode == "fully" || env.changedComponents.split(/\s+/).toList().contains(component.name)) && env.sastScannerContainerImage != null && !env.sastScannerContainerImage.isEmpty()
          }
        }
        // 需要插件：Kubernetes Plugin (kubernetes) - 提供podTemplate和Kubernetes集成功能
        podTemplate(
          label: "sast-scanner-${component.name}",
          // Kubernetes Pod配置，指定节点亲和性和容忍度，确保在指定的devops节点上运行
          // 需要插件：Kubernetes Plugin - 支持YAML配置和Pod模板功能
          yaml: """
apiVersion: v1
kind: Pod
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
          - matchExpressions:
              - key: "node-role.kubernetes.io/devops"
                operator: In
                values: ["true"]
  tolerations:
    - key: "node-role.kubernetes.io/devops"
      operator: Equal
      value: "true"
      effect: "NoSchedule"
        """,
        // 需要插件：Kubernetes Plugin - 支持YAML合并策略
        yamlMergeStrategy: merge(),
          // 定义容器模板，使用之前设置的扫描器容器镜像
          // 需要插件：Kubernetes Plugin - 提供containerTemplate功能
          containers: [
            containerTemplate(
              name: 'sast-scanner',
              image: env."${component.name}_sastScannerContainerImage",  // 使用对应组件的扫描器镜像
              ttyEnabled: true,
              command: 'sleep',
              args: 'infinity'  // 保持容器运行，等待Jenkins执行任务
            )
          ]
        ) {
          // 需要插件：Pipeline Plugin (基础插件) - 提供node步骤功能
          // 在指定的节点上运行
          node("sast-scanner-${component.name}") {
            // 需要插件：Kubernetes Plugin - 提供container步骤功能
            // 在sast-scanner容器中执行
            container('sast-scanner') {
              script {
                // 检查是否需要执行此组件（完全执行模式或该组件发生了变更）
                if (env.executeMode == "fully" || env.changedComponents.split(/\s+/).toList().contains(component.name)) {
                  if (component.sastEnabled != null && component.sastEnabled) {
                    // 记录日志：SAST安全扫描已启用，开始扫描代码
                    log.info("Pipeline", "SAST scanning has enabled, scanning code...")

                    // 步骤1：获取源代码
                    // 从Git仓库拉取最新的源代码到工作目录
                    def sourceFetcher = new SourceFetcher(this)
                    sourceFetcher.fetch(configurations)

                    // 步骤2：解析SAST扫描器类型
                    def sastScannerType = SASTScannerTypes.parse(component.sastScanner)

                    // 步骤3：执行安全扫描
                    // 创建SAST执行器并执行安全漏洞检测
                    def sastScanner = new SASTExecutor(this, env.workroot + "/" + component.root + "/", sastScannerType)
                    sastScanner.scan()
                  }
                }
              }
            }
          }
        }
      }}
    ])
  }

  // 如果启用了语义化发布，添加发布相关的阶段
  if (component.semanticReleaseEnabled != null && component.semanticReleaseEnabled) {
    stages.addAll([
      // 阶段：语义化发布执行
      {stage("${component.name} :: Semantic Releasing") {
        // 需要插件：Kubernetes Plugin (kubernetes) - 提供podTemplate和Kubernetes集成功能
        podTemplate(
          label: "semantic-releasing-${component.name}",
          // Kubernetes Pod配置，指定节点亲和性和容忍度，确保在指定的devops节点上运行
          // 需要插件：Kubernetes Plugin - 支持YAML配置和Pod模板功能
          yaml: """
apiVersion: v1
kind: Pod
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
          - matchExpressions:
              - key: "node-role.kubernetes.io/devops"
                operator: In
                values: ["true"]
  tolerations:
    - key: "node-role.kubernetes.io/devops"
      operator: Equal
      value: "true"
      effect: "NoSchedule"
        """,
        // 需要插件：Kubernetes Plugin - 支持YAML合并策略
        yamlMergeStrategy: merge(),
          // 定义容器模板，使用预构建的CI工具镜像
          // 需要插件：Kubernetes Plugin - 提供containerTemplate功能
          containers: [
            containerTemplate(
              name: 'semantic-releasing',
              image: 'freeleaps/ci-essentials:0.0.4',  // 预构建的CI工具镜像，包含semantic-release等工具
              ttyEnabled: true,
              command: 'sleep',
              args: 'infinity'  // 保持容器运行，等待Jenkins执行任务
            )
          ]
        ) {
          // 需要插件：Pipeline Plugin (基础插件) - 提供node步骤功能
          // 在指定的节点上运行
          node("semantic-releasing-${component.name}") {
            // 需要插件：Kubernetes Plugin - 提供container步骤功能
            // 在semantic-releasing容器中执行
            container('semantic-releasing') {
              script {
                // 检查是否需要执行此组件（完全执行模式或该组件发生了变更）
                if (env.executeMode == "fully" || env.changedComponents.split(/\s+/).toList().contains(component.name)) {
                  if (component.semanticReleaseEnabled != null && component.semanticReleaseEnabled) {
                    // 记录日志：语义化发布已启用，开始发布
                    log.info("Pipeline", "Semantic releasing has enabled, releasing...")

                    // 检查是否已经执行过语义化发布，避免重复发布
                    if (env.SEMANTIC_RELEASED != null && !env.SEMANTIC_RELEASED.isEmpty() && env.SEMANTIC_RELEASED) {
                      log.info("Pipeline", "Semantic release has been executed, skipping...")
                      return
                    }

                    // 步骤1：获取源代码
                    // 从Git仓库拉取最新的源代码到工作目录
                    def sourceFetcher = new SourceFetcher(this)
                    sourceFetcher.fetch(configurations)

                    // 步骤2：执行语义化发布
                    // 创建语义化发布执行器并执行发布流程
                    // 包括：版本号生成、Git标签创建、变更日志生成等
                    def semanticReleasingExecutor = new SemanticReleasingExecutor(this, env.workroot)
                    semanticReleasingExecutor.release(configurations.serviceGitCredentialsId, configurations.serviceGitBranch)
                  }
                }
              }
            }
          }
        }
      }}
    ])
  }

  stages.addAll([
    // Compilation & Packaging
    {stage("${component.name} :: Compilation & Packaging") {
      podTemplate(
        label: "build-agent-${component.name}",
        yaml: """
apiVersion: v1
kind: Pod
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
          - matchExpressions:
              - key: "node-role.kubernetes.io/devops"
                operator: In
                values: ["true"]
  tolerations:
    - key: "node-role.kubernetes.io/devops"
      operator: Equal
      value: "true"
      effect: "NoSchedule"
        """,
        yamlMergeStrategy: merge(),
        containers: [
          containerTemplate(
            name: 'build-agent',
            image: env."${component.name}_buildAgentImage",
            ttyEnabled: true,
            command: 'sleep',
            args: 'infinity',
            resourceLimitCpu: "2",
            resourceLimitMemory: "4Gi",
            resourceRequestCpu: "1",
            resourceRequestMemory: "2Gi"
          )
        ]
      ) {
        node("build-agent-${component.name}") {
          container('build-agent') {
            script {
              if (env.executeMode == "fully" || env.changedComponents.split(/\s+/).toList().contains(component.name)) {
                def buildAgentImage = env."${component.name}_buildAgentImage"
                log.info("Pipeline", "Using ${buildAgentImage} as build agent image for compilation & packaging")
                
                def sourceFetcher = new SourceFetcher(this)
                sourceFetcher.fetch(configurations)

                def language = ServiceLanguage.parse(component.language)
                def depManager = DependenciesManager.parse(component.dependenciesManager)

                def dependenciesResolver = new DependenciesResolver(this, language, env.workroot + "/" + component.root + "/")
                dependenciesResolver.useManager(depManager)
                if (component.buildCacheEnabled) {
                    dependenciesResolver.enableCachingSupport()
                } else {
                    dependenciesResolver.disableCachingSupport()
                }

                dependenciesResolver.resolve(component)

                dir(env.workroot + "/" + component.root) {
                  if (component.buildCommand != null && !component.buildCommand.isEmpty()) {
                    sh component.buildCommand
                  }
                  component.buildArtifacts.each { artifact ->
                    log.info("Pipeline", "Stashing artifact ${artifact} for ${component.name}...")
                    def artifactList = sh(script: "ls ${artifact} -al", returnStdout: true)
                    log.info("Pipeline", "Artifacts list: ${artifactList}")
                    def targetPathType = sh(
                      script: """
                        if [ -d "${artifact}" ]; then
                          echo "dir"
                        elif [ -f "${artifact}" ]; then
                          echo "file"
                        else
                          echo "unknown"
                        fi
                      """,
                      returnStdout: true
                    )
                    if (artifact == '.' || artifact == './') {
                      log.info("Pipeline", "Stashing root directory for ${component.name}...")
                      stash includes: "", name: "${component.name}-root"
                    } else if (targetPathType.trim() == "dir") {
                      def stashName = artifact.replace('/', '-').replace('.', '-')
                      log.info("Pipeline", "Stashing directory ${artifact} for ${component.name}...")
                      stash includes: "${artifact}/**", name: "${component.name}-${stashName}"
                    } else {
                      stash includes: artifact, name: "${component.name}-${artifact}"
                    }
                  }
                }
              }
            }
          }
        }
      }
    }},
    // Image Builder Setup
    {stage("${component.name} :: Image Builder Setup") {
      script {
        if (env.executeMode == "fully" || env.changedComponents.split(/\s+/).toList().contains(component.name)) {
          log.info("Pipeline", "Ready to setup image builder for ${component.name}")
          def imageBuilder
          if (component.imageBuilder == null || component.imageBuilder.isEmpty()) {
            log.info("Pipeline", "imageBuilder not set for ${component.name}, using kaniko as default image builder")
            imageBuilder = ImageBuilderTypes.KANIKO
          } else {
            imageBuilder = ImageBuilderTypes.parse(component.imageBuilder)
            if (imageBuilder == null) {
              log.error("Pipeline", "Unknown image builder for ${component.name}, skipping image building")
            }
          }

          env."${component.name}_imageBuilderImage" = imageBuilder.image
          log.info("Pipeline", "Using ${imageBuilder.builder} (image: ${imageBuilder.image}) as image builder for ${component.name}")
        }
      }
    }},
    // Image Building & Publishing
    {stage("${component.name} :: Image Building & Publishing") {
      podTemplate(
        label: "image-builder-${component.name}",
        yaml: """
apiVersion: v1
kind: Pod
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
          - matchExpressions:
              - key: "node-role.kubernetes.io/devops"
                operator: In
                values: ["true"]
  tolerations:
    - key: "node-role.kubernetes.io/devops"
      operator: Equal
      value: "true"
      effect: "NoSchedule"
        """,
        yamlMergeStrategy: merge(),
        containers: [
          containerTemplate(
            name: 'image-builder',
            image: env."${component.name}_imageBuilderImage",
            privileged: true,
            ttyEnabled: true,
            command: 'sleep',
            args: 'infinity'
          )
        ],
        volumes: [
          hostPathVolume(hostPath: '/var/run/docker.sock', mountPath: '/var/run/docker.sock')
        ]
      ) {
        node("image-builder-${component.name}") {
          container('image-builder') {
            script {
              if (env.executeMode == "fully" || env.changedComponents.split(/\s+/).toList().contains(component.name)) {
                def sourceFetcher = new SourceFetcher(this)
                sourceFetcher.fetch(configurations)

                dir(env.workroot + "/" + component.root) {
                  component.buildArtifacts.each { artifact ->
                    if (artifact == '.' || artifact == './') {
                      unstash "${component.name}-root"
                    } else {
                      def stashName = artifact.replace('/', '-').replace('.', '-')
                      log.info("Pipeline", "Fetch stashed directory ${artifact} for ${component.name}...")
                      unstash "${component.name}-${stashName}"
                      def artifactList = sh(script: "ls ${artifact} -al && pwd", returnStdout: true)
                      log.info("Pipeline", "Unstashed files: ${artifactList}")
                    }
                  }


                  if (component.dockerfile != null && !component.dockerfile.isEmpty()) {
                    log.error("Pipeline", "Component ${component.name} dockerfile not set!")
                  }

                  def imageBuilderType = ImageBuilderTypes.parse(component.imageBuilder)
                  if (imageBuilderType == null) {
                    log.error("Pipeline", "Unknown image builder for ${component.name}, skipping image building")
                  }

                  def imageBuilder = new ImageBuilder(this, 
                    env.workroot + "/" + component.root + "/",
                    component.imageBuildRoot, 
                    component.dockerfilePath, 
                    imageBuilderType
                  )

                  log.info("Pipeline", "Retrieve version of image from pervious stage...")
                  if (env.LATEST_VERSION == null || env.LATEST_VERSION.isEmpty()) {
                    log.warn("Pipeline", "LATEST_VERSION environment value not set, using 'snapshot-<BUILD_COMMIT_HASH>' as default version")
                    sh "git config --global --add safe.directory ${env.workroot}"
                    def commitHash = sh(script: 'git rev-parse HEAD', returnStdout: true).trim()
                    def shortCommitHash = commitHash.take(7)
                    env.LATEST_VERSION = "snapshot-${shortCommitHash}"
                  }
                  def version

                  imageBuilder.setManifestsOfImage(component.imageRegistry, component.imageRepository, component.imageName, env.LATEST_VERSION)
                  imageBuilder.useCredentials(component.registryCredentialsId)
                  imageBuilder.setArchitectures(component.imageReleaseArchitectures)
                  imageBuilder.build()
                }               
              }
            }
          }
        }
      }
    }},
    {stage("${component.name} :: Argo Application Version Updating") {
      podTemplate(
        label: "argo-app-version-updater-${component.name}",
        yaml: """
apiVersion: v1
kind: Pod
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
          - matchExpressions:
              - key: "node-role.kubernetes.io/devops"
                operator: In
                values: ["true"]
  tolerations:
    - key: "node-role.kubernetes.io/devops"
      operator: Equal
      value: "true"
      effect: "NoSchedule"
        """,
        yamlMergeStrategy: merge(),
        containers: [
          containerTemplate(
            name: "argo-app-version-updater",
            image: "freeleaps/ci-essentials:0.0.4",
            ttyEnabled: true,
            command: 'sleep',
            args: 'infinity'
          )
        ]
      ) {
        node("argo-app-version-updater-${component.name}") {
          container("argo-app-version-updater") {
            script {
              if (env.executeMode == "fully" || env.changedComponents.split(/\s+/).toList().contains(component.name)) {
                def argoApplicationVersionUpdater = new ArgoApplicationVersionUpdater(this, configurations)
                argoApplicationVersionUpdater.update(configurations.environmentSlug, component)
              }
            }
          }
        }
      }
    }}
  ])
  // 返回一个闭包，当执行时会依次运行所有生成的阶段
  return {
    stages.each { stageClosure ->
      stageClosure()
    }
  }
}

/**
 * Jenkins Shared Library 的主要入口方法
 * 当在Jenkinsfile中调用 executeFreeleapsPipeline { ... } 时，Jenkins会自动调用此方法
 * 
 * @param closure 从Jenkinsfile传入的配置闭包，包含所有管道配置参数
 */
def call(Closure closure) {
  // 创建配置对象，用于存储从闭包中解析出的所有配置参数
  def configurations = [:]
  
  // 设置闭包的解析策略为委托优先，这意味着当闭包中访问属性时，
  // 会优先从delegate对象（即configurations）中查找
  closure.resolveStrategy = Closure.DELEGATE_FIRST
  
  // 设置闭包的委托对象为configurations，这样闭包中的属性赋值
  // 会直接赋值给configurations对象
  closure.delegate = configurations
  
  // 执行闭包，此时闭包中的所有配置会被解析并存储到configurations中
  closure()

  // 开始定义Jenkins Pipeline
  pipeline {
    // 指定在任何可用的agent上运行
    agent any
    
    // 管道选项配置
    options {
      // 构建历史保留策略：只保留最近25次构建
      buildDiscarder(logRotator(numToKeepStr: '25'))
      // 设置超时时间：240分钟
      timeout(time: 240, unit: 'MINUTES')
      // 并行任务失败时立即停止所有并行任务
      parallelsAlwaysFailFast()
    }

    // 定义管道的各个执行阶段
    stages {
      // 阶段1：提交信息检查（如果启用）
      stage("Pipeline :: Commit Linting If Enabled") {
        // 条件执行：只有当commitMessageLintEnabled为true时才执行此阶段
        when {
          expression {
            return configurations.commitMessageLintEnabled != null && configurations.commitMessageLintEnabled
          }
        }
        // 指定在Kubernetes集群中运行此阶段
        agent {
          kubernetes {
            // 设置默认容器名称
            defaultContainer 'commit-message-linter'
            // 定义Kubernetes Pod的YAML配置
            yaml """
apiVersion: v1
kind: Pod
metadata:
  labels:
    freeleaps-devops-system/milestone: commit-message-linting
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
          - matchExpressions:
              - key: "node-role.kubernetes.io/devops"
                operator: In
                values: ["true"]
  tolerations:
    - key: "node-role.kubernetes.io/devops"
      operator: Equal
      value: "true"
      effect: "NoSchedule"
  containers:
  - name: commit-message-linter
    image: docker.io/freeleaps/ci-essentials:0.0.4
    command:
    - cat
    tty: true
    volumeMounts:
    - name: workspace
      mountPath: /workspace
  volumes:
    - name: workspace
      emptyDir: {} 
"""
          }
        }
        // 执行步骤
        steps {
          script {
            // 记录日志：提交信息检查已启用
            log.info("Pipeline","Commit message linting is enabled")
            
            // 创建源代码获取器并获取源代码
            def sourceFetcher = new SourceFetcher(this)
            sourceFetcher.fetch(configurations)

            // 创建提交信息检查器并执行检查
            def linter = new CommitMessageLinter(this)
            linter.lint(configurations)
          }
        }
      }

      // 阶段2：执行模式检测
      stage("Pipeline :: Execute Mode Detection") {
        steps {
          script {
            // 获取配置的执行模式
            def executeMode = configurations.executeMode
            
            // 如果未设置执行模式，使用默认的"fully"模式
            if (executeMode == null || executeMode.isEmpty()) {
              log.warn("Pipeline","Not set executeMode, using fully as default execute mode")
              env.executeMode = "fully"
            } 
            // 如果设置为按需执行但仓库类型不是monorepo，则不支持按需模式，回退到完全执行模式
            else if (executeMode == 'on-demand' && configurations.serviceGitRepoType != 'monorepo') {
              log.warn("Pipeline","serviceGirRepoType is not monorepo, on-demand mode is not supported, using fully mode")
              env.executeMode = "fully"
            } 
            // 使用配置的执行模式
            else {
              log.info("Pipeline","Using ${executeMode} as execute mode")
              env.executeMode = executeMode
            }
          }
        }
      }

      // 阶段3：代码变更检测（仅在按需执行模式下运行）
      stage("Pipeline :: Code Changes Detection") {
        // 条件执行：只有在按需执行模式下才检测代码变更
        when {
          expression {
            return env.executeMode == "on-demand"
          }
        }

        steps {
          script {
            // 获取源代码
            def sourceFetcher = new SourceFetcher(this)
            sourceFetcher.fetch(configurations)

            // 创建变更组件检测器并检测发生变更的组件
            def changedComponentsDetector = new ChangedComponentsDetector(this)
            def changedComponents = changedComponentsDetector.detect(env.workroot, configurations.components)

            // 记录变更的组件信息
            log.info("Pipeline","Changed components: ${changedComponents}")
            // 将变更的组件列表存储到环境变量中，用空格分隔
            env.changedComponents = changedComponents.join(' ')
          }
        }
      }

      // 阶段4：组件构建（动态生成的阶段）
      stage("Pipeline :: Components Build (Dynamic Generated Stages)") {
        steps {
          script {
            // 完全执行模式：执行所有组件的构建
            if (env.executeMode == "fully") {
              configurations.components.each { component ->
                log.info("Pipeline", "Executing generated stages for ${component.name}...")
                // 为每个组件生成并执行相应的构建阶段
                generateComponentStages(component, configurations)()
              }
            } 
            // 按需执行模式：只执行发生变更的组件
            else if (env.changedComponents != null && env.changedComponents.split(/\s+/).toList().size() > 0) {
              // 获取变更的组件列表
              def changedComponents = env.changedComponents.split(/\s+/).toList()
              configurations.components.each { component ->
                // 只对发生变更的组件执行构建
                if (changedComponents.contains(component.name)) {
                  log.info("Pipeline", "Executing generated stages for ${component.name}...")
                  generateComponentStages(component, configurations)()
                }
              }
            } 
            // 没有组件发生变更，跳过构建
            else {
              log.info("Pipeline", "No components changed, skipping...")
            }
          }
        }
      }

    }
  }
}