# !/usr/bin/env ruby

require 'fileutils'
require 'digest'
require 'base64'
require 'json'
require 'time'
require 'tmpdir'
require 'securerandom'

# used to do package build configs for user side
class PackUploadFields
  CACHE_FILE = 'upload_cache.json'
  CACHE_EXPIRY_DAYS = 365 # Reclaim cache entries older than 1 year

  def initialize(job)
    @job = job
  end

  def setup_file_store
    process_software_stack
    process_program_upload_files("config")  # Process program field config files
    process_program_upload_files("patchset")  # Process program field patchset files
    process_lkp_sources
  end

  # Handle missing files during retry
  def upload_missing_file_store(upload_paths)
    upload_paths.each do |path|
      case path
      # format: lkp_src/base/$base_commit_date/$base_commit.cgz
      when %r{^lkp_src/base/([\w-]+)/(\w+)\.cgz$}
        @job['need_file_store'].delete_if do |path| path.start_with? 'lkp_src/base/' end
        @job['need_file_store'] << path
        handle_missing_lkp_base($1, $2)
      # format: lkp_src/delta/$base_commit_date/$base_commit-$md5.cgz
      when %r{^lkp_src/delta/([\w-]+)/(\w+)-(\w+)\.cgz$}
        @job['need_file_store'].delete_if do |path| path.start_with? 'lkp_src/delta/' end
        process_lkp_src_delta(base_commit: $2, force_pack: true)
      when %r{^lkp_src/overlay/(\w{2})/(\w+)\.cgz$}
        @job['need_file_store'].delete_if do |path| path.start_with? 'lkp_src/overlay/' end
        process_lkp_src2_overlay(force_pack: true)
      when %r{^ss/config/([^/]+)/(.+)$}, %r{^ss/patchset/([^/]+)/(.+)$}
        handle_missing_upload_files($1, $2, path)
      else
        warn "Unknown path format: #{path}. Skipping."
      end
    end
    @job
  end

  private

  # Process software stack config files
  def process_software_stack
    ss = @job['ss']
    return unless ss

    lkp_src = ENV['LKP_SRC']
    ss.each do |upstream_project, project_params|
      pkgbuild_file = "#{lkp_src}/programs/#{upstream_project}/PKGBUILD"
      if File.exist?(pkgbuild_file)
        bb = Job.parse_pkgbuild_bashx(pkgbuild_file)
        project_params['pkgname'] ||= bb['pkgname'] # to support linux pkgname "linux-modules linux-headers"
      end

      process_ss_upload_files(upstream_project, project_params, "config")
      process_ss_upload_files(upstream_project, project_params, "patchset")
      process_ss_pkgbuild(upstream_project, project_params)
      @job['need_file_store'].uniq!
    end
  end

  # Process program field upload file (similar to software stack)
  def process_program_upload_files(file_type)
    program = @job['program']
    return unless program

    program.each do |program_name, program_params|
      # For makepkg program, treat it as a software stack project
      if program_name == 'makepkg' && program_params['project']
        project_name = program_params['project']
        process_ss_upload_files(project_name, program_params, file_type)
      end
    end
  end

  def process_ss_upload_files(upstream_project, project_params, file_type)
    upload_files = []
    if file_type == 'config'
      upload_files << project_params[file_type] || project_params['autoconfig']
    elsif file_type == 'patchset'
      upload_files = project_params[file_type].to_s.split(' ')
    end
    upload_files.compact!
    return if upload_files.empty?

    file_name_arr = []
    upload_files.each do |upload_file|
      file_name = File.basename(upload_file)
      file_name = file_name.gsub(/[^a-zA-Z0-9._-]/, ':')

      path = "ss/#{file_type}/#{upstream_project}/#{file_name}"
      @job['need_file_store'] ||= []

      # likely a reproduce job, the upload file is already uploaded
      return if @job['need_file_store'].include? path
      @job['need_file_store'] << path

      # upload user provided ss.xxx.config or ss.xxx.patchset;
      # autoconfig is generated in PKGBUILD, so no need upload here
      return unless project_params[file_type]

      file_name_arr << file_name

      local_path = File.expand_path(upload_file)
      unless File.exist?(local_path)
        if upload_file != file_name
          warn "Warning: upload file not found: #{local_path}. Skipping upload for #{path}."
        end
        return
      end

      content = File.binread(local_path)
      content_md5 = Digest::MD5.hexdigest(content)

      unless cache_uploaded?(path, content_md5)
        @job['upload_file_store'] ||= {}
        @job['upload_file_store'][path] = Base64.encode64(content)
        update_cache(path, content_md5)
        puts "Added #{path} to upload_file_store (MD5: #{content_md5})."
      else
        puts "Skipping upload for #{path} (already uploaded)."
      end
    end
    project_params[file_type] = file_name_arr.join(" ")
  end

  def process_ss_pkgbuild(upstream_project, project_params)
    # Extract pkgname or default to upstream_project
    pkgname = project_params["pkgname"] || upstream_project

    # Extract pkgver or fallback to commit/tag/branch, raising an error if none is found
    pkgver = project_params["pkgver"] || project_params["commit"] || project_params["tag"] || project_params["branch"] ||
             raise("No pkgver/commit/tag/branch in ss.#{upstream_project}")

    # Validate pkgver to prevent directory traversal
    if pkgver.include?("..")
      raise "Illegal characters '..' in pkgver: #{pkgver}"
    end

    # Sanitize pkgver to ensure it only contains safe characters
    pkgver = pkgver.gsub(/[^a-zA-Z0-9._-]/, ':')

    # Determine config name, defaulting to "defconfig"
    config_name = project_params["config"] || project_params["autoconfig"] || "defconfig"

    # Generate package names
    package_names = pkgname.split.map { |pname| "#{pname}.cgz" }

    # Determine directory based on upstream_project
    dir = if upstream_project.match?(/^(linux|kernel)/)
            @job["arch"]
          else
            "#{@job['os']}-#{@job['os_version']}-#{@job['os_arch']}"
          end

    # Add "vmlinuz" to package_names if it's a kernel project
    package_names << "vmlinuz" if upstream_project.match?(/^(linux|kernel)/)

    # Generate file store paths
    need_file_store = package_names.map do |pname|
      "ss/pkgbuild/#{upstream_project}/#{dir}/#{config_name}/#{pkgver}/#{pname}"
    end

    # Update @job["need_file_store"] with the generated paths
    if @job.key?("need_file_store")
      @job["need_file_store"].concat(need_file_store)
    else
      @job["need_file_store"] = need_file_store
    end

    need_file_store
  end

  # Process LKP_SRC and LKP_SRC2 directories
  def process_lkp_sources
    process_lkp_src_delta
    process_lkp_src2_overlay
  end

  # Process LKP_SRC delta changes
  def process_lkp_src_delta(base_commit: nil, force_pack: false)
    lkp_src = ENV['LKP_SRC']
    unless lkp_src
      warn "LKP_SRC environment variable not set. Skipping LKP_SRC delta processing."
      return
    end

    base_commit ||= find_base_commit(lkp_src)
    unless base_commit
      warn "Failed to retrieve base commit from LKP_SRC. Skipping LKP_SRC delta processing."
      return
    end

    base_commit_date = get_commit_date(lkp_src, base_commit)
    unless base_commit_date
      warn "Failed to retrieve commit date for #{base_commit}. Skipping LKP_SRC delta processing."
      return
    end

    @job['need_file_store'] ||= []
    @job['need_file_store'] << "lkp_src/base/#{base_commit_date}/#{base_commit}.cgz"

    cgz_file = pack_lkp_delta(base_commit)
    return unless cgz_file
    delta_content = File.binread(cgz_file)
    delta_md5 = Digest::MD5.hexdigest(delta_content)
    delta_path = "lkp_src/delta/#{base_commit_date}/#{base_commit}-#{delta_md5}.cgz"
    @job['need_file_store'] << delta_path

    unless force_pack || cache_uploaded?(delta_path, delta_md5)
      @job['upload_file_store'] ||= {}
      @job['upload_file_store'][delta_path] = Base64.encode64(delta_content)
      update_cache(delta_path, delta_md5) unless force_pack
      puts "Added #{delta_path} to upload_file_store (MD5: #{delta_md5})."
    else
      puts "Skipping upload for #{delta_path} (already uploaded)."
    end
  end

  # Process LKP_SRC2 overlay
  def process_lkp_src2_overlay(force_pack: false)
    lkp_src2 = ENV['LKP_SRC2']
    unless lkp_src2
      # warn "LKP_SRC2 environment variable not set. Skipping LKP_SRC2 overlay processing."
      return
    end

    overlay_content = `find * | cpio -o -H newc | gzip -n -9`.b
    overlay_md5 = Digest::MD5.hexdigest(overlay_content)
    overlay_path = "lkp_src/overlay/#{overlay_md5[0..1]}/#{overlay_md5}.cgz"

    @job['need_file_store'] ||= []
    @job['need_file_store'] << overlay_path

    unless force_pack || cache_uploaded?(overlay_path, overlay_md5)
      @job['upload_file_store'] ||= {}
      @job['upload_file_store'][overlay_path] = Base64.encode64(overlay_content)
      update_cache(overlay_path, overlay_md5) unless force_pack
      puts "Added #{overlay_path} to upload_file_store (MD5: #{overlay_md5})."
    else
      puts "Skipping upload for #{overlay_path} (already uploaded)."
    end
  end

  # Handle missing LKP base file
  def handle_missing_lkp_base(base_commit_date, base_commit)
    lkp_src = ENV['LKP_SRC']
    unless lkp_src
      warn "LKP_SRC environment variable not set. Cannot handle missing LKP base file."
      return
    end

    cgz_file = pack_lkp_delta(base_commit)
    return unless cgz_file
    delta_content = File.binread(cgz_file)
    delta_md5 = Digest::MD5.hexdigest(delta_content)
    delta_path = "lkp_src/delta/#{base_commit_date}/#{base_commit}-#{delta_md5}.cgz"

    @job['upload_file_store'] ||= {}
    @job['upload_file_store'][delta_path] = Base64.encode64(delta_content)
    @job['need_file_store'] << delta_path
    puts "Added missing delta file #{delta_path} to upload_file_store."
  end

  # Handle missing upload files
  def handle_missing_upload_files(software, file_name, path)
    local_path = File.expand_path(file_name)
    unless File.exist?(local_path)
      warn "Upload file not found: #{local_path}. Cannot upload #{path}."
      return
    end

    content = File.binread(local_path)
    content_md5 = Digest::MD5.hexdigest(content)
    @job['upload_file_store'] ||= {}
    @job['upload_file_store'][path] = Base64.encode64(content)
    update_cache(path, content_md5)
    puts "Added missing upload files #{path} to upload_file_store."
  end

  # Retrieve base commit from LKP_SRC
  def find_base_commit(repo_path)
    head_commit = `git -C #{repo_path} rev-parse origin/master`.chomp

    # if nothing changed, use the head_commit as base, so there will be no delta
    if `git -C #{repo_path} diff --stat origin/master..`.chomp.empty? &&
       `git -C #{repo_path} status -s lib bin daemon programs setup monitors`.chomp.empty?
      return head_commit
    end

    # Traverse Git log to find the first commit where the month part of CommitDate changes
    previous_month = nil
    base_commit = nil

    # Use git log to iterate through commits
    log_output = `git -C #{repo_path} log --pretty=format:"%H %ci" --date=format:"%Y-%m"`
    log_output.each_line do |line|
      commit_hash, commit_date = line.chomp.split(' ', 2)
      current_month = commit_date.split('-')[1] # Extract month part (e.g., "01" for January)

      if previous_month.nil?
        previous_month = current_month
      elsif current_month != previous_month
        base_commit = commit_hash
        break
      end
    end

    base_commit
  rescue
    warn "Failed to retrieve base commit from #{repo_path}."
    nil
  end

  # Retrieve commit date in YYYY-MM-DD format
  def get_commit_date(repo_path, commit)
    date_str = `git -C #{repo_path} show -s --format=%ci #{commit}`.chomp
    date_str[0..9] # YYYY-MM-DD
  rescue
    warn "Failed to retrieve commit date for #{commit}."
    nil
  end

  # Check if a file is already uploaded based on MD5
  def cache_uploaded?(key, identifier)
    cache = load_cache
    entry = cache[key]
    entry && entry['md5'] == identifier
  end

  # Update cache with new entry
  def update_cache(key, identifier)
    cache = load_cache
    cache[key] = { 'md5' => identifier, 'created_at' => Time.now.iso8601 }
    save_cache(cache)
  end

  # Load cache from JSON file
  def load_cache
    File.exist?(CACHE_FILE) ? JSON.parse(File.read(CACHE_FILE)) : {}
  rescue JSON::ParserError
    warn "Failed to parse cache file. Starting with an empty cache."
    {}
  end

  # (no shared locking) If concurrent writes happen, last-writer wins by design

  # Save cache to JSON file
  def save_cache(cache)
    reclaim_old_cache(cache)
    atomic_write_json(CACHE_FILE, cache)
  end

  def reclaim_old_cache(cache)
    return unless cache.size > 1000

    # delete if the cache entry is too old
    cache.delete_if do |k, v|
      Time.now - Time.parse(v['created_at']) > CACHE_EXPIRY_DAYS * 86400
    end
  end

  # Atomically write JSON to a file using a temp file + rename + fsync
  def atomic_write_json(path, object)
    dir = File.dirname(File.expand_path(path))
    FileUtils.mkdir_p(dir)
    tmp_basename = ".#{File.basename(path)}.tmp.#{$$}.#{SecureRandom.hex(4)}"
    tmp_path = File.join(dir, tmp_basename)

    File.open(tmp_path, 'w', 0o600) do |f|
      f.write(JSON.pretty_generate(object))
      f.flush
    end

    File.rename(tmp_path, path)
  ensure
    FileUtils.rm_f(tmp_path) if defined?(tmp_path) && File.exist?(tmp_path)
  end

  public def pack(upload_fields)
    packed_upload_fields = {}
    # iter every item in upload_fields from server, generate upload_fields pack of content
    upload_fields.each do |upload_field|
      # puts upload_field
      if upload_field =~ /ss\..*\.config.*/
        ss = @job['ss']
        key_arr = upload_field.split('.')
        file_path = ss[key_arr[1]][key_arr[2]]
      else
        file_path = @job[upload_field]
      end
      # if file not exist, raise error
      unless File.exist?(file_path)
        raise "\n #{upload_field}: #{file_path}
          This file not found in server, so we need upload it, but we not found in local.
          Please check the filepath \"#{file_path}\"  is exist in your file system."
      end
      packed_upload_fields.merge!(generate_upload_field_hash(upload_field, file_path))
    end
    packed_upload_fields
  end

  def generate_upload_field_hash(field_name, file_path)
    upload_field = {}
    # set upload file MD5
    md5 = Digest::MD5.hexdigest File.read(file_path)
    upload_field['md5'] = md5
    # set field name
    upload_field['field_name'] = field_name
    # set upload file name
    file_name = File.basename(file_path)
    upload_field['file_name'] = file_name
    # set upload file content (base64)
    content = Base64.encode64(File.read(file_path)).chomp
    upload_field['content'] = content
    {field_name => upload_field}
  end
end
