#!/bin/bash

# 达梦数据库批量数据插入脚本
# 用于向DMHR.JOB表批量插入测试数据

API_URL="http://127.0.0.1:55000/api/DMHR.JOB/bulk"
BATCH_SIZE=100
BATCH_COUNT=10
START_JOB_ID=100

# 颜色输出
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color

# 职位标题数组
job_titles=(
    "总经理" "副总经理" "部门经理" "项目经理" "技术经理"
    "销售经理" "市场经理" "人事经理" "财务经理" "运营经理"
    "软件工程师" "前端工程师" "后端工程师" "测试工程师" "运维工程师"
    "数据分析师" "产品经理" "UI设计师" "架构师" "技术总监"
    "销售代表" "客户经理" "市场专员" "人事专员" "财务专员"
    "行政助理" "秘书" "出纳" "会计" "审计员"
    "业务员" "客服代表" "培训师" "质量工程师" "系统管理员"
    "网络工程师" "安全工程师" "DBA" "DevOps工程师" "算法工程师"
    "机器学习工程师" "数据工程师" "云架构师" "移动开发工程师" "游戏开发工程师"
    "区块链工程师" "AI工程师" "大数据工程师" "物联网工程师" "嵌入式工程师"
)

# 日志函数
log() {
    echo -e "$(date '+%Y-%m-%d %H:%M:%S') - $1"
}

# 错误处理
error_exit() {
    log "${RED}错误: $1${NC}"
    exit 1
}

# 显示帮助信息
show_help() {
    echo "达梦数据库批量数据插入脚本"
    echo ""
    echo "用法:"
    echo "  $0 [选项]"
    echo ""
    echo "选项:"
    echo "  -s, --start-id <ID>     起始job_id (默认: $START_JOB_ID)"
    echo "  -b, --batch-size <SIZE> 每批数据量 (默认: $BATCH_SIZE)"
    echo "  -c, --batch-count <COUNT> 批次数量 (默认: $BATCH_COUNT)"
    echo "  -u, --url <URL>         API地址 (默认: $API_URL)"
    echo "  -h, --help              显示此帮助信息"
    echo ""
    echo "示例:"
    echo "  $0                      # 使用默认参数"
    echo "  $0 -s 200 -b 50 -c 5   # 从200开始，每批50条，共5批"
}

# 生成随机薪资范围
generate_salary() {
    local min_base=$((RANDOM % 5000 + 3000)) # 3000-8000的最低薪资
    local max_multiplier=$((RANDOM % 3 + 2)) # 2-4倍的最高薪资
    local max_salary=$((min_base * max_multiplier))

    echo "$min_base,$max_salary"
}

# 获取随机职位标题
get_random_job_title() {
    local index=$((RANDOM % ${#job_titles[@]}))
    echo "${job_titles[$index]}"
}

# 生成单批数据JSON
generate_batch_data() {
    local start_id=$1
    local batch_size=$2
    local json="["

    for ((i = 0; i < batch_size; i++)); do
        local job_id=$((start_id + i))
        local job_title=$(get_random_job_title)
        local salary_info=$(generate_salary)
        local min_salary=$(echo "$salary_info" | cut -d',' -f1)
        local max_salary=$(echo "$salary_info" | cut -d',' -f2)

        # 构建JSON对象
        local job_json="{\"job_id\":\"$job_id\",\"job_title\":\"$job_title\",\"min_salary\":$min_salary,\"max_salary\":$max_salary}"

        if [ $i -eq 0 ]; then
            json="$json$job_json"
        else
            json="$json,$job_json"
        fi
    done

    json="$json]"
    echo "$json"
}

# 执行批量插入
execute_batch_insert() {
    local batch_num=$1
    local json_data=$2

    log "${BLUE}执行第 $batch_num 批数据插入...${NC}"

    # 使用curl发送POST请求
    response=$(curl -s -w "\n%{http_code}" \
        -X POST \
        -H "Content-Type: application/json" \
        -d "$json_data" \
        "$API_URL" 2>/dev/null)

    # 分离响应体和状态码
    http_code=$(echo "$response" | tail -n1)
    response_body=$(echo "$response" | head -n -1)

    if [ "$http_code" -eq 200 ] || [ "$http_code" -eq 201 ]; then
        log "${GREEN}✓ 第 $batch_num 批插入成功 (HTTP $http_code)${NC}"
        return 0
    else
        log "${RED}✗ 第 $batch_num 批插入失败 (HTTP $http_code)${NC}"
        log "${YELLOW}响应: $response_body${NC}"
        return 1
    fi
}

# 参数解析
while [[ $# -gt 0 ]]; do
    case $1 in
    -s | --start-id)
        START_JOB_ID="$2"
        shift 2
        ;;
    -b | --batch-size)
        BATCH_SIZE="$2"
        shift 2
        ;;
    -c | --batch-count)
        BATCH_COUNT="$2"
        shift 2
        ;;
    -u | --url)
        API_URL="$2"
        shift 2
        ;;
    -h | --help)
        show_help
        exit 0
        ;;
    *)
        echo "未知参数: $1"
        show_help
        exit 1
        ;;
    esac
done

# 参数验证
if ! [[ "$START_JOB_ID" =~ ^[0-9]+$ ]] || [ "$START_JOB_ID" -lt 1 ]; then
    error_exit "起始job_id必须是正整数"
fi

if ! [[ "$BATCH_SIZE" =~ ^[0-9]+$ ]] || [ "$BATCH_SIZE" -lt 1 ]; then
    error_exit "批量大小必须是正整数"
fi

if ! [[ "$BATCH_COUNT" =~ ^[0-9]+$ ]] || [ "$BATCH_COUNT" -lt 1 ]; then
    error_exit "批次数量必须是正整数"
fi

# 检查依赖
if ! command -v curl &>/dev/null; then
    error_exit "curl 未安装，请先安装 curl"
fi

# 显示配置信息
log "${GREEN}===== 达梦数据库批量数据插入开始 =====${NC}"
log "API地址: $API_URL"
log "起始job_id: $START_JOB_ID"
log "每批数据量: $BATCH_SIZE"
log "批次数量: $BATCH_COUNT"
log "总计数据量: $((BATCH_SIZE * BATCH_COUNT))"
log ""

# 测试API连通性
log "${BLUE}测试API连通性...${NC}"
test_response=$(curl -s -w "%{http_code}" -X GET "http://127.0.0.1:55000/api/DMHR.JOB" -o /dev/null)
if [ "$test_response" -eq 200 ]; then
    log "${GREEN}✓ API连通性测试通过${NC}"
else
    log "${YELLOW}⚠ API连通性测试返回HTTP $test_response，继续执行...${NC}"
fi
log ""

# 执行批量插入
successful_batches=0
failed_batches=0
current_job_id=$START_JOB_ID

for ((batch = 1; batch <= BATCH_COUNT; batch++)); do
    log "${BLUE}准备第 $batch 批数据 (job_id: $current_job_id - $((current_job_id + BATCH_SIZE - 1)))${NC}"

    # 生成批量数据
    json_data=$(generate_batch_data "$current_job_id" "$BATCH_SIZE")

    # 显示部分示例数据
    sample_data=$(echo "$json_data" | head -c 200)
    log "示例数据: ${sample_data}..."

    # 执行插入
    if execute_batch_insert "$batch" "$json_data"; then
        ((successful_batches++))
    else
        ((failed_batches++))
    fi

    # 更新下一批的起始ID
    current_job_id=$((current_job_id + BATCH_SIZE))

    log ""
done

# 显示结果统计
log "${GREEN}===== 批量插入完成 =====${NC}"
log "总批次: $BATCH_COUNT"
log "成功批次: $successful_batches"
log "失败批次: $failed_batches"
log "成功插入数据: $((successful_batches * BATCH_SIZE)) 条"

if [ $failed_batches -eq 0 ]; then
    log "${GREEN}🎉 所有数据插入成功！${NC}"
    exit 0
else
    log "${YELLOW}⚠ 有 $failed_batches 个批次插入失败${NC}"
    exit 1
fi
