#!/usr/bin/env bash

# 配置部分
ZABBIX_URL="https://1012.s.kuaicdn.cn:10128/zabbix/api_jsonrpc.php"
ITEM_ID="497140"
COOKIE_HEADER="tab=6; zbx_session=eyJzZXNzaW9uaWQiOiIxNGM3OTVlNzg0NmFkZmFhZmE3YWI5NjM5ZTdlMjhiOCIsInNlcnZlckNoZWNrUmVzdWx0Ijp0cnVlLCJzZXJ2ZXJDaGVja1RpbWUiOjE3NDkyMTEyMTEsInNpZ24iOiIxZDRhMzBjMTdhNTE0ODlkYjNmODJjYzg1NGUxNDM5MGY1NjI1ZWE2ZTBkNzY1MzJiMzhlOTg0ZmNiZjQzMzYxIn0%3D; auth=YjljMDIxYjA3YmZiZjM1NTk2MmU5NzMyY2MwMGJmYTI=; user=d2FuZ3NlbmRp; _tea_utm_cache_10000007=undefined"
AUTH_TOKEN="a831d9d9a469f95f47e5c7e17edc8f32f9d45840296db2b804ba8d18d4c8ecab"

# 单位换算
BYTES_TO_MB=1048576
JSON_PATH="./zabbix_data.json"
CSV_PATH="./zabbix_5min_data.csv"
RAW_PATH="./zabbix_raw_data.csv"

# 创建临时目录
mkdir -p "$(dirname "$JSON_PATH")"

# 初始化输出文件
echo "时间戳,日期时间,流量(MB)" >"$CSV_PATH"
echo "日期时间,流量(MB)" >"$RAW_PATH"

# 循环每个日期（2025-08-22 到 2025-08-31）
for day in $(seq -f "%02g" 22 31); do
  date_key="2025-08-${day}"
  TIME_FROM=$(date -u -d "2025-08-${day} 18:00:00" +%s)
  TIME_TILL=$(date -u -d "2025-08-${day} 23:00:00" +%s)

  echo "正在从 Zabbix 获取 2025-08-${day} 数据 (18:00 - 23:00)..."
  # 创建临时 JSON 文件
  cat <<EOF > temp.json
{
  "jsonrpc": "2.0",
  "id": 1,
  "method": "history.get",
  "params": {
    "output": "extend",
    "itemids": "$ITEM_ID",
    "history": 3,
    "time_from": $TIME_FROM,
    "time_till": $TIME_TILL,
    "sortfield": ["clock"],
    "sortorder": "ASC"
  },
  "auth": "$AUTH_TOKEN"
}
EOF

  # 使用 curl 发送请求
  curl -s "$ZABBIX_URL" \
    -H "Accept: */*" \
    -H "Accept-Language: zh-CN,zh;q=0.9" \
    -H "Cache-Control: no-cache" \
    -H "Connection: keep-alive" \
    -H "Content-Type: application/json" \
    -H "Cookie: $COOKIE_HEADER" \
    -H "Origin: https://1012.s.kuaicdn.cn:10128" \
    -H "Referer: https://1012.s.kuaicdn.cn:10128/web/v10/" \
    -H "User-Agent: Mozilla/5.0" \
    -d @temp.json >"$JSON_PATH"

  # 删除临时文件
  rm temp.json

  # 检查是否获取成功
  if ! jq -e '.result' "$JSON_PATH" >/dev/null; then
    echo "错误：获取 2025-08-${day} 数据失败！响应内容："
    cat "$JSON_PATH"
    continue
  fi

  # 处理当前日期的数据
  while read -r line; do
    clock=$(jq -r '.clock' <<<"$line")
    value=$(jq -r '.value' <<<"$line")
    value_mb=$(awk "BEGIN {printf \"%.2f\", $value/$BYTES_TO_MB}")

    datetime=$(date -u -d "@$clock" "+%Y-%m-%d %H:%M:%S")
    echo "$datetime,$value_mb" >>"$RAW_PATH"

    daily_sums[$date_key]=$(awk "BEGIN {printf \"%.2f\", ${daily_sums[$date_key]:-0} + $value_mb}")
    daily_counts[$date_key]=$(( ${daily_counts[$date_key]:-0} + 1 ))
  done < <(jq -c '.result[]' "$JSON_PATH")
done

# 输出每日平均值
echo -e "\n✅ 数据统计："
for day in $(seq -f "%02g" 22 31); do
  date_key="2025-08-${day}"
  if [[ -n "${daily_counts[$date_key]}" && ${daily_counts[$date_key]} -gt 0 ]]; then
    avg_value=$(awk "BEGIN {printf \"%.2f\", ${daily_sums[$date_key]} / ${daily_counts[$date_key]}}")
    echo "日期 $date_key (18:00-23:00): 平均流量值 = $avg_value MB (数据点: ${daily_counts[$date_key]})"
  else
    echo "日期 $date_key (18:00-23:00): 无数据"
  fi
done

# 输出文件信息
echo -e "\n生成文件："
echo "原始数据: $RAW_PATH"

# Prometheus 查询部分
PROMETHEUS_FILE="/apps/data/workspace/default/bd/11.txt"
if [[ -f "$PROMETHEUS_FILE" ]]; then
  cat "$PROMETHEUS_FILE" | while read -r line; do
    id=$line
    prometheus_result=$(curl -s -G 'http://172.22.12.50:12505/api/v1/query_range' \
      -u 'kuaicdn:9bbfb29d00913b79' \
      --data-urlencode "query=m_250415_bbiz_id_map{key=\"ksyun\",val=\"$id\"}" \
      --data-urlencode 'step=15s')
    echo "$prometheus_result"
  done
else
  echo "错误：Prometheus 数据文件 $PROMETHEUS_FILE 不存在，跳过 Prometheus 查询。"
fi