import json
import random
import time
from datetime import datetime
from kafka import KafkaProducer

# 1. 基础数据定义
roads = [
    # 原有路段
    "G4京港澳高速长沙段", "G60沪昆高速邵阳段", "G5513长张高速益阳段",
    "G0421许广高速衡阳段", "G55二广高速永州段", "S50长芷高速娄底段",

    # 新增2024年通车路段（网页2、7）
    "G0422武深高速株洲段（醴娄高速）", "G72泉南高速安仁段（茶常高速）",
    "S76衡白高速衡山段（白南高速）", "S99永新高速东安段",
    "G5517长常北线高速益常段", "S50长芷高速沅辰段",
    "G0421许广高速永零段", "S71华常高速衡永段"
]
toll_stations = [
    # 原有收费站
    {"name": "长沙西收费站（G5513）", "peak_flow": 96000},
    {"name": "长沙收费站（G6021）", "peak_flow": 66000},
    {"name": "学士收费站（S41）", "peak_flow": 64000},

    # 新增高流量收费站（网页4）
    {"name": "雨花收费站（G4京港澳）", "peak_flow": 54600},
    {"name": "石鼓收费站（S51南岳）", "peak_flow": 50100},

    # 更名后的红色文化站点（网页1）
    {"name": "杨家界收费站（G5515张南）", "peak_flow": 20000},
    {"name": "武陵源收费站（G5513长张）", "peak_flow": 18000},
    {"name": "沙洲收费站（G72厦蓉）", "peak_flow": 22000}
]
incidents_types = [
    "车辆追尾", "货车侧翻", "充电排队拥堵", "施工管制",
    "冰雪路段缓行", "危化品泄漏", "团雾引发连环事故",  # 网页9的天气影响
    "ETC系统故障导致拥堵"  # 网页3的技术故障
]

# 2. 生成实时拥堵检测数据
def generate_traffic_status():
    return {
        "time": datetime.now().strftime("%H:%M:%S"),
        "road": random.choice(roads),
        "speed": round(random.uniform(20, 100), 1),
        "congestion_level": random.choice(["严重拥堵", "拥堵", "缓行", "畅通"]),
        "congestion_mileage": round(random.uniform(1, 30), 1),  # 网页4显示最高拥堵里程达28公里
        "source": "湖南高速路网监测中心"
    }

# 3. 收费站流量模拟（新增同比变化逻辑）
def generate_toll_flow():
    return [{
        "name": station["name"],
        "in_flow": random.randint(1000, int(station["peak_flow"] * 0.8)),  # 网页6显示高峰流量接近峰值
        "out_flow": random.randint(800, int(station["peak_flow"] * 0.7)),
        "change_rate": round(random.uniform(-5, 15), 1)  # 网页4显示春运流量同比增长8.37%
    } for station in toll_stations]

# 4. 事故信息生成（关联摄像头）
def generate_incident():
    road = random.choice(roads)
    return {
        "time": datetime.now().strftime("%H:%M"),
        "type": random.choice(incidents_types),
        "location": f"{road} K{random.randint(10, 100)}+{random.randint(100, 900)}M",
        "status": random.choice(["处置中", "已疏通"]),
        "camera_id": f"CAM_{random.choice([c['camera_id'] for c in generate_cameras() if c['location'].startswith(road)])}"
    }

# 5. 摄像头模拟（新增路段覆盖）
def generate_cameras():
    return [{
        "camera_id": f"CAM_{i}",
        "location": f"{road} K{random.randint(1, 50)}",
        "status": "在线" if random.random() > 0.1 else "故障",
        "video_url": f"rtsp://video.hngs.cn/{i}"  # 模拟实际视频流地址
    } for i, road in enumerate(roads * 3)]  # 每条路模拟3个摄像头（网页3提到高密度监控）

# 6. 数据整合生成
def generate_all_data():
    return {
        "traffic_status": generate_traffic_status(),
        "congestion_ranking": sorted(
            [{"road": r, "congestion_mileage": round(random.uniform(5, 30), 1)} for r in roads],
            key=lambda x: x["congestion_mileage"], reverse=True
        ),
        "toll_flow": generate_toll_flow(),
        "incidents": [generate_incident() for _ in range(random.randint(1, 5))],  # 网页4显示事故频发时段
        "cameras": generate_cameras()
    }

# # 7. 模拟数据推送（每3秒更新）
# if __name__ == "__main__":
#     while True:
#         data = generate_all_data()
#         with open("hunan_highway_data.json", "a") as f:
#             json.dump(data, f, indent=2, ensure_ascii=False)
#         print(f"数据更新于 {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
#         time.sleep(3)
# Kafka 配置（放在全局作用域）
KAFKA_BROKER = "node101:9092,node102:9092,node103:9092"
TOPIC_NAME = "hunan_highway_topic"

# 创建 Kafka 生产者（全局实例）
producer = KafkaProducer(
    bootstrap_servers=KAFKA_BROKER,
    value_serializer=lambda v: json.dumps(v, ensure_ascii=False).encode('utf-8'),
    retries=3
)

# 7. 模拟数据推送（修改为发送到 Kafka）
if __name__ == "__main__":
    try:
        while True:
            data = generate_all_data()
            try:
                future = producer.send(TOPIC_NAME, value=data)
                record_metadata = future.get(timeout=10)
                print(f"数据发送成功：分区 {record_metadata.partition}，偏移量 {record_metadata.offset}")
            except Exception as e:
                print(f"发送失败：{str(e)}")

            print(f"数据更新于 {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
            time.sleep(3)
    finally:
        # 确保程序退出时关闭 Kafka 连接
        if producer:
            producer.close()
