#!/usr/bin/env python

import carla
import numpy as np
import time
import random
from queue import Queue, Empty
# import cv2
import os
import copy
import logging
from carla import Transform, Rotation, Location
import shutil
import stat
import json
from carla import Map
from carla import Vector3D
import math
from carla import ColorConverter as cc
import matplotlib.pyplot as plt

############################# cv2 与matplotlib不能同时引用

def points2pcd(PCD_FILE_PATH, points):
    # 存放路径
    # PCD_DIR_PATH = os.path.join(os.path.abspath('.'), 'pcd')
    # PCD_FILE_PATH = os.path.join(PCD_DIR_PATH, 'cache.pcd')
    if os.path.exists(PCD_FILE_PATH):
        os.remove(PCD_FILE_PATH)
    # 写文件句柄
    handle = open(PCD_FILE_PATH, 'a')
    # 得到点云点数
    point_num = points.shape[0]
    # pcd头部（重要）
    handle.write(
        '# .PCD v0.7 - Point Cloud Data file format\nVERSION 0.7\nFIELDS x y z intensity\nSIZE 4 4 4 4\nTYPE F F F F\nCOUNT 1 1 1 1')
    string = '\nWIDTH ' + str(point_num)
    handle.write(string)
    handle.write('\nHEIGHT 1\nVIEWPOINT 0 0 0 1 0 0 0')
    string = '\nPOINTS ' + str(point_num)
    handle.write(string)
    handle.write('\nDATA ascii')

    # 依次写入点
    for i in range(point_num):
        string = '\n' + str(points[i, 0]) + ' ' + str(points[i, 1]) + ' ' + str(points[i, 2]) + ' ' + str(points[i, 3])
        handle.write(string)
    handle.close()

def points2plt(plt_path, points):
    l = np.cos(points[:, 2]) * points[:, 3]
    z = np.sin(points[:, 2]) * points[:, 3]
    y = np.cos(points[:, 1]) * 1
    x = np.sin(points[:, 1]) * 1
    plt.figure("3D Scatter", facecolor="lightgray", figsize=(20, 20), dpi=80)
    ax3d = plt.gca(projection="3d")
    ax3d.scatter(x, y, z, s=10, cmap="jet", marker="o")
    ax3d.view_init(elev=0, azim=-70)
    plt.savefig(plt_path)


def get_time_stamp(ct):
    """
    :param ct: float时间
    :return: 带毫秒的格式化时间戳
    """
    # ct = time.time()
    # print(ct)
    local_time = time.localtime(ct)
    # data_head = time.strftime("%Y-%m-%d %H:%M:%S", local_time)
    data_head = time.strftime("%Y-%m-%d %H-%M-%S", local_time).split(' ')[-1]
    data_secs = (ct - int(ct)) * 1000
    time_stamp = "%s-%03d" % (data_head, data_secs)
    # print(time_stamp, type(time_stamp))
    # stamp = ("".join(time_stamp.split()[0].split("-"))+"".join(time_stamp.split()[1].split(":"))).replace('.', '')
    # print(stamp)
    return time_stamp

def sensor_callback(sensor_data, sensor_queue, sensor_name):
    # Do stuff with the sensor_data data like save it to disk
    # Then you just need to add to the queue
    sensor_queue.put((sensor_data.frame, sensor_data.timestamp, sensor_name, sensor_data))

# modify from leaderboard
def _parse_lidar_cb(lidar_data):
    points = np.frombuffer(lidar_data.raw_data, dtype=np.dtype('f4'))
    points = copy.deepcopy(points)
    points = np.reshape(points, (int(points.shape[0] / 4), 4))
    return points

def _parse_radar_cb(radar_data):
    points = np.frombuffer(radar_data.raw_data, dtype=np.dtype('f4'))
    points = copy.deepcopy(points)
    points = np.reshape(points, (len(radar_data), 4))
    return points

# modify from world on rail code
def lidar_to_bev(lidar, min_x=-24, max_x=24, min_y=-16, max_y=16, pixels_per_meter=4, hist_max_per_pixel=10):
    xbins = np.linspace(
        min_x, max_x + 1,
               (max_x - min_x) * pixels_per_meter + 1,
    )
    ybins = np.linspace(
        min_y, max_y + 1,
               (max_y - min_y) * pixels_per_meter + 1,
    )
    # Compute histogram of x and y coordinates of points.
    hist = np.histogramdd(lidar[..., :2], bins=(xbins, ybins))[0]
    # Clip histogram
    hist[hist > hist_max_per_pixel] = hist_max_per_pixel
    # Normalize histogram by the maximum number of points in a bin we care about.
    overhead_splat = hist / hist_max_per_pixel * 255.
    # Return splat in X x Y orientation, with X parallel to car axis, Y perp, both parallel to ground.
    return overhead_splat[::-1, :]


# # 可视化数据
# def visualize_data(rgb, lidars, text_args=(cv2.FONT_HERSHEY_SIMPLEX, 0.3, (255, 255, 255), 1)):
#     rgb_canvas = np.array(rgb[..., ::-1])
#     # print(rgb_canvas.shape, rgb_canvas.size)
#     canvas_list = []
#     lidar_canvas = None
#     if lidars is not None:
#         for lidar in lidars:
#             lidar_viz = lidar_to_bev(lidar).astype(np.uint8)
#             lidar_viz = cv2.cvtColor(lidar_viz, cv2.COLOR_GRAY2RGB)
#             canvas = cv2.resize(lidar_viz.astype(np.uint8), (rgb_canvas.shape[0], rgb_canvas.shape[0]))
#             canvas_list.append(canvas)
#         lidar_canvas = np.concatenate(canvas_list, axis=1)
#     # cv2.putText(canvas, f'yaw angle: {imu_yaw:.3f}', (4, 10), *text_args)
#     # cv2.putText(canvas, f'log: {gnss[0]:.3f} alt: {gnss[1]:.3f} brake: {gnss[2]:.3f}', (4, 20), *text_args)
#     return lidar_canvas


# def mkdir_folder(path):
#     for s_type in sensor_type:
#         if not os.path.isdir(os.path.join(path, s_type)):
#             os.makedirs(os.path.join(path, s_type))
#     return True


def draw_waypoints(world, waypoints, road_id=None, life_time=50.0):
    """
    :param waypoints: 地图所有航点列表
    :param road_id: 目标路段id
    :param life_time: 高亮时间
    :return:
    """
    obj_waypoints = []

    for waypoint in waypoints:
        # if waypoint.road_id == road_id:
        if True:
            obj_waypoints.append(waypoint)
            world.debug.draw_string(waypoint.transform.location, 'O', draw_shadow=False,
                                    color=carla.Color(r=0, g=255, b=0), life_time=life_time,
                                    persistent_lines=True)
    return obj_waypoints


def main():
    #  ======================================跟服务器实现连接=============================================
    client = carla.Client("localhost", 2000)
    client.set_timeout(20.0)  # 设置这个客户端连接超时时间
    # with open(word_path) as od_file:
    #     data = od_file.read()
    #使用自定义地图
    # world = client.generate_opendrive_world(data,
    #                                         carla.OpendriveGenerationParameters(
    #                                             wall_height=0))
    ## 加载carla自带地图，该地图列表可通过client.get_available_maps()查看
    ## print(client.get_available_maps())  # 获取可用的地图列表（自带的地图，无自定义地图）
    world = client.load_world('shangdiwr') 
    # world = client.get_world()
    synchronous_master = False
    
    ##  显示车道中心点坐标
    # waypoints = world.get_map().generate_waypoints(0.2)
    # v_waypoints = []
    # for waypoint in waypoints:
    #     if waypoint.transform.location.x < 399 and waypoint.transform.location.x > 300:
    #         if waypoint.transform.location.y < -600 and waypoint.transform.location.y > -699:
    #             v_waypoints.append(waypoint)

    # obj_waypoints = draw_waypoints(world, v_waypoints, life_time=100)


    try:
        # =================================调整路网中的环境可见度======================================
        # # 使得道路中自动生成的交通灯为不可见状态
        # env_objs1 = world.get_environment_objects(carla.CityObjectLabel.TrafficLight)
        # env_objs2 = world.get_environment_objects(carla.CityObjectLabel.Poles)
    
        # env_list = []
        # for env_obj in env_objs1:
        #     env_list.append(env_obj.id)
        # for env_obj in env_objs2:
        #     env_list.append(env_obj.id)
        # world.enable_environment_objects(env_list, False)


        # ==================================新建pcd存储文件夹==========================================
        pt = world.get_snapshot().timestamp.platform_timestamp
        ptime = int(pt)
        if SAVE_PCD:
            # 新建pcd保存文件夹
            save_pcd_path = os.path.join(save_path, str(ptime))
            os.makedirs(save_pcd_path)

        if RECORDER:
            # 存储recorder记录文件
            client.start_recorder(recorder_path + str(ptime) + '.log', True)


        # ====================================获取世界视角=============================================
        spectator = world.get_spectator()
        world.debug.draw_string(vehicle_trans1.location, 'M', draw_shadow=False,
                                color=carla.Color(r=0, g=255, b=0), life_time=1500, persistent_lines=True)
        spectator.set_transform(vehicle_trans1)

        world.debug.draw_string(p.location, 'X', draw_shadow=False,
                                color=carla.Color(r=255, g=0, b=0), life_time=1500, persistent_lines=True)


        # ============================== 修改世界设置:更新步长和同步模式===================================
           
        # # 让车辆按照交通规则在世界中行驶
        #设置TM接入server的端口，默认8000
        traffic_manager = client.get_trafficmanager(8000)
        #TM里的每一辆车至少保持2米的安全距离
        traffic_manager.set_global_distance_to_leading_vehicle(2.0)
        #TM里每一辆车都是默认速度的40%（默认所有车辆限速30km/h）
        traffic_manager.global_percentage_speed_difference(40.0)
        if True:
            #设置TM同步模式
            settings = world.get_settings()
            traffic_manager.set_synchronous_mode(True)
            if not settings.synchronous_mode:
                print("开启同步模式")
                synchronous_master = True
                settings.synchronous_mode = True
                settings.fixed_delta_seconds = 0.1
                world.apply_settings(settings)
            else:
                synchronous_master = False

        # =============================天气=================================

        # # 控制世界的天气和时间（太阳的位置） 万里无云，没有降雨，太阳的角度为90
        weather = carla.WeatherParameters(
            cloudiness=0.0,  # 0-100  0 是晴朗的天空，100 是完全阴天
            precipitation=0.0,  # 0 表示没有下雨，100 表示大雨
            precipitation_deposits=0.0, # 0 表示道路上没有水坑，100 表示道路完全被雨水覆盖
            wind_intensity=0.0, # 0 表示平静，100 表示强风，风会影响雨向和树叶
            sun_azimuth_angle=0.0, # 太阳方位角，0～360
            sun_altitude_angle=90.0,  # 太阳高度角，90 是中午，-90 是午夜
            fog_density=0.0, # 0～100 表示雾的浓度或厚度，仅影响RGB相机传感器
            fog_distance=0.0, # 雾开始的距离，单位为米
            wetness=0.0, # 0～100 表示道路湿度百分比，仅影响RGB相机传感器
            fog_falloff=0.0, # 雾的密度，0至无穷大，0 表示雾比空气轻，覆盖整个场景，1表示与空气一样，覆盖正常大小的建筑物
            scattering_intensity=0.0, # 控制光线对雾的穿透程度
            mie_scattering_scale=0.0, # 控制光线与花粉或空气等大颗粒的相互作用，导致天气朦胧，光源周围有光晕，0表示无影响
            rayleigh_scattering_scale=0.0331, # 控制光与空气分子等小粒子的相互作用，取决于光波长，导致白天蓝天或晚上红天
            )
        world.set_weather(weather)

        # ======================通过蓝图库模糊搜索指定的车辆模型===============
        blueprint_library = world.get_blueprint_library()

        # =============================车辆=================================

        # wbp = world.get_blueprint_library().find('walker.pedestrian.0004') # 添加行人
        # wbp.set_attribute('speed', '0.0')
        # walker = world.spawn_actor(wbp, walker_trans)
        # actor_list.append(walker)

        #添加车辆
        bp1 = blueprint_library.find('vehicle.lincoln.mkz_2017')
        bp2 = blueprint_library.find('vehicle.gazelle.omafiets')
        # bp1 = world.get_blueprint_library().find('vehicle.v5.v5')
        bp1.set_attribute('color', '0, 0, 0')
        ego_vehicle1 = world.spawn_actor(bp1, vehicle_trans1)
        # ego_vehicle2 = world.spawn_actor(bp1, vehicle_trans2)
        # ego_vehicle3 = world.spawn_actor(bp1, vehicle_trans3)
        # ego_vehicle4 = world.spawn_actor(bp1, vehicle_trans4)
        # ego_vehicle5 = world.spawn_actor(bp1, vehicle_trans5)
        # ego_vehicle6 = world.spawn_actor(bp1, vehicle_trans6)
        print("单独放置车辆")
        actor_list.append(ego_vehicle1)
        # actor_list.append(ego_vehicle2)
        # actor_list.append(ego_vehicle3)
        # actor_list.append(ego_vehicle4)
        # actor_list.append(ego_vehicle5)
        # actor_list.append(ego_vehicle6)

        # #获取车辆boundingbox的长宽高信息
        # length = ego_vehicle1.bounding_box.extent.x * 2.0
        # width = ego_vehicle1.bounding_box.extent.y * 2.0
        # higth = ego_vehicle1.bounding_box.extent.z * 2.0
        # print(length, width, higth)

        #车辆控制
        # ego_vehicle1.set_autopilot(True)
        ego_vehicle1.apply_control(carla.VehicleControl(hand_brake = True))
        # ego_vehicle2.apply_control(carla.VehicleControl(hand_brake = True))
        # ego_vehicle3.apply_control(carla.VehicleControl(hand_brake = True))
        # ego_vehicle4.apply_control(carla.VehicleControl(hand_brake = True))
        # ego_vehicle5.apply_control(carla.VehicleControl(hand_brake = True))
        # ego_vehicle6.apply_control(carla.VehicleControl(hand_brake = True))

        #车辆物理属性设置（转距等）
        # ego_vehicle1.apply_physics_control(
        #     carla.VehiclePhysicsControl(center_of_mass = carla.Vector3D(-1.0, 0.0, 0.0)))

        # ===========================实例化传感器模型============================
        # =======================================传感器==================================

        lidar_bp_2_1 = blueprint_library.find('sensor.lidar.ray_cast')  # 雷达
        lidar_bp_2_2 = blueprint_library.find('sensor.lidar.ray_cast')
        lidar_bp_2_3 = blueprint_library.find('sensor.lidar.ray_cast')
        lidar_bp_4_1 = blueprint_library.find('sensor.lidar.ray_cast')
        lidar_bp_4_2 = blueprint_library.find('sensor.lidar.ray_cast')
        lidar_bp_4_3 = blueprint_library.find('sensor.lidar.ray_cast')
        lidar_bp_6_1 = blueprint_library.find('sensor.lidar.ray_cast')
        lidar_bp_6_2 = blueprint_library.find('sensor.lidar.ray_cast')
        lidar_bp_6_3 = blueprint_library.find('sensor.lidar.ray_cast')
        lidar_bp_28 = blueprint_library.find('sensor.lidar.ray_cast')
        lidar_bp_64 = blueprint_library.find('sensor.lidar.ray_cast')

        # 设置传感器的参数
        atmosphere_attenuation_rate = '0.004'
        dropoff_general_rate = '0.0'
        dropoff_intensity_limit = '0.0'
        dropoff_zero_intensity = '0.0'
        noise_seed = '0.0'
        noise_stddev = '0.0'

        lidar_bp_64.set_attribute('channels', '64')
        lidar_bp_64.set_attribute('upper_fov', '-1.5')
        lidar_bp_64.set_attribute('lower_fov', '-42')
        lidar_bp_64.set_attribute('points_per_second', '1152000')
        lidar_bp_64.set_attribute('range', '200')
        lidar_bp_64.set_attribute('rotation_frequency', str(int(1 / settings.fixed_delta_seconds)))
        lidar_bp_64.set_attribute('atmosphere_attenuation_rate', atmosphere_attenuation_rate)
        lidar_bp_64.set_attribute('dropoff_general_rate', dropoff_general_rate)
        lidar_bp_64.set_attribute('dropoff_intensity_limit', dropoff_intensity_limit)
        lidar_bp_64.set_attribute('dropoff_zero_intensity', dropoff_zero_intensity)
        lidar_bp_64.set_attribute('noise_seed', noise_seed)
        lidar_bp_64.set_attribute('noise_stddev', noise_stddev)


        lidar_bp_2_1.set_attribute('channels', '2')
        lidar_bp_2_1.set_attribute('upper_fov', '-1.5')
        lidar_bp_2_1.set_attribute('lower_fov', '-1.7')
        lidar_bp_2_1.set_attribute('points_per_second', '36000')
        lidar_bp_2_1.set_attribute('range', '200')
        lidar_bp_2_1.set_attribute('rotation_frequency', str(int(1 / settings.fixed_delta_seconds)))
        lidar_bp_2_1.set_attribute('atmosphere_attenuation_rate', atmosphere_attenuation_rate)
        lidar_bp_2_1.set_attribute('dropoff_general_rate', dropoff_general_rate)
        lidar_bp_2_1.set_attribute('dropoff_intensity_limit', dropoff_intensity_limit)
        lidar_bp_2_1.set_attribute('dropoff_zero_intensity', dropoff_zero_intensity)
        lidar_bp_2_1.set_attribute('noise_seed', noise_seed)
        lidar_bp_2_1.set_attribute('noise_stddev', noise_stddev)

        lidar_bp_28.set_attribute('channels', '28')
        lidar_bp_28.set_attribute('upper_fov', '-1.9')
        lidar_bp_28.set_attribute('lower_fov', '-4.6')
        lidar_bp_28.set_attribute('points_per_second', '504000')
        lidar_bp_28.set_attribute('range', '200')
        lidar_bp_28.set_attribute('rotation_frequency', str(int(1 / settings.fixed_delta_seconds)))
        lidar_bp_28.set_attribute('atmosphere_attenuation_rate', atmosphere_attenuation_rate)
        lidar_bp_28.set_attribute('dropoff_general_rate', dropoff_general_rate)
        lidar_bp_28.set_attribute('dropoff_intensity_limit', dropoff_intensity_limit)
        lidar_bp_28.set_attribute('dropoff_zero_intensity', dropoff_zero_intensity)
        lidar_bp_28.set_attribute('noise_seed', noise_seed)
        lidar_bp_28.set_attribute('noise_stddev', noise_stddev)

        lidar_bp_6_1.set_attribute('channels', '6')
        lidar_bp_6_1.set_attribute('upper_fov', '-4.8')
        lidar_bp_6_1.set_attribute('lower_fov', '-5.8')
        lidar_bp_6_1.set_attribute('points_per_second', '108000')
        lidar_bp_6_1.set_attribute('range', '200')
        lidar_bp_6_1.set_attribute('rotation_frequency', str(int(1 / settings.fixed_delta_seconds)))
        lidar_bp_6_1.set_attribute('atmosphere_attenuation_rate', atmosphere_attenuation_rate)
        lidar_bp_6_1.set_attribute('dropoff_general_rate', dropoff_general_rate)
        lidar_bp_6_1.set_attribute('dropoff_intensity_limit', dropoff_intensity_limit)
        lidar_bp_6_1.set_attribute('dropoff_zero_intensity', dropoff_zero_intensity)
        lidar_bp_6_1.set_attribute('noise_seed', noise_seed)
        lidar_bp_6_1.set_attribute('noise_stddev', noise_stddev)

        lidar_bp_4_1.set_attribute('channels', '4')
        lidar_bp_4_1.set_attribute('upper_fov', '-6.1')
        lidar_bp_4_1.set_attribute('lower_fov', '-7')
        lidar_bp_4_1.set_attribute('points_per_second', '72000')
        lidar_bp_4_1.set_attribute('range', '200')
        lidar_bp_4_1.set_attribute('rotation_frequency', str(int(1 / settings.fixed_delta_seconds)))
        lidar_bp_4_1.set_attribute('atmosphere_attenuation_rate', atmosphere_attenuation_rate)
        lidar_bp_4_1.set_attribute('dropoff_general_rate', dropoff_general_rate)
        lidar_bp_4_1.set_attribute('dropoff_intensity_limit', dropoff_intensity_limit)
        lidar_bp_4_1.set_attribute('dropoff_zero_intensity', dropoff_zero_intensity)
        lidar_bp_4_1.set_attribute('noise_seed', noise_seed)
        lidar_bp_4_1.set_attribute('noise_stddev', noise_stddev)
        
        lidar_bp_4_2.set_attribute('channels', '4')
        lidar_bp_4_2.set_attribute('upper_fov', '-7.4')
        lidar_bp_4_2.set_attribute('lower_fov', '-8.6')
        lidar_bp_4_2.set_attribute('points_per_second', '72000')
        lidar_bp_4_2.set_attribute('range', '200')
        lidar_bp_4_2.set_attribute('rotation_frequency', str(int(1 / settings.fixed_delta_seconds)))
        lidar_bp_4_2.set_attribute('atmosphere_attenuation_rate', atmosphere_attenuation_rate)
        lidar_bp_4_2.set_attribute('dropoff_general_rate', dropoff_general_rate)
        lidar_bp_4_2.set_attribute('dropoff_intensity_limit', dropoff_intensity_limit)
        lidar_bp_4_2.set_attribute('dropoff_zero_intensity', dropoff_zero_intensity)
        lidar_bp_4_2.set_attribute('noise_seed', noise_seed)
        lidar_bp_4_2.set_attribute('noise_stddev', noise_stddev)

        lidar_bp_4_3.set_attribute('channels', '4')
        lidar_bp_4_3.set_attribute('upper_fov', '-9.2')
        lidar_bp_4_3.set_attribute('lower_fov', '-11')
        lidar_bp_4_3.set_attribute('points_per_second', '72000')
        lidar_bp_4_3.set_attribute('range', '200')
        lidar_bp_4_3.set_attribute('rotation_frequency', str(int(1 / settings.fixed_delta_seconds)))
        lidar_bp_4_3.set_attribute('atmosphere_attenuation_rate', atmosphere_attenuation_rate)
        lidar_bp_4_3.set_attribute('dropoff_general_rate', dropoff_general_rate)
        lidar_bp_4_3.set_attribute('dropoff_intensity_limit', dropoff_intensity_limit)
        lidar_bp_4_3.set_attribute('dropoff_zero_intensity', dropoff_zero_intensity)
        lidar_bp_4_3.set_attribute('noise_seed', noise_seed)
        lidar_bp_4_3.set_attribute('noise_stddev', noise_stddev)  

        lidar_bp_6_2.set_attribute('channels', '6')
        lidar_bp_6_2.set_attribute('upper_fov', '-12')
        lidar_bp_6_2.set_attribute('lower_fov', '-17')
        lidar_bp_6_2.set_attribute('points_per_second', '108000')
        lidar_bp_6_2.set_attribute('range', '200')
        lidar_bp_6_2.set_attribute('rotation_frequency', str(int(1 / settings.fixed_delta_seconds)))
        lidar_bp_6_2.set_attribute('atmosphere_attenuation_rate', atmosphere_attenuation_rate)
        lidar_bp_6_2.set_attribute('dropoff_general_rate', dropoff_general_rate)
        lidar_bp_6_2.set_attribute('dropoff_intensity_limit', dropoff_intensity_limit)
        lidar_bp_6_2.set_attribute('dropoff_zero_intensity', dropoff_zero_intensity)
        lidar_bp_6_2.set_attribute('noise_seed', noise_seed)
        lidar_bp_6_2.set_attribute('noise_stddev', noise_stddev)
    
        lidar_bp_6_3.set_attribute('channels', '6')
        lidar_bp_6_3.set_attribute('upper_fov', '-19')
        lidar_bp_6_3.set_attribute('lower_fov', '-29')
        lidar_bp_6_3.set_attribute('points_per_second', '108000')
        lidar_bp_6_3.set_attribute('range', '200')
        lidar_bp_6_3.set_attribute('rotation_frequency', str(int(1 / settings.fixed_delta_seconds)))
        lidar_bp_6_3.set_attribute('atmosphere_attenuation_rate', atmosphere_attenuation_rate)
        lidar_bp_6_3.set_attribute('dropoff_general_rate', dropoff_general_rate)
        lidar_bp_6_3.set_attribute('dropoff_intensity_limit', dropoff_intensity_limit)
        lidar_bp_6_3.set_attribute('dropoff_zero_intensity', dropoff_zero_intensity)
        lidar_bp_6_3.set_attribute('noise_seed', noise_seed)
        lidar_bp_6_3.set_attribute('noise_stddev', noise_stddev)

        lidar_bp_2_2.set_attribute('channels', '2')
        lidar_bp_2_2.set_attribute('upper_fov', '-32')
        lidar_bp_2_2.set_attribute('lower_fov', '-35')
        lidar_bp_2_2.set_attribute('points_per_second', '36000')
        lidar_bp_2_2.set_attribute('range', '200')
        lidar_bp_2_2.set_attribute('rotation_frequency', str(int(1 / settings.fixed_delta_seconds)))
        lidar_bp_2_2.set_attribute('atmosphere_attenuation_rate', atmosphere_attenuation_rate)
        lidar_bp_2_2.set_attribute('dropoff_general_rate', dropoff_general_rate)
        lidar_bp_2_2.set_attribute('dropoff_intensity_limit', dropoff_intensity_limit)
        lidar_bp_2_2.set_attribute('dropoff_zero_intensity', dropoff_zero_intensity)
        lidar_bp_2_2.set_attribute('noise_seed', noise_seed)
        lidar_bp_2_2.set_attribute('noise_stddev', noise_stddev)

        lidar_bp_2_3.set_attribute('channels', '2')
        lidar_bp_2_3.set_attribute('upper_fov', '-38')
        lidar_bp_2_3.set_attribute('lower_fov', '-42')
        lidar_bp_2_3.set_attribute('points_per_second', '36000')
        lidar_bp_2_3.set_attribute('range', '200')
        lidar_bp_2_3.set_attribute('rotation_frequency', str(int(1 / settings.fixed_delta_seconds)))
        lidar_bp_2_3.set_attribute('atmosphere_attenuation_rate', atmosphere_attenuation_rate)
        lidar_bp_2_3.set_attribute('dropoff_general_rate', dropoff_general_rate)
        lidar_bp_2_3.set_attribute('dropoff_intensity_limit', dropoff_intensity_limit)
        lidar_bp_2_3.set_attribute('dropoff_zero_intensity', dropoff_zero_intensity)
        lidar_bp_2_3.set_attribute('noise_seed', noise_seed)
        lidar_bp_2_3.set_attribute('noise_stddev', noise_stddev)


        lidar_2_1 = world.spawn_actor(lidar_bp_2_1, lidar_trans, attach_to=None)
        lidar_2_2 = world.spawn_actor(lidar_bp_2_2, lidar_trans, attach_to=None)
        lidar_2_3 = world.spawn_actor(lidar_bp_2_3, lidar_trans, attach_to=None)
        lidar_4_1 = world.spawn_actor(lidar_bp_4_1, lidar_trans, attach_to=None)
        lidar_4_2 = world.spawn_actor(lidar_bp_4_2, lidar_trans, attach_to=None)
        lidar_4_3 = world.spawn_actor(lidar_bp_4_3, lidar_trans, attach_to=None)
        lidar_6_1 = world.spawn_actor(lidar_bp_6_1, lidar_trans, attach_to=None)
        lidar_6_2 = world.spawn_actor(lidar_bp_6_2, lidar_trans, attach_to=None)
        lidar_6_3 = world.spawn_actor(lidar_bp_6_3, lidar_trans, attach_to=None)
        lidar_28 = world.spawn_actor(lidar_bp_28, lidar_trans, attach_to=None)
        lidar_64 = world.spawn_actor(lidar_bp_64, lidar_trans, attach_to=None)

        lidar_2_1.listen(lambda data: sensor_callback(data, sensor_queue, "lidar_2_1"))
        lidar_2_2.listen(lambda data: sensor_callback(data, sensor_queue, "lidar_2_2"))
        lidar_2_3.listen(lambda data: sensor_callback(data, sensor_queue, "lidar_2_3"))
        lidar_4_1.listen(lambda data: sensor_callback(data, sensor_queue, "lidar_4_1"))
        lidar_4_2.listen(lambda data: sensor_callback(data, sensor_queue, "lidar_4_2"))
        lidar_4_3.listen(lambda data: sensor_callback(data, sensor_queue, "lidar_4_3"))
        lidar_6_1.listen(lambda data: sensor_callback(data, sensor_queue, "lidar_6_1"))
        lidar_6_2.listen(lambda data: sensor_callback(data, sensor_queue, "lidar_6_2"))
        lidar_6_3.listen(lambda data: sensor_callback(data, sensor_queue, "lidar_6_3"))
        lidar_28.listen(lambda data: sensor_callback(data, sensor_queue, "lidar_28"))
        lidar_64.listen(lambda data: sensor_callback(data, sensor_queue, "lidar_64"))

        sensor_list.append(lidar_2_1)
        sensor_list.append(lidar_2_2)
        sensor_list.append(lidar_2_3)
        sensor_list.append(lidar_4_1)
        sensor_list.append(lidar_4_2)
        sensor_list.append(lidar_4_3)
        sensor_list.append(lidar_6_1)
        sensor_list.append(lidar_6_2)
        sensor_list.append(lidar_6_3)
        sensor_list.append(lidar_28)
        sensor_list.append(lidar_64)


        while True:
            # print(sensor_queue.qsize())
            world.tick()
            w_frame = world.get_snapshot().frame
            p_timestamp = world.get_snapshot().timestamp.platform_timestamp
            w_timestamp = get_time_stamp(p_timestamp)
            print("\nWorld's frame:{0}, time: {1}".format(w_frame, w_timestamp))

            try:
                lidars = []
                splicing = []
                
                #传感器数据保存
                for i in range(0, len(sensor_list)):
                    s_frame, s_timestamp, s_name, s_data = sensor_queue.get(True, 1.0)
                    # if s_name == 'radar':
                    #     radar_data = _parse_radar_cb(s_data)
                    #     radar.append(radar_data)
                    #     radar2.append(radar_data.reshape(1, -1))
                    #     save_radar_path = save_path + "radar/" + str(w_timestamp) +".jpg"
                    #     points2plt(save_radar_path, radar_data)
                        
                    if s_name != 'lidar_64':
                        splicing.append(_parse_lidar_cb(s_data))
                    else:
                        lidars.append(_parse_lidar_cb(s_data))
                # save_radar_path = save_path + "radar.txt"
                # for r in range(0, len(radar2)):
                #     np.savetxt(save_radar_path, radar2[r], fmt='%f')
                # print(radar)

                if splicing and SAVE_PCD:
                    concat_points = np.concatenate(splicing, axis=0)
                    concat_points1 = np.concatenate(splicing, axis=0)
                    concat_points[:, 1] = [-p for p in concat_points[:, 1]] #  将生成的点云Y值进行反转 由于carla的坐标系中Y值是反的
                    pcd_path = save_pcd_path + "/" + str(w_timestamp) + ".pcd"
                    points2pcd(pcd_path, concat_points)
                    # pcd_path = save_path + str(w_timestamp) + "_" + "64" + ".pcd"
                    # points2pcd(pcd_path, concat_points)
                
                # 传感器数据可视化
                # rgb = np.concatenate(rgbs, axis=1)[..., :3]
                # lidar32 = visualize_data(rgb, lidars)
                # lidarsplice = visualize_data(rgb, [concat_points1])
                # cv2.imshow('rgb_vizs', rgb)
                # cv2.imshow('lidar_32', lidar32)
                # cv2.imshow('lidar_splice', lidarsplice)
                # cv2.waitKey(1)

            except Empty:
                print("[Warning] Some of the sensor information is missed")
            time.sleep(0.1)

    finally:
        if synchronous_master:
            settings = world.get_settings()
            settings.synchronous_mode = False
            settings.fixed_delta_seconds = None
            world.apply_settings(settings)

        for sensor in sensor_list:
            sensor.destroy()
        for actor in actor_list:
            if actor.is_alive:
                actor.destroy()
        # client.apply_batch([carla.command.DestroyActor(x) for x in actor_list])
        print("All cleaned up!")
        if RECORDER:
            #关闭recorder
            client.stop_recorder()

if __name__ == "__main__":
    word_path = r"/home/wanji/下载/carla_test/net_files/SDOpenDrive.xodr"
    sensor_type = ['rgb', 'lidar']
    sensor_queue = Queue()
    actor_list, sensor_list = [], []
    save_path = '/home/wanji/下载/carla_test/output/real_car_simu/'
    recorder_path = '/home/wanji/carla/recorder_files/'
    # realdata = '/home/wanji/下载/carla_test/output/toveiw.json'
    # if os.path.exists(save_path):
    #     for fileList in os.walk(save_path):
    #         for name in fileList[2]:
    #             os.chmod(os.path.join(fileList[0], name), stat.S_IWRITE)
    #             os.remove(os.path.join(fileList[0], name))
    #     shutil.rmtree(save_path)
    # spawed_ids = {}

    # os.mkdir(save_path)
    IM_WIDTH = 256 * 1
    IM_HEIGHT = 256 * 1

    RECORDER = False  #  是否需要回放
    SAVE_PCD = True  #  是否需要保存lidar点云的pcd格式数据

    view_transform = Transform(Location(x=350, y=-630, z=80), Rotation(pitch=-90, yaw=-13, roll=0.000000))

    # 与51simone相比，航向角yaw_carla =  360 - yaw_51
    lidar_trans = Transform(Location(x=371.9938132585651, y=-643.7199381087686, z=43.017), 
                            Rotation(pitch=-0.1249, yaw=260.3, roll=-0.2931))
    
    walker_trans = Transform(Location(x=371.9938132585651, y=-643.7199381087686, z=43.017), 
                            Rotation(pitch=-0.1249, yaw=260.3, roll=-0.2931))

    radar_trans = Transform(Location(x=371.9938132585651, y=-643.7199381087686, z=43.017), 
                            Rotation(pitch=-0.1249, yaw=260.3, roll=-0.2931))
    
    v_x= 372.25 # Carla中车辆的几何中心坐标 369.3125
    v_y= -624 # -626.0625
    v_yaw = 159.7  # 车辆的航向角189.7
    l = 1.52   # boundingbox中心点与车辆后轮之间的距离（单位：米）
    v_xr = v_x + l * math.cos(v_yaw * math.pi / 180.0)   #  同步到51simone中车辆的后轴中心坐标
    v_yr = v_y + l * math.sin(v_yaw * math.pi / 180.0)

    # 与51simone相比，航向角yaw_carla = 360 - yaw_51
    # 角度不同
    # vehicle_trans1 = Transform(Location(x=338.8991255401931, y=-581.3421246371704, z=36.68), 
    #                            Rotation(pitch=0.000000, yaw = 331, roll=0.000000))
    # vehicle_trans2 = Transform(Location(x=374.41312684734, y=-572.2943866374178, z=36.559), 
    #                            Rotation(pitch=0.000000, yaw=260, roll=0.000000))
    # vehicle_trans3 = Transform(Location(x=336.0095616463592, y=-705.1134438309988, z=36.559), 
    #                            Rotation(pitch=0.000000, yaw=72, roll=0.000000))
    # vehicle_trans4 = Transform(Location(x=338.5867939906663, y= -696.9080224175414, z=36.559), 
    #                            Rotation(pitch=0.000000, yaw=74, roll=0.000000))
    # # 距离不同
    # vehicle_trans1 = Transform(Location(x=352.36835232995634, y=-669.3162864851029, z=36.68), 
    #                            Rotation(pitch=0.000000, yaw = 71, roll=0.000000))
    # vehicle_trans2 = Transform(Location(x=334.4478501787186, y=-710.3986727539285, z=36.68), 
    #                            Rotation(pitch=0.000000, yaw=75, roll=0.000000))
    # vehicle_trans3 = Transform(Location(x=339.99493866076335, y=-691.2674734506796, z=36.68), 
    #                            Rotation(pitch=0.000000, yaw=69, roll=0.000000))
    # vehicle_trans4 = Transform(Location(x=360.1344801310995, y= -640.6139326233, z=36.68), 
    #                            Rotation(pitch=0.000000, yaw=74, roll=0.000000))
    # 车型不同
    # vehicle_trans1 = Transform(Location(x=383.82181745261005, y=-631.8765862260593, z=36.68), 
    #                            Rotation(pitch=0.000000, yaw = 169, roll=0.000000))
    # vehicle_trans2 = Transform(Location(x=356.20874447979, y=-654.981761750198, z=36.68), 
    #                            Rotation(pitch=0.000000, yaw=75, roll=0.000000))
    # 遮挡
    # vehicle_trans1 = Transform(Location(x=349.1003111740208, y= -669.5937247679345, z=36.68), 
    #                            Rotation(pitch=0.000000, yaw = 72, roll=0.000000))
    # vehicle_trans2 = Transform(Location(x=353.6486032446715, y=-661.9213709129963, z=36.68), 
    #                            Rotation(pitch=0.000000, yaw=73, roll=0.000000))
    # 100m
    vehicle_trans1 = Transform(Location(x=331.34087338600546, y= -735.3149412717362, z=36.68), 
                               Rotation(pitch=0.000000, yaw = 77, roll=0.000000))  
    
    p = Transform(Location(x=371.9938132585651, y=-643.7199381087686, z=36), 
                               Rotation(pitch=-45.000000, yaw=0.0, roll=0.000000))

    try:
        main()
    except KeyboardInterrupt:
        print(' - Exited by user.')
