# 60年代风格图形渲染器
# 使用AI辅助

import multiprocessing
import multiprocessing.shared_memory
import time

import numpy as np
import scipy
import PIL.Image

allow_normal_map = False

is_pbr_ready = False

try:
    import drawsphere_pbr
    #raise ValueError('123')
    is_pbr_ready = True
except Exception as e:
    print(f"Failed to Import PBR Module: {e}. Use Blinn-Phong instead. ")

class ADVMAT:
    def __init__(self, F0 = None, metallic = 0.0, roughness=0.5):
        self.F0 = 0.04*np.ones((3))
        self.metallic = float(metallic)
        self.roughness = float(roughness)

        if F0 is not None:
            self.F0[:] = F0
        return
        
class GROUNDS:
    def __init__(self, ground_y, advmat = None):
        self.ground_y = ground_y

        self.advmat = advmat if advmat else ADVMAT()
        return
    def is_ray_collided_with_me(self, point, direction):
        if point.ndim != 1 or direction.ndim != 1:
            raise ValueError('123')

        point = point.copy()
        direction = direction.copy() / np.linalg.norm(direction)

        is_collided = False
        collided_coord = None
        normal = None
        t_final = 0
        color = None

        # 检查方向向量的 y 分量是否为0（射线与地面平行）
        # 计算参数 t = (ground_y - point_y) / direction_y
        # 检查 t 是否大于0且在正方向上相交
        t = 0
        if np.isclose(direction[1], 0.0):
            # 如果射线起点已经在平面上且方向平行，这里我们简单认为不相交
            pass
        elif (t := (self.ground_y - point[1]) / direction[1]) > 0:
            # 计算交点坐标
            is_collided = True
            t_final = t
            collided_coord = point + t * direction
            normal = self.get_norm()
            color = self.get_color(collided_coord)

        return is_collided, collided_coord, normal, color, t_final

    def get_norm(self):
        normal = np.array([0.0,1.0,0.0])
        if allow_normal_map:
            normal += 0.05*(2*np.random.rand(3)-1)
        normal /= np.linalg.norm(normal)
        return normal

    def get_color(self,coord):
        if coord.ndim != 1:
            raise ValueError('123')

        theta = np.pi*1/6
        rot = np.array([[np.cos(theta),0,np.sin(theta)],[0,1,0],[-np.sin(theta),0,np.cos(theta)]])
        _coord = rot @ coord[:,None]

        a = np.trunc(_coord[0] % 2)
        b = np.trunc(_coord[2] % 2)

        #print(a,b)
        color = np.array([0.5,0.5,0.5]) if (a == 0 and b == 0) or (a == 1 and b == 1) else np.array([1.0,1.0,1.0])
        return color

class SPHERES:
    def __init__(self, coord, r, color=None,advmat=None):
        self.coord = np.array(coord)
        self.color = np.ones(3) if color is None else np.array(color)
        self.r = float(r)

        self.advmat = advmat if advmat else ADVMAT()
        return

    def is_ray_collided_with_me(self,point,direction):
        if point.ndim != 1 or direction.ndim != 1:
            raise ValueError('123')

        point = point.copy()
        direction = direction.copy() / np.linalg.norm(direction)

        is_collided = False
        collided_coord = None
        normal = None
        color = self.color.copy()
        t_final = 0

        A = np.sum(direction**2)
        B = (2*direction[None,:]@(point[:,None] - self.coord[:,None]))[0,0]
        C = np.sum((point - self.coord) ** 2) - self.r**2
        D = B**2 - 4*A*C

        if D > 0:
            t = np.zeros((2))
            t[0] = (-B+np.sqrt(D))/(2*A)
            t[1] = (-B-np.sqrt(D))/(2*A)

            _temp_collided_coord = direction[None,:]*t[:,None]+point[None,:] #(2*3)

            if t[0] < 0 and t[1] < 0:
                is_collided = False
            elif t[0] < t[1]:
                t_final = t[0]
                is_collided = True
                collided_coord = _temp_collided_coord[0,:]
            else:
                t_final = t[1]
                is_collided = True
                collided_coord = _temp_collided_coord[1,:]

        if is_collided:
            normal = self.get_norm(collided_coord)

        return is_collided, collided_coord, normal, color, t_final

    def get_norm(self,point):
        if point.ndim != 1:
            raise ValueError('123')

        normal = point - self.coord
        if allow_normal_map:
            normal += 0.05*(2*np.random.rand(3)-1)
        normal /= np.linalg.norm(normal)
        return normal

class CUBES:
    'AI实现的立方体绘制'
    def __init__(self, coord, lwh, color=None, advmat = None):
        self.coord = np.array(coord)
        self.lwh = np.array(lwh)
        self.color = np.ones(3) if color is None else np.array(color)

        self.advmat = advmat if advmat else ADVMAT()
        return

    def is_ray_collided_with_me(self, point, direction):
        if point.ndim != 1 or direction.ndim != 1:
            raise ValueError('123')

        point = point.copy()
        direction = direction.copy() / np.linalg.norm(direction)

        # 将射线起点转换到立方体局部坐标系
        local_point = point - self.coord
        half_extents = self.lwh / 2.0

        # 初始化最小和最大t值
        t_min = -np.inf
        t_max = np.inf

        # 检查每个轴
        for i in range(3):
            if np.abs(direction[i]) < 1e-6:  # 方向平行于当前轴
                if local_point[i] < -half_extents[i] or local_point[i] > half_extents[i]:
                    return False, None, None, None, None
                continue

            # 计算与两个平面的交点参数
            t1 = (-half_extents[i] - local_point[i]) / direction[i]
            t2 = (half_extents[i] - local_point[i]) / direction[i]

            # 确保t1是近平面，t2是远平面
            if t1 > t2:
                t1, t2 = t2, t1

            # 更新全局t_min和t_max
            if t1 > t_min:
                t_min = t1
            if t2 < t_max:
                t_max = t2

        # 检查是否有有效交点
        if t_min > t_max or t_max < 0:
            return False, None, None, None, None

        # 确定最终交点参数
        t_final = t_min if t_min > 0 else t_max

        # 计算交点世界坐标
        collided_coord = point + t_final * direction

        local_coord = collided_coord - self.coord
        normal = self.get_norm(local_coord)
        return True, collided_coord, normal, self.color.copy(), t_final

    def get_norm(self, local_coord):
        # 计算法向量（局部坐标系）
        half_extents = self.lwh / 2.0

        normalized_coord = local_coord / half_extents
        axis = np.argmax(np.abs(normalized_coord))
        sign = np.sign(normalized_coord[axis])

        normal = np.zeros(3)
        normal[axis] = sign
        if allow_normal_map:
            normal += 0.05*(2*np.random.rand(3)-1)
        normal /= np.linalg.norm(normal)
        return normal

class SPRITERS:
    def __init__(self, coord, height, texture, advmat = None):
        self.coord = np.zeros((3))
        self.coord[:] = np.array(coord)

        self.texture = np.flip(np.array(texture).transpose([1, 0, 2]), axis=1)
        if self.texture.ndim != 3 or self.texture.shape[2] != 3:
            raise ValueError('Texture must be RGB image')

        self.height = float(height)
        self.scale = float(height) / self.texture.shape[1]

        self.advmat = advmat if advmat else ADVMAT()
        return

    def is_ray_collided_with_me(self, point, direction):
        if point.ndim != 1 or direction.ndim != 1:
            raise ValueError('123')

        point = point.copy()
        direction = direction.copy() / np.linalg.norm(direction)

        is_collided = False
        collided_coord = None
        normal = np.array([0.0,0.0,1.0])
        color = None
        t_final = 0

        if np.abs(direction[2]) > 1e-3 and (t:= (self.coord[2] - point[2]) / direction[2]) > 0:
            collided_coord = point + t*direction
            t_final = t

            local_xy = collided_coord  - self.coord
            #print(local_xy.shape)
            is_collided, color = self.get_color(local_xy)

        return is_collided, collided_coord, normal, color, t_final

    def get_color(self, local_xy):
        i = int(local_xy[0]/self.scale + self.texture.shape[0]//2)
        j = int(local_xy[1]/self.scale + self.texture.shape[1]//2)

        if i < 0 or i >= self.texture.shape[0] or j < 0 or j >= self.texture.shape[1] or np.linalg.norm(self.texture[i,j,:]) < 0.2:
            return False,None
        return True, self.texture[i,j,:].copy()

class LIGHTS:
    def __init__(self, coord, color = None, intensity = 1): 
        self.coord = np.zeros((3))
        self.color = np.ones((3))
        self.intensity = intensity
        
        self.coord[:] = coord
        if color is not None:
            self.color[:] = color
        return

def load_texture(filename, target_res_height = 800):
    img = PIL.Image.open(filename).convert('RGB')
    width, height = img.size
    scale_factor = target_res_height / height
    new_width = int(width * scale_factor)
    new_height = int(height * scale_factor)
    img = img.resize((new_width, new_height), PIL.Image.LANCZOS)
    rgb_array = np.array(img, dtype=np.float64) / 255
    return np.clip(rgb_array, 0, 1)

def get_brightness(point,normal,view_direction,basecolor):
    if point.ndim != 1 or normal.ndim != 1 or basecolor.ndim != 1 :
        raise ValueError('123')

    normal = normal.copy() / np.linalg.norm(normal)
    view_direction = - view_direction.copy() / np.linalg.norm(view_direction)

    L_tot = np.zeros((3))
    L_tot += ao * ambient_light_color * basecolor 

    for i in range(len(light_list)):
        # 计算光线方向
        light_ray = light_list[i].coord - point
        light_dist2 = np.sum(light_ray**2)
        light_ray = light_ray / np.linalg.norm(light_ray)
        light_color = light_list[i].color
        light_intensity = light_list[i].intensity * (1/(1+0.05*light_dist2)); # 半经验衰减。光照强度理论上按平方反比随距离衰减，但是单纯的平方反比衰减太快，又在近处容易发散，不容易控制
        
        is_collided = is_ray_collided_with_any_obj(point+0.1*light_ray,light_ray,use_Zbuffer=False)[0]
        if is_collided:
            continue;
        
        H = (view_direction + light_ray)/2
        H /= np.linalg.norm(H)
        NdotH = np.clip(np.dot(normal,H),1e-3,1)
        NdotL = np.clip(np.dot(normal,light_ray),1e-3,1)
        
        # NdotL: 漫反射分量，基于朗伯余弦定律，与表面法向量和光源方向的夹角余弦成正比
        # NdotH: 镜面反射分量(Blinn-Phong模型)，当半角向量H接近法线时产生镜面高光
        L_i = light_intensity*light_color*basecolor*NdotL*(1 + 0.5*NdotH**32)
        L_tot += L_i

        if np.all(L_tot >= 1):
            break

    L_tot = np.clip(L_tot,0,1)
    return L_tot

def is_ray_collided_with_any_obj(point,direction,use_Zbuffer=True):
    if point.ndim != 1 or direction.ndim != 1:
        raise ValueError('123')

    is_collided = False
    collided_obj_id = -1
    collided_coord = None
    normal = None
    color = None

    z_buffer = np.inf

    for i in range(len(obj_list)):
        _is_collided, _collided_coord,_normal,_color,_t = obj_list[i].is_ray_collided_with_me(point,direction)

        if _is_collided and (_t < z_buffer or use_Zbuffer == False):
            is_collided = True
            collided_obj_id = i
            collided_coord = _collided_coord
            normal = _normal
            z_buffer = float(_t)
            color = _color

        if is_collided and use_Zbuffer == False:
            break

    return is_collided, collided_coord, normal, color, z_buffer, collided_obj_id

def start_raytracing(start_point, direction):
    obj_id = None
    is_collided, collided_coord, normal, color, t_obj, obj_id = is_ray_collided_with_any_obj(start_point, direction)
    b = np.array([0.0,0.0,0.0])
    if is_collided and t_obj > 0:
        if is_pbr_ready:
            F0 = obj_list[obj_id].advmat.F0.copy()
            metallic = obj_list[obj_id].advmat.metallic
            roughness = obj_list[obj_id].advmat.roughness
            b = drawsphere_pbr.get_brightness_pbr(collided_coord, normal, direction, basecolor = color,
            F0=F0,metallic=metallic,roughness=roughness)
        else:
            b = get_brightness(collided_coord, normal, direction, basecolor = color)
    return b, is_collided, t_obj, obj_id

ncam = 1.7

obj_list = []
obj_list.append(SPHERES(np.array([0, 0, -5]), 1))
'数据来源：Palmqvist, A. (2025) Physically Based - The PBR Values Database Available at: https://physicallybased.info/ (Accessed: 14 November 2025).'
obj_list.append(SPHERES(np.array([1, 1, -4]), 0.5, color = np.array([0.910,0.778,0.423]), advmat = ADVMAT(F0 = np.array([0.995,0.974,0.747]), metallic = 1.0, roughness = 0.6)))
obj_list.append(CUBES(np.array([0, -1.5, -9]), np.array([10, 1, 1]), color=np.array([0.0, 1.0, 0.0])))
obj_list.append(SPRITERS(np.array([3, -2 + 1.5, -6]), 3, load_texture('image.jpg')))
obj_list.append(GROUNDS(-2))

light_list = []
light_list.append(LIGHTS(np.array([5,5,0]),color = np.array([1.0,1.0,1.0]),intensity = 4))

ambient_light_color = np.array([1.0,1.0,1.0])
ao = 0.1

if is_pbr_ready:
    drawsphere_pbr.ao = ao
    drawsphere_pbr.ambient_light_color = ambient_light_color
    drawsphere_pbr.light_list = light_list
    drawsphere_pbr.is_ray_collided_with_any_obj = is_ray_collided_with_any_obj

def worker(i,shm1_name,shm2_name,ns,i_start,i_end):
    shm1 = multiprocessing.shared_memory.SharedMemory(name=shm1_name)
    shm2 = multiprocessing.shared_memory.SharedMemory(name=shm2_name)

    sxy = np.ndarray((ns,2), dtype=np.float64, buffer=shm1.buf)
    us = np.ndarray((ns,5), dtype=np.float64, buffer=shm2.buf)

    for i in range(i_start,i_end): 
        xs = sxy[i,0]
        ys = sxy[i,1]
    
        start_point = np.array([0,0,ncam])
        direction = np.array([xs,ys,-ncam])
        
        b, is_collided, t_obj, obj_id = start_raytracing(start_point, direction)

        us[i,0:3] = b
        us[i,3] = t_obj
        us[i,4] = obj_id

    shm1.close()
    shm2.close()
    return

def apply_simple_fxaa(image, edge_threshold=0.125, edge_threshold_min=0.03125):
    """
    AI实现的简易FXAA抗锯齿实现

    参数:
        image: 输入RGB图像, shape为(H,W,3), 值范围[0,1]
        edge_threshold: 边缘检测阈值
        edge_threshold_min: 最小边缘检测阈值

    返回:
        img_array 经过FXAA处理的图像矩阵
    """

    from scipy.ndimage import gaussian_filter

    if image.ndim != 3 or image.shape[2] != 3:
        raise ValueError("输入图像必须是RGB格式(H,W,3)")

    # 1. 转换为亮度通道
    luma = np.dot(image[..., :3], [0.299, 0.587, 0.114])

    # 2. 计算水平和垂直方向的亮度梯度
    grad_x = np.zeros_like(luma)
    grad_y = np.zeros_like(luma)

    grad_x[:, 1:-1] = luma[:, 2:] - luma[:, :-2]
    grad_y[1:-1, :] = luma[2:, :] - luma[:-2, :]

    # 3. 计算边缘强度
    edge_strength = np.abs(grad_x) + np.abs(grad_y)

    # 4. 检测边缘 (简化版)
    is_edge = edge_strength > max(edge_threshold_min, edge_threshold * np.max(luma))

    # 5. 创建混合权重 (简化版)
    blend_weights = np.where(is_edge, 0.5, 1.0)

    # 6. 应用高斯模糊作为抗锯齿
    blurred = np.zeros_like(image)
    for c in range(3):
        blurred[..., c] = gaussian_filter(image[..., c], sigma=0.75)

    # 7. 混合原始图像和模糊图像
    result = np.zeros_like(image)
    for c in range(3):
        result[..., c] = image[..., c] * blend_weights + blurred[..., c] * (1 - blend_weights)

    return np.clip(result, 0, 1)

if __name__ == '__main__':
    dx = 0.0025
    L = 1

    xs,ys = np.meshgrid(np.arange(-L,L,dx),np.arange(-L,L,dx))
    n = xs.shape[0]
    sxy = np.column_stack((xs.flatten(),ys.flatten()))
    del xs,ys

    ns = n**2

    us = np.zeros((ns,5))

    shm1 = multiprocessing.shared_memory.SharedMemory(create=True, size=sxy.nbytes)
    shm2 = multiprocessing.shared_memory.SharedMemory(create=True, size=us.nbytes)
    shared_sxy = np.ndarray((ns,2), dtype=np.float64, buffer=shm1.buf)
    shared_us = np.ndarray((ns,5), dtype=np.float64, buffer=shm2.buf)
    shared_sxy[:,:] = sxy[:,:]
    shared_us[:,:] = 0

    print(ns)

    num_proc = 8
    process = []
    i_start = []
    i_end = []

    start_time = time.time()

    for i in range(num_proc):
        work_range = int(ns/num_proc)
        i_start.append(work_range*i)
        i_end.append(work_range*(i+1) if i < num_proc - 1 else ns)

        p = multiprocessing.Process(target=worker,args=(i,shm1.name,shm2.name,ns,i_start[i],i_end[i]))
        process.append(p)
        p.start()

    print(i_start)
    print(i_end)

    for p in process:
        p.join()

    image = shared_us[:,0:3].reshape((n, n, 3)).copy()
    image = apply_simple_fxaa(image)
    image = (image * 255).astype(np.uint8)
    image = PIL.Image.fromarray(image).transpose(PIL.Image.FLIP_TOP_BOTTOM)

    image_deep = shared_us[:,3].reshape((n, n)).copy()
    image_deep[np.isnan(image_deep) | np.isinf(image_deep)] = 0
    image_deep = np.clip(image_deep,0,20) / 20
    image_deep = (image_deep * 255).astype(np.uint8)
    image_deep = PIL.Image.fromarray(image_deep).transpose(PIL.Image.FLIP_TOP_BOTTOM)


    image_objid = shared_us[:,4].reshape((n, n)).copy()
    image_objid = (image_objid+1)/(len(obj_list)+1)
    image_objid = (image_objid * 255).astype(np.uint8)
    image_objid = PIL.Image.fromarray(image_objid).transpose(PIL.Image.FLIP_TOP_BOTTOM)

    shm1.close()
    shm1.unlink()
    shm2.close()
    shm2.unlink()

    end_time = time.time()
    print(f"Execution time: {end_time - start_time:.2f} seconds")

    image.save('output_image.png')
    image_deep.save('output_image_deep.png')
    image_objid.save('output_image_objid.png')
    image.show()
    