# trace generated using paraview version 5.10.0
#
import os
import numpy as np
from paraview.simple import *
from DEAD.AutoDecoder.ParaviewMacro.NameInterpreter import get_n_slots,get_lz,get_m
from Utility.Csv import write_curve_to_csv
import concurrent.futures


def burning_surface(working_dir, id):
    # To ensure correct image size when batch processing, please search
    # for and uncomment the line `# renderView*.ViewSize = [*,*]`

    # import the simple module from the paraview
    # disable automatic camera reset on 'Show'
    paraview.simple._DisableFirstRenderCameraReset()

    n_slots = get_n_slots(id)
    lz = get_lz(id)
    m = get_m(id)

    # create a new 'XML Structured Grid Reader'
    target_vts = XMLStructuredGridReader(
        FileName=[working_dir+"/"+id+".vts"])

    # get active view
    renderView1 = GetActiveViewOrCreate('RenderView')

    # show data in view
    target_vtsDisplay = Show(target_vts, renderView1)

    # create a new 'Iso Volume'
    isoVolume1 = IsoVolume(Input=target_vts)

    # Properties modified on isoVolume1
    isoVolume1.InputScalars = ['POINTS', 'define_marker_theta']
    isoVolume1.ThresholdRange = [0.0, 50.0]

    # create a new 'Iso Volume'
    isoVolume2 = IsoVolume(Input=isoVolume1)

    # Properties modified on isoVolume1
    isoVolume2.InputScalars = ['POINTS', 'define_marker_rho']
    isoVolume2.ThresholdRange = [0.0, 50.0]

    max_u = isoVolume2.GetPointDataInformation().GetArray('u').GetRange()[1]
    #print("max_u is: " + str(max_u))

    # create a new 'Contour'
    contour1 = Contour(Input=isoVolume2)

    # Properties modified on contour1
    contour1.ComputeNormals = 1
    contour1.GenerateTriangles = 0

    # create a new 'Integrate Variables'
    integrateVariables1 = IntegrateVariables(Input=contour1)

    # create a new 'Slice'
    slice1 = Slice(registrationName='Slice1', Input=isoVolume2)

    # Properties modified on slice1.SliceType
    slice1.SliceType.Origin = [0.0, 0.0, lz-0.5/m]
    slice1.SliceType.Normal = [0.0, 0.0, 1.0]

    # create a new 'Iso Volume'
    isoVolume3 = IsoVolume(Input=slice1)

    # Properties modified on isoVolume3
    isoVolume3.InputScalars = ['POINTS', 'u']
    isoVolume3.ThresholdRange = [-50.0, 0.0]

    # create a new 'Integrate Variables'
    integrateVariables2 = IntegrateVariables(Input=isoVolume3)

    burned_web_array = []
    burning_surface_area_array = []
    exit_port_area_array = []
    data_points_number = 50
    web_step_size = max_u/(data_points_number-1)*0.999
    for i in range(data_points_number):
        web = i*web_step_size
        contour1.Isosurfaces = [web]
        isoVolume3.ThresholdRange = [-50.0, web]
        burning_surface_area = integrateVariables1.GetCellDataInformation().GetArray(
            'Area').GetRange()[0]*2*n_slots
        exit_port_area_object = integrateVariables2.GetCellDataInformation().GetArray(
            'Area')
        if exit_port_area_object is not None:
            exit_port_area = exit_port_area_object.GetRange()[0]*2*n_slots
        else:
            exit_port_area = 3.1415926

        # integrated_filter = paraview.servermanager.Fetch(integrateVariables1)
        # area_data = integrated_filter.GetCellData().GetArray('Area').GetValue(0)
        burned_web_array.append(web)
        burning_surface_area_array.append(burning_surface_area)
        exit_port_area_array.append(exit_port_area)

    Delete(integrateVariables2)
    del integrateVariables2

    Delete(isoVolume3)
    del isoVolume3

    Delete(slice1)
    del slice1

    Delete(integrateVariables1)
    del integrateVariables1

    Delete(contour1)
    del contour1

    Delete(isoVolume2)
    del isoVolume2

    Delete(isoVolume1)
    del isoVolume1

    Delete(target_vts)
    del target_vts
    print("finish burning surface regression for "+id)

    return burned_web_array, burning_surface_area_array, exit_port_area_array, id

def burning_surface_all(working_dir):
    # Get all files in the directory
    files = os.listdir(working_dir)

    # Filter files with .vts extension and get only the file names
    vts_file_names = [os.path.splitext(file)[0]
                      for file in files if file.endswith('.vts')]
    
    # Create a ProcessPoolExecutor
    with concurrent.futures.ProcessPoolExecutor(max_workers=3) as executor:
        # Submit tasks to the executor
        futures = []
        for id in vts_file_names:
            #npz_file_path = os.path.join(working_dir, f"{id}.npz")
            #if not os.path.exists(npz_file_path):# need to burning surface regression
            futures.append(executor.submit(burning_surface, working_dir, id))

        # Process the results as they become available
        for future in concurrent.futures.as_completed(futures):
            burned_web_array, burning_surface_area_array, exit_port_area_array, id = future.result()
            write_curve_to_csv('burned_web', 
                               'burning_surface_area',
                               burned_web_array,
                               burning_surface_area_array,
                               f"{working_dir}/{id}.csv")
            np.savez_compressed(f"{working_dir}/{id}.npz",
                                burned_web_array=burned_web_array,
                                burning_surface_area_array=burning_surface_area_array,
                                exit_port_area_array=exit_port_area_array)

if __name__ == "__main__":
    from DEAD.AutoDecoder.Config import decode_result_save_path
    burning_surface_all(f"{decode_result_save_path}")
