# ##### BEGIN GPL LICENSE BLOCK #####
#
#  This program is free software; you can redistribute it and/or
#  modify it under the terms of the GNU General Public License
#  as published by the Free Software Foundation; either version 2
#  of the License, or (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  You should have received a copy of the GNU General Public License
#  along with this program; if not, write to the Free Software Foundation,
#  Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####

# <pep8 compliant>

# Script copyright (C) Wengtong Yu
# Contributors: Wengtong Yu

import os
import bpy
import bmesh
import bmesh.ops
import mathutils
import csv

token_name_description = {}

default_tokens = [
    "POSITION",
    "COLOR",
    "NORMAL",
    "UV_PACKED",
    "TEXCOORD",
    "BONEINDICES",
    "BONEWEIGHTS",
    "ATTRIBUTE",
]

def load_csv_token(context, filepath):
    """
    Load csv first row as token name
    """
    if filepath == None:
        print("filepath is null!") 
        return
    scn = context.scene
    settings = scn.import_csv_settings
    base_tokens = settings.base_tokens
    try:
        with open(filepath) as file:
            base_tokens.clear()
            token_name_description.clear()
            reader = csv.reader(file)
            row = next(reader)
            name_col = row[2][:-2]
            token_name_description[name_col] = (0, 2, default_tokens[0], False, True)
            first = base_tokens.add()
            first.token_name = default_tokens[0]
            first.csv_name = name_col
            lastTokenId = len(default_tokens) - 1
            j = 0
            for i, n in enumerate(row):
                if (i <= 2): 
                    continue
                name = n[:-2]
                tmp = (token_name_description[name_col][0] + 1, token_name_description[name_col][1], default_tokens[lastTokenId if j >= len(default_tokens) else j], False, True)
                token_name_description[name_col] = tmp
                if (name_col != name):
                    j += 1
                    token = base_tokens.add()
                    token.csv_name = name
                    if (j >= len(default_tokens)):
                        token.token_name = default_tokens[lastTokenId]
                    else:
                        token.token_name = default_tokens[j]
                    token_name_description[name] = (0, i, token.token_name, False, True)
                    name_col = name
            tmp = (token_name_description[name_col][0] + 1, token_name_description[name_col][1], default_tokens[lastTokenId if j >= len(default_tokens) else j], False, True)
            token_name_description[name_col] = tmp
    except IOError:
        print('Error While Opening the file!')


def load_csv_data(
        context, filepath, 
        config_settings = None,
        global_matrix = None
    ):
    """
    This is load function for entire csv file
    """
    # ensure there is a matrix for translation
    if global_matrix is None:
        global_matrix = mathutils.Matrix()

    if filepath == None:
        print("filepath is null!") 
        return

    faces = []
    name = os.path.basename(filepath)[:-4]
    try:
        with open(filepath) as file:
            reader = csv.reader(file)
            next(reader)
            x_mod = -1 if config_settings.mirror_x else 1
            current_face = []
            i = 0
            # make vertex attr as array
            vertex_attr_array = []
            for id, row in enumerate(reader):
                vertex_attr_array.append(togetherVertexAttr(row, config_settings.vertical_uv, x_mod))
                if i < 2: 
                    current_face.append(id)
                    i += 1
                else:
                    current_face.append(id)
                    if config_settings.vertex_order:
                        current_face.reverse()
                        faces.append(tuple(current_face))
                    else: faces.append(tuple(current_face))
                    i = 0
                    current_face.clear()

            make_mesh(name, global_matrix, config_settings.merge_vertex, vertex_attr_array, faces)

    except IOError:
        print('Error While Opening the file!')
    pass

def getTokenDescVectorSize(token):
    return token[0]
def getTokenDescStart(token):
    return token[1]
def getTokenDescSemantic(token):
    return token[2]
def getTokenDescNormalized(token):
    return token[3]
def getTokenEnabled(token):
    return token[4]

def getSingleData(row, s, o, op, normalized):
    """
    Get single data form current row
    s : start index
    o : offset value
    op : operation to handle values
    """
    vals = []
    for i in range(o):
        vals.append(float(row[s+i].strip()))
    if (op is not None): 
        vals = op(vals)
    if (normalized):
        for i in range(len(vals)):
            vals[i] = vals[i] * 0.5 + 0.5
    return mathutils.Vector(tuple(vals))

def togetherVertexAttr(row, vertical_uv, x):
    token_vals = list(token_name_description.values())
    count = {}
    for token in default_tokens:
        count[token] = 0
    
    uvOp = lambda vals: [vals[0], 1.0 - vals[1] if vertical_uv else vals[1]]
    mirrorOp = lambda vals: [vals[0]*x, vals[1], vals[2]] if len(vals) > 2 else [vals[0]*x, vals[1], 0.0]

    vertex_attr_dict = {}
    for token in token_vals:
        for semantics in default_tokens:
            if (getTokenEnabled(token) and getTokenDescSemantic(token) == semantics):
                key = semantics + str(count[semantics])

                if (key not in vertex_attr_dict): 
                    vertex_attr_dict[key] = [] 
                op = None
                if (semantics == "POSITION"):
                    op = mirrorOp
                if (semantics == "TEXCOORD"):
                    op = uvOp
                data = getSingleData(row, getTokenDescStart(token), getTokenDescVectorSize(token), op, getTokenDescNormalized(token))
                if (semantics == "UV_PACKED"):
                    if len(data) > 2:
                        data = [(data[0], data[1]), (data[2], data[3]) if len(data) > 3 else (data[2])]
                # Ensure 3-comp normal is assigned
                if (semantics == "NORMAL"):
                    data = (data[0], data[1], data[2])
                if (semantics == "ATTRIBUTE"):
                    tmpData = []
                    for i in range(3):
                        if (i < len(data)):
                            tmpData.append(data[i])
                        else:
                            tmpData.append(0)
                    data = tuple(tmpData)
                vertex_attr_dict[key] = data
                count[semantics] += 1
    return vertex_attr_dict
 


def make_mesh(name, global_matrix, merge, vtx_attr_arr, faces):
    """
    make mesh from csv data
    """
    if (faces is None): 
        print("The vertex data(face is none) is not complete!")
        return
    # Create a new mesh from the vertices and faces
    mesh = bpy.data.meshes.new(name)
    obj = bpy.data.objects.new(name, mesh)      # Create the mesh object for the imported mesh
    obj.matrix_world = global_matrix            # Apply transformation matrix
    bpy.context.collection.objects.link(obj)    # Link object to scene
    bpy.context.view_layer.objects.active = obj

    vtx_number = len(vtx_attr_arr)

    def getVertexSingleAttr(semantic, id):
        attrs = []
        for k in vtx_attr_arr[id].keys():
            if k.find(semantic) != -1:
                attrs.append(vtx_attr_arr[id][k])
        return attrs

    uv_number = len(getVertexSingleAttr('TEXCOORD', 0))
    color_number = len(getVertexSingleAttr('COLOR', 0))
    pacekd_number = len(getVertexSingleAttr('UV_PACKED', 0))
    attr_number = len(getVertexSingleAttr('ATTRIBUTE', 0))

    groupCount = 0
    for i in range(vtx_number):
        attrs = getVertexSingleAttr('BONEINDICES', i)
        for vals in attrs:
            groupCount = int(max(max(vals), groupCount))

    for i in range(groupCount):
        bpy.ops.object.vertex_group_add()

    bm = bmesh.new()
    bm.from_mesh(mesh)

    for i in range(vtx_number):
        pos_attr = getVertexSingleAttr('POSITION', i)
        nor_attr = getVertexSingleAttr('NORMAL', i)
        if (len(pos_attr) > 0):
            vert = bm.verts.new(pos_attr[0])
        if len(nor_attr) > 0:
            vert.normal = nor_attr[0]

    bm.verts.ensure_lookup_table()

    for f_idx in faces:
        bm.faces.new([bm.verts[i] for i in f_idx])
    
    bm.faces.ensure_lookup_table()

    uv_layers = []
    for i in range(uv_number):
        uv_layers.append(bm.loops.layers.uv.new("UV" + str(i)))
    
    j = 0
    for face in bm.faces:
        for loop in face.loops:
            uv_attrs = getVertexSingleAttr('TEXCOORD', j)
            for i, layer in enumerate(uv_layers):
                loop[layer].uv = uv_attrs[i]
            j = j + 1

    col_layers = []
    for i in range(color_number):
        col_layers.append(bm.loops.layers.color.new("Col" + str(i)))

    j = 0
    for face in bm.faces:
        for loop in face.loops:
            col_attrs = getVertexSingleAttr('COLOR', j)
            for i, layer in enumerate(col_layers):
                loop[layer] = col_attrs[i]
            j = j + 1

    attr_layers = []
    for i in range(attr_number):
        attr_layers.append(bm.loops.layers.float_vector.new("Attribute" + str(i)))
    j = 0
    for face in bm.faces:
        for loop in face.loops:
            attrs = getVertexSingleAttr('ATTRIBUTE', j)
            for i, layer in enumerate(attr_layers):
                loop[layer] = attrs[i]
            j = j + 1

    packed_layers = []
    packed_data_size = []
    for data in getVertexSingleAttr('UV_PACKED', 0):
        size = 0
        for val in data:
            size += len(val)
        packed_data_size.append(size)
    for i in range(pacekd_number):
        for data_size in packed_data_size:
            if data_size > 2:
                packed_layers.append(
                    [
                        bm.loops.layers.uv.new("Custom" + str(i) + ".xy"),
                        bm.loops.layers.uv.new("Custom" + str(i) + ".zw")
                    ]
                )
            else:
                packed_layers.append([bm.loops.layers.uv.new("Custom" + str(i) + ".xy")])
    
    j = 0
    for face in bm.faces:
        for loop in face.loops:
            packed_attrs = getVertexSingleAttr('UV_PACKED', j)
            for i, layer in enumerate(packed_layers):
                for k, la in enumerate(layer):
                    loop[la].uv = mathutils.Vector(packed_attrs[i][k])
            j = j + 1

    skin_layer = bm.verts.layers.deform.verify()
    if skin_layer is not None:
        for i in range(vtx_number):
            devt = bm.verts[i][skin_layer]
            bone_idx_attrs = getVertexSingleAttr('BONEINDICES', i)
            bone_wei_attrs = getVertexSingleAttr('BONEWEIGHTS', i)
            for k, idx in enumerate(bone_idx_attrs):
                for id in range(len(idx)):
                    devt[int(idx[id])] = bone_wei_attrs[k][id]



    bm.verts.ensure_lookup_table()         
    
    if merge: bmesh.ops.remove_doubles(bm, verts = bm.verts, dist = 0.00001)

    bm.to_mesh(mesh)
    bm.free()

    # mesh.update(calc_edges = False)
    
    # create vertex groups
    bpy.ops.object.editmode_toggle()
    bpy.ops.mesh.select_all(action='SELECT')
    bpy.ops.object.editmode_toggle()
