﻿using Tesla.Content;
using Tesla.Core;
using Tesla.Graphics;
using Tesla.Math;

namespace TeslaSamples.Renderer {
    [AppDescription("Vertex Buffer Sample", PlatformCapabilities.Direct3D10 | PlatformCapabilities.XNA4, "VertexBufferImage", "VertexBufferDesc", "VertexBufferSource")]
    public class VertexBufferSample : BasicApp {
        private VertexBuffer geom;
        private IndexBuffer indices;
        private Effect effect;

        protected override void LoadContent() {
            ClearColor = Color.CornflowerBlue;
            Window.Title = "Vertex Buffer Sample";

            //Let's create a quad
            DataBuffer<Vector3> verts = new DataBuffer<Vector3>(4);
            verts.Set(new Vector3(-1, 1, 0));
            verts.Set(new Vector3(1, 1, 0));
            verts.Set(new Vector3(1, -1, 0));
            verts.Set(new Vector3(-1, -1, 0));
            DataBuffer<Vector3> norms = new DataBuffer<Vector3>(4);
            norms.Set(new Vector3(0, 0, 1));
            norms.Set(new Vector3(0, 0, 1));
            norms.Set(new Vector3(0, 0, 1));
            norms.Set(new Vector3(0, 0, 1));


            //We use the DataBuffers (which are more or less a wrapper for an array,
            //and provide a generic way of accessing byte data for multiple data types)
            //Since VertexBuffer can take in an array of them to set interleaved data, so
            //we don't have to do it ourself. This can easily be done using a single array
            //of a custom vertex struct (e.g. VertexPositionNormal struct).

            //Setup our vertex buffer, this is the sort of functionality that
            //Mesh and MeshData does behind the scenes.
            VertexDeclaration decl = new VertexDeclaration(new VertexElement[] {
                    new VertexElement(VertexSemantic.Position, 0, VertexFormat.Vector3, 0),
                    new VertexElement(VertexSemantic.Normal, 0, VertexFormat.Vector3, 12),
            });

            geom = new VertexBuffer(decl, 4, ResourceUsage.Static);
            geom.SetInterleavedData(verts, norms);

            //Since we're building a quad (two triangles) but only specified four vertices,
            //we need to setup an index buffer.
            indices = new IndexBuffer(IndexFormat.SixteenBits, 6, ResourceUsage.Static);
            //Clockwise winding (default vertex winding)
            indices.SetData<short>(new short[] { 0, 1, 2, 0, 2, 3 });

            //Our geometry is now setup, but to render it we need to setup an effect,
            //so we'll load up one of the compiled shaders that comes with the engine.
            //The specific implementations of the engine's shader library are
            //embedded in those DLL's and are served via the render system's default
            //content manager.
            effect = ContentManager.Load<Effect>("LitBasicEffect.tebo").Clone();
            effect.CurrentTechnique = effect.Techniques["LitBasicColor"];
            effect.Parameters["MatDiffuse"].SetValue(Color.Crimson.ToVector3());

            //This effect expects a light to render properly, so we need to manually set it
            PointLight pl = new PointLight();
            pl.Position = new Vector3(0, 0, 50);
            pl.Attenuate = false;
            effect.Parameters.GetParameterBySemantic("LIGHTCOUNT").SetValue(1);
            SetLight(effect.Parameters.GetParameterBySemantic("LIGHTLIST").Elements[0], pl);
        }

        //This code is taken directly from the lighting logic that is
        //used by the material. Since the scene graph's Mesh class is an IRenderable,
        //it has a material exposed to the renderer. When the material is applied (render states set,
        //logic executed, pass applied), this logic checks if the light collection has been
        //changed since the last time the renderable was rendered. If it has been, light data
        //is sent to the shader.
        private void SetLight(IEffectParameter lParam, PointLight pl) {
            lParam.StructureMembers[0].SetValue(pl.Ambient.ToVector3());
            lParam.StructureMembers[1].SetValue(pl.Diffuse.ToVector3());
            lParam.StructureMembers[2].SetValue(pl.Specular.ToVector3());
            lParam.StructureMembers[3].SetValue(pl.Attenuate);
            lParam.StructureMembers[4].SetValue(pl.Constant);
            lParam.StructureMembers[5].SetValue(pl.Linear);
            lParam.StructureMembers[6].SetValue(pl.Quadratic);
            lParam.StructureMembers[7].SetValue(new Vector4(pl.Position, 0));
            lParam.StructureMembers[8].SetValue(Vector3.Down);
            lParam.StructureMembers[9].SetValue(0f);
            lParam.StructureMembers[10].SetValue(0f);
        }

        //Called by BasicApp during Render(), in between clearing the swapchain
        //and presenting the backbuffer.
        protected override void OnRenderPost(IRenderer renderer) {
            //This is what the scene graph does before a mesh is rendered. We update
            //the engine value map with the mesh's compiled world matrix.
            Engine.ValueMap.WorldMatrix = Matrix.FromScale(25f) * Matrix.FromTranslation(new Vector3(0, 10, 0));

            //The engine value system tracks values across the engine such as the W-V-P
            //matrices or current engine time, when a material is applied these values
            //automatically are sent to the shader.
            effect.Parameters["WVP"].SetValue(Engine.ValueMap.WorldViewProjection);

            //We use the world matrix inverse transpose to deal with non-uniform scaling
            //opposed to just using the world matrix (for lighting calculations).
            effect.Parameters["WorldIT"].SetValue(Engine.ValueMap.WorldInverseTranspose);
            effect.Parameters["EyePos"].SetValue(renderer.CurrentCamera.Position);

            //Note - the above shows what internally happens, the same could be
            //done by creating the view/projection matrices (the above are from the renderer's camera
            //created in the super class) and concatenating them.

            effect.CurrentTechnique.Passes[0].Apply();

            //The engine's policy regarding render states is "apply only the states you need",
            //which tends to be the most efficient method. States applied are cached, to
            //prevent redundant state switching. If you set RasterizerState.CullNone twice
            //it only actually is applied once. This is important when using the render queue,
            //since renderables with similar materials (shader, textures, render states)
            //are sorted to be grouped together.
            //
            //Although we are not using materials here, if no render states are set,
            //then the default engine states are applied (see those classes for their
            //default state).
            renderer.RasterizerState = RasterizerState.CullNone;

            //Now setup the draw call
            renderer.SetVertexBuffer(geom);
            renderer.SetIndexBuffer(indices);
            renderer.DrawIndexed(PrimitiveType.TriangleList, indices.IndexCount, 0, 0);
        }
    }
}
