package ballena.activities;

import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.StreamCorruptedException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.nio.ShortBuffer;
import java.util.List;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import ballena.utiles.Global;
import ballena.utiles.Servlet;

import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.opengl.GLSurfaceView;
import android.opengl.GLUtils;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Window;
import android.view.ViewGroup.LayoutParams;
import android.widget.Toast;

public class EncuentraAmigo extends Activity {
	private SensorManager mSensorManager;
	private CameraView mCameraView;
	private GLSurfaceView mGLSurfaceView;
	
	private Localizacion loc;
	CuadradoRenderer cuadradoRenderer;

	/** Called when the activity is first created. */
	@Override
	public void onCreate(Bundle savedInstanceState) {
		super.onCreate(savedInstanceState);

		Bundle bundle = getIntent().getExtras();
		int id = bundle.getInt("id");

		// Hide the window title.
		// requestWindowFeature(Window.FEATURE_NO_TITLE);

		mGLSurfaceView = new GLSurfaceView(this);
		mGLSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
		cuadradoRenderer = new CuadradoRenderer(this, id);
		mGLSurfaceView.setRenderer(cuadradoRenderer);
		mGLSurfaceView.getHolder().setFormat(PixelFormat.TRANSLUCENT);

		setContentView(mGLSurfaceView);

		mCameraView = new CameraView(this);
		addContentView(mCameraView, new LayoutParams(LayoutParams.WRAP_CONTENT,
				LayoutParams.WRAP_CONTENT));

		mSensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);

		List<Sensor> listSensors = mSensorManager
				.getSensorList(Sensor.TYPE_ACCELEROMETER);
		if (listSensors.size() > 0) {
			mSensorManager.registerListener(cuadradoRenderer,
					listSensors.get(0), SensorManager.SENSOR_DELAY_UI);
		}

		listSensors = mSensorManager.getSensorList(Sensor.TYPE_MAGNETIC_FIELD);
		if (listSensors.size() > 0) {
			mSensorManager.registerListener(cuadradoRenderer,
					listSensors.get(0), SensorManager.SENSOR_DELAY_UI);
		}
	}

	@Override
	protected void onResume() {
		super.onResume();
		mGLSurfaceView.onResume();
	}

	@Override
	protected void onPause() {
		super.onPause();
		mGLSurfaceView.onPause();
	}

	@Override
	protected void onRestoreInstanceState(Bundle savedInstanceState) {
		super.onRestoreInstanceState(savedInstanceState);
	}
	@Override
	protected void onSaveInstanceState(Bundle outState) {
		super.onSaveInstanceState(outState);
	}
	
	@Override
	protected void onStop() {
		super.onStop();
		mSensorManager.unregisterListener(cuadradoRenderer);
		loc = cuadradoRenderer.getLocalizacion();
		loc.ejecutar(false);
		try {
			loc.join();
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
		//finish();
	}
}

class CameraView extends SurfaceView implements SurfaceHolder.Callback {
	SurfaceHolder mHolder;
	Camera mCamera;
	Context context;

	CameraView(Context context) {
		super(context);
		this.context=context;

		// Install a SurfaceHolder.Callback so we get notified when the
		// underlying surface is created and destroyed.
		mHolder = getHolder();
		mHolder.addCallback(this);
		mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
	}

	public void surfaceCreated(SurfaceHolder holder) {
		// The Surface has been created, acquire the camera and tell it where
		// to draw.
		mCamera = Camera.open();
		try {
			mCamera.setPreviewDisplay(holder);
		} catch (IOException exception) {
			mCamera.release();
			mCamera = null;
			Log.e("Camera", "Error en SurfaceCreated", exception);
		}
	}

	public void surfaceDestroyed(SurfaceHolder holder) {
		Toast.makeText(context, "surfdes", 1000);
		// Surface will be destroyed when we return, so stop the preview.
		// Because the CameraDevice object is not a shared resource, it's very
		// important to release it when the activity is paused.
		mCamera.stopPreview();
		mCamera.release();
		mCamera = null;
	}

	public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
		// Now that the size is known, set up the camera parameters and begin
		// the preview.
		Camera.Parameters parameters = mCamera.getParameters();
		parameters.setPreviewSize(w, h);
		mCamera.setParameters(parameters);
		mCamera.startPreview();
	}
}

class CuadradoRenderer implements GLSurfaceView.Renderer, SensorEventListener {
	private float mAccelerometerValues[] = new float[3];
	private float mMagneticValues[] = new float[3];
	private float rotationMatrix[] = new float[16];
	private float remappedRotationMatrix[] = new float[16];

	private Localizacion loc;

	private Cuadrado mCuadrado;
	
	private Context mContext;

	// Datos de los usuarios
	private int yLat, yLon, mLat, mLon;

	public CuadradoRenderer(Context c, int id) {
		mCuadrado = new Cuadrado(c, id);
		// Iniciamos el hilo que consultara la localización del amigo y mía de
		// paso
		loc = new Localizacion(id);
		loc.ejecutar(true);
		loc.start();
	}

	// Para poder parar el hilo en el ondestroy del activity
	public Localizacion getLocalizacion() {
		return loc;
	}

	private float obtenerAnguloN(Integer lat1, Integer lat2, Integer lon1,
			Integer lon2) {
		// Formamos un triangulo imaginario con la idea de calcular el angulo
		// Usamos pitagoras para calcular la "longitud" de la hipotenusa
		float cateto1 = lat2 - lat1;
		float cateto2 = lon2 - lon1;

		double hipotenusa = Math.sqrt(Math.pow(cateto1, 2)
				+ Math.pow(cateto2, 2));

		// Calculamos el angulo que necesito
		double angulo = Math.asin(cateto2 / hipotenusa);
		angulo = Math.toDegrees(angulo);
		return Double.valueOf(angulo).floatValue();
	}

	// Dada mi pos y la de otro obtener los grados de diferencia con respecto al
	// norte
	private float obtenerAngulo(Integer mLat, Integer mLon, Integer yLat,
			Integer yLon) {
		float angulo = 90.0f;
		if (yLat >= mLat) { // El otro esta hacia el norte
			if (yLon >= mLon) { // El otro esta hacia el este BIEEEN! -1
				angulo = this.obtenerAnguloN(Math.abs(mLat), Math.abs(yLat),
						Math.abs(yLon), Math.abs(mLon));
			} else { // Hacia el oeste [90-180] BIEEEN! -1
				angulo = this.obtenerAnguloN(Math.abs(mLat), Math.abs(yLat),
						Math.abs(mLon), Math.abs(yLon));
				angulo = angulo + 270.0f;
			}
		} else { // El otro esta hacia el sur
			if (yLon >= mLon) { // El otro esta hacia el este BIEEN?
				angulo = this.obtenerAnguloN(Math.abs(yLat), Math.abs(mLat),
						Math.abs(yLon), Math.abs(mLon));
				angulo = (90 - angulo) + 90.0f;

			} else { // Hacia el oeste [270-360] BIEEEN! -1
				angulo = this.obtenerAnguloN(Math.abs(yLat), Math.abs(mLat),
						Math.abs(mLon), Math.abs(yLon));
				angulo = angulo + 180.0f;
			}
		}
		return angulo;
	}

	public void onDrawFrame(GL10 gl) {
		// Get rotation matrix from the sensor
		SensorManager.getRotationMatrix(rotationMatrix, null,
				mAccelerometerValues, mMagneticValues);
		// As the documentation says, we are using the device as a compass in
		// landscape mode
		SensorManager.remapCoordinateSystem(rotationMatrix,
				SensorManager.AXIS_Y, SensorManager.AXIS_MINUS_X,
				remappedRotationMatrix);

		// Clear color buffer
		gl.glClear(GL10.GL_COLOR_BUFFER_BIT);

		// Load remapped matrix
		gl.glMatrixMode(GL10.GL_MODELVIEW);
		gl.glLoadIdentity();
		gl.glLoadMatrixf(remappedRotationMatrix, 0);

		// Hacemos una rotaci�n de x grados

		// Piscina vecino [SUROESTE]
		// Integer yLat = 36414359;
		// Integer yLon = -4263166;

		// Juval [NORESTE]
		// Integer yLat = 36414513;
		// Integer yLon = -4262950;

		// Piso al lado [NOROEESTE]
		// Integer yLat = 36414513;
		// Integer yLon = -4263179;

		// Piscina mia [SURESTE]
		// Integer yLat = 36414331;
		// Integer yLon = -4263062;

		// Integer yLat = 36414367;
		// Integer yLon = -4263020;

		// Mi cuarto
		// Integer mLat = 36414413;
		// Integer mLon = -4263106;

		Integer[] localizaciones = loc.getPosiciones();
		if ((localizaciones) == null){
			Toast.makeText(mContext, "Fallo al obtener posiciones", 1000);
		}
		else {
			mLat = localizaciones[0];
			mLon = localizaciones[1];
			yLat = localizaciones[2];
			yLon = localizaciones[3];
		}
		float angulo = this.obtenerAngulo(mLat, mLon, yLat, yLon);

		gl.glRotatef(angulo, 0.0f, 0.0f, -1.0f);
		// Finalizada la rotacion

		gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
		gl.glEnableClientState(GL10.GL_COLOR_ARRAY);

		mCuadrado.draw(gl);
	}

	public void onSurfaceChanged(GL10 gl, int width, int height) {
		gl.glViewport(0, 0, width, height);

		/*
		 * Set our projection matrix. This doesn't have to be done each time we
		 * draw, but usually a new projection needs to be set when the viewport
		 * is resized.
		 */
		float ratio = (float) width / height;
		gl.glMatrixMode(GL10.GL_PROJECTION);
		gl.glLoadIdentity();
		gl.glFrustumf(-ratio, ratio, -1, 1, 1, 100);
	}

	
	
	public void onSurfaceCreated(GL10 gl, EGLConfig config) {
		/*
		 * By default, OpenGL enables features that improve quality but reduce
		 * performance. One might want to tweak that especially on software
		 * renderer.
		 */
		gl.glDisable(GL10.GL_DITHER);

		/*
		 * Some one-time OpenGL initialization can be made here probably based
		 * on features of this particular context
		 */
		gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_FASTEST);

		gl.glClearColor(0, 0, 0, 0);
	}

	@Override
	public void onAccuracyChanged(Sensor arg0, int arg1) {
	}

	@Override
	public void onSensorChanged(SensorEvent event) {
		synchronized (this) {
			switch (event.sensor.getType()) {
			case Sensor.TYPE_ACCELEROMETER:
				mAccelerometerValues = event.values.clone();
				break;
			case Sensor.TYPE_MAGNETIC_FIELD:
				mMagneticValues = event.values.clone();
				break;
			default:
				break;
			}
		}
	}
}

class Cuadrado {
	private FloatBuffer mVertexBuffer;
	private IntBuffer mColorBuffer;
	private ByteBuffer mIndexBuffer;

	// Our UV texture buffer.
	private FloatBuffer mTextureBuffer; // New variable.

	// Our texture id.
	private int mTextureId = -1; // New variable.

	private Bitmap mBitmap;

	private byte [] obtenerPrincipal(int id){
		byte [] pral = new byte[1];
 		String param[] = { "accion=" + Global.OBTENER_PRAL,
				"id=" + id};
		Servlet s = new Servlet(Global.SERVLET, param);
		s.conectar();

		ObjectInputStream in;
		try {
			in = new ObjectInputStream(s.getInputStream());
			pral = (byte []) in.readObject();
		} catch (StreamCorruptedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (ClassNotFoundException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} finally {
			s.desconectar();
		}
		return pral;
	} 
	
	public Cuadrado(Context c,int id) {

		// Aqui obtengo la imagen principal y cargo un bitmap	
		byte [] pral = this.obtenerPrincipal(id);
		
		//mBitmap = BitmapFactory.decodeResource(c.getResources(),
			//	R.drawable.enjuto);
		mBitmap = BitmapFactory.decodeByteArray(pral, 0, pral.length);

		int one = 0x10000;

		int colorLetters[] = {
				// pos
				one, one, one, one, one, one, one, one, one, one, one, one,
				one, one, one, one, };

		// Buffers to be passed to gl*Pointer() functions
		// must be direct, i.e., they must be placed on the
		// native heap where the garbage collector cannot
		// move them.
		//
		// Buffers with multi-byte datatypes (e.g., short, int, float)
		// must have their byte order set to native order

		// (( vertices_per_compass_line * coords_per_vertex * lines_number)
		// + pos_vertices * coords_per_vertex + south_vertices *
		// coords_per_vertex
		// + east_vertices * coords_per_vertex + west_vertices *
		// coords_per_vertex)
		// * bytes_per_float
		ByteBuffer vbb = ByteBuffer.allocateDirect(((2 * 3 * 16) + (6 * 3)
				+ (10 * 3) + (8 * 3) + (8 * 3)) * 4);
		vbb.order(ByteOrder.nativeOrder());
		mVertexBuffer = vbb.asFloatBuffer();

		// ((total_compass_vertices * coords_per_color) +
		// (pos_vertices * coords_per_color) + (south_vertices *
		// coords_per_color))
		// * bytes_per_int
		ByteBuffer cbb = ByteBuffer.allocateDirect(((32 * 4) + (6 * 4)
				+ (10 * 4) + (8 * 4) + (8 * 4)) * 4);
		cbb.order(ByteOrder.nativeOrder());
		mColorBuffer = cbb.asIntBuffer();

		mIndexBuffer = ByteBuffer.allocateDirect(32 + 6 + 10 + 8 + 8);

		// Vertices del cuadrado
		float pos[] = {
				-20.0f, 48.0f, -20.0f, //v0
				20.0f,48.0f, -20.0f, //v1
				-20.0f, 48.0f, 20.0f, //v2
				20.0f, 48.0f, 20.0f }; //v3

		mVertexBuffer.put(pos);

		// Como se unen los vertices
		byte indices[] = { 0, 1, 2, 1, 3, 2 };
		mIndexBuffer.put(indices);

		mColorBuffer.put(colorLetters);

		// Texturas

		// Se mepea la textura haciendo coincidir el 0,1 con el v0 y el 1,1 von el v1
		float textureCoords[] = { 
				0.0f, 1.0f, // v0 (parte baja izq)
				1.0f, 1.0f, // v1 (parte baja der)
				0.0f, 0.0f, // v2 (parte alta izq)
				1.0f, 0.0f, // v3 (parte alta der)
		};

		// float is 4 bytes, therefore we multiply the number if
		// vertices with 4.
		ByteBuffer byteBuf = ByteBuffer
				.allocateDirect(textureCoords.length * 4);
		byteBuf.order(ByteOrder.nativeOrder());
		mTextureBuffer = byteBuf.asFloatBuffer();
		mTextureBuffer.put(textureCoords);
		mTextureBuffer.position(0);

		mColorBuffer.position(0);
		mVertexBuffer.position(0);
		mIndexBuffer.position(0);

	}

	public void draw(GL10 gl) {
		loadGLTexture(gl);

		if (mTextureId != -1 && mTextureBuffer != null) {
			gl.glEnable(GL10.GL_TEXTURE_2D);
			// Enable the texture state
			gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);

			// Point to our buffers
			gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, mTextureBuffer);
			gl.glBindTexture(GL10.GL_TEXTURE_2D, mTextureId);
		}

		gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffer);
		gl.glColorPointer(4, GL10.GL_FIXED, 0, mColorBuffer);
		gl.glDrawElements(GL10.GL_TRIANGLES, 6, GL10.GL_UNSIGNED_BYTE,
				mIndexBuffer);
	}

	private void loadGLTexture(GL10 gl) {
		// Generate one texture pointer...
		int[] textures = new int[1];
		gl.glGenTextures(1, textures, 0);
		mTextureId = textures[0];

		// ...and bind it to our array
		gl.glBindTexture(GL10.GL_TEXTURE_2D, mTextureId);

		// Create Nearest Filtered Texture
		gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER,
				GL10.GL_LINEAR);
		gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER,
				GL10.GL_LINEAR);

		// Different possible texture parameters, e.g. GL10.GL_CLAMP_TO_EDGE
		gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S,
				GL10.GL_CLAMP_TO_EDGE);
		//gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T,
			//	GL10.GL_REPEAT);

		// Use the Android GLUtils to specify a two-dimensional texture image
		// from our bitmap
		GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, mBitmap, 0);
	}

}
