package uk.ac.cam.juliet.zedcat.client;

import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.util.concurrent.BlockingQueue;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import javax.imageio.ImageIO;

/**
 *
 * @author David Piggott
 */
public class FrameSource implements Runnable {

    private final Pipeline mPipeline;
    private final Configuration mConfiguration;
    private final BlockingQueue<Frame> mOutputQueue;
    private byte[] mBoundary;
    private byte[] mSegment = new byte[0];
    private byte[] mBuffer = new byte[32768];
    private int mBufferCursor = 0;
    private boolean mConnected = false;
    private InputStream mInputStream;
    private static final byte[] JPEG_START = new byte[]{(byte) 0xFF, (byte) 0xD8};

    public FrameSource(Pipeline p, Configuration c, BlockingQueue<Frame> oQ) {
        mPipeline = p;
        mConfiguration = c;
        mOutputQueue = oQ;
    }

    public BufferedImage getImage() throws IOException {
        return ImageIO.read(mConfiguration.getCameraJPEGLocation());
    }

    public void run() {
        mPipeline.appendLine("Connecting to camera.");
        connect();
        int byteIn;
        boolean userNotified = false; // Used to ensure we only notify once each time the camera goes off
        while (true) {
            try {
                while ((byteIn = mInputStream.read()) != -1) {
                    append(byteIn);
                    if (checkBoundary()) {
                        // We found a boundary marker - process the segment to find the JPEG image in it
                        processSegment();
                        if (mSegment.length > 0) {

                            BufferedImage image = ImageIO.read(new ByteArrayInputStream(mSegment));
                            int[] sourcePixels = new int[image.getWidth() * image.getHeight()];
                            sourcePixels = image.getRGB(0, 0, image.getWidth(), image.getHeight(), sourcePixels, 0, image.getWidth());

                            // Set configuration so the Hotspots can be scaled - Chris
                            mConfiguration.ImageWidth = image.getWidth();

                            boolean blank = true;
                            for (int x = 0; x < image.getWidth() && blank; x++) {
                                for (int y = 0; y < image.getHeight() && blank; y++) {
                                    if (x > 70 && y < 20) {
                                        int pixel = sourcePixels[(y * image.getWidth()) + x];
                                        int r = ((pixel >> 16) & 0x000000FF);
                                        int g = ((pixel >> 8) & 0x000000FF);
                                        int b = ((pixel) & 0x000000FF);
                                        blank = (r < 30 && g < 30 && b < 30) ? blank : false;
                                    }
                                }
                            }

                            if (!blank) {
                                mOutputQueue.put(new Frame(image, sourcePixels));
                                if(userNotified) {
                                    userNotified = false;
                                    mPipeline.appendLine("Camera is transmitting again.");
                                }
                            } else {
                                mPipeline.updateFramerate(0);
                                if(!userNotified) {
                                    mPipeline.appendLine("Connection to camera is OK but camera" +
                                        " is not transmitting. Try waving your hand in front" +
                                        " of the passive IR sensor to trigger transmission.");
                                    userNotified = true;
                                }
                            }

                        }
                        // Clear buffer
                        mBufferCursor = 0;

                    }
                }
                mPipeline.appendLine("Error communicating with camera. Reconnecting.");
                mPipeline.updateFramerate(0);
                connect();
            } catch (IOException e) {
            } catch (InterruptedException e) {
            }
        }
    }

    private void connect() {
        if (mConnected) {
            try {
                mInputStream.close();
            } catch (IOException e) {
            }
            mConnected = false;
        }
        while (!mConnected) {
            try {
                HttpURLConnection httpURLConnection = (HttpURLConnection) mConfiguration.getCameraMJPEGLocation().openConnection();
                String boundaryString = "--myboundary";
                String contentType = httpURLConnection.getContentType();
                Matcher matcher = Pattern.compile("boundary=(.*)$").matcher(contentType);
                matcher.find();
                boundaryString = matcher.group(1);
                mBoundary = boundaryString.getBytes();
                mInputStream = httpURLConnection.getInputStream();
                mConnected = true;
                mPipeline.appendLine("Connected to camera.");
            } catch (Exception e1) {
                mPipeline.appendLine("Error connecting to camera. Will wait and retry.");
                try {
                    Thread.sleep(10000);
                } catch (InterruptedException e2) {
                }
            }
        }
    }

    private void append(int i) {
        if (mBufferCursor >= mBuffer.length) {
            // Make buffer bigger
            byte[] newBuffer = new byte[mBuffer.length * 2];
            System.arraycopy(mBuffer, 0, newBuffer, 0, mBuffer.length);
            mBuffer = newBuffer;
        }
        mBuffer[mBufferCursor++] = (byte) i;
    }

    private void processSegment() {
        // First, look through the new segment for the start of a JPEG
        boolean found = false;
        int i;
        for (i = 0; i < mBufferCursor - JPEG_START.length; i++) {
            if (segmentsEqual(mBuffer, i, JPEG_START, 0, JPEG_START.length)) {
                found = true;
                break;
            }
        }
        if (found) {
            int segmentLength = mBufferCursor - mBoundary.length - i;
            mSegment = new byte[segmentLength];
            System.arraycopy(mBuffer, i, mSegment, 0, segmentLength);
        }
    }

    private boolean checkBoundary() {
        return segmentsEqual(mBuffer, mBufferCursor - mBoundary.length, mBoundary, 0, mBoundary.length);
    }

    private boolean segmentsEqual(byte[] b1, int b1Start, byte[] b2, int b2Start, int len) {
        if (b1Start < 0 || b2Start < 0 || b1Start + len > b1.length || b2Start + len > b2.length) {
            return false;
        } else {
            for (int i = 0; i < len; i++) {
                if (b1[b1Start + i] != b2[b2Start + i]) {
                    return false;
                }
            }
            return true;
        }
    }
}
