/*
 * This file includes all of the functions that relate to the camera.
 * 
 * It is in the same "package" as the main file, Robot2012.java, which means
 * that it will be sent to the robot along-side of it.  For this set of code,
 * we need a few more imports that in Robot2012... specifically, we need all of
 * the library files dealing with the camera and image processing.
 */

package edu.wpi.first.wpilibj.frc2012;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.camera.*;
import edu.wpi.first.wpilibj.image.*;

/*
 * This class "extends" Robot2012, the main class, which means that it can add
 * onto the code contained there.  For example, the Robot2012 allows the robot
 * to be driven, however this class also makes the robot check for button 10
 * being pressed on either joystick in order to perform the findRectangle()
 * function.  Settings is also implemented here, so that this class can use
 * the variables set in Settings.java.
 */

public class Camera2012 extends Robot2012 implements Settings {
    
/*
 * First of all, we want to sort out some variables.  We declare the camera
 * here and make it public, or accessible by other classes.  The camera should
 * be connected to the Robot's Bridge, with an IP address of 10.24.25.11.  The
 * call "getInstance()" connects the camera to the robot and driver's station.
 * 
 * We also want to use a variable called CriteriaCollection.  WPI created this
 * type of variable, literally to collect all of the criteria for image
 * recognition.  So far, we use the CriteriaCollection variable (which I've
 * called "criteria") to set the minimum width and height of a rectangle to be
 * considered as possibly the basketball hoop's backboard.
 */
    
    CriteriaCollection criteria = new CriteriaCollection();
    public AxisCamera camera = AxisCamera.getInstance("10.24.25.11");
    
/*
 * We override the robotInit() function from Robot2012 and add onto it.
 * 
 * During the initialization of the robot, we set the camera's compression rate
 * (we don't want it to compress the image at all) and resolution (the greatest
 * possible).
 * 
 * We also take the opportunity to set the criteria for rectangle recognition.
 */
    
    public void robotInit() {
        camera.writeCompression(0);
        camera.writeResolution(AxisCamera.ResolutionT.k640x480);
        
        criteria.addCriteria(NIVision.MeasurementType.IMAQ_MT_BOUNDING_RECT_WIDTH, 30, 400, false);
        criteria.addCriteria(NIVision.MeasurementType.IMAQ_MT_BOUNDING_RECT_HEIGHT, 40, 400, false);
    }
    
/*
 * Here's a new function!  I called it "findRectangle()" because that's all it
 * does.  It takes an image, filters it, and reports on the rectangles it
 * contains.
 * 
 * Here's the basic path that the image takes:
 * 
 * Camera Image --> Binary Image (all colors that meet the "threshold" are kept
 * as one color, and all the rest are turned black) --> Filtered Image (small
 * objects are removed) --> Convex Image (convexHull takes all outlines of
 * polygons and fills them in to make them solid) --> Final Image (now that the
 * polygons are filled in, the filter takes out any rectangles that are too
 * small) --> Particl Analysis (reports data about each "blob" in the image).
 * 
 * We want to take the "blob" with the best rectangle score (the one that is
 * most likely a rectangle in shape) and use it as our aiming point.  We'll use
 * the best_x and best_y variables to track where the best "blob" is on the
 * screen, and later determine how the turret-shooter needs to aim.
 * 
 * For the moment, a full particle analysis report is printed out to the
 * driver's station - I believe this can be viewed using the "Debug" tab.
 */
    
    public void findRectangle() {
        try {
            ColorImage colorImage = camera.getImage();
            BinaryImage thresholdImage =  colorImage.thresholdHSL(hueLow, hueHigh, saturationLow, saturationHigh, luminenceLow, luminenceHigh);
            BinaryImage filterImage = thresholdImage.removeSmallObjects(false, 2);
            BinaryImage convexImage = filterImage.convexHull(false);
            BinaryImage finalImage = convexImage.particleFilter(criteria);
            
            double best_score = 0;
            int best_x = 0;
            int best_y = 0;
            
            ParticleAnalysisReport[] reports = finalImage.getOrderedParticleAnalysisReports();  // get list of results
            for (int i = 0; i < reports.length; i++) {                                // print results
                ParticleAnalysisReport r = reports[i];
                System.out.println(r.toString());
                if(r.particleQuality > best_score) {
                    best_x = r.center_mass_x;
                    best_y = r.center_mass_y;
                }
            }
            System.out.println("Best Scores: " + best_x + " , " + best_y);
            System.out.println(finalImage.getNumberParticles() + "  " + Timer.getFPGATimestamp());
            
            finalImage.free();
            convexImage.free();
            filterImage.free();
            thresholdImage.free();
            colorImage.free();
        }
        catch (AxisCameraException ex) {
            ex.printStackTrace();
        }
        catch (NIVisionException ex) {
            ex.printStackTrace();
        }
        
    }
    
/*
 * We override this function too!  We want the robot not only to drive, but to
 * perform the findRectangle() function when the Joystick has button 10 pressed.
 */
    
    public void teleopPeriodic() {
        if(debugTesting == 1) {
            if(joyLeft.getRawButton(10) || joyRight.getRawButton(10)) {
                findRectangle();
            }
        }
    }
}
