/**
 * Copyright (c) 2012 Tomi Paananen.
 *
 * Part of Quick Gestures user interface.
 */

import QtQuick 1.1
import com.nokia.symbian 1.1
import QuickGestures 1.0

Page {
    id: recognitionPage

    property QuickGesturesEngine engine: null
    property bool engineHasProvidedResult: false
    property bool finalRecognition: false
    property int recognizedId: -1
    property bool doCenter: false

    // Handling of engine signals.
    Connections {
        target: engine

        onSucceeded: {
            console.debug("Recognized gesture with ID", id);
            recognizedId = id;
            engineHasProvidedResult = true;
            recognitionVisualizer.gesture = engine.model.gestureById(id);
            recognitionVisualizer.show();
        }

        onFailed: {
            console.debug("Failed to recognize the gesture!");
            recognizedId = -1;
            engineHasProvidedResult = true;
            recognitionVisualizer.gesture = null;
            recognitionVisualizer.show();
        }

        onStateChanged: {
            if (state == QuickGesturesEngine.Idle
                    || state == QuickGesturesEngine.Cancelling) {
                console.debug("RecognitionPage.qml: Engine idle or cancelling.");
                progressLabel.text = "Recognized: " + recognizedId;
                progressLabel.hideDelayed(1000);
            }
            else if (state == QuickGesturesEngine.Processing
                     || state == QuickGesturesEngine.Recognizing) {
                console.debug("RecognitionPage.qml: Engine processing or recognizing.");
                progressLabel.text = "Recognizing...";
                progressLabel.shown = true;
            }
        }
    }

    onWidthChanged: {
        if (doCenter) {
            centerGesture();
        }
    }
    onHeightChanged: {
        if (doCenter) {
            centerGesture();
        }
    }

    Connections {
        target: root

        onInPortraitChanged: {
            doCenter = true;
        }
    }

    /**
     * Centers the gesture.
     */
    function centerGesture()
    {
        var width = recognitionPage.width;
        var height = recognitionPage.height;

        if (width > 0 && height > 0) {
            helper.center(sketchGesture, width, height);
            doCenter = false;
        }
        else {
            console.debug("EditorPage.qml: centerGesture(): Will not center",
                          "because the width or the height of the page is zero!");
        }
    }

    /**
     * Tries to cancel the recognition.
     *
     * @return True if the recognition was cancelled or already cancelling,
     *         false otherwise.
     */
    function cancelRecognition()
    {
        if (engine.state != QuickGesturesEngine.Idle) {
            if (engine.state == QuickGesturesEngine.Processing
                    || engine.state == QuickGesturesEngine.Recognizing)
            {
                engine.cancelRecognition();
            }
            else if (engine.state == QuickGesturesEngine.Cancelling){
                console.debug("RecognitionPage.qml: cancelRecognition(): "
                              + "Still cancelling...");
            }

            return true;
        }

        return false;
    }

    ProgressLabel {
        id: progressLabel
        width: parent.width - 10

        anchors {
            top: parent.top
            left: parent.left
            right: parent.right
            margins: 5
        }

        z: 3
        text: "Recognizing...";
        determinate: false
    }

    EmptyPageInfo {
        id: emptyPageInfo
        text: engine ? "Sketch the gesture to recognize here"
                     : "Error: No Quick Gesture engine set!";
    }

    Gesture { id: sketchGesture }

    // Visualizer for painting the sketched gesture.
    GestureVisualizer {
        id: sketchVisualizer
        anchors.fill: parent
        autoScale: false

        Component.onCompleted: {
            sketchGesture.clear();
            gesture = sketchGesture;
        }
    }

    // Mouse area for sketching the gesture.
    MouseArea {
        id: recognitionPageMouseArea

        property bool sketching: true // Set true for the first time
        property int numOfNewPoints: 0

        anchors.fill: parent
        z: 1

        /**
         * Adds a new point to the count and takes appropriate actions based
         * on pre-emptive logic.
         */
        function handleNewPointAdded()
        {
            if (!sketching) {
                return;
            }

            numOfNewPoints += 1;

            if (numOfNewPoints < 20 || cancelRecognition()) {
                // Not enough new points requiring action or the current
                // recognition process has not yet been cancelled.
                return;
            }

            numOfNewPoints = 0;

            // Hide the recognition visualizer if visible.
            if (recognitionVisualizer.opacity > 0) {
                recognitionVisualizer.opacity = 0;
            }

            // Start recognition process.
            engine.recognize(sketchGesture);
        }

        /**
         * Resets the state for new recognition.
         */
        function startNewGesture()
        {
            if (stopTimer.running) {
                stopTimer.stop();
            }

            sketchGesture.clear();
            recognitionVisualizer.opacity = 0;
            engineHasProvidedResult = false;
            finalRecognition = false;
            numOfNewPoints = 0;
            sketching = true;
        }

        onPressed: {
            console.debug("RecognitionPage.qml: Locking the orientation.");
            recognitionPage.orientationLock = PageOrientation.LockPrevious;

            if (!sketching && sketchGesture.pointCount) {
                // Time to start a new recognition.
                startNewGesture();
            }

            if (sketchGesture.pointCount) {
                // Add a break point.
                sketchGesture.appendPoint(-1, -1);
            }
            else {
                // Hide the empty page info.
                emptyPageInfo.visible = false;
            }

            sketchGesture.appendPoint(mouse.x, mouse.y);

            // Pre-emptive actions.
            handleNewPointAdded();
        }
        onMousePositionChanged: {
            sketchGesture.appendPoint(mouse.x, mouse.y);

            // Pre-emptive actions.
            handleNewPointAdded();
        }
        onReleased: {
            if (sketching) {
                stopTimer.restart();
            }

            console.debug("RecognitionPage.qml: Releasing the orientation lock.");
            recognitionPage.orientationLock = PageOrientation.Automatic;
        }
    }

    Timer {
        id: stopTimer
        running: false
        repeat: false
        interval: 1000

        onTriggered: {
            console.debug("RecognitionPage.qml: stopTimer::onTriggered");
            recognitionPageMouseArea.sketching = false;

            if (!engineHasProvidedResult || !finalRecognition) {
                engine.recognize(sketchGesture);
            }
            else {
                console.debug("RecognitionPage.qml: stopTimer::onTriggered:",
                              "Not enough new points to trigger recognition.");
                recognitionVisualizer.opacity = 0.8;
            }

            finalRecognition = true;
        }
    }

    // A visualizer for displaying the recognized gesture.
    GestureVisualizer {
        id: recognitionVisualizer

        /**
         * Calculates the position and the size for this item before showing
         * the visualizer.
         */
        function show()
        {
            console.debug("RecognitionPage.qml: GestureVisualizer::show()");
            hideTimer.restart();

            if (finalRecognition && recognitionVisualizer.opacity < 0.8) {
                recognitionVisualizer.opacity = 0.8;
            }
            else if (recognitionVisualizer.opacity < 0.1) {
                recognitionVisualizer.opacity = 0.1;
            }

            recalculatePosition();
        }

        /**
         * Recalculates the visualizer postion based on the dimensions of the
         * gesture and the visualizer itself.
         */
        function recalculatePosition()
        {
            var absX = (parent.width - width) / 2;
            var absY = (parent.height - height) / 2;

            if (recognitionVisualizer.gesture) {
                x = absX + (width - contentWidth) / 2;
                y = absY + (height - contentHeight) / 2;
            }
            else {
                x = absX;
                y = absY;
            }
        }

        x: parent.width / 10
        y: parent.height / 10
        width: parent.width * 4 / 5
        height: parent.height * 4 / 5
        opacity: 0
        z: 2
        lineWidth: 12
        lineColor: "lightgreen"

        Behavior on opacity {
            NumberAnimation { duration: 600 }
        }

        onContentWidthChanged: recalculatePosition();

        Component.onCompleted: {
            x = (parent.width - width) / 2;
            y = (parent.height - height) / 2;
        }
    }

    // A timer for hiding the visualizer containing the recognized gesture.
    Timer {
        id: hideTimer
        running: false
        repeat: false
        interval: 10000

        onTriggered: {
            recognitionVisualizer.opacity = 0;
        }
    }

    tools: ToolBarLayout {
        ToolButton {
            iconSource: "toolbar-back"
            onClicked: pageStack.pop();
        }
    }
}
