/* eslint-disable no-console */
/* eslint-disable no-unused-vars */
const Runtime = require('../../engine/runtime');

const ArgumentType = require('../../extension-support/argument-type');
const BlockType = require('../../extension-support/block-type');
const Clone = require('../../util/clone');
const Cast = require('../../util/cast');
const formatMessage = require('format-message');
const Video = require('../../io/video');
const VideoMotion = require('./library');
/**
 * Icon svg to be displayed in the blocks category menu, encoded as a data URI.
 * @type {string}
 */
// eslint-disable-next-line max-len
const menuIconURI = '';

/**
 * Icon svg to be displayed at the left edge of each extension block, encoded as a data URI.
 * @type {string}
 */
// eslint-disable-next-line max-len
const blockIconURI = '';

/**
 * Sensor attribute video sensor block should report.
 * @readonly
 * @enum {string}
 */
const SensingAttribute = {
    /** The amount of motion. */
    MOTION: 'motion',

    /** The direction of the motion. */
    DIRECTION: 'direction'
};

/**
 * Subject video sensor block should report for.
 * @readonly
 * @enum {string}
 */
const SensingSubject = {
    /** The sensor traits of the whole stage. */
    STAGE: 'Stage',

    /** The senosr traits of the area overlapped by this sprite. */
    SPRITE: 'this sprite'
};


/**
 * States the video sensing activity can be set to.
 * @readonly
 * @enum {string}
 */
const _VideoState = {
    /** Video turned off. */
    OFF: 'off',

    /** Video turned on with default y axis mirroring. */
    ON: 'on',

    /** Video turned on without default y axis mirroring. */
    ON_FLIPPED: 'on-flipped'
};

const Facestate = {
    OFF: 'off',
    ON: 'on'
};

/**
 * Class for the motion-related blocks in Scratch 3.0
 * @param {Runtime} runtime - the runtime instantiating this block package.
 * @constructor
 */
class Scratch3VideoSensingBlocks {
    constructor (runtime) {
    /**
     * The runtime instantiating this block package.
     * @type {Runtime}
     */
        this.runtime = runtime;

        /**
     * The motion detection algoritm used to power the motion amount and
     * direction values.
     * @type {VideoMotion}
     */
        this.detect = new VideoMotion();

        /**
     * The last millisecond epoch timestamp that the video stream was
     * analyzed.
     * @type {number}
     */
        this._lastUpdate = null;

        /**
     * A flag to determine if this extension has been installed in a project.
     * It is set to false the first time getInfo is run.
     * @type {boolean}
     */
        this.firstInstall = true;

        if (this.runtime.ioDevices) {
            // Configure the video device with values from globally stored locations.
            this.runtime.on(Runtime.PROJECT_LOADED, this.updateVideoDisplay.bind(this));

            // Clear target motion state values when the project starts.
            this.runtime.on(Runtime.PROJECT_RUN_START, this.reset.bind(this));

            // Kick off looping the analysis logic.
            this._loop();
        }


        this.人脸识别 = {};
        this.手势识别 = '';
        this.人体检测与属性识别 = {
            lower_wear: {
                name: '不确定'
            },
            upper_wear: {
                name: '不确定'
            }
        };
        this.人体检测 = '';
        this.人像分割 = '';
        this.通用图像分析 = '';
        this.菜品识别 = '';
        this.商标识别 = '';
        this.动物识别 = '';
        this.植物识别 = '';
        this.花卉识别 = '';
        this.地标识别 = '';
        this.车型识别 = '';
        this.文字识别 = '';
        this.手写文字 = '';
        this.名片识别 = '';
        this.数字识别 = '';

        this.果蔬类食材 = '';
        this.身份证识别 = {
            住址: {
                words: ''
            },
            公民身份证号码: {
                words: ''
            },
            出生: {
                words: ''
            },
            姓名: {
                words: ''
            },
            性别: {
                words: ''
            },
            民族: {
                words: ''
            }
        };
        this.火车票识别 = '';
        this.网络图片文字识别 = '';
    }

    /**
   * After analyzing a frame the amount of milliseconds until another frame
   * is analyzed.
   * @type {number}
   */
    static get INTERVAL () {
        return 33;
    }

    /**
   * Dimensions the video stream is analyzed at after its rendered to the
   * sample canvas.
   * @type {Array.<number>}
   */
    static get DIMENSIONS () {
        return [480, 360];
    }

    /**
   * The key to load & store a target's motion-related state.
   * @type {string}
   */
    static get STATE_KEY () {
        return 'Scratch.videoSensing';
    }

    /**
   * The default motion-related state, to be used when a target has no existing motion state.
   * @type {MotionState}
   */
    static get DEFAULT_MOTION_STATE () {
        return {
            motionFrameNumber: 0,
            motionAmount: 0,
            motionDirection: 0
        };
    }

    /**
   * The transparency setting of the video preview stored in a value
   * accessible by any object connected to the virtual machine.
   * @type {number}
   */
    get globalVideoTransparency () {
        const stage = this.runtime.getTargetForStage();
        if (stage) {
            return stage.videoTransparency;
        }
        return 1;
    }

    set globalVideoTransparency (transparency) {
        const stage = this.runtime.getTargetForStage();
        if (stage) {
            stage.videoTransparency = transparency;
        }
        return transparency;
    }

    /**
   * The video state of the video preview stored in a value accessible by any
   * object connected to the virtual machine.
   * @type {number}
   */
    get globalVideoState () {
        const stage = this.runtime.getTargetForStage();
        if (stage) {
            return stage.videoState;
        }
        // Though the default value for the stage is normally 'on', we need to default
        // to 'off' here to prevent the video device from briefly activating
        // while waiting for stage targets to be installed that say it should be off
        return VideoState.OFF;
    }

    set globalVideoState (state) {
        const stage = this.runtime.getTargetForStage();
        if (stage) {
            stage.videoState = state;
        }
        return state;
    }

    /**
   * Get the latest values for video transparency and state,
   * and set the video device to use them.
   */
    updateVideoDisplay () {
        this.setVideoTransparency({
            TRANSPARENCY: this.globalVideoTransparency
        });
        this.videoToggle({
            VIDEO_STATE: this.globalVideoState
        });
    }

    /**
   * Reset the extension's data motion detection data. This will clear out
   * for example old frames, so the first analyzed frame will not be compared
   * against a frame from before reset was called.
   */
    reset () {
        this.detect.reset();

        const targets = this.runtime.targets;
        for (let i = 0; i < targets.length; i++) {
            const state = targets[i].getCustomState(Scratch3VideoSensingBlocks.STATE_KEY);
            if (state) {
                state.motionAmount = 0;
                state.motionDirection = 0;
            }
        }
    }

    /**
   * Occasionally step a loop to sample the video, stamp it to the preview
   * skin, and add a TypedArray copy of the canvas's pixel data.
   * @private
   */
    _loop () {
        setTimeout(this._loop.bind(this), Math.max(this.runtime.currentStepTime, Scratch3VideoSensingBlocks.INTERVAL));

        // Add frame to detector
        const time = Date.now();
        if (this._lastUpdate === null) {
            this._lastUpdate = time;
        }
        const offset = time - this._lastUpdate;
        if (offset > Scratch3VideoSensingBlocks.INTERVAL) {
            const frame = this.runtime.ioDevices.video.getFrame({
                format: Video.FORMAT_IMAGE_DATA,
                dimensions: Scratch3VideoSensingBlocks.DIMENSIONS
            });
            if (frame) {
                this._lastUpdate = time;
                this.detect.addFrame(frame.data);
            }
        }
    }

    /**
   * Create data for a menu in scratch-blocks format, consisting of an array
   * of objects with text and value properties. The text is a translated
   * string, and the value is one-indexed.
   * @param {object[]} info - An array of info objects each having a name
   *   property.
   * @return {array} - An array of objects with text and value properties.
   * @private
   */
    _buildMenu (info) {
        return info.map((entry, index) => {
            const obj = {};
            obj.text = entry.name;
            obj.value = entry.value || String(index + 1);
            return obj;
        });
    }

    /**
   * @param {Target} target - collect motion state for this target.
   * @returns {MotionState} the mutable motion state associated with that
   *   target. This will be created if necessary.
   * @private
   */
    _getMotionState (target) {
        let motionState = target.getCustomState(Scratch3VideoSensingBlocks.STATE_KEY);
        if (!motionState) {
            motionState = Clone.simple(Scratch3VideoSensingBlocks.DEFAULT_MOTION_STATE);
            target.setCustomState(Scratch3VideoSensingBlocks.STATE_KEY, motionState);
        }
        return motionState;
    }

    static get SensingAttribute () {
        return SensingAttribute;
    }

    /**
   *
   */

    faceinfo (data) {
        this.face = data;
    }

    fetfaceinfo (i = 0, data = 'age') {
        if (this.face !== null) {
            return this.face.face_list[i][data];
        }
        return null;

    }

    /**
   * An array of choices of whether a reporter should return the frame's
   * motion amount or direction.
   * @type {object[]}
   * @param {string} name - the translatable name to display in sensor
   *   attribute menu
   * @param {string} value - the serializable value of the attribute
   */
    get ATTRIBUTE_INFO () {
        return [{
            name: formatMessage({
                id: 'videoSensing.motion',
                default: 'motion',
                description: 'Attribute for the "video [ATTRIBUTE] on [SUBJECT]" block'
            }),
            value: SensingAttribute.MOTION
        },
        {
            name: formatMessage({
                id: 'videoSensing.direction',
                default: 'direction',
                description: 'Attribute for the "video [ATTRIBUTE] on [SUBJECT]" block'
            }),
            value: SensingAttribute.DIRECTION
        }
        ];
    }

    static get SensingSubject () {
        return SensingSubject;
    }

    /**
   * An array of info about the subject choices.
   * @type {object[]}
   * @param {string} name - the translatable name to display in the subject menu
   * @param {string} value - the serializable value of the subject
   */
    get SUBJECT_INFO () {
        return [{
            name: formatMessage({
                id: 'videoSensing.sprite',
                default: 'sprite',
                description: 'Subject for the "video [ATTRIBUTE] on [SUBJECT]" block'
            }),
            value: SensingSubject.SPRITE
        },
        {
            name: formatMessage({
                id: 'videoSensing.stage',
                default: 'stage',
                description: 'Subject for the "video [ATTRIBUTE] on [SUBJECT]" block'
            }),
            value: SensingSubject.STAGE
        }
        ];
    }

    get FACE_STATE_INIT () {
        return [{
            name: 'start',
            value: Facestate.ON
        },
        {
            name: 'stop',
            value: Facestate.OFF
        }
        ];
    }

    /**
   * States the video sensing activity can be set to.
   * @readonly
   * @enum {string}
   */
    static get VideoState () {
        return _VideoState;
    }

    /**
   * An array of info on video state options for the "turn video [STATE]" block.
   * @type {object[]}
   * @param {string} name - the translatable name to display in the video state menu
   * @param {string} value - the serializable value stored in the block
   */
    get VIDEO_STATE_INFO () {
        return [{
            name: formatMessage({
                id: 'videoSensing.off',
                default: 'off',
                description: 'Option for the "turn video [STATE]" block'
            }),
            value: VideoState.OFF
        },
        {
            name: formatMessage({
                id: 'videoSensing.on',
                default: 'on',
                description: 'Option for the "turn video [STATE]" block'
            }),
            value: VideoState.ON
        },
        {
            name: formatMessage({
                id: 'videoSensing.onFlipped',
                default: 'on flipped',
                description: 'Option for the "turn video [STATE]" block that causes the video to be flipped' +
          ' horizontally (reversed as in a mirror)'
            }),
            value: VideoState.ON_FLIPPED
        }
        ];
    }

    /**
   * @returns {object} metadata for this extension and its blocks.
   */
    getInfo () {
    // Set the video display properties to defaults the first time
    // getInfo is run. This turns on the video device when it is
    // first added to a project, and is overwritten by a PROJECT_LOADED
    // event listener that later calls updateVideoDisplay
        if (this.firstInstall) {
            this.globalVideoState = VideoState.ON_FLIPPED;
            this.globalVideoTransparency = 1;
            this.updateVideoDisplay();
            this.firstInstall = false;
        }

        // Return extension definition
        return {
            id: 'videoSensing',
            name: '视频侦测',
            blockIconURI: blockIconURI,
            menuIconURI: menuIconURI,
            blocks: [{

                // threads like Scratch 2's behaviour.
                opcode: 'whenMotionGreaterThan',
                text: formatMessage({
                    id: 'videoSensing.whenMotionGreaterThan',
                    default: 'when video motion > [REFERENCE]',
                    description: 'Event that triggers when the amount of motion is greater than [REFERENCE]'
                }),
                blockType: BlockType.HAT,
                arguments: {
                    REFERENCE: {
                        type: ArgumentType.NUMBER,
                        defaultValue: 10
                    }
                }
            },
            {
                opcode: 'videoOn',
                blockType: BlockType.REPORTER,
                text: formatMessage({
                    id: 'videoSensing.videoOn',
                    default: 'video [ATTRIBUTE] on [SUBJECT]',
                    description: 'Reporter that returns the amount of [ATTRIBUTE] for the selected [SUBJECT]'
                }),
                arguments: {
                    ATTRIBUTE: {
                        type: ArgumentType.NUMBER,
                        menu: 'ATTRIBUTE',
                        defaultValue: SensingAttribute.MOTION
                    },
                    SUBJECT: {
                        type: ArgumentType.NUMBER,
                        menu: 'SUBJECT',
                        defaultValue: SensingSubject.SPRITE
                    }
                }
            },
            {
                opcode: 'videoToggle',
                text: formatMessage({
                    id: 'videoSensing.videoToggle',
                    default: 'turn video [VIDEO_STATE]',
                    description: 'Controls display of the video preview layer'
                }),
                arguments: {
                    VIDEO_STATE: {
                        type: ArgumentType.NUMBER,
                        menu: 'VIDEO_STATE',
                        defaultValue: VideoState.ON_FLIPPED
                    }
                }
            },
            {
                opcode: 'setVideoTransparency',
                text: formatMessage({
                    id: 'videoSensing.setVideoTransparency',
                    default: 'set video transparency to [TRANSPARENCY]',
                    description: 'Controls transparency of the video preview layer'
                }),
                arguments: {
                    TRANSPARENCY: {
                        type: ArgumentType.NUMBER,
                        defaultValue: 1
                    }
                }
            },
            {
                opcode: 'setFaceinit',
                text: '[second]秒后，人脸识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'getFaceage',
                text: '人脸 我的年龄',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'getFacebeauty',
                text: '人脸 我的颜值',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'getFaceexpression',
                text: '人脸 我的表情 [getFaceexpression]',
                blockType: BlockType.BOOLEAN,
                arguments: {
                    getFaceexpression: {
                        type: ArgumentType.STRING,
                        menu: 'faceexpressionmenus',
                        defaultValue: 'smile'
                    }
                }
            },
            {
                opcode: 'getFaceshape',
                text: '人脸 我的脸型 [getFaceshape]',
                blockType: BlockType.BOOLEAN,
                arguments: {
                    getFaceshape: {
                        type: ArgumentType.STRING,
                        menu: 'faceshapemenus',
                        defaultValue: 'round'
                    }
                }
            },
            {
                opcode: 'getFacegender',
                text: '人脸 我的性别 [getFacegender]',
                blockType: BlockType.BOOLEAN,
                arguments: {
                    getFacegender: {
                        type: ArgumentType.STRING,
                        menu: 'facegendermenus',
                        defaultValue: 'male'
                    }
                }
            },
            {
                opcode: 'getFaceglasses',
                text: '人脸 我的眼镜 [getFaceglasses]',
                blockType: BlockType.BOOLEAN,
                arguments: {
                    getFaceglasses: {
                        type: ArgumentType.STRING,
                        menu: 'faceglassesmenus',
                        defaultValue: 'sun'
                    }
                }
            },
            {
                opcode: 'getFacerace',
                text: '人脸 我的人种 [getFacerace]',
                blockType: BlockType.BOOLEAN,
                arguments: {
                    getFacerace: {
                        type: ArgumentType.STRING,
                        menu: 'faceracemenus',
                        defaultValue: 'yellow'
                    }
                }
            },
            {
                opcode: 'getFacetype',
                text: '人脸 我的脸是 [getFacetype]',
                blockType: BlockType.BOOLEAN,
                arguments: {
                    getFacetype: {
                        type: ArgumentType.STRING,
                        menu: 'facetypemenus',
                        defaultValue: 'human'
                    }
                }
            },
            {
                opcode: 'setgesture',
                text: '[second]秒后，手指识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'getgesture',
                text: '人体 我的手指',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'setgeneralBasic',
                text: '[second]秒后，文字识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'getgeneralBasic',
                text: '文字 文字识别',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'sethandwriting',
                text: '[second]秒后，手写文字识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'handwriting',
                text: '文字 手写文字识别',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'setnumbers',
                text: '[second]秒后，数字识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'numbers',
                text: '文字 数字识别',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'setbodyAttrtopwear',
                text: '[second]秒后，上半身衣服识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'getbodyAttrtopwear',
                text: '人体 我的上半身衣服',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'setbodyAttrdownwear',
                text: '[second]秒后，下半身衣服识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'getbodyAttrdownwear',
                text: '人体 我的下半身衣服',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'setbodyNum',
                text: '[second]秒后，人流量识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'getbodyNum',
                text: '人体 人流量统计',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'setadvancedGeneral',
                text: '[second]秒后，通用图像识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'getadvancedGeneral',
                text: '图像 通用图像分析',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'setdishDetect',
                text: '[second]秒后，菜品识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'getdishDetect',
                text: '图像 菜品识别',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'setcarDetect',
                text: '[second]秒后，车型识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'getcarDetect',
                text: '图像 车型识别',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'setlogoSearch',
                text: '[second]秒后，商标识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'getlogoSearch',
                text: '图像 商标识别',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'setanimalDetect',
                text: '[second]秒后，动物识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'getanimalDetect',
                text: '图像 动物识别',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'setplantDetect',
                text: '[second]秒后，植物识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'getplantDetect',
                text: '图像 植物识别',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'setlandmark',
                text: '[second]秒后，地标识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'getlandmark',
                text: '图像 地标识别',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'setwebImage',
                text: '[second]秒后，网络图片文字识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    }
                }
            },
            {
                opcode: 'getwebImage',
                text: '文字 网络图片文字识别',
                blockType: BlockType.REPORTER
            },
            {
                opcode: 'setidcard',
                text: '[second]秒后，身份证识别',
                blockType: BlockType.COMMAND,
                arguments: {
                    second: {
                        type: ArgumentType.NUMBER,
                        menu: 'second',
                        defaultValue: 2
                    },
                    idCardSide: {
                        type: ArgumentType.String,
                        menu: 'idCardSide',
                        defaultValue: 'front'
                    }
                }
            },
            {
                opcode: 'getidcard',
                text: '文字 身份证识别 [CARDTYPE]',
                blockType: BlockType.REPORTER,
                arguments: {
                    CARDTYPE: {
                        type: ArgumentType.STRING,
                        menu: 'CARDTYPE',
                        defaultValue: '姓名'
                    }
                }
            }
            ],
            menus: {
                idCardSide: [{
                    text: '正面',
                    value: 'front'
                },
                {
                    text: '背面',
                    value: 'back'
                }
                ],
                second: [{
                    text: '1',
                    value: 1
                },
                {
                    text: '2',
                    value: 2
                },
                {
                    text: '3',
                    value: 3
                },
                {
                    text: '4',
                    value: 4
                },
                {
                    text: '5',
                    value: 5
                }
                ],
                CARDTYPE: [{
                    text: '住址',
                    value: '住址'
                },
                {
                    text: '公民身份号码',
                    value: '公民身份号码'
                },
                {
                    text: '出生日期',
                    value: '出生'
                },
                {
                    text: '姓名',
                    value: '姓名'
                },
                {
                    text: '性别',
                    value: '性别'
                },
                {
                    text: '民族',
                    value: '民族'
                }
                ],
                bodyAnalysis: [{
                    text: '左脚踝',
                    value: '左脚踝'
                },
                {
                    text: '左手肘',
                    value: '左手肘'
                },
                {
                    text: '左髋部',
                    value: '左髋部'
                },
                {
                    text: '左膝盖',
                    value: '左膝盖'
                },
                {
                    text: '左肩膀',
                    value: '左肩膀'
                },
                {
                    text: '左手腕',
                    value: '左手腕'
                },
                {
                    text: '颈部',
                    value: '颈部'
                },
                {
                    text: '鼻子',
                    value: '鼻子'
                },
                {
                    text: '右脚踝',
                    value: '右脚踝'
                },
                {
                    text: '右手肘',
                    value: '右手肘'
                },
                {
                    text: '右髋部',
                    value: '右髋部'
                },
                {
                    text: '右膝盖',
                    value: '右膝盖'
                },
                {
                    text: '右肩膀',
                    value: '右肩膀'
                },
                {
                    text: '右手腕',
                    value: '右手腕'
                }
                ],
                VIDEOAI: [{
                    text: '人脸识别',
                    value: '人脸识别'
                },
                {
                    text: '手势识别',
                    value: '手势识别'
                },
                {
                    text: '人体关键点识别',
                    value: '人体关键点识别'
                },
                {
                    text: '人体检测',
                    value: '人体检测'
                },
                {
                    text: '人像分割',
                    value: '人像分割'
                },
                {
                    text: '通用图像分析',
                    value: '通用图像分析'
                },
                {
                    text: '菜品识别',
                    value: '菜品识别'
                },
                {
                    text: '商标识别',
                    value: '商标识别'
                },
                {
                    text: '动物识别',
                    value: '动物识别'
                },
                {
                    text: '植物识别',
                    value: '植物识别'
                },
                {
                    text: '花卉识别',
                    value: '花卉识别'
                },
                {
                    text: '果蔬类食材',
                    value: '果蔬类食材'
                },
                {
                    text: '地标识别',
                    value: '地标识别'
                },
                {
                    text: '车型识别',
                    value: '车型识别'
                },
                {
                    text: '文字识别',
                    value: '文字识别'
                },
                {
                    text: '手写文字',
                    value: '手写文字'
                },
                {
                    text: '身份证识别',
                    value: '身份证识别'
                },
                {
                    text: '名片识别',
                    value: '名片识别'
                },
                {
                    text: '火车票识别',
                    value: '火车票识别'
                },
                {
                    text: '网络图片文字识别',
                    value: '网络图片文字识别'
                },

                {
                    text: '数字识别',
                    value: '数字识别'
                }
                ],
                ATTRIBUTE: this._buildMenu(this.ATTRIBUTE_INFO),
                SUBJECT: this._buildMenu(this.SUBJECT_INFO),
                VIDEO_STATE: this._buildMenu(this.VIDEO_STATE_INFO),
                FACE_STATE: this._buildMenu(this.VIDEO_STATE_INFO),
                facetypemenus: [{
                    text: '真实人脸',
                    value: 'human'
                },
                {
                    text: '卡通人脸',
                    value: 'cartoon'
                }
                ],
                faceracemenus: [{
                    text: '黄种人',
                    value: 'yellow'
                },
                {
                    text: '白种人',
                    value: 'white'
                },
                {
                    text: '黑种人',
                    value: 'black'
                },
                {
                    text: '阿拉伯人',
                    value: 'arabs'
                }
                ],
                faceglassesmenus: [{
                    text: '无眼镜',
                    value: 'none'
                },
                {
                    text: '普通眼镜',
                    value: 'common'
                },
                {
                    text: '墨镜',
                    value: 'sun'
                }
                ],
                facegendermenus: [{
                    text: '男生',
                    value: 'male'
                },
                {
                    text: '女生',
                    value: 'female'
                }
                ],
                faceshapemenus: [{
                    text: '威严面容',
                    value: 'square'
                },
                {
                    text: '瓜子脸',
                    value: 'oval'
                },
                {
                    text: '蛇精脸',
                    value: 'triangle'
                },
                {
                    text: '心形脸',
                    value: 'heart'
                },
                {
                    text: '圆圆的面容',
                    value: 'round'
                }
                ],
                faceexpressionmenus: [{
                    text: '不笑',
                    value: 'none'
                },
                {
                    text: '微笑',
                    value: 'smile'
                },
                {
                    text: '大笑',
                    value: 'laugh'
                }
                ]
            }
        };
    }

    /**
   * Analyze a part of the frame that a target overlaps.
   * @param {Target} target - a target to determine where to analyze
   * @returns {MotionState} the motion state for the given target
   */
    _analyzeLocalMotion (target) {
        const drawable = this.runtime.renderer._allDrawables[target.drawableID];
        const state = this._getMotionState(target);
        this.detect.getLocalMotion(drawable, state);
        return state;
    }

    /**
   * A scratch reporter block handle that analyzes the last two frames and
   * depending on the arguments, returns the motion or direction for the
   * whole stage or just the target sprite.
   * @param {object} args - the block arguments
   * @param {BlockUtility} util - the block utility
   * @returns {number} the motion amount or direction of the stage or sprite
   */
    videoOn (args, util) {
        this.detect.analyzeFrame();
        let state = this.detect;
        if (args.SUBJECT === SensingSubject.SPRITE) {
            state = this._analyzeLocalMotion(util.target);
        }

        if (args.ATTRIBUTE === SensingAttribute.MOTION) {
            return state.motionAmount;
        }
        return state.motionDirection;
    }

    /**
   * A scratch hat block edge handle that analyzes the last two frames where
   * the target sprite overlaps and if it has more motion than the given
   * reference value.
   * @param {object} args - the block arguments
   * @param {BlockUtility} util - the block utility
   * @returns {boolean} true if the sprite overlaps more motion than the
   *   reference
   */
    whenMotionGreaterThan (args, util) {
        this.detect.analyzeFrame();
        const state = this._analyzeLocalMotion(util.target);
        return state.motionAmount > Number(args.REFERENCE);
    }

    /**
   * A scratch command block handle that configures the video state from
   * passed arguments.
   * @param {object} args - the block arguments
   * @param {VideoState} args.VIDEO_STATE - the video state to set the device to
   */
    videoToggle (args) {
        const state = args.VIDEO_STATE;
        this.globalVideoState = state;
        if (state === VideoState.OFF) {
            this.runtime.ioDevices.video.disableVideo();
        } else {
            this.runtime.ioDevices.video.enableVideo();
            // Mirror if state is ON. Do not mirror if state is ON_FLIPPED.
            this.runtime.ioDevices.video.mirror = state === VideoState.ON;
        }
    }

    /**
   * A scratch command block handle that configures the video preview's
   * transparency from passed arguments.
   * @param {object} args - the block arguments
   * @param {number} args.TRANSPARENCY - the transparency to set the video
   *   preview to
   */
    setVideoTransparency (args) {
        const transparency = Cast.toNumber(args.TRANSPARENCY);
        this.globalVideoTransparency = transparency;
        this.runtime.ioDevices.video.setPreviewGhost(transparency);
    }

    setFaceinit (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.人脸识别 = {

        };
        return new Promise(r => {
            setTimeout(() => {
                const option = {};
                option.face_field = 'age,beauty,expression,faceshape,gender,glasses,race,facetype';
                option.face_type = 'LIVE';
                option.max_face_num = '1';
                window.electron.setFaceinit(image, option, 'a.png')
                    .then(res => {
                        console.log(res);
                        if (!res.error_code) {
                            this.人脸识别 = res.result.face_list[0];
                        }
                        r();
                    })
                    .catch(() => {
                        r();
                    });

            }, s);
        });
    }

    // age
    getFaceage () {
        const data = this.人脸识别;
        return data.age || 0;
    }

    // setFacebeauty
    getFacebeauty () {
        const data = this.人脸识别;
        return data.beauty || 0;
    }

    // expression
    getFaceexpression (arge) {
        const data = this.人脸识别;
        if (data.expression) {
            if (arge.getFaceexpression === data.expression.type) {
                return true;
            }
            return false;
        }
        return false;
    }

    // shape
    getFaceshape (arge) {
        const data = this.人脸识别;
        if (data.face_shape) {
            if (arge.getFaceshape === data.face_shape.type) {
                return true;
            }
            return false;
        }
        return false;

    }

    // gender
    getFacegender (arge) {
        const data = this.人脸识别;
        if (data.gender) {
            if (arge.getFacegender === data.gender.type) {
                return true;
            }
            return false;

        }
        return false;

    }

    // glasses
    getFaceglasses (arge) {
        const data = this.人脸识别;
        if (data.glasses) {
            if (arge.getFaceglasses === data.glasses.type) {
                return true;
            }
            return false;

        }
        return false;

    }

    // race
    getFacerace (arge) {
        const data = this.人脸识别;
        if (data.race) {
            if (arge.getFacerace === data.race.type) {
                return true;
            }
            return false;

        }
        return false;

    }


    // type
    getFacetype (arge) {
        const data = this.人脸识别;
        if (data.face_type) {
            if (arge.getFacetype === data.type) {
                return true;
            }
            return false;

        }
        return false;

    }

    setgesture (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.手势识别 = '';
        const type = {
            One: '1',
            Five: '5',
            Fist: '10',
            OK: 'OK',
            Prayer: '祈祷',
            Congratulation: '作揖',
            Honour: '告辞',
            Heart_single: '比心',
            Thumb_up: '赞',
            Thumb_down: 'Diss',
            ILY: '我爱你',
            Palm_up: '掌心向上',
            Heart_1: '比心',
            Heart_2: '比心',
            Heart_3: '比心',
            Two: '2',
            Three: '3',
            Four: '4',
            Six: '6',
            Seven: '7',
            Eight: '8',
            Nine: '9',
            Rock: '摇滚',
            Insult: '竖中指'
        };
        return new Promise(r => {
            setTimeout(() => {
                window.electron.setgesture(image, {}, 'a.png')
                    .then(res => {
                        console.log(res);
                        if (!res.error_code) {
                            this.手势识别 = type[res.result[0].classname];
                        }
                        r();
                    })
                    .catch(() => {
                        r();
                    });

            }, s);
        });
    }

    getgesture () {
    // 手势检测
        return this.手势识别;
    }

    rendererelectron () {
        if (this.globalVideoState === VideoState.OFF);
        const abb = this.runtime.ioDevices.video.getFrame({
            format: Video.FORMAT_IMAGE_DATA,
            dimensions: Scratch3VideoSensingBlocks.DIMENSIONS
        });
        const x = document.getElementById('canvas');
        x.getContext('2d').putImageData(abb, 0, 0);
        const image = x.toDataURL().split(',')[1];
        return image;
    }

    setbodyAttrtopwear (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.人体检测与属性识别 = {
            lower_wear: {
                name: '不确定'
            },
            upper_wear: {
                name: '不确定'
            }
        };
        return new Promise(r => {
            setTimeout(() => {

                const option = {};
                option.type = 'lower_wear,upper_wear';

                window.electron.setbodyAttrtopwear(image, option)
                    .then(res => {
                        console.log(res);
                        if (!res.error_code) {
                            this.人体检测与属性识别 = res.person_info[0].attributes;
                        }
                        r();
                    })
                    .catch(() => {
                        r();
                    });

            }, s);
        });
    }

    getbodyAttrtopwear () {
    // text: '人体 我的上半身衣服',
        return this.人体检测与属性识别.upper_wear.name || '';
    }

    setbodyAttrdownwear (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.人体检测与属性识别 = {
            lower_wear: {
                name: '不确定'
            },
            upper_wear: {
                name: '不确定'
            }
        };
        return new Promise(r => {
            setTimeout(() => {

                const option = {};
                option.type = 'lower_wear,upper_wear';
                window.electron.setbodyAttrdownwear(image, option)
                    .then(res => {
                        console.log(res);
                        if (!res.error_code) {
                            this.人体检测与属性识别 = res.person_info[0].attributes;
                        }
                        r();

                    })
                    .catch(() => {
                        r();
                    });

            }, s);
        });
    }

    getbodyAttrdownwear () {
    // text: '人体 我的下半身衣服',
        return this.人体检测与属性识别.lower_wear.name || '';
    }

    setbodyNum (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.人流量统计 = '';
        return new Promise(r => {
            setTimeout(() => {


                window.electron.setbodyNum(image)
                    .then(res => {
                        console.log(res);
                        if (!res.error_code) {
                            this.人流量统计 = res.person_num;
                        }
                        r();

                    })
                    .catch(() => {
                        r();
                    });

            }, s);
        });
    }

    getbodyNum () {
    // 人体 人流量统计
        return this.人流量统计;
    }

    setadvancedGeneral (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.通用图像分析 = '';
        return new Promise(r => {
            setTimeout(() => {


                window.electron.setadvancedGeneral(image)
                    .then(res => {
                        console.log(res);
                        if (!res.error_code) {
                            this.通用图像分析 = res.result[0].keyword || '';
                        }
                        r();

                    })
                    .catch(() => {
                        r();
                    });

            }, s);
        });
    }

    getadvancedGeneral () {
    //    text: '图像 通用图像分析',
        return this.通用图像分析;
    }

    setdishDetect (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.菜品识别 = '';
        return new Promise(r => {
            setTimeout(() => {

                const option = {};
                option.top_num = '1';
                window.electron.setdishDetect(image, option)
                    .then(res => {
                        console.log(res);
                        if (!res.error_code) {
                            this.菜品识别 = res.result[0].name || '';
                        }
                        r();

                    })
                    .catch(() => {
                        r();
                    });

            }, s);
        });
    }

    getdishDetect () {
        return this.菜品识别;
    }

    setcarDetect (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.车型识别 = '';
        return new Promise(r => {
            setTimeout(() => {
                const option = {};
                option.top_num = '1';
                window.electron.setcarDetect(image, option)
                    .then(res => {
                        console.log(res);
                        if (!res.error_code) {
                            this.车型识别 = res.result[0].name || '';
                        }
                        r();
                    })
                    .catch(() => {
                        r();
                    });

            }, s);
        });
    }

    getcarDetect () {
    //    text: '图像 车型识别',
        return this.车型识别;

    }

    setlogoSearch (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.商标识别 = '';
        return new Promise(r => {
            setTimeout(() => {
                const option = {};
                option.top_num = '1';
                window.electron.setlogoSearch(image, option)
                    .then(res => {
                        console.log(res);
                        if (!res.error_code) {
                            this.商标识别 = res.result[0].name || '';
                        }
                        r();
                    })
                    .catch(() => {
                        r();
                    });

            }, s);
        });
    }

    getlogoSearch () {
    //    text: '图像 商标识别',
        return this.商标识别;
    }

    setanimalDetect (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.动物识别 = '';
        return new Promise(r => {
            setTimeout(() => {
                window.electron.setanimalDetect(image)
                    .then(res => {
                        console.log(res);
                        if (!res.error_code) {
                            this.动物识别 = res.result[0].name || '';
                        }
                        r();
                    })
                    .catch(() => {
                        r();
                    });
            }, s);
        });
    }

    getanimalDetect () {
    //  text: '图像 动物识别',
        return this.动物识别;
    }

    setplantDetect (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.植物识别 = '';
        return new Promise(r => {
            setTimeout(() => {


                window.electron.setplantDetect(image)
                    .then(res => {
                        console.log(res);
                        if (!res.error_code) {
                            this.植物识别 = res.result[0].name || '';
                        }
                        r();
                    })
                    .catch(() => {
                        r();
                    });

            }, s);
        });
    }

    getplantDetect () {
    //    text: '图像 植物识别',
        return this.植物识别;

    }

    setlandmark (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.地标识别 = '';
        return new Promise(r => {
            setTimeout(() => {


                window.electron.setlandmark(image)
                    .then(res => {
                        console.log(res);
                        if (!res.error_code) {
                            this.地标识别 = res.result.landmark || '';
                        }
                        r();
                    })
                    .catch(() => {
                        r();
                    });

            }, s);
        });
    }

    getlandmark () {
    // text: '图像 地标识别',
        return this.地标识别;
    }

    setgeneralBasic (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.文字识别 = '';
        return new Promise(r => {
            setTimeout(() => {

                window.electron.setgeneralBasic(image)
                    .then(res => {
                        console.log(res);
                        if (res.words_result_num) {

                            this.文字识别 = res.words_result.join('-');
                        }
                        r();
                    })
                    .catch(() => {
                        r();
                    });

            }, s);
        });
    }

    getgeneralBasic () {
    //  text: '文字 文字识别',
        return this.文字识别;
    }

    setwebImage (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.网络图片文字识别 = '';
        return new Promise(r => {
            setTimeout(() => {

                window.electron.setwebimage(image)
                    .then(res => {
                        console.log(res);
                        if (!res.error_code) {
                            this.网络图片文字识别 = res.words_result.join('-');
                        }
                        r();
                    })
                    .catch(() => {
                        r();
                    });

            }, s);
        });
    }

    getwebImage () {
        return this.网络图片文字识别;
    }

    setidcard (arge) {
        const s = arge.second * 1000;
        const idCardSide = arge.idCardSide || 'front';
        const image = this.rendererelectron();
        this.身份证识别 = {
            住址: {
                words: ''
            },
            公民身份证号码: {
                words: ''
            },
            出生: {
                words: ''
            },
            姓名: {
                words: ''
            },
            性别: {
                words: ''
            },
            民族: {
                words: ''
            }
        };
        return new Promise(r => {
            setTimeout(() => {

                window.electron.setidcard(image, {
                    idCardSide
                })
                    .then(res => {
                        console.log(res);
                        if (res.words_result_num) {
                            this.身份证识别 = res.result.words_result;
                        }
                        r();
                    })
                    .catch(() => {
                        r();
                    });

            }, s);
        });
    }

    getidcard (arge) {
        const data = this.身份证识别;
        if (data[arge.CARDTYPE]) {
            return this.身份证识别[arge.CARDTYPE].words || '';
        }
        return '';
    }

    sethandwriting (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.手写文字识别 = '';
        return new Promise(r => {
            setTimeout(() => {

                window.electron.sethandwriting(image)
                    .then(res => {
                        console.log(res);
                        if (res.words_result_num) {
                            this.手写文字识别 = res.words_result.join('-');
                        }
                        r();
                    })
                    .catch(() => {
                        r();
                    });
            }, s);
        });
    }

    handwriting () {
        return this.手写文字识别;
    }

    setnumbers (arge) {
        const s = arge.second * 1000;
        const image = this.rendererelectron();
        this.数字识别 = '';
        return new Promise(r => {
            setTimeout(() => {
                window.electron.setnumbers(image)
                    .then(res => {
                        console.log(res);
                        if (res.words_result_num) {
                            this.数字识别 = res.words_result.join('-');
                        }
                        r();
                    })
                    .catch(() => {
                        r();
                    });

            }, s);
        });
    }

    numbers () {
        return this.数字识别;
    }


}

module.exports = Scratch3VideoSensingBlocks;
