package main
/*
#cgo CFLAGS: -I ./include -I /usr/local/opencv/include/opencv/

#cgo LDFLAGS: -L ./lib -lhcnetsdk -lPlayCtrl

// #include "HCNetSDK.h"
// #include "PlayM4.h"
// #include <stdlib.h>
// #include <stdio.h>
// #include <stdbool.h>
// #include <time.h>
// #include <cv.h>
// #include <highgui.h>
#include "lib.c"

*/
import "C"
import "unsafe"

import (
    "opencv-hk/conf"
    "fmt"
    "image"
    "image/color"
    "os"
    "strconv"
    "time"
    // "io/ioutil" //io 工具包

    "gocv.io/x/gocv"
)

func main() {
    myConfig := new(conf.Config)
	myConfig.InitConfig("./config.txt")
    ip := C.CString(myConfig.Read("default", "ip"))
    user := C.CString(myConfig.Read("default", "user"))
    pass := C.CString(myConfig.Read("default", "pass"))
    ch, _ := strconv.Atoi(myConfig.Read("default", "channel"))
    defer C.free(unsafe.Pointer(ip))
    defer C.free(unsafe.Pointer(user))
    defer C.free(unsafe.Pointer(pass))
    // defer C.free(unsafe.Pointer(ch))
    id := C.get_Jpeg_Capture(ip, user, pass)
    //定义JPEG图像质量
	JpegPara := new(C.NET_DVR_JPEGPARA)
	JpegPara.wPicQuality = 0
	JpegPara.wPicSize = 9
    // Jpeg := (*C.char)(C.malloc(200*1024))
    Ret := (*C.uint)(C.malloc(4))
    // Jpeg := make([]byte, 200*1024)
    Jpeg := new([200*1024]byte)

	// var Ret C.LPDWORD
    *Ret = 0
    capture := C.NET_DVR_CaptureJPEGPicture_NEW((C.int)(id), C.int(ch), JpegPara, (*C.char)(unsafe.Pointer(Jpeg)), 200*1024, Ret)
	if capture < 0 {
		fmt.Println("Error: NET_DVR_CaptureJPEGPicture_NEW", C.NET_DVR_GetLastError())
	}
	// err2 := ioutil.WriteFile("./test.jpg", Jpeg[:], 0666) //写入文件(字节数组)
    // // C.free(unsafe.Pointer(Jpeg))
    C.free(unsafe.Pointer(Ret))
    // fmt.Println(err2)
    window := gocv.NewWindow("Face Detect")
    defer window.Close()
    // color for the rect when faces detected
    blue := color.RGBA{0, 0, 255, 0}
    // load classifier to recognize faces
    classifier := gocv.NewCascadeClassifier()
    defer classifier.Close()
    xmlFile := os.Args[1]
    if !classifier.Load(xmlFile) {
        fmt.Printf("Error reading cascade file: %v\n", xmlFile)
        return
    }
    fmt.Println("-----------------------------------------------------------------------------")
    
    img, _ := gocv.IMDecode(Jpeg[:], 1)

    for {
        rects := classifier.DetectMultiScale(img)
        fmt.Printf("found %d faces\n", len(rects))

        // draw a rectangle around each face on the original image,
        // along with text identifying as "Human"
        for _, r := range rects {
            gocv.Rectangle(&img, r, blue, 3)

            size := gocv.GetTextSize("Human", gocv.FontHersheyPlain, 1.2, 2)
            pt := image.Pt(r.Min.X+(r.Min.X/2)-(size.X/2), r.Min.Y-2)
            gocv.PutText(&img, "Human", pt, gocv.FontHersheyPlain, 1.2, blue, 2)
        }
        window.IMShow(img)
        if window.WaitKey(1) >= 0 {
           break
        }
    }
    time.Sleep(time.Duration(2)*time.Second)

    // if len(os.Args) < 3 {
    //     fmt.Println("How to run:\n\tfacedetect [camera ID] [classifier XML file]")
    //     return  
    // }
    // // parse args
    // deviceID, _ := strconv.Atoi(os.Args[1])
    // xmlFile := os.Args[2]

    // // open webcam
    // webcam, err := gocv.VideoCaptureDevice(int(deviceID))
    // if err != nil {
    //     fmt.Println(err)
    //     return
    // }
    // defer webcam.Close()

    // // open display window
    // window := gocv.NewWindow("Face Detect")
    // defer window.Close()

    // // prepare image matrix
    // img := gocv.NewMat()
    // defer img.Close()

    // // color for the rect when faces detected
    // blue := color.RGBA{0, 0, 255, 0}

    // // load classifier to recognize faces
    // classifier := gocv.NewCascadeClassifier()
    // defer classifier.Close()

    // if !classifier.Load(xmlFile) {
    //     fmt.Printf("Error reading cascade file: %v\n", xmlFile)
    //     return
    // }

    // fmt.Printf("start reading camera device: %v\n", deviceID)
    // for {
    //     if ok := webcam.Read(&img); !ok {
    //         fmt.Printf("cannot read device %d\n", deviceID)
    //         return
    //     }
    //     if img.Empty() {
    //         continue
    //     }

    //     // detect faces
    //     rects := classifier.DetectMultiScale(img)
    //     fmt.Printf("found %d faces\n", len(rects))

    //     // draw a rectangle around each face on the original image,
    //     // along with text identifying as "Human"
    //     for _, r := range rects {
    //         gocv.Rectangle(&img, r, blue, 3)

    //         size := gocv.GetTextSize("Human", gocv.FontHersheyPlain, 1.2, 2)
    //         pt := image.Pt(r.Min.X+(r.Min.X/2)-(size.X/2), r.Min.Y-2)
    //         gocv.PutText(&img, "Human", pt, gocv.FontHersheyPlain, 1.2, blue, 2)
    //     }

    //     // show the image in the window, and wait 1 millisecond
    //     window.IMShow(img)
    //     if window.WaitKey(1) >= 0 {
    //         break
    //     }
    // }
}
