# -*- coding: utf-8 -*-
'''
Created on 14.12.2019

@author: yu03
'''

from PyUeye_Unified.Cross_200line_SAAC import file_name, hor_index, ver_index, file_path, doc_name, hor_lines, ver_lines
from PyUeye_Unified.IDS_lib.pyueye_camera import Camera
from PyUeye_Unified.IDS_lib.pyueye_utils import FrameThread
from PyUeye_Unified.IDS_lib.pyueye_gui import PyuEyeQtApp, PyuEyeQtView
from PyQt5 import QtGui
from pyueye import ueye
import numpy as np
import time
from PyQt5.QtWidgets import QApplication
import datetime


now = datetime.datetime.now()
num = 0
#f = open(file_name,'ab')

def process_image(self, image_data):
    # reshape the image data as 1dimensional array
    time_stamp = time.time()
    global num, f, start_time
    num += 1
    '''
        Defination of Frame Number
    '''
    print(num, time_stamp)
    
    image = image_data.as_1d_image()
    img_hor_lines = image[hor_lines+hor_index-49,:]
    img_ver_lines = image[:,ver_lines+ver_index-49].T
    line = np.array([img_hor_lines, img_ver_lines, time_stamp])
    timestr = time.strftime('%m%d-%H%M')
    if num == 1:
        start_time = time.time()
#         print(img_hor_lines[0])
    '''
    defining frame number
    '''
    if num == 10: #140k points for 1.5h （was 350000 for SAAC）
#     if num == 25*4:
        stop_time = time.time()
        time_diff = stop_time-start_time
        print(time_diff)
        QApplication.quit()
    with open(file_path + '\\' +  doc_name + '_' + timestr + '.npy', 'ab') as f:
    #with open(file_name, 'ab') as f:
        np.save(f, line, allow_pickle=True)
    return QtGui.QImage(image.data,
                        image_data.mem_info.width,
                        image_data.mem_info.height,
                        QtGui.QImage.Format_Grayscale8)

def main():
    
    '''
        Parameters Define
    '''

    AOI = [0, 0, 1280, 1024] # area of interest
    Exposure = 0.1 # exposure time
    Trigger_External = 0 # 0=disable, 1=enable
    Blacklevel = 0, 200
#    Data_fomate = 'SENSOR_RAW10'
    Data_fomate = 'MONO8' # 8bit monochrome

    # we need a QApplication, that runs our QT Gui Framework    
    app = PyuEyeQtApp()
  
    # a basic qt window
    view = PyuEyeQtView()
    '''
        QT plot
    '''
    #view.show()
    view.user_callback = process_image
 
    # camera class to simplify uEye API access
    cam = Camera()
    cam.init()
    
    if Data_fomate == 'MONO8':
        cam.set_colormode(ueye.IS_CM_MONO8)
    elif Data_fomate == 'SENSOR_RAW10':
        cam.set_colormode(ueye.IS_CM_SENSOR_RAW10)
    
    if cam.get_colormode() == 33:
        print('Color Mode: Sensor RAW 10')
    elif cam.get_colormode() == 6:
        print('Color Mode: MONO 8')
#     print(ueye.IS_CM_BGR8_PACKED)

    if Blacklevel[0] == 0:
        cam.set_blacklevel_mode(ueye.IS_AUTO_BLACKLEVEL_OFF)
        cam.set_blacklevel(Blacklevel[1])
        print('Blacklevel Mode: Manual', cam.get_blacklevel_mode(), 'Blacklevel:', cam.get_blacklevel())

    cam.set_aoi(AOI[0], AOI[1], AOI[2], AOI[3])
    aoi = cam.get_aoi()
    print('AOI:', aoi.x, aoi.y, aoi.width, aoi.height)
    
    if Trigger_External == 1:
        cam.set_external_trigger()
    elif Trigger_External == 0:
        print('Framerate Range:', cam.get_FrameTimeRange()[0], cam.get_FrameTimeRange()[1],cam.get_FrameTimeRange()[2])
        cam.set_fps(25)
#         cam.set_fps(1/cam.get_FrameTimeRange()[0])

    cam.set_exposure(Exposure)
    print('Exposure Time:', cam.get_exposure())
    cam.turn_on_flash_high()
    cam.set_flash_params(-30,500)
#     print(cam.get_colordepth()[0], cam.get_colordepth()[1])
    cam.alloc()
    time.sleep(1)
    cam.capture_video()
#     cam.freeze_video()
     
    #a thread that waits for new images and processes all connected views
    thread = FrameThread(cam, view)
#     thread.setDaemon(True)
    thread.start()
    
    # cleanup
    app.exit_connect(thread.stop)
    app.exec_()
  
    print('Frame Rate:', cam.get_fps())
    thread.stop()
    thread.join()
 
    cam.stop_video()
    cam.exit()
 
if __name__ == "__main__":
    main()