import os
import json
import time
import json
import base64
from io import BytesIO
import tensorflow as tf
import numpy as np
from keras.models import load_model
import cv2
import argparse
from PIL import Image
#from multiprocessing import Queue, Process
import pynng
import opentracing
import sys
sys.path.append("/home/hydra-mini-demo")
from jtracer.tracing import DownStreamIndexCache, init_tracer

tracer = opentracing.tracer

def parse_outputs(outputs):
    res = []
    for output in outputs:
        for i in range(output.shape[0]):
            res.append(output[i])
    steer = res[0]*2.0 - 1.0
    speed = res[1]*2.0 - 1.0
    if steer > 0:
        steer = steer+0.2
    return { 'msg_type' : 'pynq_speed', 'steering': steer.__str__(), 'speed':speed.__str__()}
#def com_with_client(img_queue,com_queue,address):
#    print("start Pair1 listen")
#    print(address)
#    
#    while True:
#        try:
#            req_package = sock.recv()
#            flag, index, span_ctx, image = index_cache.extract_pynng_req_package(req_package)
#
#            if image is not None:
#                img_queue.put(image.decode(),block=False,timeout=1)
#        except:
#            pass
#        try:
#            command = com_queue.get(block=False,timeout=1)
#            if command is not None:
#                resp_package = index_cache.gen_pynng_resp_package(False, None, json.dumps(parse_outputs(command)).encode())
#                sock.send(resp_package)
#        except:
#            pass
        

if __name__ == "__main__":
    parser = argparse.ArgumentParser(description='prediction server')
    parser.add_argument('--nng_host', type=str, default='tcp://10.16.0.181:8899', help='bind to ip:port')
    parser.add_argument('--model', type=str, default='./model.h5',help='model filename')
    args = parser.parse_args()
    
    model = load_model(args.model)    
    graph = tf.get_default_graph()

    #img_queue = Queue(maxsize=3)
    #com_queue = Queue(maxsize=3)
    
    sock = pynng.Pair1(recv_timeout=10,send_timeout=10,recv_buffer_size=1,send_buffer_size=1)
    sock.listen(args.nng_host)

    tracer = init_tracer("image-process")
    index_cache = DownStreamIndexCache(tracer)
    span_inference = None
    with graph.as_default():
        while True:
            try:
                req_package = sock.recv()
                flag, index, span_ctx, image = index_cache.extract_pynng_req_package(req_package)
                if flag:
                    span_inference = tracer.start_span('image_process:hydramini',child_of=span_ctx)
            except:
                continue
            if image is None:
                continue
            imgString = image#.decode()
            image_array = cv2.imdecode(np.fromstring(base64.b64decode(imgString),dtype=np.uint8),1)
            image_array = image_array[40:,:]
            image_array = image_array/255.0-0.5
            outputs= model.predict(image_array[None, :, :, :])
            if outputs is not None:
                if flag:
                    span_inference.finish()
                resp_package = index_cache.gen_pynng_resp_package(flag, index, parse_outputs(outputs))#.encode())
                try:
                    sock.send(resp_package)
                except:
                    pass
