﻿#include <winsock2.h>
#include <windows.h>
#include <strsafe.h>
#include "DepthBasics.h"
#include "resource.h"
#include "intrael.h"
#pragma comment(lib, "ws2_32.lib")
#pragma comment(lib, "jpeg-static.lib")

SOCKET listener = INVALID_SOCKET, client = INVALID_SOCKET, stmp;

int connected = 0;
int     optind = 1;
int     opterr = 1;
int     optopt;
char    *optarg;
ULONG NonBlock = 1;
HANDLE winMutex;


METHODDEF(void) init_destination(j_compress_ptr cinfo)
{
	mem_dest_ptr dest = (mem_dest_ptr)cinfo->dest;
	dest->pub.next_output_byte = dest->buffer;
	dest->pub.free_in_buffer = dest->bufsize;
	dest->datasize = 0;
}

METHODDEF(boolean) empty_output_buffer(j_compress_ptr cinfo)
{
	mem_dest_ptr dest = (mem_dest_ptr)cinfo->dest;
	dest->pub.next_output_byte = dest->buffer;
	dest->pub.free_in_buffer = dest->bufsize;
	return TRUE;
}

METHODDEF(void) term_destination(j_compress_ptr cinfo)
{
	mem_dest_ptr dest = (mem_dest_ptr)cinfo->dest;
	dest->datasize = dest->bufsize - dest->pub.free_in_buffer;
	*dest->outsize = (int)dest->datasize;
}

GLOBAL(void) jpeg_memory_dest(j_compress_ptr cinfo)
{
	mem_dest_ptr dest;
	if (cinfo->dest == 0) cinfo->dest = (struct jpeg_destination_mgr *)(*cinfo->mem->alloc_small) ((j_common_ptr)cinfo, JPOOL_PERMANENT, sizeof (memory_destination_mgr));
	dest = (mem_dest_ptr)cinfo->dest;
	dest->pub.init_destination = init_destination;
	dest->pub.empty_output_buffer = empty_output_buffer;
	dest->pub.term_destination = term_destination;
}

METHODDEF(void) jpeg_set_frame(j_compress_ptr cinfo, char *buf, uint16_t *len)
{
	mem_dest_ptr dest = (mem_dest_ptr)cinfo->dest;
	dest->buffer = (unsigned char *)buf;
	dest->bufsize = 65000;
	dest->outsize = len;
}

struct jpeg_compress_struct ginfo,vinfo;
struct jpeg_error_mgr jerr;

struct {
	int x, X, y, Y, z, Z,c;

} BOUNDS = {0,320,0,240,0,1200,512};
DWORD dwThreadId;
DWORD netfn(void);
int main(int argc, char* argv[])
{
	
    CDepthBasics application;
	ndframe = ngframe = nvframe = NULL;

	if (CreateThread(NULL, //Choose default security
		0, //Default stack size
		(LPTHREAD_START_ROUTINE)&netfn,
		NULL, //Thread parameter
		0, //Immediately run the thread
		&dwThreadId //Thread Id
		) == NULL)
	{
		printf("Error Creating Thread");
		return(1);
	}
	application.Run();
}
DWORD netfn(void)
{
	WSADATA wsa_data;
	WSAStartup(MAKEWORD(2, 2), &wsa_data);
	IOVEC sdata[2];
	fd_set rd, wr, master, streaming;
	struct timeval timeout;
	struct sockaddr_in listeneraddr;
	in_addr_t admin_addr;
	static DWORD stmp;
	static int i, ni, kick, admin, itmp, yes, camera, video;
	static SOCKET listener, newfd, fdmax;
	static char *targ, *oarg, *admin_org, buf[4096];
	static client_t *client, tclient;
	static header_t *head403, *head404, *head503, *headreg;
	static frame_t *frame404, *frame403, *frame503, *framereg,*dframe,*vframe,*gframe,*tframe;
	int32_t c1, c2, cc;
	uint16_t lport;
	uint32_t ncount, count;
	size_t stacksize = 65536;
	cc = 1;
	LIST_HEAD(, client_t) clients;
	LIST_HEAD(, client_t) dclients;
	LIST_HEAD(, client_t) vclients;
	LIST_HEAD(, client_t) gclients;
	LIST_INIT(&clients);
	LIST_INIT(&dclients);
	LIST_INIT(&vclients);
	LIST_INIT(&gclients);
	HSET(head404, STR_404, STR_404, 10);
	HSET(head403, STR_403, STR_403, 10);
	HSET(head503, STR_503, STR_503, 10);
	HSET(headreg, STR_REG, STR_REG, 5);
	CFRAME(frame404, head404, 256);
	CFRAME(frame403, head403, 256);
	CFRAME(frame503, head503, 256);
	frame404->l = sprintf_s(frame404->buf,256, "%s", STR_404B);
	frame403->l = sprintf_s(frame403->buf, 256, "%s", STR_403B);
	frame503->l = sprintf_s(frame503->buf, 256, "%s", STR_503B);
	active = 0;
	admin_addr = 0;
	lport = 6661;
	yes = video = 1;
	listener =  camera =  ncount = 0;
	gframe = dframe = vframe = tframe = NULL;
	quality = 75;
	FD_ZERO(&streaming);
	FD_ZERO(&master);
	memset(depth, 0, 320 * 2);
	memset(l_count, 0, (FRAME_PIXELS / 2 + 1)*sizeof(uint32_t));
	listeneraddr.sin_addr.s_addr = INADDR_ANY;
	
	
	memset(black, 0, 320);
	HSET(vhead, STR_JPEG_S, STR_JPEG_M, 2);
	HSET(dhead, STR_JSON_S, STR_JSON_M, 0);
	HSET(ghead, STR_JPEG_S, STR_JPEG_M, 1);
	
	INITSOCKET(listener);
	NONBLOCKING(listener);
	listeneraddr.sin_family = AF_INET;
	listeneraddr.sin_port = htons(lport);
	memset(&(listeneraddr.sin_zero), '\0', 8);
	setsockopt(listener,  SOL_SOCKET, SO_REUSEADDR,NULL,0);
	if (bind(listener, (struct sockaddr *)&listeneraddr, sizeof(listeneraddr)) == SOCKET_ERROR) printf("Bind failed, aborting %d\n", WSAGetLastError());
	if (listen(listener, 50) == SOCKET_ERROR) FAIL("Listen failed, aborting\n")
		FD_SET(listener, &master);
	fdmax = listener;
	char *context=NULL;
	while (!die) {
		FSEL(dframe, ndframe, dclients, dentries);
		FSEL(gframe, ngframe, gclients, gentries);
		FSEL(vframe, nvframe, vclients, ventries);
		rd = master;
		wr = streaming;
		timeout.tv_sec = 0;
		timeout.tv_usec = 15000;
		itmp = select(fdmax + 1, &rd, &wr, NULL, &timeout);
		if (itmp > 0) {
			fdmax = listener;
			LIST_FOREACH(client, &clients, entries) {
				kick = 0;
				oarg = NULL;
				do {
					if (FD_ISSET(client->s, &rd)) {
						if ((itmp = recv(client->s, buf, 4094, 0))>6) {
							if (client->f) {
								kick = 1;
								break;
							}
							else {
								buf[itmp] = '\0';
								c1 = buf[5];
								c2 = buf[6];
								targ = strtok_s(buf + 5, " \n\r",&context);
								
								if (targ) {
									oarg = strchr(targ, '?');
									if (oarg) {
										oarg = strtok_s(oarg, "?&", &context);
										while ((oarg)){
											if (oarg && strlen(oarg) > 2){
												switch (oarg[0]){
													case 'x':
														BOUNDS.x = atoi((const char *)oarg+2);
														CLAMP(BOUNDS.x, 0, 320);
														break;
													case 'X':
														BOUNDS.X = atoi((const char *)oarg + 2);
														CLAMP(BOUNDS.X, 0, 320);
														break;
													case 'y':
														BOUNDS.y = atoi((const char *)oarg + 2);
														CLAMP(BOUNDS.y, 0, 240);
														break;
													case 'Y':
														BOUNDS.Y = atoi((const char *)oarg + 2);
														CLAMP(BOUNDS.Y, 0, 240);
														break;
													case 'z':
														BOUNDS.z = atoi((const char *)oarg + 2);
														CLAMP(BOUNDS.z, 0, 8191);
														break;
													case 'Z':
														BOUNDS.Z = atoi((const char *)oarg + 2);
														CLAMP(BOUNDS.Z, 0, 8191);
														break;
													case 'c':
														BOUNDS.c = atoi((const char *)oarg + 2);
														CLAMP(BOUNDS.c, 0, 65535);
														break;
													default: kick = 0;  goto esc;
												}
												oarg = strtok_s(NULL, "?&", &context);
											}
											else{
												kick = 1;
												goto esc;
											}
										}
									}
								}
								if (kick) {
									kick = 1;
									goto esc;
								}
							}
						esc:
							c2 -= '0';
							if (c2 < 0 || c2 > 9) c2 = 0;
							switch (c1) {
							case '1':
								FCHK(gclients, gentries);
								break;
							case '2':
								FCHK(vclients, ventries);
								break;
							default:
								FCHK(dclients, dentries);
								break;
							}
							client->m = c2;
						}
						else {
							kick = 1;
						}
					}
					else if (FD_ISSET(client->s, &wr)) {
						switch (client->f->h->t) {
						case 1:
							STREAM(gclients, gentries, 0, client->f->h->ml - 11);
						case 2:
							STREAM(vclients, ventries, 0, client->f->h->ml - 11);
						default:
							STREAM(dclients, dentries, 59, client->f->h->ml - 5);
						}
					}
					else {
						fdmax = MAX(client->s, fdmax);
					}
				} while (0);
				if (kick) {
					FD_CLR(client->s, &master);
					if (client->f) {
						DFRAME();
						FD_CLR(client->s, &streaming);
					}
					closesocket(client->s);
					
					tclient.entries.le_next = client->entries.le_next;
					LIST_REMOVE(client, entries);
					free(client);
					client = &tclient;
				}
			}
			if (FD_ISSET(listener, &rd)) {
				if ((newfd = accept(listener, NULL, NULL)) != SOCKET_ERROR) {
						NONBLOCKING(newfd);
						setsockopt(newfd, IPPROTO_TCP, TCP_NODELAY, (const char*)&yes, sizeof(int));
						client = (client_t *)calloc(1, sizeof(client_t));
						client->s = newfd;
						client->b = 0;
						client->f = NULL;
						client->m = 0;
						client->c = 0;
						client->t = 0;
						fdmax = MAX(newfd, fdmax);
						FD_SET(newfd, &master);
						LIST_INSERT_HEAD(&clients, client, entries);
						
				}
					else closesocket(newfd);
			}
		}
	}

	
	closesocket(listener);
	WSACleanup();
	return 0;
}

CDepthBasics::CDepthBasics() :
    m_hNextDepthFrameEvent(INVALID_HANDLE_VALUE),
    m_pDepthStreamHandle(INVALID_HANDLE_VALUE),
    m_bNearMode(false),
    m_pNuiSensor(NULL)
{
    m_depthRGBX = new BYTE[cDepthWidth*cDepthHeight*cBytesPerPixel];
	memset(depth_ref,0,FRAME_PIXELS*2);
    memset(depth,0,320*2);
    memset(l_count,0,(FRAME_PIXELS/2+1)*sizeof(uint32_t));
}

CDepthBasics::~CDepthBasics()
{
    if (m_pNuiSensor)
    {
        m_pNuiSensor->NuiShutdown();
    }

    if (m_hNextDepthFrameEvent != INVALID_HANDLE_VALUE)
    {
        CloseHandle(m_hNextDepthFrameEvent);
    }
	if (m_hNextVideoFrameEvent != INVALID_HANDLE_VALUE)
	{
		CloseHandle(m_hNextVideoFrameEvent);
	}
 
}

/// <summary>
/// Creates the main window and begins processing
/// </summary>
/// <param name="hInstance">handle to the application instance</param>
/// <param name="nCmdShow">whether to display minimized, maximized, or normally</param>
int CDepthBasics::Run()
{
	
	u_long iMode = 1;
	
	//::NuiSetDeviceStatusCallback(&StatusProc, this);
    const int eventCount = 1;
  //  HANDLE hEvents[eventCount];
	connected = 0;
	// Main message loop
	static int count;
	
	
	JSET(ginfo, JCS_GRAYSCALE, 1);
	JSET(vinfo, JCS_EXT_RGBX, 4);
	while (!die){
		if (!connected){
			if (CreateFirstConnected() != E_FAIL){
				connected = 1;
			}
			Sleep(3000);
		}
		Update();
	}
	
	exit(0);

}


void CDepthBasics::Update()
{
	HANDLE h[2];
	h[0] = m_hNextDepthFrameEvent;
	h[1] = m_hNextVideoFrameEvent;
	DWORD ret;
	ret = WaitForMultipleObjects(2, h, FALSE, 0);
	if(ret == WAIT_OBJECT_0){
		ProcessDepth();
	}
	if (ret == WAIT_OBJECT_0 + 1){
		ProcessVideo();
	}
}



HRESULT CDepthBasics::CreateFirstConnected()
{
    INuiSensor * pNuiSensor;
    HRESULT hr;

    int iSensorCount = 0;
	if (NULL != m_pNuiSensor){
		printf("Released\n");
		m_pNuiSensor->Release();
		m_pNuiSensor->NuiShutdown();
		m_pNuiSensor = NULL;
	}
    hr = NuiGetSensorCount(&iSensorCount);
	printf("Count %d\n", iSensorCount);
    if (FAILED(hr))
    {
        return hr;
    }

    for (int i = 0; i < iSensorCount; ++i)
    {
        hr = NuiCreateSensorByIndex(i, &pNuiSensor);
        if (FAILED(hr))
        {
            continue;
        }

        hr = pNuiSensor->NuiStatus();
        if (S_OK == hr)
        {
            m_pNuiSensor = pNuiSensor;
            break;
        }

        pNuiSensor->Release();
    }
	if (NULL != m_pNuiSensor)
    do{
		hr = m_pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_DEPTH | NUI_INITIALIZE_FLAG_USES_COLOR);
		if (SUCCEEDED(hr))
        {
             m_hNextDepthFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
			 m_hNextVideoFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
            hr = m_pNuiSensor->NuiImageStreamOpen(
                NUI_IMAGE_TYPE_DEPTH,
                NUI_IMAGE_RESOLUTION_320x240,
                0,
                2,
                m_hNextDepthFrameEvent,
                &m_pDepthStreamHandle);
			if (FAILED(hr)) break;
		}
		
		hr  = m_pNuiSensor->NuiImageStreamOpen(
			NUI_IMAGE_TYPE_COLOR,            
			NUI_IMAGE_RESOLUTION_640x480,   
			0,     
			2,     
			m_hNextVideoFrameEvent,   
			&m_pVideoStreamHandle);
	} while (0);

    if (NULL == m_pNuiSensor || FAILED(hr))
    {
        printf("%s\n","No ready Kinect found!");
        return E_FAIL;
    }

    return hr;
}

void CDepthBasics::ProcessVideo()
{
	HRESULT hr;
	NUI_IMAGE_FRAME imageFrame;
	unsigned char *buf[256];
	hr = m_pNuiSensor->NuiImageStreamGetNextFrame(m_pVideoStreamHandle, 0, &imageFrame);
	if (FAILED(hr))
	{
		connected = 0;
		m_pNuiSensor->NuiShutdown();
		return;
	}
	uint16_t jlen,i,j,k;
	BOOL nearMode;
	INuiFrameTexture* pTexture;

	// Get the depth image pixel texture
	pTexture = imageFrame.pFrameTexture;
	

	NUI_LOCKED_RECT LockedRect;
	frame_t *vframe = NULL;
	frame_t *tframe = NULL;
	// Lock the frame data so the Kinect knows not to modify it while we're reading it
	pTexture->LockRect(0, &LockedRect, NULL, 0);

	// Make sure we've received valid data
	if (LockedRect.Pitch != 0)
	{
		const BYTE* frame = (const BYTE*)LockedRect.pBits;
		CFRAME(vframe, vhead, 1);
		jpeg_set_frame(&vinfo, vframe->buf + 53, &jlen);
		jpeg_start_compress(&vinfo, TRUE);
		unsigned char rgb[320 * 4];
		for (i = 0; i < 240; i++,frame+=2560){
			for (j = 0,k=0; j < 320; j++,k+=4,frame+=8){
				rgb[k] = frame[2];
				rgb[k+2] = frame[0];
				rgb[k + 1] = frame[1];
			}
			unsigned char*jrgb = (unsigned char*)rgb;
			jpeg_write_scanlines(&vinfo,&jrgb, TRUE);
		}
		jpeg_finish_compress(&vinfo);
		Cnt++;
		memcpy(vframe->buf, buf, sprintf_s((char *)buf, 256, STR_JPEG, (int)jlen));
		vframe->l = jlen + 53;
		vframe->t = Cnt;
		FSWAP(vframe, nvframe);
		if (vframe){
			free(vframe);
		}
		vframe = NULL;
	}
	
	pTexture->UnlockRect(0);
	m_pNuiSensor->NuiImageStreamReleaseFrame(m_pVideoStreamHandle, &imageFrame);
	


}
/// <summary>
/// Handle new depth data
/// </summary>
void CDepthBasics::ProcessDepth()
{
    HRESULT hr;
    NUI_IMAGE_FRAME imageFrame;
	// Attempt to get the depth frame
    hr = m_pNuiSensor->NuiImageStreamGetNextFrame(m_pDepthStreamHandle, 0, &imageFrame);
    if (FAILED(hr))
    {
		connected = 0;
		m_pNuiSensor->NuiShutdown();
        return;
    }
	uint16_t jlen;
    BOOL nearMode;
    INuiFrameTexture* pTexture;

    // Get the depth image pixel texture
    hr = m_pNuiSensor->NuiImageFrameGetDepthImagePixelFrameTexture(
        m_pDepthStreamHandle, &imageFrame, &nearMode, &pTexture);
    if (FAILED(hr))
    {
        goto ReleaseFrame;
    }

    NUI_LOCKED_RECT LockedRect;
	frame_t *dframe = NULL;
	frame_t *gframe = NULL;
	frame_t *tframe = NULL;
    // Lock the frame data so the Kinect knows not to modify it while we're reading it
    pTexture->LockRect(0, &LockedRect, NULL, 0);
	
    // Make sure we've received valid data
    if (LockedRect.Pitch != 0)
    {
        // Get the min and max reliable depth for the current frame
        int minDepth = (nearMode ? NUI_IMAGE_DEPTH_MINIMUM_NEAR_MODE : NUI_IMAGE_DEPTH_MINIMUM) >> NUI_IMAGE_PLAYER_INDEX_SHIFT;
        int maxDepth = (nearMode ? NUI_IMAGE_DEPTH_MAXIMUM_NEAR_MODE : NUI_IMAGE_DEPTH_MAXIMUM) >> NUI_IMAGE_PLAYER_INDEX_SHIFT;
		unsigned char*jblack = (unsigned char*)black;
		unsigned char*jgray = (unsigned char*)gray;
		
		
		
        BYTE * rgbrun = m_depthRGBX;
        const NUI_DEPTH_IMAGE_PIXEL * pBufferRun = reinterpret_cast<const NUI_DEPTH_IMAGE_PIXEL *>(LockedRect.pBits);
		static int itmp;
		static uint16_t X,i,dz,dZ,dzv,dZv,x,y,rawz,rawZ,len;
        static int32_t ni,l,n,rs,re,rt,er,sr,label,running;
        USHORT da;
		NUI_DEPTH_IMAGE_PIXEL * frame = reinterpret_cast<NUI_DEPTH_IMAGE_PIXEL *>(LockedRect.pBits);
		NUI_DEPTH_IMAGE_PIXEL * depth = reinterpret_cast<NUI_DEPTH_IMAGE_PIXEL *>(LockedRect.pBits);
		 do {
                        
                       l = n = 1 ;
                        y =BOUNDS.y;
						int Y = BOUNDS.Y;
						CFRAME(gframe, ghead, 1);
						jpeg_set_frame(&ginfo, gframe->buf + 53, &jlen);
						jpeg_start_compress(&ginfo, TRUE);
						memset(black, 0, 320);
						for (i = 0; i < y; i++) jpeg_write_scanlines(&ginfo, &jblack, TRUE);
						i = BOUNDS.x;

                        running = label = dz = dZ = dzv = dZv = ni = rs = rt = re =  0;
                        itmp=i+320*y;
                        rawz = BOUNDS.z;
						rawZ = BOUNDS.Z;
						frame += itmp;
						while(1) {
                                IF_THR(0) {PROC(0);}
                                IF_THR(1) {PROC(1);}
                                IF_THR(2) {PROC(2);}
                                IF_THR(3) {PROC(3);}
                                IF_THR(4) {PROC(4);}
                                IF_THR(5) {PROC(5);}
                                IF_THR(6) {PROC(6);}
                                IF_THR(7) {PROC(7);}
                                IF_NEXT() {
                                        frame += 8;
                                }
                                else {
										jpeg_write_scanlines(&ginfo, &jgray, TRUE);
										memset(gray, 0, 320);
                                        JUMP();
                                }
                        }
						Cnt++;
						for (; y < 240; y++) jpeg_write_scanlines(&ginfo, &jblack, TRUE);
						jpeg_finish_compress(&ginfo);
						static char buf[256];
						memcpy(gframe->buf, buf, sprintf_s(buf,256, STR_JPEG, jlen));
						gframe->l = jlen + 53;
						gframe->t = Cnt;
						memset(gray, 0, 320);
						FSWAP(gframe, ngframe);
						if (gframe){
							free(gframe);
						}
						gframe = NULL;

                        ni=0;
                        while(--n) {
                                label = r_label[r_label[run_label[n]]];
                                rs=run_s[n];
                                re=run_e[n];
                                y=run_y[n];
                                l=re-rs;
                                if(l_count[label]) {
                                        dzv=run_zv[n];
                                        dZv=run_Zv[n];
                                        if(rs < run_s[l_pos_x[label]]) l_pos_x[label] = n;
                                        if(re > run_e[l_pos_X[label]]) l_pos_X[label] = n;
                                        if(dzv < run_zv[l_pos_z[label]]) l_pos_z[label] = n;
                                        if(dZv > run_Zv[l_pos_Z[label]]) l_pos_Z[label] = n;
                                        l_count[label] += l;
                                        l_pos_y[label] = n;
                                        l_cx[label]+=((l*(rs+re)));
                                        l_vrun[label] += y;
                                        l_cy[label]+= l*y;
                                        l_sum[label] += run_sum[n];
                                        l_runs[label]++;
                                } else {
                                        l_pos_z[label] = n;
                                        l_pos_Z[label] = n;
                                        l_pos_x[label] = n;
                                        l_pos_X[label] = n;
                                        l_pos_y[label] = n;
                                        l_pos_Y[label] = n;
                                        l_count[label] = l;
                                        l_cx[label]=((l*(rs+re)));
                                        l_vrun[label] = y;
                                        l_cy[label]=l*y;
                                        l_sum[label] = run_sum[n];
                                        l_runs[label] = 1;
                                        l_checked[ni++]=label;
                                }
                        }
						CFRAME(dframe, dhead, 0);
						len = 59 + sprintf_s(dframe->buf + 59, 512, "[%u,%d,%d,%u,%u,%u,%u,%u,%u,%u,%u,%f,%f,%f,%d,%d", 0, 0, 0, BOUNDS.x, BOUNDS.X, BOUNDS.y, BOUNDS.Y, BOUNDS.z, BOUNDS.Z, BOUNDS.c, FRAME_PIXELS, 0, 0, 0, 0, 0);
                        while(ni--) {
                                label=l_checked[ni];
                                l=l_count[label];
                                l_count[label]=0;
                                if(len > 4096) continue;
                                if((l < BOUNDS.c)) continue;
                                int posx = l_pos_x[label];
                                int posX = l_pos_X[label];
                                int posy = l_pos_y[label];
                                int posY = l_pos_Y[label];
                                int posz = l_pos_z[label];
                                int posZ = l_pos_Z[label];
                                x= l_cx[label]/(2*l);
                                y= l_cy[label]/l;
                                da = (uint32_t) (x+320*y);
                                dzv=depth_to_mm[l_sum[label]/l];
                                int rshift=0;
								len += sprintf_s(dframe->buf + len, 512, ",%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%d", x, y, dzv, depth_ref[da], run_s[posx], run_y[posx], run_sv[posx], depth_ref[320 * run_y[posx] + run_s[posx]], run_e[posX], run_y[posX], run_ev[posX], depth_ref[320 * run_y[posX] + run_e[posX]], run_s[posy], run_y[posy], run_sv[posy], depth_ref[320 * run_y[posy] + run_s[posy]], run_e[posY], run_y[posY], run_ev[posY], depth_ref[320 * run_y[posY] + run_e[posY]], run_z[posz], run_y[posz], run_zv[posz], depth_ref[320 * run_y[posz] + run_z[posz]], run_Z[posZ], run_y[posZ], run_Zv[posZ], depth_ref[320 * run_y[posZ] + run_Z[posZ]], l, l_runs[label], (uint32_t)l_vrun[label] / l_runs[label], rshift);
                        }
                                
						dframe->buf[len++] = ']';
						dframe->buf[len++] = '\n';
						dframe->buf[len++] = '\n';
						memcpy(dframe->buf, buf, sprintf_s(buf,256, STR_JSON, len - 59));
						dframe->l = len;
                        dframe->t = Cnt;
						FSWAP(dframe, ndframe);
						if (dframe) free(dframe);
						dframe = NULL;
                } while(0);
        
    }

    // We're done with the texture so unlock it
    pTexture->UnlockRect(0);

    pTexture->Release();

ReleaseFrame:
    // Release the frame
    m_pNuiSensor->NuiImageStreamReleaseFrame(m_pDepthStreamHandle, &imageFrame);
}
