kinect for windows - 手势识别之一,C++实现

用C++来实现手势识别是比较困难的,所以在这个例子,我们只实现了握拳和松手的手势识别,其他没有实现。

先上个效果图:

kinect for windows - 手势识别之一,C++实现

在这个程序里,我们打开了kinect的RGB流,深度流,骨骼数据流和手势识别流。其中手势识别接口需要骨骼数据流和深度流来进行计算。

代码如下:

/*******************************************************************
 *  Copyright(c) 2014-2015 传智播客
 *  All rights reserved.
 *
 *  文件名称: main.cpp
 *  简要描述: 该文件演示了Kinect for windows手势例子
 *
 *  创建日期: 2014-01-20
 *  作者:
 *  说明: 1.0
 *
 *  修改日期:
 *  作者:
 *  说明:
 ******************************************************************/
#include <Windows.h>
#include <NuiApi.h>
#include <KinectInteraction.h>
#include <d2d1.h>
#include "resource.h"
#if 1

// 交互客户端类,是一个纯虚类,必须继承实现纯虚函数,提供控件信息,用于交互。
class CIneractionClient:public INuiInteractionClient
{
public:
    CIneractionClient()
    {;}
    ~CIneractionClient()
    {;}

    STDMETHOD(GetInteractionInfoAtLocation)(THIS_ DWORD skeletonTrackingId, NUI_HAND_TYPE handType, FLOAT x, FLOAT y, _Out_ NUI_INTERACTION_INFO *pInteractionInfo)
    {
        if(pInteractionInfo)
        {
            pInteractionInfo->IsPressTarget         = false;
            pInteractionInfo->PressTargetControlId  = 0;
            pInteractionInfo->PressAttractionPointX = 0.f;
            pInteractionInfo->PressAttractionPointY = 0.f;
            pInteractionInfo->IsGripTarget          = false;//true;
            return S_OK;
        }
        return E_POINTER;

        //return S_OK; 

    }

    STDMETHODIMP_(ULONG)    AddRef()                                    { return 2;     }
    STDMETHODIMP_(ULONG)    Release()                                   { return 1;     }
    STDMETHODIMP            QueryInterface(REFIID riid, void **ppv)     { return S_OK;  }

};
#endif
static const float g_JointThickness = 3.0f;
static const float g_TrackedBoneThickness = 6.0f;
static const float g_InferredBoneThickness = 1.0f;
// 减少全局变量,把全局变量定义在结构体里
struct
{
	DWORD			width;
	DWORD			height;
	HWND			hWnd;    // 主窗口句柄
	HWND			hWndColor; // rgb窗口句柄
	HWND			hWndDepth; // 深度图句柄
	HWND			hWndSkeleton; // 骨骼图显示窗口句柄
	HWND			hWndEdit;  // 信息输出窗口句柄

	INuiSensor*		pSensor; // kinect 设备
	HANDLE			hEventRGB;  // 色彩流通知event对象
	HANDLE			hEventDepth;  // 深度流通知event对象
	HANDLE			hEventSkeleton; // 骨骼数据流通知event对象
	HANDLE			hEventInteration; // 交互流数据通知event对象

	HANDLE			hColorStream;  // 色彩数据流
	HANDLE			hDepthStream; // 深度数据流
	HANDLE			hSkeletonStream; // 骨骼数据流

	INuiInteractionStream*	pNuiIStream; // 交互数据流
	CIneractionClient		nuiIClient;  // 交互客户端,提供控件信息
	//INuiInteractionClient	nuiIClient;
	ID2D1Factory*	pD2DFactory;  // direct绘图相关
	ID2D1HwndRenderTarget*		pRenderTargetRGB;
	ID2D1HwndRenderTarget*		pRenderTargetDepth;
	ID2D1HwndRenderTarget*		pRenderTargetSkeleton;

	// RGB图显示
	ID2D1Bitmap*	pBitmapRGB;  // 用户显示色彩图的bitmap缓存

	BYTE*			pDepthRGBBuf;  // 深度图数据
	ID2D1Bitmap*	pBitmapDepth; // 深度图缓存

	// 骨骼跟踪相关
	ID2D1SolidColorBrush*    pBrushJointTracked;
    ID2D1SolidColorBrush*    pBrushJointInferred;
    ID2D1SolidColorBrush*    pBrushBoneTracked;
    ID2D1SolidColorBrush*    pBrushBoneInferred;
    D2D1_POINT_2F            Points[NUI_SKELETON_POSITION_COUNT];

	BOOL			bNearMode;  // 近模式
	BOOL			bSeat;      // 坐模式
} g_data;

// 简单的释放接口的封装
template<class Interface>
inline void SafeRelease( Interface *& pInterfaceToRelease )
{
    if ( pInterfaceToRelease != NULL )
    {
        pInterfaceToRelease->Release();
        pInterfaceToRelease = NULL;
    }
}

// 初始化全局数据
void initGlobalData()
{
	g_data.hWnd = NULL;
	g_data.pSensor = NULL;
	g_data.hEventRGB = NULL;
	g_data.hEventDepth = NULL;
	g_data.hEventSkeleton = NULL;
	g_data.bNearMode = FALSE;
	g_data.bSeat = FALSE;
	g_data.hColorStream = NULL;
	g_data.hDepthStream = NULL;
	g_data.hSkeletonStream = NULL;
	g_data.width = 640;
	g_data.height = 480;
	g_data.pDepthRGBBuf = new BYTE[g_data.width * g_data.height * 4];
}

// 对话框窗口的windows消息处理函数
LRESULT CALLBACK DlgFunc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
	switch(uMsg)
	{
	case WM_INITDIALOG:
		g_data.hWnd = hWnd;
		g_data.hWndColor = GetDlgItem(hWnd, IDC_RGB);
		g_data.hWndDepth = GetDlgItem(hWnd, IDC_DEPTH);
		g_data.hWndSkeleton = GetDlgItem(hWnd, IDC_SKELETON);
		g_data.hWndEdit = GetDlgItem(hWnd, IDC_MESSAGE);
		break;
	case WM_CLOSE:
	case WM_DESTROY:
		PostQuitMessage(0);
		break;
	case WM_COMMAND:
#if 0
		if(IDC_SEAT == LOWORD(wParam)
			&& BN_CLICKED == HIWORD(wParam))
		{
			g_data.bSeat = !g_data.bSeat;
			if(g_data.pSensor)
			{
				g_data.pSensor->NuiSkeletonTrackingEnable(
					g_data.pNextSkeletonFrame,
					g_data.bSeat?NUI_SKELETON_TRACKING_FLAG_ENABLE_SEATED_SUPPORT:0);
			}
		}
#endif
		break;
	}
	return FALSE;
}

// 初始化direct画图对象
HRESULT initD2D()
{
	HRESULT hr;
	hr = D2D1CreateFactory(D2D1_FACTORY_TYPE_SINGLE_THREADED, &g_data.pD2DFactory);
	if(FAILED(hr)) return hr;

	g_data.pD2DFactory->AddRef();

	return hr;
}
// 创建kinect设备对象
HRESULT createSensor()
{
	INuiSensor*& pSensor = g_data.pSensor;
	HRESULT hr;
	int sensorCount = 0;
	hr = NuiGetSensorCount(&sensorCount);
	if(FAILED(hr)) return hr;
	if(sensorCount < 1) return 0x80000000;

	for(int i=0; i<sensorCount; ++i)
	{
		hr = NuiCreateSensorByIndex(i, &pSensor);
		if(FAILED(hr)) continue;

		hr = pSensor->NuiStatus();
		if(S_OK == hr)
			return S_OK;

		pSensor->Release();
	}

	return hr;
}

// 初始化kinect设备
HRESULT initSensor()
{
	HRESULT hr;
	INuiSensor*& pSensor = g_data.pSensor;
	// 要支持RGB,DEPTH,SKELETON,所以初始化设备时用了多个flag
	hr = pSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON);
	if(FAILED(hr))
	{
		pSensor->Release();
		return hr;
	}

	// 打开色彩数据流
	g_data.hEventRGB = CreateEvent( NULL, TRUE, FALSE, NULL );
	hr = pSensor->NuiImageStreamOpen(
        NUI_IMAGE_TYPE_COLOR,
		NUI_IMAGE_RESOLUTION_640x480,
		0,
		2,
        g_data.hEventRGB, &g_data.hColorStream);
    if( FAILED( hr ) )
    {
        return hr;
    }

	// 打开深度数据流
	g_data.hEventDepth = CreateEvent( NULL, TRUE, FALSE, NULL );
	hr = pSensor->NuiImageStreamOpen(
        NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX,
        NUI_IMAGE_RESOLUTION_640x480, 0, 2,
		g_data.hEventDepth, &g_data.hDepthStream);
    if( FAILED( hr ) )
    {
        return hr;
    }

	// 打开骨骼数据流
	g_data.hEventSkeleton = CreateEvent( NULL, TRUE, FALSE, NULL );
    hr = pSensor->NuiSkeletonTrackingEnable(
        g_data.hEventSkeleton,
        NUI_SKELETON_TRACKING_FLAG_ENABLE_IN_NEAR_RANGE//|
        );
    if( FAILED( hr ) )
    {
        return hr;
    }

	// 打开交互数据流
	g_data.hEventInteration = CreateEvent( NULL,TRUE,FALSE,NULL );
	hr = NuiCreateInteractionStream(pSensor,(INuiInteractionClient *)&g_data.nuiIClient, &g_data.pNuiIStream);
    if( FAILED( hr ) )
    {
        return hr;
    }
    hr = g_data.pNuiIStream->Enable(g_data.hEventInteration);
	if( FAILED( hr ) )
    {
        return hr;
    }

	return hr;
}

void Cleanup()
{
	if(g_data.hEventDepth != INVALID_HANDLE_VALUE) CloseHandle(g_data.hEventDepth);
	if(g_data.hEventRGB != INVALID_HANDLE_VALUE) CloseHandle(g_data.hEventRGB);
	if(g_data.hEventSkeleton != INVALID_HANDLE_VALUE) CloseHandle(g_data.hEventSkeleton);

//	DiscardResources();

	SafeRelease(g_data.pD2DFactory);
//	if(g_data.pSensor) g_data.pSensor->NuiShutdown();
	SafeRelease(g_data.pSensor);
}

void OutputMessage(LPCWSTR msg)
{
	//SetWindowTextW(g_data.hWndEdit, msg);

	static BOOL first = TRUE;
	if(!first)
	{
		::SendMessageW(g_data.hWndEdit, EM_REPLACESEL, 0, (LPARAM)L"\r\n");

	}
	::SendMessageW(g_data.hWndEdit, EM_SETSEL, -1, -1);
	::SendMessageW(g_data.hWndEdit, EM_REPLACESEL, 0, (LPARAM)msg);
	first = FALSE;
}

HRESULT EnsureResourcesDepth()
{
    HRESULT hr = S_OK;

	if (NULL == g_data.pRenderTargetDepth)
    {
		D2D1_SIZE_U size = D2D1::SizeU(g_data.width, g_data.height);

        D2D1_RENDER_TARGET_PROPERTIES rtProps = D2D1::RenderTargetProperties();
        rtProps.pixelFormat = D2D1::PixelFormat(DXGI_FORMAT_B8G8R8A8_UNORM, D2D1_ALPHA_MODE_IGNORE);
        rtProps.usage = D2D1_RENDER_TARGET_USAGE_GDI_COMPATIBLE;

        // Create a hWnd render target, in order to render to the window set in initialize
		hr = g_data.pD2DFactory->CreateHwndRenderTarget(
            rtProps,
			D2D1::HwndRenderTargetProperties(g_data.hWndDepth, size),
			&g_data.pRenderTargetDepth
            );

        if ( FAILED(hr) )
        {
            return hr;
        }

        // Create a bitmap that we can copy image data into and then render to the target
		hr = g_data.pRenderTargetDepth->CreateBitmap(
            size,
            D2D1::BitmapProperties( D2D1::PixelFormat( DXGI_FORMAT_B8G8R8A8_UNORM, D2D1_ALPHA_MODE_IGNORE) ),
			&g_data.pBitmapDepth
            );

        if ( FAILED(hr) )
        {
			SafeRelease(g_data.pRenderTargetDepth);
            return hr;
        }
    }

    return hr;
}

void DiscardResourcesDepth()
{
	SafeRelease(g_data.pRenderTargetDepth);
	SafeRelease(g_data.pBitmapDepth);
}

// 处理色彩流,色彩流对于跟踪技术并无作用,这里只是显示罢了
HRESULT DrawRGBMapDepth(BYTE* data, unsigned long size)
{
	int sourceStride = g_data.width * sizeof (long);
	HRESULT hr = EnsureResourcesDepth();
	if(FAILED(hr)) return hr;

	hr = g_data.pBitmapDepth->CopyFromMemory(NULL, data,  sourceStride);
	if(FAILED(hr)) return hr;

	g_data.pRenderTargetDepth->BeginDraw();
	g_data.pRenderTargetDepth->DrawBitmap(g_data.pBitmapDepth);
	hr = g_data.pRenderTargetDepth->EndDraw();
	if(hr = D2DERR_RECREATE_TARGET)
	{
		hr = S_OK;
		DiscardResourcesDepth();
	}
	return hr;
}
void DrawDepth(const NUI_LOCKED_RECT&  lockedRect, BOOL bNearMode)
{
	if(lockedRect.Pitch != 0)
	{
		int minDepth = (bNearMode ? NUI_IMAGE_DEPTH_MINIMUM_NEAR_MODE : NUI_IMAGE_DEPTH_MINIMUM) >> NUI_IMAGE_PLAYER_INDEX_SHIFT;
		int maxDepth = (bNearMode ? NUI_IMAGE_DEPTH_MAXIMUM_NEAR_MODE : NUI_IMAGE_DEPTH_MAXIMUM) >> NUI_IMAGE_PLAYER_INDEX_SHIFT;
		BYTE* rgbrun = g_data.pDepthRGBBuf;
		const NUI_DEPTH_IMAGE_PIXEL* pBufferRun = reinterpret_cast<const NUI_DEPTH_IMAGE_PIXEL *>(lockedRect.pBits);
		const NUI_DEPTH_IMAGE_PIXEL * pBufferEnd = pBufferRun + (g_data.width * g_data.height);

		while ( pBufferRun < pBufferEnd )
		{
			// discard the portion of the depth that contains only the player index
			USHORT depth = pBufferRun->depth;

			// To convert to a byte, we're discarding the most-significant
			// rather than least-significant bits.
			// We're preserving detail, although the intensity will "wrap."
			// Values outside the reliable depth range are mapped to 0 (black).

			// Note: Using conditionals in this loop could degrade performance.
			// Consider using a lookup table instead when writing production code.
			BYTE intensity = static_cast<BYTE>(depth >= minDepth && depth <= maxDepth ? depth % 256 : 0);

			// Write out blue byte  变成黑白灰的颜色了
			*(rgbrun++) = intensity;

			// Write out green byte
			*(rgbrun++) = intensity;

			// Write out red byte
			*(rgbrun++) = intensity;

			// We're outputting BGR, the last byte in the 32 bits is unused so skip it
			// If we were outputting BGRA, we would write alpha here.
			++rgbrun;

			// Increment our index into the Kinect's depth buffer
			++pBufferRun;
		}

		DrawRGBMapDepth(g_data.pDepthRGBBuf, g_data.width * g_data.height * 4);
	}

}

// 处理深度数据
void ProcessDepth()
{
	NUI_IMAGE_FRAME pImageFrame;
    INuiFrameTexture* pDepthImagePixelFrame;
	HRESULT hr = g_data.pSensor->NuiImageStreamGetNextFrame(g_data.hDepthStream, 0, &pImageFrame );
    BOOL nearMode = TRUE;
    g_data.pSensor->NuiImageFrameGetDepthImagePixelFrameTexture(g_data.hDepthStream, &pImageFrame, &nearMode, &pDepthImagePixelFrame);
    INuiFrameTexture * pTexture = pDepthImagePixelFrame;
    NUI_LOCKED_RECT LockedRect;
    pTexture->LockRect( 0, &LockedRect, NULL, 0 );  

	DrawDepth(LockedRect, nearMode);
    if( LockedRect.Pitch != 0 )
    {
		// 让交互对象去处理深度数据,交互数据由深度数据和骨骼数据计算,所以一旦有交互数据,就送到交互对象,让交互对象去计算结果
		HRESULT hr = g_data.pNuiIStream->ProcessDepth(LockedRect.size,PBYTE(LockedRect.pBits),pImageFrame.liTimeStamp);
        if( FAILED( hr ) )
        {
			OutputMessage(L"error");
        }
    }
    pTexture->UnlockRect(0);
    g_data.pSensor->NuiImageStreamReleaseFrame( g_data.hDepthStream, &pImageFrame );
}
HRESULT EnsureResourcesRGB()
{
    HRESULT hr = S_OK;

	if (NULL == g_data.pRenderTargetRGB)
    {
		//D2D1_SIZE_U size = D2D1::SizeU(g_data.width, g_data.height);
		D2D1_SIZE_U size = D2D1::SizeU(640, 480);

        D2D1_RENDER_TARGET_PROPERTIES rtProps = D2D1::RenderTargetProperties();
        rtProps.pixelFormat = D2D1::PixelFormat(DXGI_FORMAT_B8G8R8A8_UNORM, D2D1_ALPHA_MODE_IGNORE);
        rtProps.usage = D2D1_RENDER_TARGET_USAGE_GDI_COMPATIBLE;

        // Create a hWnd render target, in order to render to the window set in initialize
		hr = g_data.pD2DFactory->CreateHwndRenderTarget(
            rtProps,
			D2D1::HwndRenderTargetProperties(g_data.hWndColor, size),
			&g_data.pRenderTargetRGB
            );

        if ( FAILED(hr) )
        {
            return hr;
        }

        // Create a bitmap that we can copy image data into and then render to the target
		hr = g_data.pRenderTargetRGB->CreateBitmap(
            size,
            D2D1::BitmapProperties( D2D1::PixelFormat( DXGI_FORMAT_B8G8R8A8_UNORM, D2D1_ALPHA_MODE_IGNORE) ),
			&g_data.pBitmapRGB
            );

        if ( FAILED(hr) )
        {
			SafeRelease(g_data.pRenderTargetRGB);
            return hr;
        }
    }

    return hr;
}
void DiscardResourcesRGB()
{
	SafeRelease(g_data.pRenderTargetRGB);
	SafeRelease(g_data.pBitmapRGB);
}
HRESULT DrawRGBMapRGB(BYTE* data, unsigned long size)
{
//	int sourceStride = g_data.width * sizeof (long);
	int sourceStride = 640 * sizeof (long);
	HRESULT hr = EnsureResourcesRGB();
	if(FAILED(hr)) return hr;

	hr = g_data.pBitmapRGB->CopyFromMemory(NULL, data,  sourceStride);
	if(FAILED(hr)) return hr;

	g_data.pRenderTargetRGB->BeginDraw();
	g_data.pRenderTargetRGB->DrawBitmap(g_data.pBitmapRGB);
	hr = g_data.pRenderTargetRGB->EndDraw();
	if(hr = D2DERR_RECREATE_TARGET)
	{
		hr = S_OK;
		DiscardResourcesRGB();
	}
	return hr;
}
void ProcessRGB()
{
	// 处理RGB图
	HRESULT hr;
	NUI_IMAGE_FRAME imageFrame;
	hr = g_data.pSensor->NuiImageStreamGetNextFrame(g_data.hColorStream, 0, &imageFrame);
	if(FAILED(hr))  return;

	INuiFrameTexture* pTexture = imageFrame.pFrameTexture;
	NUI_LOCKED_RECT lockedRect;
	pTexture->LockRect(0, &lockedRect, NULL, 0);
	if(lockedRect.Pitch != 0)
	{
		DrawRGBMapRGB(lockedRect.pBits, lockedRect.size);
	}
	pTexture->UnlockRect(0);
	g_data.pSensor->NuiImageStreamReleaseFrame(g_data.hColorStream, &imageFrame);
}

// 处理交互数据
// 当应用程序提供了足够的信息,并计算出交互数据之后,交互对象会通知程序
void ProcessInteration()
{
	NUI_INTERACTION_FRAME Interaction_Frame;
	auto ret = g_data.pNuiIStream->GetNextFrame( 0,&Interaction_Frame );
    if( FAILED( ret  ) ) {
        OutputMessage(L"Failed GetNextFrame");
        return ;
    }

    int trackingID = 0;
    int event=0;

  //  COORD pos = {0,0};
  //  HANDLE hOut = GetStdHandle(STD_OUTPUT_HANDLE);
   // SetConsoleCursorPosition(hOut, pos);   

	// 根据交互对象提供的数据,打印抓拳或者松手的信息
    for(int i=0;i<NUI_SKELETON_COUNT;i++)
    {
        trackingID = Interaction_Frame.UserInfos[i].SkeletonTrackingId;

        event=Interaction_Frame.UserInfos[i].HandPointerInfos->HandEventType;

		WCHAR info[128];

        if ( event == NUI_HAND_EVENT_TYPE_GRIP) {
			wsprintfW(info, L"抓拳 x=%d, y=%d", (int)Interaction_Frame.UserInfos[i].HandPointerInfos->X, Interaction_Frame.UserInfos[i].HandPointerInfos->Y);
			OutputMessage(info);

        }
        else if ( event == NUI_HAND_EVENT_TYPE_GRIPRELEASE) {
			wsprintfW(info, L"放手 x=%d, y=%d", (int)Interaction_Frame.UserInfos[i].HandPointerInfos->X, Interaction_Frame.UserInfos[i].HandPointerInfos->Y);
			OutputMessage(info);
            //OutputMessage(L"放手");
        }
        else  {
         //   OutputMessage(L"没消息");
        }

    }

    return ;
}

HRESULT EnsureResourcesSkeleton()
{
    HRESULT hr = S_OK;

	// If there isn't currently a render target, we need to create one
	if (NULL == g_data.pRenderTargetSkeleton)
    {
        RECT rc;
		GetWindowRect( g_data.hWndSkeleton, &rc );  

        int width = rc.right - rc.left;
        int height = rc.bottom - rc.top;
        D2D1_SIZE_U size = D2D1::SizeU( width, height );
        D2D1_RENDER_TARGET_PROPERTIES rtProps = D2D1::RenderTargetProperties();
        rtProps.pixelFormat = D2D1::PixelFormat( DXGI_FORMAT_B8G8R8A8_UNORM, D2D1_ALPHA_MODE_IGNORE);
        rtProps.usage = D2D1_RENDER_TARGET_USAGE_GDI_COMPATIBLE;

        // Create a Hwnd render target, in order to render to the window set in initialize
		hr = g_data.pD2DFactory->CreateHwndRenderTarget(
            rtProps,
			D2D1::HwndRenderTargetProperties(g_data.hWndSkeleton, size),
            &g_data.pRenderTargetSkeleton
            );
        if ( FAILED(hr) )
        {
            return hr;
        }

        //light green
        g_data.pRenderTargetSkeleton->CreateSolidColorBrush(D2D1::ColorF(0.27f, 0.75f, 0.27f), &g_data.pBrushJointTracked);
        g_data.pRenderTargetSkeleton->CreateSolidColorBrush(D2D1::ColorF(D2D1::ColorF::Yellow, 1.0f), &g_data.pBrushJointInferred);
        g_data.pRenderTargetSkeleton->CreateSolidColorBrush(D2D1::ColorF(D2D1::ColorF::Green, 1.0f), &g_data.pBrushBoneTracked);
        g_data.pRenderTargetSkeleton->CreateSolidColorBrush(D2D1::ColorF(D2D1::ColorF::Gray, 1.0f), &g_data.pBrushBoneInferred);
    }

    return hr;
}

void DiscardResourcesSkeleton()
{
	SafeRelease(g_data.pRenderTargetSkeleton);
	SafeRelease(g_data.pBrushJointTracked);
    SafeRelease(g_data.pBrushJointInferred);
    SafeRelease(g_data.pBrushBoneTracked);
    SafeRelease(g_data.pBrushBoneInferred);
}

D2D1_POINT_2F SkeletonToScreen(Vector4 skeletonPoint, int width, int height)
{
    LONG x, y;
    USHORT depth;

    // Calculate the skeleton's position on the screen
    // NuiTransformSkeletonToDepthImage returns coordinates in NUI_IMAGE_RESOLUTION_320x240 space
    NuiTransformSkeletonToDepthImage(skeletonPoint, &x, &y, &depth);

//	float screenPointX = static_cast<float>(x * width) / g_data.width;
//	float screenPointY = static_cast<float>(y * height) / g_data.height;
	float screenPointX = static_cast<float>(x * width) / 320;
	float screenPointY = static_cast<float>(y * height) / 240;

    return D2D1::Point2F(screenPointX, screenPointY);

}
void DrawBone(const NUI_SKELETON_DATA & skel, NUI_SKELETON_POSITION_INDEX joint0, NUI_SKELETON_POSITION_INDEX joint1)
{
    NUI_SKELETON_POSITION_TRACKING_STATE joint0State = skel.eSkeletonPositionTrackingState[joint0];
    NUI_SKELETON_POSITION_TRACKING_STATE joint1State = skel.eSkeletonPositionTrackingState[joint1];

    // If we can't find either of these joints, exit
    if (joint0State == NUI_SKELETON_POSITION_NOT_TRACKED || joint1State == NUI_SKELETON_POSITION_NOT_TRACKED)
    {
        return;
    }

    // Don't draw if both points are inferred
    if (joint0State == NUI_SKELETON_POSITION_INFERRED && joint1State == NUI_SKELETON_POSITION_INFERRED)
    {
        return;
    }

    // We assume all drawn bones are inferred unless BOTH joints are tracked
    if (joint0State == NUI_SKELETON_POSITION_TRACKED && joint1State == NUI_SKELETON_POSITION_TRACKED)
    {
        g_data.pRenderTargetSkeleton->DrawLine(g_data.Points[joint0], g_data.Points[joint1], g_data.pBrushBoneTracked, g_TrackedBoneThickness);
    }
    else
    {
        g_data.pRenderTargetSkeleton->DrawLine(g_data.Points[joint0], g_data.Points[joint1], g_data.pBrushBoneInferred, g_InferredBoneThickness);
    }
}
void DrawSkeleton(const NUI_SKELETON_DATA& skel, int windowWidth, int windowHeight)
{
	int i;

	// 将关节点转化成屏幕上的坐标点
    for (i = 0; i < NUI_SKELETON_POSITION_COUNT; ++i)
    {
        g_data.Points[i] = SkeletonToScreen(skel.SkeletonPositions[i], windowWidth, windowHeight);
    }

    // 画骨骼,参数1是骨骼数据,参数2和参数3是关节
    DrawBone(skel, NUI_SKELETON_POSITION_HEAD, NUI_SKELETON_POSITION_SHOULDER_CENTER); // 这个是脑袋,从脑袋关节到肩膀中间
    DrawBone(skel, NUI_SKELETON_POSITION_SHOULDER_CENTER, NUI_SKELETON_POSITION_SHOULDER_LEFT);  // 肩膀中间到左边
    DrawBone(skel, NUI_SKELETON_POSITION_SHOULDER_CENTER, NUI_SKELETON_POSITION_SHOULDER_RIGHT);  // 肩膀中间到右边,下面的类似
    DrawBone(skel, NUI_SKELETON_POSITION_SHOULDER_CENTER, NUI_SKELETON_POSITION_SPINE);
    DrawBone(skel, NUI_SKELETON_POSITION_SPINE, NUI_SKELETON_POSITION_HIP_CENTER);
    DrawBone(skel, NUI_SKELETON_POSITION_HIP_CENTER, NUI_SKELETON_POSITION_HIP_LEFT);
    DrawBone(skel, NUI_SKELETON_POSITION_HIP_CENTER, NUI_SKELETON_POSITION_HIP_RIGHT);

    // Left Arm
    DrawBone(skel, NUI_SKELETON_POSITION_SHOULDER_LEFT, NUI_SKELETON_POSITION_ELBOW_LEFT);
    DrawBone(skel, NUI_SKELETON_POSITION_ELBOW_LEFT, NUI_SKELETON_POSITION_WRIST_LEFT);
    DrawBone(skel, NUI_SKELETON_POSITION_WRIST_LEFT, NUI_SKELETON_POSITION_HAND_LEFT);

    // Right Arm
    DrawBone(skel, NUI_SKELETON_POSITION_SHOULDER_RIGHT, NUI_SKELETON_POSITION_ELBOW_RIGHT);
    DrawBone(skel, NUI_SKELETON_POSITION_ELBOW_RIGHT, NUI_SKELETON_POSITION_WRIST_RIGHT);
    DrawBone(skel, NUI_SKELETON_POSITION_WRIST_RIGHT, NUI_SKELETON_POSITION_HAND_RIGHT);

    // Left Leg
    DrawBone(skel, NUI_SKELETON_POSITION_HIP_LEFT, NUI_SKELETON_POSITION_KNEE_LEFT);
    DrawBone(skel, NUI_SKELETON_POSITION_KNEE_LEFT, NUI_SKELETON_POSITION_ANKLE_LEFT);
    DrawBone(skel, NUI_SKELETON_POSITION_ANKLE_LEFT, NUI_SKELETON_POSITION_FOOT_LEFT);

    // Right Leg
    DrawBone(skel, NUI_SKELETON_POSITION_HIP_RIGHT, NUI_SKELETON_POSITION_KNEE_RIGHT);
    DrawBone(skel, NUI_SKELETON_POSITION_KNEE_RIGHT, NUI_SKELETON_POSITION_ANKLE_RIGHT);
    DrawBone(skel, NUI_SKELETON_POSITION_ANKLE_RIGHT, NUI_SKELETON_POSITION_FOOT_RIGHT);

    // 画关节
    for (i = 0; i < NUI_SKELETON_POSITION_COUNT; ++i)
    {
        D2D1_ELLIPSE ellipse = D2D1::Ellipse( g_data.Points[i], g_JointThickness, g_JointThickness );

        if ( skel.eSkeletonPositionTrackingState[i] == NUI_SKELETON_POSITION_INFERRED )
        {
            g_data.pRenderTargetSkeleton->DrawEllipse(ellipse, g_data.pBrushJointInferred);
        }
        else if ( skel.eSkeletonPositionTrackingState[i] == NUI_SKELETON_POSITION_TRACKED )
        {
            g_data.pRenderTargetSkeleton->DrawEllipse(ellipse, g_data.pBrushJointTracked);
        }
    }
}

void DrawSkeleton1(const NUI_SKELETON_FRAME& skeletonFrame)
{

	HRESULT hr;
	//g_data.pSensor->NuiTransformSmooth(&skeletonFrame, NULL);

	hr = ::EnsureResourcesSkeleton();
	if(FAILED(hr)) return;

	g_data.pRenderTargetSkeleton->BeginDraw();
	g_data.pRenderTargetSkeleton->Clear();
	RECT rc;
	GetClientRect( g_data.hWndSkeleton, &rc);
	int width = rc.right;
	int height = rc.bottom;

	for( int i=0; i < NUI_SKELETON_COUNT; ++i)
	{
		const NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData[i].eTrackingState;
		if(NUI_SKELETON_TRACKED == trackingState)  // 跟踪骨骼的,画骨骼
		{
			DrawSkeleton(skeletonFrame.SkeletonData[i], width, height);
		}
		else if(NUI_SKELETON_POSITION_ONLY == trackingState)  // 跟踪位置的,只画位置
		{
			D2D1_ELLIPSE ellipse = D2D1::Ellipse(
				SkeletonToScreen(skeletonFrame.SkeletonData[i].Position, width, height),
				g_JointThickness,
				g_JointThickness);
			g_data.pRenderTargetSkeleton->DrawEllipse(ellipse, g_data.pBrushJointTracked);
		}
	}
	hr = g_data.pRenderTargetSkeleton->EndDraw();

	if(D2DERR_RECREATE_TARGET == hr)
		::DiscardResourcesSkeleton();
}
// 处理骨骼流
void ProcessSkeleton()
{
	NUI_SKELETON_FRAME SkeletonFrame = {0};
    HRESULT hr = g_data.pSensor->NuiSkeletonGetNextFrame( 0, &SkeletonFrame );
    if( FAILED( hr ) )
    {
        OutputMessage(L"Get Skeleton Image Frame Failed");
        return;
    }

    bool bFoundSkeleton = true;
    bFoundSkeleton = true;  

    g_data.pSensor->NuiTransformSmooth(&SkeletonFrame,NULL);
	DrawSkeleton1(SkeletonFrame);
    Vector4 v;
    g_data.pSensor->NuiAccelerometerGetCurrentReading(&v);
    // m_nuiIStream->ProcessSkeleton(i,&SkeletonFrame.SkeletonData[i],&v,SkeletonFrame.liTimeStamp);

	// 让交互对象去处理骨骼数据,交互对象计算交互需要骨骼数据和深度数据,所以应用程序一旦收到骨骼数据
	// 就调用交互对象接口将数据发送过去。让交互对象做运算
	// 一旦交互对象得到足够的数据,就通知应用程序
	hr =g_data.pNuiIStream->ProcessSkeleton(NUI_SKELETON_COUNT,
        SkeletonFrame.SkeletonData,
        &v,
        SkeletonFrame.liTimeStamp);
    if( FAILED( hr ) )
    {
        OutputMessage(L"Process Skeleton failed");
    }

}

int Run(HINSTANCE hInst, int show)
{
	MSG msg = {0};
	WNDCLASS wc;

	ZeroMemory(&wc, sizeof(wc));
	wc.style	 = CS_HREDRAW | CS_VREDRAW;
	wc.cbWndExtra    = DLGWINDOWEXTRA;
    wc.hInstance     = hInst;
    wc.hCursor       = LoadCursorW(NULL, IDC_ARROW);
    wc.hIcon         = NULL;// LoadIconW(hInst, MAKEINTRESOURCE(IDI_APP));
    wc.lpfnWndProc   = DefDlgProcW;
    wc.lpszClassName = L"KinectInteration";
	if(!RegisterClass(&wc))
	{
		return -1;
	}
	g_data.hWnd = CreateDialogParamW(hInst,MAKEINTRESOURCE(IDD_DLG), NULL, (DLGPROC)DlgFunc, NULL);
	ShowWindow(g_data.hWnd, show);

	if(FAILED(initD2D()))
	{
		MessageBox(g_data.hWnd, L"初始化DirectX失败", L"错误", MB_OK);
		return 0;
	}

	if(FAILED(createSensor()))
	{
		MessageBox(g_data.hWnd, L"没有找到体感设备", L"错误", MB_OK);
		return 0;
	}

	if(FAILED(initSensor()))
	{
		MessageBox(g_data.hWnd, L"初始化体感设备失败", L"错误", MB_OK);
		return 0;
	}

	HANDLE hEvents[4];
	hEvents[0] = g_data.hEventDepth;
	hEvents[1] = g_data.hEventInteration;
	hEvents[2] = g_data.hEventRGB;
	hEvents[3] = g_data.hEventSkeleton;

	while(WM_QUIT != msg.message)
	{
		DWORD dwEvent = MsgWaitForMultipleObjects(4, hEvents, FALSE, INFINITE, QS_ALLINPUT);
		if(WAIT_OBJECT_0 == WaitForSingleObject(g_data.hEventDepth, 0))
		{
			ProcessDepth();
		}
		if(WAIT_OBJECT_0 == WaitForSingleObject(g_data.hEventInteration, 0))
		{
			ProcessInteration();
		}
		if(WAIT_OBJECT_0 == WaitForSingleObject(g_data.hEventRGB, 0))
		{
			ProcessRGB();
		}
		if(WAIT_OBJECT_0 == WaitForSingleObject(g_data.hEventSkeleton, 0))
		{
			ProcessSkeleton();
		}

		if(PeekMessageW(&msg, NULL, 0, 0, PM_REMOVE))
		{
			if( g_data.hWnd != NULL && IsDialogMessageW(g_data.hWnd, &msg))
			{
				continue;
			}
			TranslateMessage(&msg);
			DispatchMessageW(&msg);
		}
	}

	Cleanup();
	return msg.wParam;
}

int APIENTRY wWinMain(HINSTANCE hInst, HINSTANCE hPrevInstance, LPWSTR lpCmdLine, int nCmdShow)
{
	initGlobalData();
	return Run(hInst, nCmdShow);
}
上一篇:Photon服务器进阶&一个新游戏的出产(三)


下一篇:Java学习1——JDK(学前准备)