Classes
- InteractionFrame
- InteractionFrameReadyEventArgs
- InteractionHandPointer
- InteractionInfo
- InteractionStream
- KinectRuntimeExtensions
- UserInfo
Interfaces
Enumerations
可以透過Interaction去判斷目前手的狀態是握拳或是放開,不過Interaction又是由Kinect Toolkit所提供的
我們要先下載Kinect Toolkit,目前版本是1.8.0
引用『Microsoft.Kinect.dll』和『Microsoft.Kinect.Toolkit.Interaction.dll』
XAML:
<Window x:Class="WpfApplication2.MainWindow" xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml" Title="MainWindow" Height="520" Width="720" Loaded="Window_Loaded"> <Grid> <Image Name="Image" Height="480" Width="640" Margin="0,0,72,10"></Image> <TextBlock Name="text1" HorizontalAlignment="Left" Margin="645,20,0,0" TextWrapping="Wrap" Text="" VerticalAlignment="Top" Width="67"/> <TextBlock Name="text2" HorizontalAlignment="Left" Margin="645,104,0,0" TextWrapping="Wrap" Text="" VerticalAlignment="Top" Width="67"/> </Grid> </Window>
程式碼:
using Microsoft.Kinect; using Microsoft.Kinect.Toolkit.Interaction; using System.Linq; using System.Windows; using System.Windows.Media; using System.Windows.Media.Imaging; namespace WpfApplication2 { #region 實做介面 public class DummyInteractionClient : IInteractionClient { public InteractionInfo GetInteractionInfoAtLocation( int skeletonTrackingId, InteractionHandType handType, double x, double y) { var result = new InteractionInfo(); result.IsGripTarget = true; result.IsPressTarget = true; result.PressAttractionPointX = 0.5; result.PressAttractionPointY = 0.5; result.PressTargetControlId = 1; return result; } } #endregion public partial class MainWindow : Window { private KinectSensor kinect; private InteractionStream interactionstream; private byte[] dataPixels; public MainWindow() { InitializeComponent(); } private void Window_Loaded(object sender, RoutedEventArgs e) { //判斷Kinect有連接上,並沒在運行 if (KinectSensor.KinectSensors.Count > 0 && KinectSensor.KinectSensors[0].IsRunning != true) { kinect = KinectSensor.KinectSensors[0]; //深度追蹤 kinect.DepthStream.Range = DepthRange.Near; kinect.DepthFrameReady += kinect_DepthFrameReady; kinect.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); //骨架追蹤 kinect.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated; kinect.SkeletonStream.EnableTrackingInNearRange = true; kinect.SkeletonFrameReady += kinect_SkeletonFrameReady; kinect.SkeletonStream.Enable(); //色彩 kinect.ColorFrameReady += kinect_ColorFrameReady; kinect.ColorStream.Enable(); interactionstream = new InteractionStream(kinect, new DummyInteractionClient()); interactionstream.InteractionFrameReady += interactionstream_InteractionFrameReady; kinect.Start(); } else MessageBox.Show("請連接上Kinect"); } #region 自訂方法 private void ParseHandEvent(UserInfo info) { var hands = info.HandPointers; if (hands.Count > 0) { foreach (InteractionHandPointer hand in hands) { if (hand.HandType == InteractionHandType.Right) { if (hand.HandEventType == InteractionHandEventType.Grip) text1.Text = "右手握拳"; } else if (hand.HandType == InteractionHandType.Left) { if (hand.HandEventType == InteractionHandEventType.Grip) text2.Text = "左手握拳"; } } } } #endregion #region Kinect事件 private void interactionstream_InteractionFrameReady(object sender, InteractionFrameReadyEventArgs e) { using (InteractionFrame frame = e.OpenInteractionFrame()) { if (frame != null) { UserInfo[] userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength]; frame.CopyInteractionDataTo(userInfos); var user = (from u in userInfos where u.SkeletonTrackingId > 0 select u).FirstOrDefault(); if (user != null) ParseHandEvent(user); } } } private void kinect_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame frame = e.OpenDepthImageFrame()) { if (frame != null) { interactionstream.ProcessDepth(frame.GetRawPixelData(), frame.Timestamp); } } } private void kinect_SkeletonFrameReady(object sender, SkeletonFrameReadyEventArgs e) { using (SkeletonFrame frame = e.OpenSkeletonFrame()) { if (frame != null) { Skeleton[] skeletons = new Skeleton[frame.SkeletonArrayLength]; frame.CopySkeletonDataTo(skeletons); Vector4 vector4 = kinect.AccelerometerGetCurrentReading(); interactionstream.ProcessSkeleton(skeletons, vector4, frame.Timestamp); } } } private void kinect_ColorFrameReady(object sender, ColorImageFrameReadyEventArgs e) { using (ColorImageFrame frame = e.OpenColorImageFrame()) { if (frame != null) { dataPixels = new byte[frame.PixelDataLength]; frame.CopyPixelDataTo(dataPixels); BitmapSource source = BitmapSource.Create(640, 480, 96, 96, PixelFormats.Bgr32, null, dataPixels, 640 * 4); Image.Source = source; } } } #endregion } }
程式載入時,判斷Kinect跟電腦連接了沒,以及目前的狀態
//判斷Kinect有連接上,並沒在運行 if (KinectSensor.KinectSensors.Count > 0 && KinectSensor.KinectSensors[0].IsRunning != true) { kinect = KinectSensor.KinectSensors[0];
設定深度、骨架以及色彩和互動事件
//深度追蹤 kinect.DepthStream.Range = DepthRange.Near; kinect.DepthFrameReady += kinect_DepthFrameReady; kinect.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); //骨架追蹤 kinect.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated; kinect.SkeletonStream.EnableTrackingInNearRange = true; kinect.SkeletonFrameReady += kinect_SkeletonFrameReady; kinect.SkeletonStream.Enable(); //色彩 kinect.ColorFrameReady += kinect_ColorFrameReady; kinect.ColorStream.Enable(); interactionstream = new InteractionStream(kinect, new DummyInteractionClient()); interactionstream.InteractionFrameReady += interactionstream_InteractionFrameReady;
關鍵在於深度距離設定為近
kinect.DepthStream.Range = DepthRange.Near;
骨架追蹤設定為坐著,支持近距離追蹤骨架,SkeletonTrackingMode為Seated最多追蹤10個點
kinect.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated; kinect.SkeletonStream.EnableTrackingInNearRange = true;
互動串流的建構方法,帶入DummyInteractionClient這個類別是實做了IInteractionClient介面
interactionstream = new InteractionStream(kinect, new DummyInteractionClient());
深度事件,取得深度框架時,將框架資料和時間標記由互動串流去處理
private void kinect_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame frame = e.OpenDepthImageFrame()) { if (frame != null) { interactionstream.ProcessDepth(frame.GetRawPixelData(), frame.Timestamp); } } }
骨架事件,取得骨架框架時,將骨架的資料傳給互動串流去處理
private void kinect_SkeletonFrameReady(object sender, SkeletonFrameReadyEventArgs e) { using (SkeletonFrame frame = e.OpenSkeletonFrame()) { if (frame != null) { Skeleton[] skeletons = new Skeleton[frame.SkeletonArrayLength]; frame.CopySkeletonDataTo(skeletons); Vector4 vector4 = kinect.AccelerometerGetCurrentReading(); interactionstream.ProcessSkeleton(skeletons, vector4, frame.Timestamp); } } }
互動事件,取得使用者資訊後,在傳入ParseHandEvent方法作處理
private void interactionstream_InteractionFrameReady(object sender, InteractionFrameReadyEventArgs e) { using (InteractionFrame frame = e.OpenInteractionFrame()) { if (frame != null) { UserInfo[] userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength]; frame.CopyInteractionDataTo(userInfos); var user = (from u in userInfos where u.SkeletonTrackingId > 0 select u).FirstOrDefault(); if (user != null) ParseHandEvent(user); } } }
從使用者參數去知道,目前手的數量是否大於0,並走訪每一個手掌,從手掌的資訊去判斷手是握拳還是放開
private void ParseHandEvent(UserInfo info) { var hands = info.HandPointers; if (hands.Count > 0) { foreach (InteractionHandPointer hand in hands) { if (hand.HandType == InteractionHandType.Right) { if (hand.HandEventType == InteractionHandEventType.Grip) text1.Text = "右手握拳"; } else if (hand.HandType == InteractionHandType.Left) { if (hand.HandEventType == InteractionHandEventType.Grip) text2.Text = "左手握拳"; } } } }
如果出現InvalidOperationException,代表你的System資料夾底下沒有相對應的dll檔案
為什麼需要『KinectInteraction180_32.dll』呢?Kinect Toolkit功能是呼『KinectInteraction180_32.dll』函式庫實現
把『KinectInteraction180_32.dll』和『KinectInteraction180_64.dll』分別放到System32以及System資料夾底下
參考資料:
http://msdn.microsoft.com/en-us/library/microsoft.kinect.toolkit.interaction.aspx
http://msdn.microsoft.com/en-us/library/microsoft.kinect.toolkit.interaction.interactionstream.aspx
http://msdn.microsoft.com/en-us/library/microsoft.kinect.toolkit.interaction.userinfo.aspx
http://social.msdn.microsoft.com/Forums/zh-TW/607ab2b6-3279-4238-850f-c842ba7e5309/how-to-implement-iinteractionclient-in-c
http://social.msdn.microsoft.com/Forums/en-US/e4f5a696-ed4f-4a5f-8e54-4b3706f62ad0/kinect-interactions?forum=kinectsdknuiapi
http://www.soulsolutions.com.au/Blog/tabid/73/EntryId/858/Kinect-For-Windows-Interactions-Gallery-Interaction-Stream.aspx
http://social.msdn.microsoft.com/forums/en-US/0b0b5d7d-e1e8-4498-9c10-d8088650bacb/how-to-get-the-interactionstream-working-properly