diff --git a/README.md b/README.md
index 9dadd4d4de..a042759b66 100644
--- a/README.md
+++ b/README.md
@@ -86,29 +86,32 @@ For additional Windows samples, see [Windows on GitHub](http://microsoft.github.
Camera resolution
+ Camera stream correlation
DASH streaming
Direct2D photo adjustment
- Media editing
+ Media editing
+ Media import
Media transport controls
+
+
MIDI
Playlists
+ Simple imaging
- Simple imaging
Spatial audio
System media transport controls
+ Transcoding media
- Transcoding media
Video playback
Video playback synchronization
+ Video stabilization
- Video stabilization
Windows audio session (WASAPI)
- Windows media import
diff --git a/Samples/AllJoyn/ConsumerExperiences/cs/Scenario2ViewModel.cs b/Samples/AllJoyn/ConsumerExperiences/cs/Scenario2ViewModel.cs
index 5b0773c948..c63cde8c0e 100644
--- a/Samples/AllJoyn/ConsumerExperiences/cs/Scenario2ViewModel.cs
+++ b/Samples/AllJoyn/ConsumerExperiences/cs/Scenario2ViewModel.cs
@@ -16,7 +16,6 @@
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Runtime.CompilerServices;
-using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Input;
@@ -24,6 +23,7 @@
using Windows.Devices.WiFi;
using Windows.Networking.Connectivity;
using Windows.Security.Credentials;
+using Windows.Security.Cryptography;
using Windows.UI.Core;
using Windows.UI.Xaml;
@@ -884,7 +884,7 @@ private async void AttemptOnboardingAsync(string ssid, string password, short au
{
UpdateStatusAsync("Attempting to configure onboardee...", NotifyType.StatusMessage);
- OnboardingConfigureWiFiResult configureWifiResult = await m_consumer.ConfigureWiFiAsync(ssid, password, authType);
+ OnboardingConfigureWiFiResult configureWifiResult = await m_consumer.ConfigureWiFiAsync(ssid, ConvertUtf8ToHex(password), authType);
if (configureWifiResult.Status == AllJoynStatus.Ok)
{
UpdateStatusAsync("Onboardee sucessfully configured.", NotifyType.StatusMessage);
@@ -908,6 +908,20 @@ private async void AttemptOnboardingAsync(string ssid, string password, short au
ClearPasswords();
}
+ private static string ConvertUtf8ToHex(string inputString)
+ {
+ if (string.IsNullOrEmpty(inputString))
+ {
+ return string.Empty;
+ }
+ else
+ {
+ var tempBuffer = CryptographicBuffer.ConvertStringToBinary(inputString, BinaryStringEncoding.Utf8);
+ var hexString = CryptographicBuffer.EncodeToHexString(tempBuffer);
+ return hexString;
+ }
+ }
+
private async void AttemptConnectionAsync()
{
OnboardingConnectResult connectResult = await m_consumer.ConnectAsync();
diff --git a/Samples/AllJoyn/ConsumerExperiences/js/js/scenario2.js b/Samples/AllJoyn/ConsumerExperiences/js/js/scenario2.js
index de6dafd1a8..8e48efe73e 100644
--- a/Samples/AllJoyn/ConsumerExperiences/js/js/scenario2.js
+++ b/Samples/AllJoyn/ConsumerExperiences/js/js/scenario2.js
@@ -440,15 +440,8 @@
}
function convertUtf8ToHex(str) {
- var result = "";
- var hex;
-
- for (var i = 0; i < str.length; i++) {
- hex = str.charCodeAt(i).toString(16);
- result += hex;
- }
-
- return result;
+ var tempBuffer = Windows.Security.Cryptography.CryptographicBuffer.convertStringToBinary(str, Windows.Security.Cryptography.BinaryStringEncoding.Utf8);
+ return Windows.Security.Cryptography.CryptographicBuffer.encodeToHexString(tempBuffer);
}
function disposeConsumer() {
diff --git a/Samples/AllJoyn/ProducerExperiences/cs/Scenario2ViewModel.cs b/Samples/AllJoyn/ProducerExperiences/cs/Scenario2ViewModel.cs
index 7441c7c0af..4d60f36433 100644
--- a/Samples/AllJoyn/ProducerExperiences/cs/Scenario2ViewModel.cs
+++ b/Samples/AllJoyn/ProducerExperiences/cs/Scenario2ViewModel.cs
@@ -15,10 +15,10 @@
using System.Collections.Generic;
using System.ComponentModel;
using System.Runtime.CompilerServices;
-using System.Text;
using System.Threading.Tasks;
using System.Windows.Input;
using Windows.Devices.AllJoyn;
+using Windows.Security.Cryptography;
using Windows.UI.Core;
using Windows.UI.Xaml;
@@ -309,7 +309,7 @@ private async void OnboardingService_ConnectRequestRecieved(object sender, Event
}
else
{
- if (AppData.OnboardingConfigurePassphrase.Equals(AppData.SampleNetworkPassword, StringComparison.OrdinalIgnoreCase))
+ if (AppData.OnboardingConfigurePassphrase.Equals(ConvertUtf8ToHex(AppData.SampleNetworkPassword), StringComparison.OrdinalIgnoreCase))
{
returnArg.Value1 = (short)ConnectionResultCode.Validated;
returnArg.Value2 = "Connected successfully";
@@ -352,6 +352,20 @@ private async void OnboardingService_ConnectRequestRecieved(object sender, Event
}
}
+ static private string ConvertUtf8ToHex(string inputString)
+ {
+ if (string.IsNullOrEmpty(inputString))
+ {
+ return string.Empty;
+ }
+ else
+ {
+ var tempBuffer = CryptographicBuffer.ConvertStringToBinary(inputString, BinaryStringEncoding.Utf8);
+ var hexString = CryptographicBuffer.EncodeToHexString(tempBuffer);
+ return hexString;
+ }
+ }
+
private async void UpdateStatusAsync(string status, NotifyType statusType)
{
await m_dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
diff --git a/Samples/AllJoyn/ProducerExperiences/js/js/scenario2.js b/Samples/AllJoyn/ProducerExperiences/js/js/scenario2.js
index c48c8ca1f3..55fcd59b27 100644
--- a/Samples/AllJoyn/ProducerExperiences/js/js/scenario2.js
+++ b/Samples/AllJoyn/ProducerExperiences/js/js/scenario2.js
@@ -250,15 +250,8 @@
}
function convertUtf8ToHex(str) {
- var result = "";
- var hex;
-
- for (var i = 0; i < str.length; i++) {
- hex = str.charCodeAt(i).toString(16);
- result += hex;
- }
-
- return result;
+ var tempBuffer = Windows.Security.Cryptography.CryptographicBuffer.convertStringToBinary(str, Windows.Security.Cryptography.BinaryStringEncoding.Utf8);
+ return Windows.Security.Cryptography.CryptographicBuffer.encodeToHexString(tempBuffer);
}
function reportStatus(message) {
diff --git a/Samples/CameraStreamCorrelation/README.md b/Samples/CameraStreamCorrelation/README.md
new file mode 100644
index 0000000000..1b4ebce21f
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/README.md
@@ -0,0 +1,88 @@
+
+
+# Camera stream correlation sample
+
+This sample shows how to use spatially correlated color and depth cameras and the depth frames
+to map image pixels from one camera to another using a [DepthCorrelatedCoordinateMapper]
+(https://msdn.microsoft.com/library/windows/apps/windows.media.devices.core.depthcorrelatedcoordinatemapper.aspx),
+as well as decoding a vendor-specific media frame layout with a [BufferMediaFrame]
+(https://msdn.microsoft.com/library/windows/apps/Windows.Media.Capture.Frames.BufferMediaFrame.aspx).
+
+This sample demonstrates how to:
+
+- Find cameras which support color, depth and pose tracking respectively.
+- Create FrameReaders and read frames from multiple sources concurrently.
+- Map depth camera's pixels onto correlated color camera.
+- Process color and depth frames pixel by pixel to produce a background removal effect.
+- Detect if a vendor-specific camera source exist using a vendor-specific sub type. In this sample, we look for a pose tracking stream.
+- How to use vendor-specific buffer layout to decode a 1D BufferMediaFrame.
+- Overlay skeletal tracking points to color camera coodinate system.
+
+### Correlation of multiple capture sources
+
+Use the DepthCorrelatedCoordinateMapper class to map depth space pixels to color
+space pixels.
+
+Use the camera intrinsics of the color camera to project skeletal
+tracking points on top of the color image.
+
+### 1D camera frame with BufferMediaFrame
+
+With new BufferMediaFrame in media capture APIs, cameras can also support 1D media frame format.
+Using the "Perception" major media type and a vendor's custom sub media type, a camera
+in this sample can expose a PoseTrackingFrame and the app can overlay skeletal tracking points.
+
+###Vendor specific media frame format
+
+This sample also demonstrate how camera vendors can use a WinRT library to wrap vendor-specific
+camera frame layout into WinRT class so that apps can easily consume the vendor specific data
+with same coding patterns as Windows APIs.
+
+**Note** The Windows universal samples for Windows 10 require Visual Studio 2015 Update 2
+and Windows SDK version 14393 or above to build.
+
+To obtain information about Windows 10 development, go to the [Windows Dev Center](https://dev.windows.com).
+
+## See also
+
+### Samples
+
+[CameraFrames](/Samples/CameraFrames)
+
+### Reference
+
+[Windows.Media.Capture.Frames namespace](https://msdn.microsoft.com/library/windows/apps/windows.media.capture.frames.aspx)
+
+[Windows.Media.Devices.Core.DepthCorrelatedCoordinateMapper](https://msdn.microsoft.com/library/windows/apps/windows.media.devices.core.depthcorrelatedcoordinatemapper.aspx)
+
+[Windows.Media.Devices.Core.CameraIntrinsics](https://msdn.microsoft.com/library/windows/apps/windows.media.devices.core.cameraintrinsics.aspx)
+
+## System requirements
+
+**Client:** Windows 10 build 14393
+
+**Camera:** Correlated color and depth camera (For example a Kinect V2 sensor)
+
+## Build the sample
+
+1. If you download the samples ZIP, be sure to unzip the entire archive, not just the folder with
+ the sample you want to build.
+2. Start Microsoft Visual Studio 2015 and select **File** \> **Open** \> **Project/Solution**.
+3. Starting in the folder where you unzipped the samples, go to the Samples subfolder, then the
+ subfolder for this specific sample, then the subfolder for your preferred language (C++, C#, or
+ JavaScript). Double-click the Visual Studio 2015 Solution (.sln) file.
+4. Press Ctrl+Shift+B, or select **Build** \> **Build Solution**.
+
+## Run the sample
+
+The next steps depend on whether you just want to deploy the sample or you want to both deploy and
+run it.
+
+### Deploying and running the sample
+
+- To debug the sample and then run it, follow the steps listed above to connect your
+ developer-unlocked Microsoft HoloLens, then press F5 or select **Debug** \> **Start Debugging**.
+ To run the sample without debugging, press Ctrl+F5 or select **Debug** \> **Start Without Debugging**.
diff --git a/Samples/CameraStreamCorrelation/cpp/CameraStreamCorrelation.sln b/Samples/CameraStreamCorrelation/cpp/CameraStreamCorrelation.sln
new file mode 100644
index 0000000000..cc6961ab85
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/CameraStreamCorrelation.sln
@@ -0,0 +1,54 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio 14
+VisualStudioVersion = 14.0.25420.1
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "CameraStreamCorrelation", "CameraStreamCorrelation.vcxproj", "{F710B9FD-4E6B-42D7-A99A-6D48888D48B0}"
+EndProject
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "PoseTrackingPreview", "PoseTrackingPreview\PoseTrackingPreview.vcxproj", "{67B5157A-25B1-4EC3-98CF-50E1B9177DB4}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|ARM = Debug|ARM
+ Debug|x64 = Debug|x64
+ Debug|x86 = Debug|x86
+ Release|ARM = Release|ARM
+ Release|x64 = Release|x64
+ Release|x86 = Release|x86
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Debug|ARM.ActiveCfg = Debug|ARM
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Debug|ARM.Build.0 = Debug|ARM
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Debug|ARM.Deploy.0 = Debug|ARM
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Debug|x64.ActiveCfg = Debug|x64
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Debug|x64.Build.0 = Debug|x64
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Debug|x64.Deploy.0 = Debug|x64
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Debug|x86.ActiveCfg = Debug|Win32
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Debug|x86.Build.0 = Debug|Win32
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Debug|x86.Deploy.0 = Debug|Win32
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Release|ARM.ActiveCfg = Release|ARM
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Release|ARM.Build.0 = Release|ARM
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Release|ARM.Deploy.0 = Release|ARM
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Release|x64.ActiveCfg = Release|x64
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Release|x64.Build.0 = Release|x64
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Release|x64.Deploy.0 = Release|x64
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Release|x86.ActiveCfg = Release|Win32
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Release|x86.Build.0 = Release|Win32
+ {F710B9FD-4E6B-42D7-A99A-6D48888D48B0}.Release|x86.Deploy.0 = Release|Win32
+ {67B5157A-25B1-4EC3-98CF-50E1B9177DB4}.Debug|ARM.ActiveCfg = Debug|ARM
+ {67B5157A-25B1-4EC3-98CF-50E1B9177DB4}.Debug|ARM.Build.0 = Debug|ARM
+ {67B5157A-25B1-4EC3-98CF-50E1B9177DB4}.Debug|x64.ActiveCfg = Debug|x64
+ {67B5157A-25B1-4EC3-98CF-50E1B9177DB4}.Debug|x64.Build.0 = Debug|x64
+ {67B5157A-25B1-4EC3-98CF-50E1B9177DB4}.Debug|x86.ActiveCfg = Debug|Win32
+ {67B5157A-25B1-4EC3-98CF-50E1B9177DB4}.Debug|x86.Build.0 = Debug|Win32
+ {67B5157A-25B1-4EC3-98CF-50E1B9177DB4}.Release|ARM.ActiveCfg = Release|ARM
+ {67B5157A-25B1-4EC3-98CF-50E1B9177DB4}.Release|ARM.Build.0 = Release|ARM
+ {67B5157A-25B1-4EC3-98CF-50E1B9177DB4}.Release|x64.ActiveCfg = Release|x64
+ {67B5157A-25B1-4EC3-98CF-50E1B9177DB4}.Release|x64.Build.0 = Release|x64
+ {67B5157A-25B1-4EC3-98CF-50E1B9177DB4}.Release|x86.ActiveCfg = Release|Win32
+ {67B5157A-25B1-4EC3-98CF-50E1B9177DB4}.Release|x86.Build.0 = Release|Win32
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+EndGlobal
diff --git a/Samples/CameraStreamCorrelation/cpp/CameraStreamCorrelation.vcxproj b/Samples/CameraStreamCorrelation/cpp/CameraStreamCorrelation.vcxproj
new file mode 100644
index 0000000000..dde53a744a
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/CameraStreamCorrelation.vcxproj
@@ -0,0 +1,228 @@
+
+
+
+ {f710b9fd-4e6b-42d7-a99a-6d48888d48b0}
+ SDKTemplate
+ en-US
+ 14.0
+ true
+ Windows Store
+ 10.0
+ 10.0.14393.0
+ 10.0.14393.0
+ CameraStreamCorrelation
+
+
+
+
+ Debug
+ ARM
+
+
+ Debug
+ Win32
+
+
+ Debug
+ x64
+
+
+ Release
+ ARM
+
+
+ Release
+ Win32
+
+
+ Release
+ x64
+
+
+
+ Application
+ true
+ v140
+
+
+ Application
+ true
+ v140
+
+
+ Application
+ true
+ v140
+
+
+ Application
+ false
+ true
+ v140
+
+
+ Application
+ false
+ true
+ v140
+
+
+ Application
+ false
+ true
+ v140
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(VC_IncludePath);$(UniversalCRT_IncludePath);$(WindowsSDK_IncludePath);..\..\..\SharedContent\cpp
+
+
+
+ /bigobj %(AdditionalOptions)
+ 4453;28204
+ Level4
+
+
+
+
+ /bigobj %(AdditionalOptions)
+ 4453;28204
+ Level4
+
+
+
+
+ /bigobj %(AdditionalOptions)
+ 4453;28204
+ Level4
+
+
+
+
+ /bigobj %(AdditionalOptions)
+ 4453;28204
+ Level4
+
+
+
+
+ /bigobj %(AdditionalOptions)
+ 4453;28204
+ Level4
+
+
+
+
+ /bigobj %(AdditionalOptions)
+ 4453;28204
+ Level4
+
+
+
+
+
+
+
+ ..\..\..\SharedContent\xaml\App.xaml
+
+
+ ..\..\..\SharedContent\cpp\MainPage.xaml
+
+
+
+ Scenario1_CorrelateStreams.xaml
+
+
+
+
+
+ Designer
+
+
+ Designer
+
+
+
+ Styles\Styles.xaml
+
+
+
+
+ Designer
+
+
+
+
+ ..\..\..\SharedContent\xaml\App.xaml
+
+
+ ..\..\..\SharedContent\cpp\MainPage.xaml
+
+
+
+ Create
+ Create
+ Create
+ Create
+ Create
+ Create
+
+
+
+ Scenario1_CorrelateStreams.xaml
+
+
+
+
+
+ Assets\microsoft-sdk.png
+
+
+ Assets\smalltile-sdk.png
+
+
+ Assets\splash-sdk.png
+
+
+ Assets\squaretile-sdk.png
+
+
+ Assets\storelogo-sdk.png
+
+
+ Assets\tile-sdk.png
+
+
+ Assets\windows-sdk.png
+
+
+
+
+ {67b5157a-25b1-4ec3-98cf-50e1b9177db4}
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Samples/CameraStreamCorrelation/cpp/CameraStreamCorrelation.vcxproj.filters b/Samples/CameraStreamCorrelation/cpp/CameraStreamCorrelation.vcxproj.filters
new file mode 100644
index 0000000000..5dcaa8eee1
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/CameraStreamCorrelation.vcxproj.filters
@@ -0,0 +1,65 @@
+
+
+
+
+ 80bfd669-aa83-4537-9611-027cffe0d8af
+ bmp;fbx;gif;jpg;jpeg;tga;tiff;tif;png
+
+
+ {c6978fb6-bc64-498d-97c8-f5b53997e54e}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Styles
+
+
+
+
+
+ Assets
+
+
+ Assets
+
+
+ Assets
+
+
+ Assets
+
+
+ Assets
+
+
+ Assets
+
+
+ Assets
+
+
+
\ No newline at end of file
diff --git a/Samples/CameraStreamCorrelation/cpp/FrameRenderer.cpp b/Samples/CameraStreamCorrelation/cpp/FrameRenderer.cpp
new file mode 100644
index 0000000000..4a10a8816c
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/FrameRenderer.cpp
@@ -0,0 +1,295 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#include "pch.h"
+#include
+#include "FrameRenderer.h"
+
+using namespace SDKTemplate;
+
+using namespace concurrency;
+using namespace Platform;
+using namespace Microsoft::WRL;
+using namespace Windows::Foundation;
+using namespace Windows::Foundation::Numerics;
+using namespace Windows::Graphics::Imaging;
+using namespace Windows::Media::Capture::Frames;
+using namespace Windows::Media::Devices::Core;
+using namespace Windows::Perception::Spatial;
+using namespace Windows::UI::Xaml::Controls;
+using namespace Windows::UI::Xaml::Media::Imaging;
+
+#pragma region Low-level operations on reference pointers
+
+// InterlockedExchange for reference pointer types.
+template
+T^ InterlockedExchangeRefPointer(T^* target, U value)
+{
+ static_assert(sizeof(T^) == sizeof(void*), "InterlockedExchangePointer is the wrong size");
+ T^ exchange = value;
+ void** rawExchange = reinterpret_cast(&exchange);
+ void** rawTarget = reinterpret_cast(target);
+ *rawExchange = static_cast(InterlockedExchangePointer(rawTarget, *rawExchange));
+ return exchange;
+}
+
+// Convert a reference pointer to a specific ComPtr.
+template
+Microsoft::WRL::ComPtr AsComPtr(Platform::Object^ object)
+{
+ Microsoft::WRL::ComPtr p;
+ reinterpret_cast(object)->QueryInterface(IID_PPV_ARGS(&p));
+ return p;
+}
+
+#pragma endregion
+
+// Structure used to access colors stored in 8-bit BGRA format.
+struct ColorBGRA
+{
+ byte B, G, R, A;
+};
+
+FrameRenderer::FrameRenderer(Image^ imageElement)
+{
+ m_imageElement = imageElement;
+ m_imageElement->Source = ref new SoftwareBitmapSource();
+}
+
+task FrameRenderer::DrainBackBufferAsync()
+{
+ // Keep draining frames from the backbuffer until the backbuffer is empty.
+ SoftwareBitmap^ latestBitmap = InterlockedExchangeRefPointer(&m_backBuffer, nullptr);
+ if (latestBitmap != nullptr)
+ {
+ if (SoftwareBitmapSource^ imageSource = dynamic_cast(m_imageElement->Source))
+ {
+ return create_task(imageSource->SetBitmapAsync(latestBitmap))
+ .then([this]()
+ {
+ return DrainBackBufferAsync();
+ }, task_continuation_context::use_current());
+ }
+ }
+
+ // To avoid a race condition against ProcessFrame, we cannot let any other
+ // tasks run on the UI thread between point that the InterlockedExchangeRefPointer
+ // reports that there is no more work, and we clear the m_taskRunning flag on
+ // the UI thread.
+ m_taskRunning = false;
+
+ return task_from_result();
+}
+
+void FrameRenderer::ProcessColorFrame(MediaFrameReference^ colorFrame)
+{
+ if (colorFrame == nullptr)
+ {
+ return;
+ }
+
+ SoftwareBitmap^ inputBitmap = colorFrame->VideoMediaFrame->SoftwareBitmap;
+
+ if (inputBitmap == nullptr)
+ {
+ return;
+ }
+
+ // If the input bitmap is in the correct format, copy it and then buffer it for rendering.
+ if ((inputBitmap->BitmapPixelFormat == BitmapPixelFormat::Bgra8) &&
+ (inputBitmap->BitmapAlphaMode == BitmapAlphaMode::Premultiplied))
+ {
+ BufferBitmapForRendering(SoftwareBitmap::Copy(inputBitmap));
+ }
+ // Otherwise, convert the bitmap to the correct format before buffering it for rendering.
+ else
+ {
+ BufferBitmapForRendering(SoftwareBitmap::Convert(inputBitmap, BitmapPixelFormat::Bgra8, BitmapAlphaMode::Premultiplied));
+ }
+}
+
+void FrameRenderer::ProcessDepthAndColorFrames(MediaFrameReference^ colorFrame, MediaFrameReference^ depthFrame)
+{
+ if (colorFrame == nullptr || depthFrame == nullptr)
+ {
+ return;
+ }
+
+ // Create the coordinate mapper used to map depth pixels from depth space to color space.
+ DepthCorrelatedCoordinateMapper^ coordinateMapper = depthFrame->VideoMediaFrame->DepthMediaFrame->TryCreateCoordinateMapper(
+ colorFrame->VideoMediaFrame->CameraIntrinsics, colorFrame->CoordinateSystem);
+
+ if (coordinateMapper == nullptr)
+ {
+ return;
+ }
+
+ // Map the depth image to color space and buffer the result for rendering.
+ SoftwareBitmap^ softwareBitmap = MapDepthToColor(
+ colorFrame->VideoMediaFrame,
+ depthFrame->VideoMediaFrame,
+ colorFrame->VideoMediaFrame->CameraIntrinsics,
+ colorFrame->CoordinateSystem,
+ coordinateMapper);
+
+ if (softwareBitmap)
+ {
+ BufferBitmapForRendering(softwareBitmap);
+ }
+}
+
+void FrameRenderer::BufferBitmapForRendering(SoftwareBitmap^ softwareBitmap)
+{
+ if (softwareBitmap != nullptr)
+ {
+ // Swap the processed frame to _backBuffer, and trigger the UI thread to render it.
+ softwareBitmap = InterlockedExchangeRefPointer(&m_backBuffer, softwareBitmap);
+
+ // UI thread always resets m_backBuffer before using it. Unused bitmap should be disposed.
+ delete softwareBitmap;
+
+ // Changes to the XAML ImageElement must happen in the UI thread, via the CoreDispatcher.
+ m_imageElement->Dispatcher->RunAsync(Windows::UI::Core::CoreDispatcherPriority::Normal,
+ ref new Windows::UI::Core::DispatchedHandler([this]()
+ {
+ // Don't let two copies of this task run at the same time.
+ if (m_taskRunning)
+ {
+ return;
+ }
+
+ m_taskRunning = true;
+
+ // Keep draining frames from the backbuffer until the backbuffer is empty.
+ DrainBackBufferAsync();
+ }));
+ }
+}
+
+SoftwareBitmap^ FrameRenderer::MapDepthToColor(
+ VideoMediaFrame^ colorFrame,
+ VideoMediaFrame^ depthFrame,
+ CameraIntrinsics^ colorCameraIntrinsics,
+ SpatialCoordinateSystem^ colorCoordinateSystem,
+ DepthCorrelatedCoordinateMapper^ coordinateMapper)
+{
+ SoftwareBitmap^ inputBitmap = colorFrame->SoftwareBitmap;
+ SoftwareBitmap^ outputBitmap;
+
+ // Copy the color input bitmap so we may overlay the depth bitmap on top of it.
+ if ((inputBitmap->BitmapPixelFormat == BitmapPixelFormat::Bgra8) &&
+ (inputBitmap->BitmapAlphaMode == BitmapAlphaMode::Premultiplied))
+ {
+ outputBitmap = SoftwareBitmap::Copy(inputBitmap);
+ }
+ else
+ {
+ outputBitmap = SoftwareBitmap::Convert(inputBitmap, BitmapPixelFormat::Bgra8, BitmapAlphaMode::Premultiplied);
+ }
+
+ // Create buffers used to access pixels.
+ BitmapBuffer^ depthBuffer = depthFrame->SoftwareBitmap->LockBuffer(BitmapBufferAccessMode::Read);
+ BitmapBuffer^ colorBuffer = colorFrame->SoftwareBitmap->LockBuffer(BitmapBufferAccessMode::Read);
+ BitmapBuffer^ outputBuffer = outputBitmap->LockBuffer(BitmapBufferAccessMode::Write);
+
+ if (depthBuffer == nullptr || colorBuffer == nullptr || outputBuffer == nullptr)
+ {
+ return nullptr;
+ }
+
+ BitmapPlaneDescription colorDesc = colorBuffer->GetPlaneDescription(0);
+ UINT32 colorWidth = static_cast(colorDesc.Width);
+ UINT32 colorHeight = static_cast(colorDesc.Height);
+
+ IMemoryBufferReference^ depthReference = depthBuffer->CreateReference();
+ IMemoryBufferReference^ outputReference = outputBuffer->CreateReference();
+
+ byte* depthBytes = nullptr;
+ UINT32 depthCapacity;
+
+ byte* outputBytes = nullptr;
+ UINT32 outputCapacity;
+
+ AsComPtr(depthReference)->GetBuffer(&depthBytes, &depthCapacity);
+ AsComPtr(outputReference)->GetBuffer(&outputBytes, &outputCapacity);
+
+ if (depthBytes == nullptr || outputBytes == nullptr)
+ {
+ return nullptr;
+ }
+
+ ColorBGRA* outputPixels = reinterpret_cast(outputBytes);
+
+ {
+ // Ensure synchronous read/write access to point buffer cache.
+ std::lock_guard guard(m_pointBufferMutex);
+
+ // If we don't have point arrays, or the ones we have are the wrong dimensions,
+ // then create new ones.
+ Array^ colorSpacePoints = m_colorSpacePoints;
+ if (colorSpacePoints == nullptr ||
+ m_previousBufferWidth != colorWidth ||
+ m_previousBufferHeight != colorHeight)
+ {
+ colorSpacePoints = ref new Array(colorWidth * colorHeight);
+
+ // Prepare array of points we want mapped.
+ for (UINT y = 0; y < colorHeight; y++)
+ {
+ for (UINT x = 0; x < colorWidth; x++)
+ {
+ colorSpacePoints[y * colorWidth + x] = Point(static_cast(x), static_cast(y));
+ }
+ }
+ }
+
+ Array^ depthSpacePoints = m_depthSpacePoints;
+ if (depthSpacePoints == nullptr ||
+ m_previousBufferWidth != colorWidth ||
+ m_previousBufferHeight != colorHeight)
+ {
+ depthSpacePoints = ref new Array(colorWidth * colorHeight);
+ }
+
+ // Save the (possibly updated) values now that they are all known to be good.
+ m_colorSpacePoints = colorSpacePoints;
+ m_depthSpacePoints = depthSpacePoints;
+ m_previousBufferWidth = colorWidth;
+ m_previousBufferHeight = colorHeight;
+
+ // Unproject depth points to color image.
+ coordinateMapper->UnprojectPoints(
+ m_colorSpacePoints, colorCoordinateSystem, m_depthSpacePoints);
+
+ constexpr float depthFadeStart = 1;
+ constexpr float depthFadeEnd = 1.5;
+
+ // Using the depth values we fade the color pixels of the ouput if they are too far away.
+ for (UINT y = 0; y < colorHeight; y++)
+ {
+ for (UINT x = 0; x < colorWidth; x++)
+ {
+ UINT index = y * colorWidth + x;
+
+ // The z value of each depth space point contains the depth value of the point.
+ // This value is mapped to a fade value. Fading starts at depthFadeStart meters
+ // and is completely black by depthFadeEnd meters.
+ float fadeValue = 1 - max(0, min(((m_depthSpacePoints[index].z - depthFadeStart) / (depthFadeEnd - depthFadeStart)), 1));
+
+ outputPixels[index].R = static_cast(static_cast(outputPixels[index].R) * fadeValue);
+ outputPixels[index].G = static_cast(static_cast(outputPixels[index].G) * fadeValue);
+ outputPixels[index].B = static_cast(static_cast(outputPixels[index].B) * fadeValue);
+ }
+ }
+ }
+
+ return outputBitmap;
+}
\ No newline at end of file
diff --git a/Samples/CameraStreamCorrelation/cpp/FrameRenderer.h b/Samples/CameraStreamCorrelation/cpp/FrameRenderer.h
new file mode 100644
index 0000000000..6306f503f0
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/FrameRenderer.h
@@ -0,0 +1,73 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+namespace SDKTemplate
+{
+ // Function type used to map scanline of pixels to an alternate format.
+ typedef std::function TransformScanline;
+
+ class FrameRenderer
+ {
+ public:
+ FrameRenderer(Windows::UI::Xaml::Controls::Image^ image);
+
+ ///
+ /// Buffer and render color frame.
+ ///
+ void ProcessColorFrame(Windows::Media::Capture::Frames::MediaFrameReference^ colorFrame);
+
+ ///
+ /// Buffer and render correlated color and depth frames.
+ ///
+ void ProcessDepthAndColorFrames(
+ Windows::Media::Capture::Frames::MediaFrameReference^ colorFrame,
+ Windows::Media::Capture::Frames::MediaFrameReference^ depthFrame);
+
+ private: // private methods
+ ///
+ /// Perform mapping of depth pixels to color pixels.
+ ///
+ Windows::Graphics::Imaging::SoftwareBitmap^ MapDepthToColor(
+ Windows::Media::Capture::Frames::VideoMediaFrame^ colorFrame,
+ Windows::Media::Capture::Frames::VideoMediaFrame^ depthFrame,
+ Windows::Media::Devices::Core::CameraIntrinsics^ colorCameraIntrinsics,
+ Windows::Perception::Spatial::SpatialCoordinateSystem^ colorCoordinateSystem,
+ Windows::Media::Devices::Core::DepthCorrelatedCoordinateMapper^ coordinateMapper);
+
+ ///
+ /// Buffer processed bitmap and render on UI.
+ ///
+ void BufferBitmapForRendering(Windows::Graphics::Imaging::SoftwareBitmap^ softwareBitmap);
+
+ ///
+ /// Keep presenting the m_backBuffer until there are no more.
+ ///
+ concurrency::task FrameRenderer::DrainBackBufferAsync();
+
+ private: // private data
+ Windows::UI::Xaml::Controls::Image^ m_imageElement;
+ Windows::Graphics::Imaging::SoftwareBitmap^ m_backBuffer;
+
+ Platform::Array^ m_colorSpacePoints;
+ Platform::Array^ m_depthSpacePoints;
+
+ UINT32 m_previousBufferWidth = 0;
+ UINT32 m_previousBufferHeight = 0;
+
+ bool m_taskRunning = false;
+
+ private: // private synchronization
+ std::mutex m_pointBufferMutex;
+
+ };
+} // CameraStreamCorrelation
\ No newline at end of file
diff --git a/Samples/CameraStreamCorrelation/cpp/Package.appxmanifest b/Samples/CameraStreamCorrelation/cpp/Package.appxmanifest
new file mode 100644
index 0000000000..666931903d
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/Package.appxmanifest
@@ -0,0 +1,33 @@
+
+
+
+
+
+ Camera Stream Correlation C++/cx Sample
+ Microsoft Corporation
+ Assets\StoreLogo-sdk.png
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/BufferHelper.h b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/BufferHelper.h
new file mode 100644
index 0000000000..3dbf0e29a1
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/BufferHelper.h
@@ -0,0 +1,112 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+namespace WindowsPreview {
+ namespace Media {
+ namespace Capture {
+ namespace Frames {
+
+ // Target API will return a IBuffer providing this partial view of a larger IBuffer
+ // And this class will disappear
+ public ref class BufferView sealed
+ {
+ public:
+ property Windows::Storage::Streams::IBuffer^ Buffer
+ {
+ Windows::Storage::Streams::IBuffer^ get() { return _buffer; }
+ }
+ property uint32_t Offset
+ {
+ uint32_t get() { return _offset; }
+ }
+ property uint32_t Size
+ {
+ uint32_t get() { return _size; }
+ }
+ private:
+ Windows::Storage::Streams::IBuffer^ _buffer;
+ uint32_t _offset;
+ uint32_t _size;
+ internal:
+ BufferView(Windows::Storage::Streams::IBuffer^ buffer, uint32_t offset, uint32_t size)
+ : _buffer(buffer), _offset(offset), _size(size)
+ {}
+ };
+
+ class BufferHelper
+ {
+ public:
+ BufferHelper(Windows::Storage::Streams::IBuffer^ buffer)
+ : _buffer(buffer), _size(buffer->Capacity)
+ {
+ HRESULT hr = (reinterpret_cast(_buffer))->QueryInterface(_bufferByteAccess.ReleaseAndGetAddressOf());
+ if (FAILED(hr))
+ {
+ throw ref new Platform::Exception(hr);
+ }
+
+ hr = _bufferByteAccess->Buffer(&_bytes);
+ if (FAILED(hr))
+ {
+ throw ref new Platform::Exception(hr);
+ }
+ }
+ BufferHelper(BufferView^ bufferView)
+ : _buffer(bufferView->Buffer), _size(bufferView->Size)
+ {
+ HRESULT hr = (reinterpret_cast(_buffer))->QueryInterface(_bufferByteAccess.ReleaseAndGetAddressOf());
+ if (FAILED(hr))
+ {
+ throw ref new Platform::Exception(hr);
+ }
+
+ hr = _bufferByteAccess->Buffer(&_bytes);
+ if (FAILED(hr))
+ {
+ throw ref new Platform::Exception(hr);
+ }
+ _bytes += bufferView->Offset;
+ }
+
+ template
+ const T* GetPtrAt(uint32_t byteOffset) const
+ {
+ if (byteOffset + sizeof(T) > _size)
+ {
+ throw ref new OutOfBoundsException();
+ }
+ return reinterpret_cast(_bytes + byteOffset);
+ }
+
+ template
+ void CheckBufferSize(const parentType* ptr, uint32_t effectiveCount)
+ {
+ auto offset = reinterpret_cast(ptr) - _bytes;
+ auto effectiveSize = sizeof_composedStructure(effectiveCount);
+ if (offset + effectiveSize > _size)
+ {
+ throw ref new OutOfBoundsException();
+ }
+ }
+ private:
+ BYTE* _bytes = nullptr;
+ uint32_t _offset = 0;
+ uint32_t _size = 0;
+ Microsoft::WRL::ComPtr _bufferByteAccess;
+ Windows::Storage::Streams::IBuffer^ _buffer;
+ };
+
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/MFPoseTrackingFrame.h b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/MFPoseTrackingFrame.h
new file mode 100644
index 0000000000..9116fbd800
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/MFPoseTrackingFrame.h
@@ -0,0 +1,116 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+// Namespace and types to parse MediaFoundation perception buffer payload.
+namespace MFPoseTrackingPreview
+{
+
+ enum class TrackingStatus : int32_t {
+ NotTracked = 0,
+ Inferred = 1,
+ Tracked = 2,
+ };
+
+ // When PoseSet field of PoseTrackingEntity is set to PoseSet_BodyTracking, the
+ // Poses of the PoseTrackingEntity follow the order defined by this enum
+ enum class BodyPart : int32_t {
+ SpineBase = 0,
+ SpineMid = 1,
+ Neck = 2,
+ Head = 3,
+ ShoulderLeft = 4,
+ ElbowLeft = 5,
+ WristLeft = 6,
+ HandLeft = 7,
+ ShoulderRight = 8,
+ ElbowRight = 9,
+ WristRight = 10,
+ HandRight = 11,
+ HipLeft = 12,
+ KneeLeft = 13,
+ AnkleLeft = 14,
+ FootLeft = 15,
+ HipRight = 16,
+ KneeRight = 17,
+ AnkleRight = 18,
+ FootRight = 19,
+ SpineShoulder = 20,
+ HandTipLeft = 21,
+ ThumbLeft = 22,
+ HandTipRight = 23,
+ ThumbRight = 24,
+ };
+
+ struct TrackedPose {
+ TrackingStatus PositionStatus;
+ Windows::Foundation::Numerics::float3 Position;
+ TrackingStatus OrientationStatus;
+ Windows::Foundation::Numerics::quaternion Orientation;
+ };
+
+ // The layout of a pose frame buffer is:
+ // 1. PoseTrackingFrameHeader: Common structure for all providers.
+ // 1. CustomDataTypeGuid: Guid corresponding to specific provider (16 bytes).
+ // 2. EntitiesCount: How many entities are in this frame (4 bytes).
+ // 3. EntityOffsets: Offsets of entity data from buffer start (4 * EntitiesCount bytes).
+ //
+ // 2. Provider-specific, per-frame data.
+ //
+ // 3. PoseTrackingEntityData #0: First entity data. Common structure for all providers.
+ // 1. DataSizeInBytes: Size of entire entity (PoseTrackingEntityData + custom data) in bytes (4 bytes).
+ // 2. EntityId: Allows correlation between frames (16 bytes).
+ // 3. PoseSet: Guids for Body (such as Kinect), Handtracking, etc.: defines the meaning of each Pose (16 bytes).
+ // 4. PosesCount: Count of poses in this entity data (4 bytes).
+ // 5. IsTracked: Whether or not this entity is being tracked (1 byte).
+ // 6. Poses: Array of common structure TrackedPose (sizeof(TrackedPose) * PosesCount bytes).
+ // 7. Customer specific data for this entity (DataSizeInBytes - sizeof(PoseTrackedEntityData) - sizeof(TrackedPose) * (PosesCount -1) bytes)
+ //
+ // 4. Provider-specific data for entity #0 in this frame.
+ //
+ // 5. PoseTrackingEntityData #1: Second entity data. Common structure for all providers.
+ //
+ // 6. Provider-specific data for entity #1 in this frame.
+
+ struct PoseTrackingFrameHeader
+ {
+ // This Guid allows the consumer of the buffer to verify that
+ // it has been written by the expected provider
+ GUID CustomDataTypeGuid;
+ uint32_t EntitiesCount;
+ uint32_t EntityOffsets[1]; // actual length is EntitiesCount
+ };
+
+ struct PoseTrackingEntityData
+ {
+ uint32_t DataSizeInBytes;
+ GUID EntityId;
+ GUID PoseSet;
+ uint32_t PosesCount;
+ bool IsTracked;
+ TrackedPose Poses[1]; // actual length is PosesCount
+ };
+
+ // Subtype for PoseTracking
+ // {69232056-2ED9-4D0E-89CC-5D2734A56808}
+ DEFINE_GUID(MFPerceptionFormat_PoseTracking, 0x69232056, 0x2ed9, 0x4d0e, 0x89, 0xcc, 0x5d, 0x27, 0x34, 0xa5, 0x68, 0x8);
+
+ // These two constants (used in the PoseSet attribute of PoseTrackingEntityData) define which
+ // body part each TrackedPose corresponds to.
+ // i.e. for body tracking: indexes in Poses correspond to values of the BodyPart enumerated type.
+ // for hand tracking: enumerated type is not yet defined.
+ // {84520B1F-AB61-46DA-AB1D-E01340EF884E}
+ DEFINE_GUID(PoseSet_BodyTracking, 0x84520b1f, 0xab61, 0x46da, 0xab, 0x1d, 0xe0, 0x13, 0x40, 0xef, 0x88, 0x4e);
+ // {F142C82C-3A57-4E7D-8159-98BDBD6CCFE2}
+ DEFINE_GUID(PoseSet_HandTracking, 0xf142c82c, 0x3a57, 0x4e7d, 0x81, 0x59, 0x98, 0xbd, 0xbd, 0x6c, 0xcf, 0xe2);
+
+}
diff --git a/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/PoseTrackingFrame.cpp b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/PoseTrackingFrame.cpp
new file mode 100644
index 0000000000..64631d163d
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/PoseTrackingFrame.cpp
@@ -0,0 +1,188 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#include "pch.h"
+#include
+#include "PoseTrackingFrame.h"
+
+using namespace Platform;
+using namespace Platform::Collections;
+using namespace Windows::Foundation;
+using namespace Windows::Foundation::Collections;
+using namespace Windows::Media::Capture::Frames;
+using namespace Windows::Storage::Streams;
+using namespace Microsoft::WRL;
+using namespace WindowsPreview::Media::Capture::Frames;
+
+static const wchar_t *UnknownMediaType= L"Unknown";
+static const wchar_t *PerceptionMediaType= L"Perception";
+
+// Formatted Guid strings are 38 characters long, not including the terminator
+const unsigned int GuidBufferLength = 40;
+static wchar_t PoseTrackingFormat[GuidBufferLength] = { 0 };
+static wchar_t PerceptionTypeGuid[GuidBufferLength] = { 0 };
+
+static bool g_staticInitialize = []()
+{
+ (void)StringFromGUID2(MFPoseTrackingPreview::MFPerceptionFormat_PoseTracking, PoseTrackingFormat, _countof(PoseTrackingFormat));
+ (void)StringFromGUID2(MFMediaType_Perception, PerceptionTypeGuid, _countof(PerceptionTypeGuid));
+ return true;
+}();
+
+// Because we are doing a reinterpret cast between different definitions, let's check that they match
+static_assert(sizeof(MFPoseTrackingPreview::TrackedPose) == sizeof(WindowsPreview::Media::Capture::Frames::TrackedPose), "Error: Mismatched size of TrackedPose");
+static_assert(offsetof(MFPoseTrackingPreview::TrackedPose, PositionStatus) == offsetof(WindowsPreview::Media::Capture::Frames::TrackedPose, PositionStatus), "Error: Mismatched offset of PositionStatus in TrackedPose");
+static_assert(offsetof(MFPoseTrackingPreview::TrackedPose, Position) == offsetof(WindowsPreview::Media::Capture::Frames::TrackedPose, Position), "Error: Mismatched offset of Position in TrackedPose");
+static_assert(offsetof(MFPoseTrackingPreview::TrackedPose, OrientationStatus) == offsetof(WindowsPreview::Media::Capture::Frames::TrackedPose, OrientationStatus), "Error: Mismatched offset of OrientationStatus in TrackedPose");
+static_assert(offsetof(MFPoseTrackingPreview::TrackedPose, Orientation) == offsetof(WindowsPreview::Media::Capture::Frames::TrackedPose, Orientation), "Error: Mismatched offset of v in TrackedPose");
+
+PoseTrackingEntity::PoseTrackingEntity(_In_ Windows::Storage::Streams::IBuffer^ frameBuffer, _In_ unsigned int entityIndex, _In_ const MFPoseTrackingPreview::PoseTrackingEntityData* entityHeader)
+ : _frameBuffer(frameBuffer),
+ _entityIndex(entityIndex),
+ _entityId(entityHeader->EntityId),
+ _poseSetId(entityHeader->PoseSet),
+ _posesCount(entityHeader->PosesCount),
+ _dataSize(entityHeader->DataSizeInBytes),
+ _isTracked(entityHeader->IsTracked != 0)
+{
+}
+
+void PoseTrackingEntity::GetPoses(_Out_ Platform::WriteOnlyArray^ poses)
+{
+ if (poses == nullptr)
+ {
+ throw ref new NullReferenceException(StringReference(L"poses"));
+ }
+ if (poses->Length < _posesCount)
+ {
+ throw ref new InvalidArgumentException(StringReference(L"poses array is too small"));
+ }
+
+ BufferHelper helper(_frameBuffer);
+ auto frameHeader = helper.GetPtrAt(0);
+ auto entityHeader = helper.GetPtrAt(frameHeader->EntityOffsets[_entityIndex]);
+ helper.CheckBufferSize(entityHeader, _posesCount - 1);
+ CopyMemory(poses->Data, entityHeader->Poses, sizeof(TrackedPose) * _posesCount);
+}
+
+void PoseTrackingEntity::GetPoses(_In_ const Platform::Array^ indices, _Out_ Platform::WriteOnlyArray^ poses)
+{
+ if (indices == nullptr)
+ {
+ throw ref new InvalidArgumentException(StringReference(L"indices"));
+ }
+ if (poses == nullptr)
+ {
+ throw ref new NullReferenceException(StringReference(L"poses"));
+ }
+ if (poses->Length < indices->Length)
+ {
+ throw ref new InvalidArgumentException(StringReference(L"poses array is to small"));
+ }
+
+ BufferHelper helper(_frameBuffer);
+ auto frameHeader = helper.GetPtrAt(0);
+ auto entityHeader = helper.GetPtrAt(frameHeader->EntityOffsets[_entityIndex]);
+ helper.CheckBufferSize(entityHeader, _posesCount - 1);
+
+ // The compatibility of the two types has been verified with the static_assert above.
+ auto sourcePoses = reinterpret_cast(entityHeader->Poses);
+
+ for (unsigned int i = 0; i < indices->Length; i++)
+ {
+ auto index = indices[i];
+ if (index >= _posesCount)
+ {
+ std::wstring message(L"Invalid Pose Index: ");
+ message += std::to_wstring(index);
+ throw ref new InvalidArgumentException(ref new String(message.c_str()));
+ }
+ else
+ {
+ poses[i] = sourcePoses[index];
+ }
+ }
+}
+
+BufferView^ PoseTrackingEntity::GetCustomData()
+{
+ BufferHelper bufferHelper(_frameBuffer);
+ auto frameHeader = bufferHelper.GetPtrAt(0);
+ auto entityHeader = bufferHelper.GetPtrAt(frameHeader->EntityOffsets[_entityIndex]);
+ auto entityCommonDataSize = sizeof_composedStructure(entityHeader->PosesCount);
+
+ auto bufferOffset = frameHeader->EntityOffsets[_entityIndex] + entityCommonDataSize;
+ auto bufferLength = entityHeader->DataSizeInBytes - entityCommonDataSize;
+ return ref new BufferView(_frameBuffer, bufferOffset, bufferLength);
+}
+
+PoseTrackingFrame::PoseTrackingFrame(Windows::Media::Capture::Frames::MediaFrameReference^ mediaFrameReference,
+ _In_ REFIID customDataType,
+ _In_ Windows::Storage::Streams::IBuffer^ frameBuffer,
+ _In_ Windows::Foundation::Collections::IVectorView^ entities)
+ : _mediaFrameReference(mediaFrameReference), _customDataType(customDataType), _frameBuffer(frameBuffer), _entities(entities)
+{}
+
+PoseTrackingFrameCreationResult^ PoseTrackingFrame::Create(_In_ Windows::Media::Capture::Frames::MediaFrameReference^ mediaFrameReference)
+{
+ if (mediaFrameReference == nullptr)
+ {
+ throw ref new InvalidArgumentException(StringReference(L"mediaFrameReference"));
+ }
+
+ // UnknownMediaType may be returned as the MajorType.
+ if ((0 != _wcsicmp(mediaFrameReference->Format->MajorType->Data(), UnknownMediaType))
+ && (0 != _wcsicmp(mediaFrameReference->Format->MajorType->Data(), PerceptionTypeGuid))
+ && (0 != _wcsicmp(mediaFrameReference->Format->MajorType->Data(), PerceptionMediaType))
+ )
+ {
+ return ref new PoseTrackingFrameCreationResult(nullptr, PoseTrackingFrameCreationStatus::NotAPerceptionMediaType);
+ }
+
+
+ if (0 != _wcsicmp(mediaFrameReference->Format->Subtype->Data(), PoseTrackingFormat))
+ {
+ return ref new PoseTrackingFrameCreationResult(nullptr, PoseTrackingFrameCreationStatus::NotAPoseTrackingFrame);
+ }
+
+ auto bmf = mediaFrameReference->BufferMediaFrame;
+ if (bmf == nullptr)
+ {
+ return ref new PoseTrackingFrameCreationResult(nullptr, PoseTrackingFrameCreationStatus::FrameBufferReleased);
+ }
+ auto buffer = bmf->Buffer;
+ if (buffer == nullptr)
+ {
+ return ref new PoseTrackingFrameCreationResult(nullptr, PoseTrackingFrameCreationStatus::FrameBufferReleased);
+ }
+
+ BufferHelper bufferHelper(buffer);
+ auto frameHeader = bufferHelper.GetPtrAt(0);
+ auto entities = ref new Vector(frameHeader->EntitiesCount);
+
+ for (unsigned int index = 0; index < frameHeader->EntitiesCount; index++)
+ {
+ auto entityHeader = bufferHelper.GetPtrAt(frameHeader->EntityOffsets[index]);
+ entities->SetAt(index, ref new PoseTrackingEntity(buffer, index, entityHeader));
+ }
+
+ return ref new PoseTrackingFrameCreationResult(ref new PoseTrackingFrame(mediaFrameReference, frameHeader->CustomDataTypeGuid, buffer, entities->GetView()), PoseTrackingFrameCreationStatus::Success);
+}
+
+BufferView^ PoseTrackingFrame::GetCustomData()
+{
+ BufferHelper bufferHelper(_frameBuffer);
+ auto frameHeader = bufferHelper.GetPtrAt(0);
+
+ auto bufferOffset = sizeof_composedStructure(frameHeader->EntitiesCount);
+ auto bufferLength = frameHeader->EntityOffsets[0] - bufferOffset;
+ return ref new BufferView(_frameBuffer, bufferOffset, bufferLength);
+}
diff --git a/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/PoseTrackingFrame.h b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/PoseTrackingFrame.h
new file mode 100644
index 0000000000..1438275bf3
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/PoseTrackingFrame.h
@@ -0,0 +1,207 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+#include "MFPoseTrackingFrame.h"
+
+namespace WindowsPreview {
+ namespace Media {
+ namespace Capture {
+ namespace Frames {
+
+ public enum class TrackingStatus : int32_t {
+ NotTracked = static_cast(MFPoseTrackingPreview::TrackingStatus::NotTracked),
+ Inferred = static_cast(MFPoseTrackingPreview::TrackingStatus::Inferred),
+ Tracked = static_cast(MFPoseTrackingPreview::TrackingStatus::Tracked)
+ };
+
+ public value struct TrackedPose {
+ TrackingStatus PositionStatus;
+ Windows::Foundation::Numerics::float3 Position;
+ TrackingStatus OrientationStatus;
+ Windows::Foundation::Numerics::quaternion Orientation;
+ };
+
+ // When PoseSet field of PoseTrackingEntity is set to PoseSet_BodyTracking, the
+ // Poses of the PoseTrackingEntity follow the order defined by this enum
+ public enum class BodyPart : int32_t {
+ SpineBase = static_cast(MFPoseTrackingPreview::BodyPart::SpineBase),
+ SpineMid = static_cast(MFPoseTrackingPreview::BodyPart::SpineMid),
+ Neck = static_cast(MFPoseTrackingPreview::BodyPart::Neck),
+ Head = static_cast(MFPoseTrackingPreview::BodyPart::Head),
+ ShoulderLeft = static_cast(MFPoseTrackingPreview::BodyPart::ShoulderLeft),
+ ElbowLeft = static_cast(MFPoseTrackingPreview::BodyPart::ElbowLeft),
+ WristLeft = static_cast(MFPoseTrackingPreview::BodyPart::WristLeft),
+ HandLeft = static_cast(MFPoseTrackingPreview::BodyPart::HandLeft),
+ ShoulderRight = static_cast(MFPoseTrackingPreview::BodyPart::ShoulderRight),
+ ElbowRight = static_cast(MFPoseTrackingPreview::BodyPart::ElbowRight),
+ WristRight = static_cast(MFPoseTrackingPreview::BodyPart::WristRight),
+ HandRight = static_cast(MFPoseTrackingPreview::BodyPart::HandRight),
+ HipLeft = static_cast(MFPoseTrackingPreview::BodyPart::HipLeft),
+ KneeLeft = static_cast(MFPoseTrackingPreview::BodyPart::KneeLeft),
+ AnkleLeft = static_cast(MFPoseTrackingPreview::BodyPart::AnkleLeft),
+ FootLeft = static_cast(MFPoseTrackingPreview::BodyPart::FootLeft),
+ HipRight = static_cast(MFPoseTrackingPreview::BodyPart::HipRight),
+ KneeRight = static_cast(MFPoseTrackingPreview::BodyPart::KneeRight),
+ AnkleRight = static_cast(MFPoseTrackingPreview::BodyPart::AnkleRight),
+ FootRight = static_cast(MFPoseTrackingPreview::BodyPart::FootRight),
+ SpineShoulder = static_cast(MFPoseTrackingPreview::BodyPart::SpineShoulder),
+ HandTipLeft = static_cast(MFPoseTrackingPreview::BodyPart::HandTipLeft),
+ ThumbLeft = static_cast(MFPoseTrackingPreview::BodyPart::ThumbLeft),
+ HandTipRight = static_cast(MFPoseTrackingPreview::BodyPart::HandTipRight),
+ ThumbRight = static_cast(MFPoseTrackingPreview::BodyPart::ThumbRight)
+ };
+
+ public ref class KnownPoseTrackingPointSets sealed
+ {
+ public:
+ static property Platform::Guid BodyTrackingPointSet
+ {
+ Platform::Guid get() { return MFPoseTrackingPreview::PoseSet_BodyTracking; }
+ }
+
+ static property Platform::Guid HandTrackingPointSet
+ {
+ Platform::Guid get() { return MFPoseTrackingPreview::PoseSet_HandTracking; }
+ }
+
+ };
+
+ public ref class PoseTrackingEntity sealed
+ {
+ public:
+ property Platform::Guid EntityId
+ {
+ Platform::Guid get() { return _entityId; }
+ }
+ property Platform::Guid PoseSetId
+ {
+ Platform::Guid get() { return _poseSetId; }
+ }
+ property unsigned int PosesCount
+ {
+ unsigned int get() { return _posesCount; }
+ }
+ property bool IsTracked
+ {
+ bool get() { return _isTracked; }
+ }
+
+ // Fills the poses array parameter with the Poses contained in the entity
+ // The array must be large enough, i.e. contain at least PosesCount elements
+ // Exactly PosesCount elements are written; the remaining of the array is unchanged
+ void GetPoses(_Out_ Platform::WriteOnlyArray^ poses);
+
+ // Fills the poses array parameter with the Poses contained in the entity
+ // and at the positions indicated by the indices parameter
+ // poses array must be at least as large as indices array
+ // Exactly indices->Length elements are written; the rest of the array is unchanged
+ void GetPoses(_In_ const Platform::Array^ indices, _Out_ Platform::WriteOnlyArray^ poses);
+
+ // Retrieves the location of Provider specific entity data in the frame buffer
+ BufferView^ GetCustomData();
+
+ internal:
+ PoseTrackingEntity(_In_ Windows::Storage::Streams::IBuffer^ frameBuffer, _In_ unsigned int entityIndex, _In_ const MFPoseTrackingPreview::PoseTrackingEntityData* entityHeader);
+
+ private:
+ // Cached values for frequently access data
+ Platform::Guid _entityId;
+ Platform::Guid _poseSetId;
+ unsigned int _posesCount;
+ size_t _dataSize;
+ bool _isTracked;
+
+ // Access to the full buffer
+ Windows::Storage::Streams::IBuffer^ _frameBuffer;
+ unsigned int _entityIndex;
+ };
+
+ ref class PoseTrackingFrame;
+
+ public enum class PoseTrackingFrameCreationStatus
+ {
+ Success, // The buffer could be correctly accessed and interpreted as a Pose Tracking Frame
+ NotAPerceptionMediaType, // The Media Type associated to the MediaFrameReference was not Perception
+ NotAPoseTrackingFrame, // The Media Subtype associated to the MediaFrameReference was not Pose Tracking
+ FrameBufferReleased, // The Buffer associated to the MediaFrameReference has already been already released
+ InvalidFormat // There was a mistmatch between expected and real value of some internal fields of the buffer
+ };
+
+ public ref class PoseTrackingFrameCreationResult sealed
+ {
+ public:
+ property PoseTrackingFrame^ Frame
+ {
+ PoseTrackingFrame^ get() { return _frame; }
+ }
+ property PoseTrackingFrameCreationStatus Status
+ {
+ PoseTrackingFrameCreationStatus get() { return _status; }
+ }
+ internal:
+ PoseTrackingFrameCreationResult(_In_opt_ PoseTrackingFrame^ frame, _In_ PoseTrackingFrameCreationStatus status)
+ : _frame(frame), _status(status)
+ {
+ }
+ private:
+ PoseTrackingFrame^ _frame;
+ PoseTrackingFrameCreationStatus _status;
+ };
+
+ public ref class PoseTrackingFrame sealed
+ {
+ public:
+ static property Platform::Guid PoseTrackingSubtype
+ {
+ Platform::Guid get()
+ {
+ return MFPoseTrackingPreview::MFPerceptionFormat_PoseTracking;
+ }
+ }
+
+ property Windows::Media::Capture::Frames::MediaFrameReference^ MediaFrameReference
+ {
+ Windows::Media::Capture::Frames::MediaFrameReference^ get() { return _mediaFrameReference; }
+ }
+
+ property Windows::Foundation::Collections::IVectorView^ Entities
+ {
+ Windows::Foundation::Collections::IVectorView^ get() { return _entities; }
+ }
+
+ property Platform::Guid CustomDataType
+ {
+ Platform::Guid get() { return _customDataType; }
+ }
+
+ // Creates a PoseTrackingFrame from the MediaFrameReference
+ // If the MediaFrameReference does not contain Perception / PoseTracking data, the returned value is nullptr
+ static PoseTrackingFrameCreationResult^ Create(_In_ Windows::Media::Capture::Frames::MediaFrameReference^ mediaFrameReference);
+
+ // Retrieves the location of Provider specific frame data in the frame buffer
+ BufferView^ GetCustomData();
+
+ private:
+ Windows::Media::Capture::Frames::MediaFrameReference^ _mediaFrameReference;
+ Windows::Storage::Streams::IBuffer^ _frameBuffer;
+ Windows::Foundation::Collections::IVectorView^ _entities;
+ Platform::Guid _customDataType;
+
+ PoseTrackingFrame(Windows::Media::Capture::Frames::MediaFrameReference^ mediaFrameReference,
+ _In_ REFIID customDataType,
+ _In_ Windows::Storage::Streams::IBuffer^ frameBuffer,
+ _In_ Windows::Foundation::Collections::IVectorView^ entities);
+ };
+ }
+ }
+ }
+}
diff --git a/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/PoseTrackingPreview.vcxproj b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/PoseTrackingPreview.vcxproj
new file mode 100644
index 0000000000..730d0606ab
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/PoseTrackingPreview.vcxproj
@@ -0,0 +1,259 @@
+
+
+
+
+ Debug
+ ARM
+
+
+ Debug
+ Win32
+
+
+ Debug
+ x64
+
+
+ Release
+ ARM
+
+
+ Release
+ Win32
+
+
+ Release
+ x64
+
+
+
+ {67b5157a-25b1-4ec3-98cf-50e1b9177db4}
+ WindowsRuntimeComponent
+ PoseTrackingPreview
+ WindowsPreview.Media.Capture.Frames
+ en-US
+ 14.0
+ true
+ Windows Store
+ 10.0
+ 10.0.14393.0
+ 10.0.14393.0
+
+
+
+ DynamicLibrary
+ true
+ v140
+
+
+ DynamicLibrary
+ true
+ v140
+
+
+ DynamicLibrary
+ true
+ v140
+
+
+ DynamicLibrary
+ false
+ true
+ v140
+
+
+ DynamicLibrary
+ false
+ true
+ v140
+
+
+ DynamicLibrary
+ false
+ true
+ v140
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ false
+ WindowsPreview.Media.Capture.Frames
+
+
+ false
+ WindowsPreview.Media.Capture.Frames
+
+
+ false
+ WindowsPreview.Media.Capture.Frames
+
+
+ false
+ WindowsPreview.Media.Capture.Frames
+
+
+ false
+ WindowsPreview.Media.Capture.Frames
+
+
+ false
+ WindowsPreview.Media.Capture.Frames
+
+
+
+ Use
+ _WINRT_DLL;%(PreprocessorDefinitions)
+ pch.h
+ $(IntDir)pch.pch
+ $(WindowsSDK_WindowsMetadata);$(AdditionalUsingDirectories)
+ /bigobj %(AdditionalOptions)
+ 28204
+ $(ProjectDir);$(GeneratedFilesDir);$(IntDir);%(AdditionalIncludeDirectories)
+ Level4
+
+
+ Console
+ false
+ $(OutDir)$(TargetName)$(TargetExt)
+ WindowsApp.lib;%(AdditionalDependencies);mfuuid.lib;
+
+
+
+
+ Use
+ _WINRT_DLL;NDEBUG;%(PreprocessorDefinitions)
+ pch.h
+ $(IntDir)pch.pch
+ $(WindowsSDK_WindowsMetadata);$(AdditionalUsingDirectories)
+ /bigobj %(AdditionalOptions)
+ 28204
+ $(ProjectDir);$(GeneratedFilesDir);$(IntDir);%(AdditionalIncludeDirectories)
+ Level4
+
+
+ Console
+ false
+ $(OutDir)$(TargetName)$(TargetExt)
+ WindowsApp.lib;%(AdditionalDependencies);mfuuid.lib;
+
+
+
+
+ Use
+ _WINRT_DLL;%(PreprocessorDefinitions)
+ pch.h
+ $(IntDir)pch.pch
+ $(WindowsSDK_WindowsMetadata);$(AdditionalUsingDirectories)
+ /bigobj %(AdditionalOptions)
+ 28204
+ $(ProjectDir);$(GeneratedFilesDir);$(IntDir);%(AdditionalIncludeDirectories)
+ Level4
+
+
+ Console
+ false
+ $(OutDir)$(TargetName)$(TargetExt)
+ WindowsApp.lib;%(AdditionalDependencies);mfuuid.lib;
+
+
+
+
+ Use
+ _WINRT_DLL;NDEBUG;%(PreprocessorDefinitions)
+ pch.h
+ $(IntDir)pch.pch
+ $(WindowsSDK_WindowsMetadata);$(AdditionalUsingDirectories)
+ /bigobj %(AdditionalOptions)
+ 28204
+ $(ProjectDir);$(GeneratedFilesDir);$(IntDir);%(AdditionalIncludeDirectories)
+ Level4
+
+
+ Console
+ false
+ $(OutDir)$(TargetName)$(TargetExt)
+ WindowsApp.lib;%(AdditionalDependencies);mfuuid.lib;
+
+
+
+
+ Use
+ _WINRT_DLL;%(PreprocessorDefinitions)
+ pch.h
+ $(IntDir)pch.pch
+ $(WindowsSDK_WindowsMetadata);$(AdditionalUsingDirectories)
+ /bigobj %(AdditionalOptions)
+ 28204
+ $(ProjectDir);$(GeneratedFilesDir);$(IntDir);%(AdditionalIncludeDirectories)
+ Level4
+
+
+ Console
+ false
+ $(OutDir)$(TargetName)$(TargetExt)
+ WindowsApp.lib;%(AdditionalDependencies);mfuuid.lib;
+
+
+
+
+ Use
+ _WINRT_DLL;NDEBUG;%(PreprocessorDefinitions)
+ pch.h
+ $(IntDir)pch.pch
+ $(WindowsSDK_WindowsMetadata);$(AdditionalUsingDirectories)
+ /bigobj %(AdditionalOptions)
+ 28204
+ $(ProjectDir);$(GeneratedFilesDir);$(IntDir);%(AdditionalIncludeDirectories)
+ Level4
+
+
+ Console
+ false
+ $(OutDir)$(TargetName)$(TargetExt)
+ WindowsApp.lib;%(AdditionalDependencies);mfuuid.lib;
+
+
+
+
+
+
+
+
+
+
+ Create
+ Create
+ Create
+ Create
+ Create
+ Create
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/PoseTrackingPreview.vcxproj.filters b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/PoseTrackingPreview.vcxproj.filters
new file mode 100644
index 0000000000..5de0c47b56
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/PoseTrackingPreview.vcxproj.filters
@@ -0,0 +1,19 @@
+
+
+
+
+ a0eefb2e-85df-4d93-885a-8177d35028f1
+ rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tga;tiff;tif;png;wav;mfcribbon-ms
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/pch.cpp b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/pch.cpp
new file mode 100644
index 0000000000..bcb5590be1
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/pch.cpp
@@ -0,0 +1 @@
+#include "pch.h"
diff --git a/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/pch.h b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/pch.h
new file mode 100644
index 0000000000..f83b7ca2f8
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/PoseTrackingPreview/pch.h
@@ -0,0 +1,32 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+#include
+#include
+#include
+#include
+#include
+
+#include
+#include
+
+#include
+#include "BufferHelper.h"
+
+// Because this function is only used to check the size of an IBuffer - which uses a capacity of type int, not size_t -
+// we return a uint32_t here.
+template
+uint32_t sizeof_composedStructure(uint32_t effectiveCount)
+{
+ return static_cast(sizeof(parentType) + (static_cast(effectiveCount) - static_cast(declaredCount)) * sizeof(repeatedType));
+}
\ No newline at end of file
diff --git a/Samples/CameraStreamCorrelation/cpp/SampleConfiguration.cpp b/Samples/CameraStreamCorrelation/cpp/SampleConfiguration.cpp
new file mode 100644
index 0000000000..53271229bf
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/SampleConfiguration.cpp
@@ -0,0 +1,21 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#include "pch.h"
+#include "MainPage.xaml.h"
+#include "SampleConfiguration.h"
+
+using namespace SDKTemplate;
+
+Platform::Array^ MainPage::scenariosInner = ref new Platform::Array
+{
+ { "Correlate multiple streams", "SDKTemplate.Scenario1_CorrelateStreams" }
+};
diff --git a/Samples/CameraStreamCorrelation/cpp/SampleConfiguration.h b/Samples/CameraStreamCorrelation/cpp/SampleConfiguration.h
new file mode 100644
index 0000000000..45c9041acd
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/SampleConfiguration.h
@@ -0,0 +1,47 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+#include "pch.h"
+
+namespace SDKTemplate
+{
+ value struct Scenario;
+
+ partial ref class MainPage
+ {
+ internal:
+ static property Platform::String^ FEATURE_NAME
+ {
+ Platform::String^ get()
+ {
+ return "Camera Stream Correlation C++/cx Sample";
+ }
+ }
+
+ static property Platform::Array^ scenarios
+ {
+ Platform::Array^ get()
+ {
+ return scenariosInner;
+ }
+ }
+
+ private:
+ static Platform::Array^ scenariosInner;
+ };
+
+ public value struct Scenario
+ {
+ Platform::String^ Title;
+ Platform::String^ ClassName;
+ };
+} // SDKTemplate
\ No newline at end of file
diff --git a/Samples/CameraStreamCorrelation/cpp/Scenario1_CorrelateStreams.xaml b/Samples/CameraStreamCorrelation/cpp/Scenario1_CorrelateStreams.xaml
new file mode 100644
index 0000000000..035befc378
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/Scenario1_CorrelateStreams.xaml
@@ -0,0 +1,76 @@
+
+
+
+
+
+
+
+ Correlate and display frames from depth and custom stream sources.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Samples/CameraStreamCorrelation/cpp/Scenario1_CorrelateStreams.xaml.cpp b/Samples/CameraStreamCorrelation/cpp/Scenario1_CorrelateStreams.xaml.cpp
new file mode 100644
index 0000000000..ad0c552bcd
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/Scenario1_CorrelateStreams.xaml.cpp
@@ -0,0 +1,457 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#include "pch.h"
+#include
+#include "Scenario1_CorrelateStreams.xaml.h"
+#include "FrameRenderer.h"
+
+using namespace SDKTemplate;
+
+using namespace concurrency;
+using namespace Platform;
+using namespace Platform::Collections;
+using namespace Windows::Media::Devices::Core;
+using namespace Windows::Foundation;
+using namespace Windows::Foundation::Collections;
+using namespace Windows::Foundation::Numerics;
+using namespace Windows::Graphics::Imaging;
+using namespace Windows::Media::Capture;
+using namespace Windows::Media::Capture::Frames;
+using namespace Windows::Perception::Spatial;
+using namespace Windows::UI::Xaml::Media::Imaging;
+
+using namespace WindowsPreview::Media::Capture::Frames;
+
+// Used to determine whether a source has a Perception major type.
+static String^ PerceptionMediaType = L"Perception";
+
+// Returns the values from a std::map as a std::vector.
+template
+static inline std::vector values(std::map const& inputMap)
+{
+ std::vector outputVector(inputMap.size());
+ std::transform(inputMap.begin(), inputMap.end(), outputVector.begin(), [](auto const& pair)
+ {
+ return pair.second;
+ });
+ return outputVector;
+}
+
+Scenario1_CorrelateStreams::Scenario1_CorrelateStreams() : rootPage(MainPage::Current)
+{
+ InitializeComponent();
+
+ m_logger = ref new SimpleLogger(outputTextBlock);
+
+ m_correlatedFrameRenderer = std::make_unique(previewImage);
+}
+
+void Scenario1_CorrelateStreams::OnNavigatedTo(Windows::UI::Xaml::Navigation::NavigationEventArgs^ e)
+{
+ // Start streaming from the first available source group.
+ PickNextMediaSourceAsync();
+}
+
+void Scenario1_CorrelateStreams::OnNavigatedFrom(Windows::UI::Xaml::Navigation::NavigationEventArgs^ e)
+{
+ CleanupMediaCaptureAsync();
+}
+
+void Scenario1_CorrelateStreams::NextButton_Click(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
+{
+ PickNextMediaSourceAsync();
+}
+
+void Scenario1_CorrelateStreams::ToggleDepth_Click(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
+{
+ m_frameSources[MediaFrameSourceKind::Depth].enabled = !m_frameSources[MediaFrameSourceKind::Depth].enabled;
+ UpdateUI();
+}
+
+void Scenario1_CorrelateStreams::ToggleSkeletons_Click(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
+{
+ m_frameSources[MediaFrameSourceKind::Custom].enabled = !m_frameSources[MediaFrameSourceKind::Custom].enabled;
+ UpdateUI();
+}
+
+void Scenario1_CorrelateStreams::UpdateUI()
+{
+ ToggleDepth->IsEnabled = m_frameSources[MediaFrameSourceKind::Depth].sourceInfo != nullptr;
+ ToggleSkeletons->IsEnabled = m_frameSources[MediaFrameSourceKind::Custom].sourceInfo != nullptr;
+
+ DepthMessage->Text = m_frameSources[MediaFrameSourceKind::Depth].sourceInfo != nullptr
+ ? "Depth overlay " + (m_frameSources[MediaFrameSourceKind::Depth].enabled ? "enabled" : "disabled")
+ : "Depth source not found";
+ SkeletalMessage->Text = m_frameSources[MediaFrameSourceKind::Depth].sourceInfo != nullptr
+ ? "Skeletal overlay " + (m_frameSources[MediaFrameSourceKind::Custom].enabled ? "enabled" : "disabled")
+ : "Skeletal tracking source not found";
+}
+
+task Scenario1_CorrelateStreams::PickNextMediaSourceAsync()
+{
+ NextButton->IsEnabled = false;
+ return PickNextMediaSourceWorkerAsync()
+ .then([this]()
+ {
+ NextButton->IsEnabled = true;
+ UpdateUI();
+ }, task_continuation_context::use_current());
+}
+
+task Scenario1_CorrelateStreams::PickNextMediaSourceWorkerAsync()
+{
+ return CleanupMediaCaptureAsync().then([this]()
+ {
+ return create_task(MediaFrameSourceGroup::FindAllAsync());
+ }).then([this](IVectorView^ allGroups)
+ {
+ std::vector eligableGroups;
+ for (auto const& group : allGroups)
+ {
+ auto sourceInfos = group->SourceInfos;
+
+ // Keep this group if it at least supports color, as the other sources must be correlated with the color source.
+ if (group != nullptr && std::any_of(begin(sourceInfos), end(sourceInfos),
+ [](MediaFrameSourceInfo^ sourceInfo) { return sourceInfo != nullptr && sourceInfo->SourceKind == MediaFrameSourceKind::Color; }))
+ {
+ eligableGroups.push_back(group);
+ }
+ }
+
+ if (eligableGroups.size() == 0)
+ {
+ m_logger->Log("No valid source groups found");
+ return task_from_result();
+ }
+
+ // Pick next group in the array after each time the Next button is clicked.
+ m_selectedSourceGroupIndex = (m_selectedSourceGroupIndex + 1) % eligableGroups.size();
+
+ m_logger->Log("Found " + eligableGroups.size().ToString() + " groups and " +
+ "selecting index [" + m_selectedSourceGroupIndex.ToString() + "] : " +
+ eligableGroups[m_selectedSourceGroupIndex]->DisplayName);
+
+ MediaFrameSourceGroup^ selectedGroup = eligableGroups[m_selectedSourceGroupIndex];
+
+ // Initialize MediaCapture with selected group.
+ return TryInitializeMediaCaptureAsync(selectedGroup)
+ .then([this, selectedGroup](bool initialized)
+ {
+ if (!initialized)
+ {
+ return CleanupMediaCaptureAsync();
+ }
+
+ // Try to find color, depth and skeletal sources on this source group.
+ auto sourceInfos = selectedGroup->SourceInfos;
+
+ auto colorSourceInfo = std::find_if(begin(sourceInfos), end(sourceInfos), [](MediaFrameSourceInfo^ sourceInfo)
+ {
+ return sourceInfo->SourceKind == MediaFrameSourceKind::Color;
+ });
+
+ auto depthSourceInfo = std::find_if(begin(sourceInfos), end(sourceInfos), [](MediaFrameSourceInfo^ sourceInfo)
+ {
+ return sourceInfo->SourceKind == MediaFrameSourceKind::Depth;
+ });
+
+ // In order to determine whether or not the source info is a skeletal source, its major type must
+ // be "Perception" and its subtype must be a Guid matching MFPoseTrackingPreview::MFPerceptionFormat_PoseTracking.
+ // We muse use the initialized media capture object to look up the format and determine the major type and subtype
+ // of the source.
+ auto skeletalSourceInfo = std::find_if(begin(sourceInfos), end(sourceInfos), [this](MediaFrameSourceInfo^ sourceInfo)
+ {
+ if (m_mediaCapture->FrameSources->HasKey(sourceInfo->Id))
+ {
+ MediaFrameFormat^ format = m_mediaCapture->FrameSources->Lookup(sourceInfo->Id)->CurrentFormat;
+ GUID subTypeGuid;
+ if (SUCCEEDED(IIDFromString(format->Subtype->Data(), &subTypeGuid)))
+ {
+ return sourceInfo->SourceKind == MediaFrameSourceKind::Custom
+ && format->MajorType == PerceptionMediaType
+ && Guid(subTypeGuid).Equals(PoseTrackingFrame::PoseTrackingSubtype);
+ }
+ }
+
+ return false;
+ });
+
+ // Reset our frame sources data
+ m_frameSources[MediaFrameSourceKind::Color] = FrameSourceState();
+ m_frameSources[MediaFrameSourceKind::Depth] = FrameSourceState();
+ m_frameSources[MediaFrameSourceKind::Custom] = FrameSourceState();
+
+ // Store the source info object if a source group was found.
+ m_frameSources[MediaFrameSourceKind::Color].sourceInfo = colorSourceInfo != end(sourceInfos) ? *colorSourceInfo : nullptr;
+ m_frameSources[MediaFrameSourceKind::Depth].sourceInfo = depthSourceInfo != end(sourceInfos) ? *depthSourceInfo : nullptr;
+ m_frameSources[MediaFrameSourceKind::Custom].sourceInfo = skeletalSourceInfo != end(sourceInfos) ? *skeletalSourceInfo : nullptr;
+
+ // Enable color always.
+ m_frameSources[MediaFrameSourceKind::Color].enabled = true;
+
+ // Enable depth if depth is available.
+ m_frameSources[MediaFrameSourceKind::Depth].enabled = m_frameSources[MediaFrameSourceKind::Depth].sourceInfo != nullptr;
+
+ // Create readers for found sources.
+ std::vector> createReadersTasks;
+
+ if (m_frameSources[MediaFrameSourceKind::Color].sourceInfo)
+ {
+ createReadersTasks.push_back(CreateReaderAsync(m_frameSources[MediaFrameSourceKind::Color].sourceInfo));
+ }
+
+ if (m_frameSources[MediaFrameSourceKind::Depth].sourceInfo)
+ {
+ createReadersTasks.push_back(CreateReaderAsync(m_frameSources[MediaFrameSourceKind::Depth].sourceInfo));
+ }
+
+ if (m_frameSources[MediaFrameSourceKind::Custom].sourceInfo)
+ {
+ createReadersTasks.push_back(CreateReaderAsync(m_frameSources[MediaFrameSourceKind::Custom].sourceInfo));
+ }
+
+ // The when_all method will execute all tasks in parallel, and call the continuation when all tasks have completed.
+ // This async method can be called even if no readers are present. In that case the continuation will be called immediately.
+ return when_all(begin(createReadersTasks), end(createReadersTasks));
+ });
+ }, task_continuation_context::get_current_winrt_context());
+}
+
+task Scenario1_CorrelateStreams::CreateReaderAsync(MediaFrameSourceInfo^ info)
+{
+ // Access the initialized frame source by looking up the the Id of the source.
+ // Verify that the Id is present, because it may have left the group while were were
+ // busy deciding which group to use.
+ if (!m_mediaCapture->FrameSources->HasKey(info->Id))
+ {
+ m_logger->Log("Unable to start " + info->SourceKind.ToString() + " reader: Frame source not found");
+ return task_from_result();
+ }
+
+ return create_task(m_mediaCapture->CreateFrameReaderAsync(m_mediaCapture->FrameSources->Lookup(info->Id)))
+ .then([this, info](MediaFrameReader^ frameReader)
+ {
+ m_frameSources[info->SourceKind].frameArrivedEventToken = frameReader->FrameArrived +=
+ ref new TypedEventHandler(this, &Scenario1_CorrelateStreams::FrameReader_FrameArrived);
+
+ m_logger->Log(info->SourceKind.ToString() + " reader created");
+
+ // Keep track of created reader and event handler so it can be stopped later.
+ m_frameSources[info->SourceKind].reader = frameReader;
+ return create_task(frameReader->StartAsync());
+ }).then([this, info](MediaFrameReaderStartStatus status)
+ {
+ if (status != MediaFrameReaderStartStatus::Success)
+ {
+ m_logger->Log("Unable to start " + info->SourceKind.ToString() + " reader. Error: " + status.ToString());
+ }
+ });
+}
+
+task Scenario1_CorrelateStreams::TryInitializeMediaCaptureAsync(MediaFrameSourceGroup^ group)
+{
+ if (m_mediaCapture != nullptr)
+ {
+ // Already initialized.
+ return task_from_result(true);
+ }
+
+ // Initialize mediacapture with the source group.
+ m_mediaCapture = ref new MediaCapture();
+
+ auto settings = ref new MediaCaptureInitializationSettings();
+
+ // Select the source we will be reading from.
+ settings->SourceGroup = group;
+
+ // This media capture can share streaming with other apps.
+ settings->SharingMode = MediaCaptureSharingMode::SharedReadOnly;
+
+ // Only stream video and don't initialize audio capture devices.
+ settings->StreamingCaptureMode = StreamingCaptureMode::Video;
+
+ // Set to CPU to ensure frames always contain CPU SoftwareBitmap images,
+ // instead of preferring GPU D3DSurface images.
+ settings->MemoryPreference = MediaCaptureMemoryPreference::Cpu;
+
+ // Only stream video and don't initialize audio capture devices.
+ settings->StreamingCaptureMode = StreamingCaptureMode::Video;
+
+ // Initialize MediaCapture with the specified group.
+ // This must occur on the UI thread because some device families
+ // (such as Xbox) will prompt the user to grant consent for the
+ // app to access cameras.
+ // This can raise an exception if the source no longer exists,
+ // or if the source could not be initialized.
+ return create_task(m_mediaCapture->InitializeAsync(settings))
+ .then([this](task initializeMediaCaptureTask)
+ {
+ try
+ {
+ // Get the result of the initialization. This call will throw if initialization failed
+ // This pattern is documented at https://msdn.microsoft.com/en-us/library/dd997692.aspx
+ initializeMediaCaptureTask.get();
+ m_logger->Log("MediaCapture is successfully initialized in shared mode.");
+ return true;
+ }
+ catch (Exception^ exception)
+ {
+ m_logger->Log("Failed to initialize media capture: " + exception->Message);
+ return false;
+ }
+ });
+}
+
+task Scenario1_CorrelateStreams::CleanupMediaCaptureAsync()
+{
+ task cleanupTask = task_from_result();
+
+ if (m_mediaCapture != nullptr)
+ {
+ for (FrameSourceState frameSourceState : values(m_frameSources))
+ {
+ if (frameSourceState.reader)
+ {
+ frameSourceState.reader->FrameArrived -= frameSourceState.frameArrivedEventToken;
+ cleanupTask = cleanupTask && create_task(frameSourceState.reader->StopAsync());
+ }
+
+ frameSourceState.enabled = false;
+ frameSourceState.sourceInfo = nullptr;
+ frameSourceState.reader = nullptr;
+ frameSourceState.latestFrame = nullptr;
+ }
+
+ m_mediaCapture = nullptr;
+ }
+ return cleanupTask;
+}
+
+void Scenario1_CorrelateStreams::FrameReader_FrameArrived(MediaFrameReader^ sender, MediaFrameArrivedEventArgs^ args)
+{
+ // TryAcquireLatestFrame will return the latest frame that has not yet been acquired.
+ // This can return null if there is no such frame, or if the reader is not in the
+ // "Started" state. The latter can occur if a FrameArrived event was in flight
+ // when the reader was stopped.
+ if (MediaFrameReference^ candidateFrame = sender->TryAcquireLatestFrame())
+ {
+ // Since multiple sources will be receiving frames, we must synchronize access to m_frameSources.
+ auto lock = m_frameLock.LockExclusive();
+
+ // Buffer frame for later usage.
+ m_frameSources[candidateFrame->SourceKind].latestFrame = candidateFrame;
+
+ auto frameSourceObjects = values(m_frameSources);
+ bool allFramesBuffered = std::none_of(frameSourceObjects.begin(), frameSourceObjects.end(),
+ [](FrameSourceState const& frameSourceState)
+ {
+ return frameSourceState.enabled && frameSourceState.latestFrame == nullptr;
+ });
+
+ // If we have frames from currently enabled sources, render to UI.
+ if (allFramesBuffered)
+ {
+ bool colorEnabled = m_frameSources[MediaFrameSourceKind::Color].enabled;
+ bool depthEnabled = m_frameSources[MediaFrameSourceKind::Depth].enabled;
+ bool skeletalEnabled = m_frameSources[MediaFrameSourceKind::Custom].enabled;
+
+ MediaFrameReference^ colorFrame = m_frameSources[MediaFrameSourceKind::Color].latestFrame;
+ MediaFrameReference^ depthFrame = m_frameSources[MediaFrameSourceKind::Depth].latestFrame;
+ MediaFrameReference^ skeletalFrame = m_frameSources[MediaFrameSourceKind::Custom].latestFrame;
+
+ // If depth and color enabled, correlate and output
+ if (colorEnabled && depthEnabled)
+ {
+ m_correlatedFrameRenderer->ProcessDepthAndColorFrames(colorFrame, depthFrame);
+ }
+ // Render only color if enabled and available
+ else if (colorEnabled)
+ {
+ m_correlatedFrameRenderer->ProcessColorFrame(colorFrame);
+ }
+
+ // Clear the skeleton shapes if we are not rendering a skeletal frame.
+ Dispatcher->RunAsync(Windows::UI::Core::CoreDispatcherPriority::Normal,
+ ref new Windows::UI::Core::DispatchedHandler([this]()
+ {
+ skeletalFrameRenderer->Clear();
+ }));
+
+ // Render skeletal data if enabled and color and depth frames are available.
+ // The skeletal rendering requires the color and depth coordinate systems and the
+ // color frame's camera intrinsics to map the resulting skeleton shapes into color image space.
+ if (skeletalEnabled && colorFrame != nullptr && depthFrame != nullptr)
+ {
+ if (PoseTrackingFrameCreationResult^ result = PoseTrackingFrame::Create(skeletalFrame))
+ {
+ if (result->Status == PoseTrackingFrameCreationStatus::Success)
+ {
+ // We must map the coordinates produced by the skeletal tracker from depth space to color space.
+ CameraIntrinsics^ colorIntrinsics = colorFrame->VideoMediaFrame->CameraIntrinsics;
+ SpatialCoordinateSystem^ colorCoordinateSystem = colorFrame->CoordinateSystem;
+ SpatialCoordinateSystem^ depthCoordinateSystem = depthFrame->CoordinateSystem;
+
+ Dispatcher->RunAsync(Windows::UI::Core::CoreDispatcherPriority::Normal,
+ ref new Windows::UI::Core::DispatchedHandler([this, result, colorIntrinsics, colorCoordinateSystem, depthCoordinateSystem]()
+ {
+ // We must also scale up the coordinates to the size of the color image canves in order
+ // to map the bones onto people in the frame correctly.
+ float widthScale = static_cast(previewImage->ActualWidth) / static_cast(colorIntrinsics->ImageWidth);
+ float heightScale = static_cast(previewImage->ActualHeight) / static_cast(colorIntrinsics->ImageHeight);
+
+ if (IBox^ boxedDepthToColorTransform = depthCoordinateSystem->TryGetTransformTo(colorCoordinateSystem))
+ {
+ float4x4 depthToColorTransform = boxedDepthToColorTransform->Value;
+
+ skeletalFrameRenderer->Render(result->Frame, ref new CoordinateTransformationMethod(
+ [colorIntrinsics, depthToColorTransform, widthScale, heightScale](float3 point)
+ {
+ // Transform the point from depth to color space.
+ Point transformedPoint = colorIntrinsics->ProjectOntoFrame(transform(point, depthToColorTransform));
+
+ // Scale up the coordinates to match the color image.
+ transformedPoint.X *= widthScale;
+ transformedPoint.Y *= heightScale;
+
+ // Return the result to the SkeletalFrameRenderer.
+ return transformedPoint;
+ }));
+ }
+ }));
+ }
+ else
+ {
+ m_logger->Log("Failed to convert pose tracking frame: " + result->Status.ToString());
+ }
+ }
+ }
+
+ // clear buffered frames if used
+ if (colorEnabled)
+ {
+ m_frameSources[MediaFrameSourceKind::Color].latestFrame = nullptr;
+ }
+ if (depthEnabled)
+ {
+ m_frameSources[MediaFrameSourceKind::Depth].latestFrame = nullptr;
+ }
+ if (skeletalEnabled)
+ {
+ m_frameSources[MediaFrameSourceKind::Custom].latestFrame = nullptr;
+ }
+ }
+ }
+ else
+ {
+ m_logger->Log("Unable to acquire frame");
+ }
+}
\ No newline at end of file
diff --git a/Samples/CameraStreamCorrelation/cpp/Scenario1_CorrelateStreams.xaml.h b/Samples/CameraStreamCorrelation/cpp/Scenario1_CorrelateStreams.xaml.h
new file mode 100644
index 0000000000..1502720b9d
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/Scenario1_CorrelateStreams.xaml.h
@@ -0,0 +1,117 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+#include "Scenario1_CorrelateStreams.g.h"
+#include "MainPage.xaml.h"
+#include "SkeletalFrameRenderer.h"
+#include "SimpleLogger.h"
+#include "FrameRenderer.h"
+#include
+#include
+
+namespace SDKTemplate
+{
+ // This structure stores information related to a frame source.
+ struct FrameSourceState
+ {
+ bool enabled = false; // Whether or not this source is enabled for rendering.
+
+ Windows::Media::Capture::Frames::MediaFrameSourceInfo^ sourceInfo = nullptr; // The source info associated with this source.
+ Windows::Media::Capture::Frames::MediaFrameReference^ latestFrame = nullptr; // The latest frame from this source.
+ Windows::Media::Capture::Frames::MediaFrameReader^ reader = nullptr; // The reader we are using to read this source.
+
+ Windows::Foundation::EventRegistrationToken frameArrivedEventToken;
+ };
+
+ [Windows::Foundation::Metadata::WebHostHidden]
+ public ref class Scenario1_CorrelateStreams sealed
+ {
+ public:
+ Scenario1_CorrelateStreams();
+
+ protected:
+ ///
+ /// Called when user navigates to this Scenario.
+ /// Immediately start streaming from first available source group.
+ ///
+ virtual void OnNavigatedTo(Windows::UI::Xaml::Navigation::NavigationEventArgs^ e) override;
+
+ ///
+ /// Called when user navigates away from this Scenario.
+ /// Stops streaming and disposes of all objects.
+ ///
+ virtual void OnNavigatedFrom(Windows::UI::Xaml::Navigation::NavigationEventArgs^ e) override;
+
+ private:
+ void NextButton_Click(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e);
+ void ToggleDepth_Click(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e);
+ void ToggleSkeletons_Click(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e);
+
+ private: // Private methods
+ ///
+ /// Enable and disable the stream toggle buttons and set their output message if they are disabled.
+ ///
+ void UpdateUI();
+
+ ///
+ /// Pick the next source group and start streaming from it.
+ ///
+ concurrency::task PickNextMediaSourceAsync();
+
+ ///
+ /// Switch to the next eligible media source.
+ ///
+ concurrency::task PickNextMediaSourceWorkerAsync();
+
+ ///
+ /// Creates a reader for the frame source described by the MediaFrameSourceInfo.
+ /// On frame arrived will render the output to the supplied software bitmap source.
+ ///
+ concurrency::task CreateReaderAsync(Windows::Media::Capture::Frames::MediaFrameSourceInfo^ sourceInfo);
+
+ ///
+ /// Initialize the media capture object.
+ /// Must be called from the UI thread.
+ ///
+ concurrency::task TryInitializeMediaCaptureAsync(Windows::Media::Capture::Frames::MediaFrameSourceGroup^ group);
+
+ ///
+ /// Unregisters FrameArrived event handlers, stops and disposes frame readers
+ /// and disposes the MediaCapture object.
+ ///
+ concurrency::task CleanupMediaCaptureAsync();
+
+ ///
+ /// Handler for frames which arrive from the MediaFrameReader.
+ /// Buffers the required frames for rendering and renders based on which sources are enabled and available.
+ ///
+ void FrameReader_FrameArrived(
+ Windows::Media::Capture::Frames::MediaFrameReader^ sender,
+ Windows::Media::Capture::Frames::MediaFrameArrivedEventArgs^ args);
+
+ private: // Private data.
+ SDKTemplate::MainPage^ rootPage;
+
+ UINT32 m_selectedSourceGroupIndex = 0;
+
+ Platform::Agile m_mediaCapture;
+
+ Microsoft::WRL::Wrappers::SRWLock m_frameLock;
+
+ std::map m_frameSources;
+
+ std::unique_ptr m_correlatedFrameRenderer;
+
+ SDKTemplate::SimpleLogger^ m_logger;
+ };
+} // SDKTemplate
\ No newline at end of file
diff --git a/Samples/CameraStreamCorrelation/cpp/SimpleLogger.h b/Samples/CameraStreamCorrelation/cpp/SimpleLogger.h
new file mode 100644
index 0000000000..b238379a3a
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/SimpleLogger.h
@@ -0,0 +1,48 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+namespace SDKTemplate
+{
+ private ref class SimpleLogger sealed
+ {
+ public:
+ SimpleLogger(Windows::UI::Xaml::Controls::TextBlock^ textBlock)
+ {
+ m_textBlock = textBlock;
+ m_dispatcher = textBlock->Dispatcher;
+ }
+
+ void Log(Platform::String^ message)
+ {
+ auto calendar = ref new Windows::Globalization::Calendar();
+ auto formatter = ref new Windows::Globalization::DateTimeFormatting::DateTimeFormatter("hour minute second");
+
+ LONG messageNumber = InterlockedIncrement(&m_messageCount);
+ Platform::String^ newMessage = "[" + messageNumber.ToString() + "] " +
+ formatter->Format(calendar->GetDateTime()) + " : " + message;
+
+ m_dispatcher->RunAsync(Windows::UI::Core::CoreDispatcherPriority::Low,
+ ref new Windows::UI::Core::DispatchedHandler(
+ [this, newMessage]()
+ {
+ m_textBlock->Text = newMessage + "\r\n" + m_textBlock->Text;
+ }));
+ }
+
+ private:
+ LONG m_messageCount = 0;
+ Windows::UI::Xaml::Controls::TextBlock^ m_textBlock;
+ Windows::UI::Core::CoreDispatcher^ m_dispatcher;
+ };
+
+} // SDKTemplate
\ No newline at end of file
diff --git a/Samples/CameraStreamCorrelation/cpp/SkeletalFrameRenderer.cpp b/Samples/CameraStreamCorrelation/cpp/SkeletalFrameRenderer.cpp
new file mode 100644
index 0000000000..56f97e2e2c
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/SkeletalFrameRenderer.cpp
@@ -0,0 +1,165 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#include "pch.h"
+#include "SkeletalFrameRenderer.h"
+#include
+
+using namespace SDKTemplate;
+
+using namespace Platform;
+using namespace Windows::Foundation;
+using namespace Windows::UI;
+using namespace Windows::UI::Xaml::Media;
+using namespace Windows::UI::Xaml::Shapes;
+using namespace WindowsPreview::Media::Capture::Frames;
+
+// The Templated Control item template is documented at http://go.microsoft.com/fwlink/?LinkId=234235
+
+// Skeleton colors.
+static std::array colors = {
+ ColorHelper::FromArgb(0xFF, 0x7F, 0x00, 0x00),
+ ColorHelper::FromArgb(0xFF, 0xFF, 0x00, 0x00),
+ ColorHelper::FromArgb(0xFF, 0xFF, 0x7F, 0x00),
+ ColorHelper::FromArgb(0xFF, 0xFF, 0xFF, 0x00),
+ ColorHelper::FromArgb(0xFF, 0x7F, 0xFF, 0x7F),
+ ColorHelper::FromArgb(0xFF, 0x00, 0xFF, 0xFF),
+ ColorHelper::FromArgb(0xFF, 0x00, 0x7F, 0xFF),
+ ColorHelper::FromArgb(0xFF, 0x00, 0x00, 0xFF),
+ ColorHelper::FromArgb(0xFF, 0x00, 0x00, 0x7F)
+};
+
+// Returns line between two body parts transformed using supplied transformation function.
+static Line^ LineTo(
+ Array^ poses,
+ BodyPart fromBodyPart,
+ BodyPart toBodyPart,
+ CoordinateTransformationMethod^ positionTransformation)
+{
+ TrackedPose fromTrackedBodyPart = poses[static_cast(fromBodyPart)];
+ TrackedPose toTrackedBodypart = poses[static_cast(toBodyPart)];
+
+ // Only render tracked bones.
+ if (fromTrackedBodyPart.PositionStatus == TrackingStatus::NotTracked ||
+ fromTrackedBodyPart.PositionStatus == TrackingStatus::Inferred ||
+ toTrackedBodypart.PositionStatus == TrackingStatus::NotTracked ||
+ toTrackedBodypart.PositionStatus == TrackingStatus::Inferred)
+ {
+ return nullptr;
+ }
+
+ Point fromPosition = positionTransformation(fromTrackedBodyPart.Position);
+ Point toPosition = positionTransformation(toTrackedBodypart.Position);
+
+ Line^ line = ref new Line();
+
+ line->X1 = fromPosition.X;
+ line->Y1 = fromPosition.Y;
+ line->X2 = toPosition.X;
+ line->Y2 = toPosition.Y;
+
+ return line;
+}
+
+static std::vector RenderSkeletonShapes(
+ PoseTrackingEntity^ poseEntity,
+ Brush^ shapeColor,
+ CoordinateTransformationMethod^ positionTransformation)
+{
+ if (poseEntity == nullptr || !poseEntity->IsTracked)
+ {
+ return std::vector();
+ }
+
+ Array^ poses = ref new Array(poseEntity->PosesCount);
+ poseEntity->GetPoses(poses);
+
+ // Create line shapes for all bones in the skeleton
+ std::vector shapes{
+ LineTo(poses, BodyPart::Head, BodyPart::Neck, positionTransformation),
+ LineTo(poses, BodyPart::Neck, BodyPart::SpineShoulder, positionTransformation),
+ LineTo(poses, BodyPart::SpineShoulder,BodyPart::ShoulderLeft, positionTransformation),
+ LineTo(poses, BodyPart::SpineShoulder,BodyPart::ShoulderRight, positionTransformation),
+ LineTo(poses, BodyPart::ShoulderLeft, BodyPart::ElbowLeft, positionTransformation),
+ LineTo(poses, BodyPart::ShoulderRight,BodyPart::ElbowRight, positionTransformation),
+ LineTo(poses, BodyPart::ElbowLeft, BodyPart::WristLeft, positionTransformation),
+ LineTo(poses, BodyPart::ElbowRight, BodyPart::WristRight, positionTransformation),
+ LineTo(poses, BodyPart::WristLeft, BodyPart::HandLeft, positionTransformation),
+ LineTo(poses, BodyPart::WristRight, BodyPart::HandRight, positionTransformation),
+ LineTo(poses, BodyPart::HandLeft, BodyPart::HandTipLeft, positionTransformation),
+ LineTo(poses, BodyPart::HandRight, BodyPart::HandTipRight, positionTransformation),
+ LineTo(poses, BodyPart::HandLeft, BodyPart::ThumbLeft, positionTransformation),
+ LineTo(poses, BodyPart::HandRight, BodyPart::ThumbRight, positionTransformation),
+ LineTo(poses, BodyPart::SpineShoulder,BodyPart::SpineMid, positionTransformation),
+ LineTo(poses, BodyPart::SpineMid, BodyPart::SpineBase, positionTransformation),
+ LineTo(poses, BodyPart::SpineBase, BodyPart::HipLeft, positionTransformation),
+ LineTo(poses, BodyPart::SpineBase, BodyPart::HipRight, positionTransformation),
+ LineTo(poses, BodyPart::HipLeft, BodyPart::KneeLeft, positionTransformation),
+ LineTo(poses, BodyPart::HipRight, BodyPart::KneeRight, positionTransformation),
+ LineTo(poses, BodyPart::KneeLeft, BodyPart::AnkleLeft, positionTransformation),
+ LineTo(poses, BodyPart::KneeRight, BodyPart::AnkleRight, positionTransformation),
+ LineTo(poses, BodyPart::AnkleLeft, BodyPart::FootLeft, positionTransformation),
+ LineTo(poses, BodyPart::AnkleRight, BodyPart::FootRight, positionTransformation)
+ };
+
+ // Apply shape color style.
+ for (Shape^ const& boneShape : shapes)
+ {
+ if (boneShape)
+ {
+ boneShape->StrokeThickness = 5;
+ boneShape->Stroke = shapeColor;
+ }
+ }
+
+ return shapes;
+}
+
+SkeletalFrameRenderer::SkeletalFrameRenderer() :
+ Canvas()
+{
+ for(auto const& color : colors)
+ {
+ m_brushes.push_back(ref new SolidColorBrush(color));
+ }
+}
+
+void SkeletalFrameRenderer::Clear()
+{
+ Children->Clear();
+}
+
+void SkeletalFrameRenderer::Render(PoseTrackingFrame^ frame, CoordinateTransformationMethod^ positionTransformation)
+{
+ Clear();
+
+ for (UINT32 i = 0; i < frame->Entities->Size; i++)
+ {
+ PoseTrackingEntity^ poseEntity = frame->Entities->GetAt(i);
+
+ if (poseEntity == nullptr ||
+ !poseEntity->IsTracked ||
+ !poseEntity->PoseSetId.Equals(KnownPoseTrackingPointSets::BodyTrackingPointSet))
+ {
+ continue;
+ }
+
+ Brush^ lineColor = m_brushes[i % m_brushes.size()];
+
+ for (UIElement^ const& bone : RenderSkeletonShapes(poseEntity, lineColor, positionTransformation))
+ {
+ if (bone)
+ {
+ Children->Append(bone);
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/Samples/CameraStreamCorrelation/cpp/SkeletalFrameRenderer.h b/Samples/CameraStreamCorrelation/cpp/SkeletalFrameRenderer.h
new file mode 100644
index 0000000000..0709e91b27
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/SkeletalFrameRenderer.h
@@ -0,0 +1,43 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+namespace SDKTemplate
+{
+ // Delegate used to map skeletal points from depth space to required display space.
+ public delegate Windows::Foundation::Point CoordinateTransformationMethod(Windows::Foundation::Numerics::float3);
+
+ public ref class SkeletalFrameRenderer sealed : public Windows::UI::Xaml::Controls::Canvas
+ {
+ public:
+ SkeletalFrameRenderer();
+
+ ///
+ /// Clears all rendered shapes on this canvas.
+ ///
+ void Clear();
+
+ ///
+ /// Renders a pose tracking frame to this canvas.
+ ///
+ void Render(WindowsPreview::Media::Capture::Frames::PoseTrackingFrame^ frame, CoordinateTransformationMethod^ transformation);
+
+ private:
+ std::vector m_brushes;
+
+ static Windows::UI::Xaml::Shapes::Line^ LineTo(
+ Platform::Array^ poses,
+ WindowsPreview::Media::Capture::Frames::BodyPart fromBodyPart,
+ WindowsPreview::Media::Capture::Frames::BodyPart toBodyPart,
+ CoordinateTransformationMethod^ positionTransformation);
+ };
+} // CameraStreamCorrelation
\ No newline at end of file
diff --git a/Samples/CameraStreamCorrelation/cpp/pch.cpp b/Samples/CameraStreamCorrelation/cpp/pch.cpp
new file mode 100644
index 0000000000..e5b0b06e30
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/pch.cpp
@@ -0,0 +1,16 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+//
+// Include the standard header and generate the precompiled header.
+//
+
+#include "pch.h"
diff --git a/Samples/CameraStreamCorrelation/cpp/pch.h b/Samples/CameraStreamCorrelation/cpp/pch.h
new file mode 100644
index 0000000000..416d4f5ed3
--- /dev/null
+++ b/Samples/CameraStreamCorrelation/cpp/pch.h
@@ -0,0 +1,23 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+//
+// Header for standard system include files.
+//
+
+#pragma once
+
+#include
+#include
+#include
+#include
+
+#include "App.xaml.h"
\ No newline at end of file
diff --git a/Samples/SystemMediaTransportControls/cs/Scenario1.xaml.cs b/Samples/SystemMediaTransportControls/cs/Scenario1.xaml.cs
index 8bfece8d60..38b6da0b22 100644
--- a/Samples/SystemMediaTransportControls/cs/Scenario1.xaml.cs
+++ b/Samples/SystemMediaTransportControls/cs/Scenario1.xaml.cs
@@ -210,19 +210,18 @@ await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
switch (systemMediaControls.SoundLevel)
{
case SoundLevel.Full:
+ case SoundLevel.Low:
+ // If we had paused due to system mute, then resume on unmute.
if (pausedDueToMute)
{
- // If we previously paused due to being muted, resume.
mediaPlayer.Play();
+ pausedDueToMute = false;
}
break;
- case SoundLevel.Low:
- // We're being ducked, take no action.
- break;
case SoundLevel.Muted:
+ // We've been muted by the system. Pause playback to release resources.
if (mediaPlayer != null && mediaPlayer.PlaybackSession.PlaybackState == MediaPlaybackState.Playing)
{
- // We've been muted by the system, pause to save our playback position.
mediaPlayer.Pause();
pausedDueToMute = true;
}
diff --git a/Samples/SystemMediaTransportControls/js/js/scenario1.js b/Samples/SystemMediaTransportControls/js/js/scenario1.js
index 4ebc7880bf..72c37b5182 100644
--- a/Samples/SystemMediaTransportControls/js/js/scenario1.js
+++ b/Samples/SystemMediaTransportControls/js/js/scenario1.js
@@ -328,25 +328,23 @@
if (eventIn.property == Windows.Media.SystemMediaTransportControlsProperty.soundLevel) {
switch (systemMediaControls.soundLevel) {
case Windows.Media.SoundLevel.muted:
- // We;ve been muted by the system, pause to save out state if we're currently playing.
- if (player != null && player.playerState == playerState.playing) {
- WinJS.log && WinJS.log("Play pressed", "sample", "status");
+ // We've been muted by the system. Pause playback to release resources.
+ if (player != null && !player.paused && !player.ended) {
+ WinJS.log && WinJS.log("Paused due to system mute", "sample", "status");
player.pause();
pausedDueToMute = true;
}
break;
case Windows.Media.SoundLevel.full:
- // We've just been unmuted, if we paused when we muted resume.
+ case Windows.Media.SoundLevel.low:
+ // If we had paused due to system mute, then resume on unmute.
if (pausedDueToMute) {
- WinJS.log && WinJS.log("Pause pressed", "sample", "status");
+ WinJS.log && WinJS.log("Unpause due to system unmute", "sample", "status");
player.play();
+ pausedDueToMute = false;
}
break;
-
- case Windows.Media.SoundLevel.low:
- // We're being ducked, take no action.
- break;
}
}
}
@@ -555,13 +553,6 @@
var mediaFile = playlist[currentItemIndex];
player.src = URL.createObjectURL(mediaFile, { oneTimeOnly: true });
- // for sample purpose only: the setActive() as a side effect will temporarily
- // fades in the build-in transport controls of the element as visual
- // feedback, in case the mediaFile being loaded/played is audio-only. Not
- // necessary with elements as their build-in transport controls
- // (when enabled) are always visible.
- player.setActive();
-
updateSystemMediaControlsDisplayAsync(mediaFile).done(function (done) {
// no further actions needed
}, function (error) {