From 1438cdea81bf31cf7aabe0dde639868a8db4cdd4 Mon Sep 17 00:00:00 2001 From: Hyblocker Date: Thu, 23 Nov 2023 22:43:07 +0100 Subject: [PATCH 1/2] Fix incorrect stream accessors for debug streams. Tested with Vive Facial tracker. Untested with eye tracking stream. --- VRCFaceTracking/Views/SettingsPage.xaml.cs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/VRCFaceTracking/Views/SettingsPage.xaml.cs b/VRCFaceTracking/Views/SettingsPage.xaml.cs index 8a69f2e5..e4dd680f 100644 --- a/VRCFaceTracking/Views/SettingsPage.xaml.cs +++ b/VRCFaceTracking/Views/SettingsPage.xaml.cs @@ -67,7 +67,7 @@ public SettingsPage() _upperStream = UpperImageSource.PixelBuffer.AsStream(); } - if (upperSize is { x: > 0, y: > 0 }) + if (lowerSize is { x: > 0, y: > 0 }) { LowerImageSource = new WriteableBitmap(lowerSize.x, lowerSize.y); _lowerStream = LowerImageSource.PixelBuffer.AsStream(); @@ -82,7 +82,7 @@ public SettingsPage() private async void OnTrackingDataUpdated() { var upperData = UnifiedTracking.EyeImageData.ImageData; - if (upperData != null && _lowerStream.CanWrite) + if (upperData != null && _upperStream.CanWrite) { _upperStream.Position = 0; await _upperStream.WriteAsync(upperData, 0, upperData.Length); @@ -91,7 +91,7 @@ private async void OnTrackingDataUpdated() } var lowerData = UnifiedTracking.LipImageData.ImageData; - if (lowerData != null && _upperStream.CanWrite) + if (lowerData != null && _lowerStream.CanWrite) { _lowerStream.Position = 0; await _lowerStream.WriteAsync(lowerData, 0, lowerData.Length); From 4b2d2cea29c4a2a55cda86d43bfc3d9019c7bc94 Mon Sep 17 00:00:00 2001 From: Hyblocker Date: Mon, 27 Nov 2023 18:37:44 +0100 Subject: [PATCH 2/2] Make the settings page less prone to crashing given invalid streams --- VRCFaceTracking/Views/SettingsPage.xaml.cs | 107 ++++++++++++++------- 1 file changed, 73 insertions(+), 34 deletions(-) diff --git a/VRCFaceTracking/Views/SettingsPage.xaml.cs b/VRCFaceTracking/Views/SettingsPage.xaml.cs index e4dd680f..50b2ea7d 100644 --- a/VRCFaceTracking/Views/SettingsPage.xaml.cs +++ b/VRCFaceTracking/Views/SettingsPage.xaml.cs @@ -5,6 +5,7 @@ using VRCFaceTracking.ViewModels; using Windows.System; using Microsoft.UI.Xaml.Media.Imaging; +using VrcftImage = VRCFaceTracking.Core.Types.Image; namespace VRCFaceTracking.Views; @@ -38,18 +39,12 @@ public RiskySettingsViewModel RiskySettingsViewModel { get; } - - public WriteableBitmap UpperImageSource - { - get; - } - - public WriteableBitmap LowerImageSource - { - get; - } - private readonly Stream _upperStream, _lowerStream; + public WriteableBitmap UpperImageSource => _upperImageStream; + public WriteableBitmap LowerImageSource => _lowerImageStream; + + private WriteableBitmap _upperImageStream, _lowerImageStream; + private Stream _upperStream, _lowerStream; public SettingsPage() { @@ -58,45 +53,89 @@ public SettingsPage() CalibrationSettings = App.GetService(); RiskySettingsViewModel = App.GetService(); - var upperSize = UnifiedTracking.EyeImageData.ImageSize; - var lowerSize = UnifiedTracking.LipImageData.ImageSize; - - if (upperSize is { x: > 0, y: > 0 }) - { - UpperImageSource = new WriteableBitmap(upperSize.x, upperSize.y); - _upperStream = UpperImageSource.PixelBuffer.AsStream(); - } + // Initialize hardware debug streams for upper and lower face tracking + InitializeHardwareDebugStream(UnifiedTracking.EyeImageData, ref _upperImageStream, ref _upperStream); + InitializeHardwareDebugStream(UnifiedTracking.LipImageData, ref _lowerImageStream, ref _lowerStream); - if (lowerSize is { x: > 0, y: > 0 }) - { - LowerImageSource = new WriteableBitmap(lowerSize.x, lowerSize.y); - _lowerStream = LowerImageSource.PixelBuffer.AsStream(); - } - Loaded += OnPageLoaded; UnifiedTracking.OnUnifiedDataUpdated += _ => DispatcherQueue.TryEnqueue(OnTrackingDataUpdated); InitializeComponent(); } + private void InitializeHardwareDebugStream(VrcftImage image, ref WriteableBitmap bitmap, ref Stream targetStream) + { + var imageSize = image.ImageSize; + + if ( imageSize is { x: > 0, y: > 0 } ) + { + bitmap = new WriteableBitmap(imageSize.x, imageSize.y); + targetStream = bitmap.PixelBuffer.AsStream(); + } + } + private async void OnTrackingDataUpdated() { + // Handle eye tracking + var upperData = UnifiedTracking.EyeImageData.ImageData; - if (upperData != null && _upperStream.CanWrite) + if ( upperData != null ) { - _upperStream.Position = 0; - await _upperStream.WriteAsync(upperData, 0, upperData.Length); + // Handle device connected + if ( _upperStream == null ) + { + InitializeHardwareDebugStream(UnifiedTracking.EyeImageData, ref _upperImageStream, ref _upperStream); + } + // Handle device is valid and is providing data + if ( _upperStream.CanWrite ) + { + _upperStream.Position = 0; + await _upperStream.WriteAsync(upperData, 0, upperData.Length); - UpperImageSource.Invalidate(); + _upperImageStream.Invalidate(); + } } - + else + { + // Handle device getting unplugged / destroyed / disabled + // Device is connected + if ( _upperStream != null || _upperImageStream != null ) + { + await _upperStream.DisposeAsync(); + _upperImageStream = null; + _upperStream = null; + } + } + + // Handle lip tracking + var lowerData = UnifiedTracking.LipImageData.ImageData; - if (lowerData != null && _lowerStream.CanWrite) + if ( lowerData != null ) { - _lowerStream.Position = 0; - await _lowerStream.WriteAsync(lowerData, 0, lowerData.Length); + // Handle device connected + if ( _lowerStream == null ) + { + InitializeHardwareDebugStream(UnifiedTracking.LipImageData, ref _lowerImageStream, ref _lowerStream); + } + // Handle device is valid and is providing data + if ( _lowerStream.CanWrite ) + { + _lowerStream.Position = 0; + await _lowerStream.WriteAsync(lowerData, 0, lowerData.Length); - LowerImageSource.Invalidate(); + _lowerImageStream.Invalidate(); + } + } + else + { + // Handle device getting unplugged / destroyed / disabled + // Device is connected + if ( _lowerStream != null || _lowerImageStream != null ) + { + await _lowerStream.DisposeAsync(); + _lowerImageStream = null; + _lowerStream = null; + } } }