Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix incomplete code for handling debug camera streams #181

Merged
merged 2 commits into from
Feb 12, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
107 changes: 73 additions & 34 deletions VRCFaceTracking/Views/SettingsPage.xaml.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
using VRCFaceTracking.ViewModels;
using Windows.System;
using Microsoft.UI.Xaml.Media.Imaging;
using VrcftImage = VRCFaceTracking.Core.Types.Image;

namespace VRCFaceTracking.Views;

Expand Down Expand Up @@ -38,18 +39,12 @@ public RiskySettingsViewModel RiskySettingsViewModel
{
get;
}

public WriteableBitmap UpperImageSource
{
get;
}

public WriteableBitmap LowerImageSource
{
get;
}

private readonly Stream _upperStream, _lowerStream;
public WriteableBitmap UpperImageSource => _upperImageStream;
public WriteableBitmap LowerImageSource => _lowerImageStream;

private WriteableBitmap _upperImageStream, _lowerImageStream;
private Stream _upperStream, _lowerStream;

public SettingsPage()
{
Expand All @@ -58,45 +53,89 @@ public SettingsPage()
CalibrationSettings = App.GetService<UnifiedTrackingMutator>();
RiskySettingsViewModel = App.GetService<RiskySettingsViewModel>();

var upperSize = UnifiedTracking.EyeImageData.ImageSize;
var lowerSize = UnifiedTracking.LipImageData.ImageSize;

if (upperSize is { x: > 0, y: > 0 })
{
UpperImageSource = new WriteableBitmap(upperSize.x, upperSize.y);
_upperStream = UpperImageSource.PixelBuffer.AsStream();
}
// Initialize hardware debug streams for upper and lower face tracking
InitializeHardwareDebugStream(UnifiedTracking.EyeImageData, ref _upperImageStream, ref _upperStream);
InitializeHardwareDebugStream(UnifiedTracking.LipImageData, ref _lowerImageStream, ref _lowerStream);

if (upperSize is { x: > 0, y: > 0 })
{
LowerImageSource = new WriteableBitmap(lowerSize.x, lowerSize.y);
_lowerStream = LowerImageSource.PixelBuffer.AsStream();
}

Loaded += OnPageLoaded;

UnifiedTracking.OnUnifiedDataUpdated += _ => DispatcherQueue.TryEnqueue(OnTrackingDataUpdated);
InitializeComponent();
}

private void InitializeHardwareDebugStream(VrcftImage image, ref WriteableBitmap bitmap, ref Stream targetStream)
{
var imageSize = image.ImageSize;

if ( imageSize is { x: > 0, y: > 0 } )
{
bitmap = new WriteableBitmap(imageSize.x, imageSize.y);
targetStream = bitmap.PixelBuffer.AsStream();
}
}

private async void OnTrackingDataUpdated()
{
// Handle eye tracking

var upperData = UnifiedTracking.EyeImageData.ImageData;
if (upperData != null && _lowerStream.CanWrite)
if ( upperData != null )
{
_upperStream.Position = 0;
await _upperStream.WriteAsync(upperData, 0, upperData.Length);
// Handle device connected
if ( _upperStream == null )
{
InitializeHardwareDebugStream(UnifiedTracking.EyeImageData, ref _upperImageStream, ref _upperStream);
}
// Handle device is valid and is providing data
if ( _upperStream.CanWrite )
{
_upperStream.Position = 0;
await _upperStream.WriteAsync(upperData, 0, upperData.Length);

UpperImageSource.Invalidate();
_upperImageStream.Invalidate();
}
}

else
{
// Handle device getting unplugged / destroyed / disabled
// Device is connected
if ( _upperStream != null || _upperImageStream != null )
{
await _upperStream.DisposeAsync();
_upperImageStream = null;
_upperStream = null;
}
}

// Handle lip tracking

var lowerData = UnifiedTracking.LipImageData.ImageData;
if (lowerData != null && _upperStream.CanWrite)
if ( lowerData != null )
{
_lowerStream.Position = 0;
await _lowerStream.WriteAsync(lowerData, 0, lowerData.Length);
// Handle device connected
if ( _lowerStream == null )
{
InitializeHardwareDebugStream(UnifiedTracking.LipImageData, ref _lowerImageStream, ref _lowerStream);
}
// Handle device is valid and is providing data
if ( _lowerStream.CanWrite )
{
_lowerStream.Position = 0;
await _lowerStream.WriteAsync(lowerData, 0, lowerData.Length);

LowerImageSource.Invalidate();
_lowerImageStream.Invalidate();
}
}
else
{
// Handle device getting unplugged / destroyed / disabled
// Device is connected
if ( _lowerStream != null || _lowerImageStream != null )
{
await _lowerStream.DisposeAsync();
_lowerImageStream = null;
_lowerStream = null;
}
}
}

Expand Down