Skip to content

iOS NativeCamera session start/stop synchronization #1137

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Plugins/NativeCamera/Source/Apple/NativeCameraImpl.h
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -34,5 +34,7 @@ namespace Babylon::Plugins
bool m_overrideCameraTexture{}; bool m_overrideCameraTexture{};


CameraDimensions m_cameraDimensions{}; CameraDimensions m_cameraDimensions{};

arcana::background_dispatcher<32> m_cameraSessionDispatcher{};
}; };
} }
162 changes: 95 additions & 67 deletions Plugins/NativeCamera/Source/Apple/NativeCameraImpl.mm
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -137,19 +137,6 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],
{ {
~ImplData() ~ImplData()
{ {
if (currentCommandBuffer != nil) {
[currentCommandBuffer waitUntilCompleted];
}

dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[avCaptureSession stopRunning];
});

if (textureCache)
{
CVMetalTextureCacheFlush(textureCache, 0);
CFRelease(textureCache);
}
} }


CameraTextureDelegate* cameraTextureDelegate{}; CameraTextureDelegate* cameraTextureDelegate{};
Expand All @@ -160,6 +147,8 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],
id<MTLDevice> metalDevice{}; id<MTLDevice> metalDevice{};
id<MTLCommandQueue> commandQueue{}; id<MTLCommandQueue> commandQueue{};
id<MTLCommandBuffer> currentCommandBuffer{}; id<MTLCommandBuffer> currentCommandBuffer{};
bool isInitialized{false};
bool refreshBgfxHandle{true};
}; };
Camera::Impl::Impl(Napi::Env env, bool overrideCameraTexture) Camera::Impl::Impl(Napi::Env env, bool overrideCameraTexture)
: m_deviceContext{nullptr} : m_deviceContext{nullptr}
Expand All @@ -175,8 +164,7 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],


arcana::task<Camera::Impl::CameraDimensions, std::exception_ptr> Camera::Impl::Open(uint32_t maxWidth, uint32_t maxHeight, bool frontCamera) arcana::task<Camera::Impl::CameraDimensions, std::exception_ptr> Camera::Impl::Open(uint32_t maxWidth, uint32_t maxHeight, bool frontCamera)
{ {
m_implData->commandQueue = (__bridge id<MTLCommandQueue>)bgfx::getInternalData()->commandQueue; NSError *error{nil};
m_implData->metalDevice = (__bridge id<MTLDevice>)bgfx::getInternalData()->context;


if (maxWidth == 0 || maxWidth > std::numeric_limits<int32_t>::max()) { if (maxWidth == 0 || maxWidth > std::numeric_limits<int32_t>::max()) {
maxWidth = std::numeric_limits<int32_t>::max(); maxWidth = std::numeric_limits<int32_t>::max();
Expand All @@ -185,18 +173,39 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],
maxHeight = std::numeric_limits<int32_t>::max(); maxHeight = std::numeric_limits<int32_t>::max();
} }


if (!m_deviceContext) // This is the first time the camera has been opened, perform some one time setup.
{ if (!m_implData->isInitialized) {
m_implData->commandQueue = (__bridge id<MTLCommandQueue>)bgfx::getInternalData()->commandQueue;
m_implData->metalDevice = (__bridge id<MTLDevice>)bgfx::getInternalData()->context;
m_deviceContext = &Graphics::DeviceContext::GetFromJavaScript(m_env); m_deviceContext = &Graphics::DeviceContext::GetFromJavaScript(m_env);

// Compile shaders used for converting camera output to RGBA.
id<MTLLibrary> lib = CompileShader(m_implData->metalDevice, shaderSource);
id<MTLFunction> vertexFunction = [lib newFunctionWithName:@"vertexShader"];
id<MTLFunction> fragmentFunction = [lib newFunctionWithName:@"fragmentShader"];

// Create a pipeline state for converting the camera output to RGBA.
MTLRenderPipelineDescriptor *pipelineStateDescriptor = [[MTLRenderPipelineDescriptor alloc] init];
pipelineStateDescriptor.label = @"Native Camera YCbCr to RGBA Pipeline";
pipelineStateDescriptor.vertexFunction = vertexFunction;
pipelineStateDescriptor.fragmentFunction = fragmentFunction;
pipelineStateDescriptor.colorAttachments[0].pixelFormat = MTLPixelFormatRGBA8Unorm;
m_implData->cameraPipelineState = [m_implData->metalDevice newRenderPipelineStateWithDescriptor:pipelineStateDescriptor error:&error];

if (!m_implData->cameraPipelineState) {
return arcana::task_from_error<CameraDimensions>(std::make_exception_ptr(std::runtime_error{
std::string("Failed to create camera pipeline state: ") + [error.localizedDescription cStringUsingEncoding:NSASCIIStringEncoding]}));
} }


__block arcana::task_completion_source<Camera::Impl::CameraDimensions, std::exception_ptr> taskCompletionSource{}; m_implData->isInitialized = true;
} else {
// Always refresh the bgfx handle to point to textureRGBA on re-open.
m_implData->refreshBgfxHandle = true;
}


dispatch_async(dispatch_get_main_queue(), ^{ // Construct the camera texture delegate, which is responsible for handling updates for device orientation and the capture session.
CVMetalTextureCacheCreate(nullptr, nullptr, m_implData->metalDevice, nullptr, &m_implData->textureCache); CVMetalTextureCacheCreate(nullptr, nullptr, m_implData->metalDevice, nullptr, &m_implData->textureCache);
m_implData->cameraTextureDelegate = [[CameraTextureDelegate alloc]init:m_implData->textureCache]; m_implData->cameraTextureDelegate = [[CameraTextureDelegate alloc]init:m_implData->textureCache];
m_implData->avCaptureSession = [[AVCaptureSession alloc] init];
m_implData->textureRGBA = nil;


#if (TARGET_OS_IPHONE) #if (TARGET_OS_IPHONE)
// Loop over all available camera configurations to find a config that most closely matches the constraints. // Loop over all available camera configurations to find a config that most closely matches the constraints.
Expand Down Expand Up @@ -282,21 +291,16 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],
// If no matching device, throw an error with the message "ConstraintError" which matches the behavior in the browser. // If no matching device, throw an error with the message "ConstraintError" which matches the behavior in the browser.
if (bestDevice == nullptr) if (bestDevice == nullptr)
{ {
taskCompletionSource.complete(arcana::make_unexpected( return arcana::task_from_error<CameraDimensions>(std::make_exception_ptr(std::runtime_error{"ConstraintError: Unable to match constraints to a supported camera configuration."}));
std::make_exception_ptr(std::runtime_error{"ConstraintError: Unable to match constraints to a supported camera configuration."})));
return;
} }


// Lock camera device and set up camera format. If there a problem initialising the camera it will give an error. // Lock camera device and set up camera format. If there a problem initialising the camera it will give an error.
NSError *error{nil};
[bestDevice lockForConfiguration:&error]; [bestDevice lockForConfiguration:&error];
if (error != nil) if (error != nil)
{ {
taskCompletionSource.complete(arcana::make_unexpected(std::make_exception_ptr(std::runtime_error{"Failed to lock camera"}))); return arcana::task_from_error<CameraDimensions>(std::make_exception_ptr(std::runtime_error{"Failed to lock camera"}));
return;
} }


[m_implData->avCaptureSession setSessionPreset:AVCaptureSessionPresetInputPriority];
[bestDevice setActiveFormat:bestFormat]; [bestDevice setActiveFormat:bestFormat];
AVCaptureDeviceInput *input{[AVCaptureDeviceInput deviceInputWithDevice:bestDevice error:&error]}; AVCaptureDeviceInput *input{[AVCaptureDeviceInput deviceInputWithDevice:bestDevice error:&error]};
[bestDevice unlockForConfiguration]; [bestDevice unlockForConfiguration];
Expand All @@ -308,17 +312,14 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],
UNUSED(maxWidth); UNUSED(maxWidth);
UNUSED(maxHeight); UNUSED(maxHeight);
UNUSED(frontCamera); UNUSED(frontCamera);
NSError *error{nil};
AVCaptureDevice* captureDevice{[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]}; AVCaptureDevice* captureDevice{[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]};
AVCaptureDeviceInput *input{[AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error]}; AVCaptureDeviceInput *input{[AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error]};
CMVideoFormatDescriptionRef videoFormatRef{static_cast<CMVideoFormatDescriptionRef>(captureDevice.activeFormat.formatDescription)}; CMVideoFormatDescriptionRef videoFormatRef{static_cast<CMVideoFormatDescriptionRef>(captureDevice.activeFormat.formatDescription)};
CMVideoDimensions dimensions{CMVideoFormatDescriptionGetDimensions(videoFormatRef)}; CMVideoDimensions dimensions{CMVideoFormatDescriptionGetDimensions(videoFormatRef)};
uint32_t devicePixelFormat{static_cast<uint32_t>(CMFormatDescriptionGetMediaSubType(videoFormatRef))}; uint32_t devicePixelFormat{static_cast<uint32_t>(CMFormatDescriptionGetMediaSubType(videoFormatRef))};
if (!isPixelFormatSupported(devicePixelFormat)) if (!isPixelFormatSupported(devicePixelFormat))
{ {
taskCompletionSource.complete(arcana::make_unexpected( return arcana::task_from_error<CameraDimensions>(std::make_exception_ptr(std::runtime_error{"ConstraintError: Unable to match constraints to a supported camera configuration."}));
std::make_exception_ptr(std::runtime_error{"ConstraintError: Unable to match constraints to a supported camera configuration."})));
return;
} }
#endif #endif


Expand All @@ -334,49 +335,42 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],
// Check for failed initialisation. // Check for failed initialisation.
if (!input) if (!input)
{ {
taskCompletionSource.complete(arcana::make_unexpected(std::make_exception_ptr(std::runtime_error{"Error Getting Camera Input"}))); return arcana::task_from_error<CameraDimensions>(std::make_exception_ptr(std::runtime_error{"Error Getting Camera Input"}));
return; }

// Kick off camera session on a background thread.
return arcana::make_task(m_cameraSessionDispatcher, arcana::cancellation::none(), [implObj = shared_from_this(), input, devicePixelFormat, cameraDimensions]() mutable {
if (implObj->m_implData->avCaptureSession == nil) {
implObj->m_implData->avCaptureSession = [[AVCaptureSession alloc] init];
} else {
for (AVCaptureInput* input in [implObj->m_implData->avCaptureSession inputs]) {
[implObj->m_implData->avCaptureSession removeInput: input];
} }


for (AVCaptureOutput* output in [implObj->m_implData->avCaptureSession outputs]) {
[implObj->m_implData->avCaptureSession removeOutput: output];
}
}

#if (TARGET_OS_IPHONE)
[implObj->m_implData->avCaptureSession setSessionPreset:AVCaptureSessionPresetInputPriority];
#endif

// Add camera input source to the capture session. // Add camera input source to the capture session.
[m_implData->avCaptureSession addInput:input]; [implObj->m_implData->avCaptureSession addInput:input];


// Create the camera buffer. // Create the camera buffer, and set up camera texture delegate to capture frames.
dispatch_queue_t sampleBufferQueue{dispatch_queue_create("CameraMulticaster", DISPATCH_QUEUE_SERIAL)}; dispatch_queue_t sampleBufferQueue{dispatch_queue_create("CameraMulticaster", DISPATCH_QUEUE_SERIAL)};
AVCaptureVideoDataOutput* dataOutput{[[AVCaptureVideoDataOutput alloc] init]}; AVCaptureVideoDataOutput* dataOutput{[[AVCaptureVideoDataOutput alloc] init]};
[dataOutput setAlwaysDiscardsLateVideoFrames:YES]; [dataOutput setAlwaysDiscardsLateVideoFrames:YES];
[dataOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(devicePixelFormat)}]; [dataOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(devicePixelFormat)}];
[dataOutput setSampleBufferDelegate:m_implData->cameraTextureDelegate queue:sampleBufferQueue]; [dataOutput setSampleBufferDelegate:implObj->m_implData->cameraTextureDelegate queue:sampleBufferQueue];
[implObj->m_implData->avCaptureSession addOutput:dataOutput];


// Actually start the camera session. // Actually start the camera session.
[m_implData->avCaptureSession addOutput:dataOutput]; [implObj->m_implData->avCaptureSession startRunning];
[m_implData->avCaptureSession commitConfiguration]; return cameraDimensions;
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[m_implData->avCaptureSession startRunning];
}); });

// Create a pipeline state for converting the camera output to RGBA.
id<MTLLibrary> lib = CompileShader(m_implData->metalDevice, shaderSource);
id<MTLFunction> vertexFunction = [lib newFunctionWithName:@"vertexShader"];
id<MTLFunction> fragmentFunction = [lib newFunctionWithName:@"fragmentShader"];

MTLRenderPipelineDescriptor *pipelineStateDescriptor = [[MTLRenderPipelineDescriptor alloc] init];
pipelineStateDescriptor.label = @"Native Camera YCbCr to RGBA Pipeline";
pipelineStateDescriptor.vertexFunction = vertexFunction;
pipelineStateDescriptor.fragmentFunction = fragmentFunction;
pipelineStateDescriptor.colorAttachments[0].pixelFormat = MTLPixelFormatRGBA8Unorm;
m_implData->cameraPipelineState = [m_implData->metalDevice newRenderPipelineStateWithDescriptor:pipelineStateDescriptor error:&error];

if (!m_implData->cameraPipelineState) {
taskCompletionSource.complete(arcana::make_unexpected(std::make_exception_ptr(std::runtime_error{
std::string("Failed to create camera pipeline state: ") + [error.localizedDescription cStringUsingEncoding:NSASCIIStringEncoding]})));
return;
}

taskCompletionSource.complete(cameraDimensions);
});

return taskCompletionSource.as_task();
} }


void Camera::Impl::SetTextureOverride(void* /*texturePtr*/) void Camera::Impl::SetTextureOverride(void* /*texturePtr*/)
Expand Down Expand Up @@ -417,6 +411,11 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],
bgfx::overrideInternal(textureHandle, reinterpret_cast<uintptr_t>(m_implData->textureRGBA)); bgfx::overrideInternal(textureHandle, reinterpret_cast<uintptr_t>(m_implData->textureRGBA));
m_cameraDimensions.width = static_cast<uint32_t>(width); m_cameraDimensions.width = static_cast<uint32_t>(width);
m_cameraDimensions.height = static_cast<uint32_t>(height); m_cameraDimensions.height = static_cast<uint32_t>(height);
m_implData->refreshBgfxHandle = false;
} else if (m_implData->refreshBgfxHandle) {
// On texture re-use across sessions set the bgfx texture handle.
bgfx::overrideInternal(textureHandle, reinterpret_cast<uintptr_t>(m_implData->textureRGBA));
m_implData->refreshBgfxHandle = false;
} }


if (textureY != nil && textureCbCr != nil && m_implData->textureRGBA != nil) if (textureY != nil && textureCbCr != nil && m_implData->textureRGBA != nil)
Expand Down Expand Up @@ -469,9 +468,28 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],


void Camera::Impl::Close() void Camera::Impl::Close()
{ {
// Stop collecting frames, release camera texture delegate.
[m_implData->cameraTextureDelegate reset]; [m_implData->cameraTextureDelegate reset];
m_implData.reset(); m_implData->cameraTextureDelegate = nil;
m_implData = std::make_unique<ImplData>();
// Complete any running command buffers before destroying the cache.
if (m_implData->currentCommandBuffer != nil) {
[m_implData->currentCommandBuffer waitUntilCompleted];
}

// Free the texture cache.
if (m_implData->textureCache)
{
CVMetalTextureCacheFlush(m_implData->textureCache, 0);
CFRelease(m_implData->textureCache);
m_implData->textureCache = nil;
}

if (m_implData->avCaptureSession != nil) {
arcana::make_task(m_cameraSessionDispatcher, arcana::cancellation::none(), [implObj = shared_from_this()](){
[implObj->m_implData->avCaptureSession stopRunning];
});
}
} }
} }


Expand Down Expand Up @@ -523,6 +541,7 @@ - (id)init:(CVMetalTextureCacheRef)textureCache


- (void) reset { - (void) reset {
@synchronized (self) { @synchronized (self) {
[self cleanupTextures];
self->textureCache = nil; self->textureCache = nil;
} }
} }
Expand Down Expand Up @@ -595,6 +614,11 @@ - (void)captureOutput:(AVCaptureOutput *)__unused captureOutput didOutputSampleB
CVMetalTextureRef textureCbCr = [self getCameraTexture:pixelBuffer plane:1]; CVMetalTextureRef textureCbCr = [self getCameraTexture:pixelBuffer plane:1];


@synchronized(self) { @synchronized(self) {
// It's possible that the texture cache has been invalidated, in which case we should skip assignment.
if (self->textureCache == nil) {
return;
}

[self cleanupTextures]; [self cleanupTextures];
cameraTextureY = textureY; cameraTextureY = textureY;
cameraTextureCbCr = textureCbCr; cameraTextureCbCr = textureCbCr;
Expand Down Expand Up @@ -645,7 +669,11 @@ -(void)cleanupTextures {
} }


-(void)dealloc { -(void)dealloc {
[self cleanupTextures]; #if (TARGET_OS_IPHONE)
[[NSNotificationCenter defaultCenter]removeObserver:self name:UIDeviceOrientationDidChangeNotification object:nil];
#endif

[self reset];
} }


@end @end
17 changes: 14 additions & 3 deletions Plugins/NativeEngine/Source/NativeEngine.cpp
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -1409,16 +1409,27 @@ namespace Babylon
} }


return textureBuffer; return textureBuffer;
}).then(m_runtimeScheduler, *m_cancellationSource, [bufferRef{Napi::Persistent(buffer)}, bufferOffset, deferred](std::vector<uint8_t> textureBuffer) { }).then(m_runtimeScheduler, *m_cancellationSource, [this, bufferRef{Napi::Persistent(buffer)}, bufferOffset, deferred, tempTexture, sourceTextureHandle](std::vector<uint8_t> textureBuffer) mutable {
// Double check the destination buffer length. This is redundant with prior checks, but we'll be extra sure before the memcpy. // Double check the destination buffer length. This is redundant with prior checks, but we'll be extra sure before the memcpy.
assert(bufferRef.Value().ByteLength() - bufferOffset >= textureBuffer.size()); assert(bufferRef.Value().ByteLength() - bufferOffset >= textureBuffer.size());


// Copy the pixel data into the JS ArrayBuffer. // Copy the pixel data into the JS ArrayBuffer.
uint8_t* buffer{static_cast<uint8_t*>(bufferRef.Value().Data())}; uint8_t* buffer{static_cast<uint8_t*>(bufferRef.Value().Data())};
std::memcpy(buffer + bufferOffset, textureBuffer.data(), textureBuffer.size()); std::memcpy(buffer + bufferOffset, textureBuffer.data(), textureBuffer.size());

// Dispose of the texture handle before resolving the promise.
// TODO: Handle properly handle stale handles after BGFX shutdown
if (tempTexture && !m_cancellationSource->cancelled())
{
bgfx::destroy(sourceTextureHandle);
tempTexture = false;
}

deferred.Resolve(bufferRef.Value()); deferred.Resolve(bufferRef.Value());
}).then(m_runtimeScheduler, arcana::cancellation::none(), [env, deferred, tempTexture, sourceTextureHandle](const arcana::expected<void, std::exception_ptr>& result) { }).then(m_runtimeScheduler, arcana::cancellation::none(), [this, env, deferred, tempTexture, sourceTextureHandle](const arcana::expected<void, std::exception_ptr>& result) {
if (tempTexture) // Dispose of the texture handle if not yet disposed.
// TODO: Handle properly handle stale handles after BGFX shutdown
if (tempTexture && !m_cancellationSource->cancelled())
{ {
bgfx::destroy(sourceTextureHandle); bgfx::destroy(sourceTextureHandle);
} }
Expand Down