camera: Reenabled macOS/iOS support, with rewritten CoreMedia implementation.

main
Ryan C. Gordon 2024-02-06 01:19:12 -05:00
parent f8fa08d2b1
commit 99d1337de2
8 changed files with 344 additions and 456 deletions

View File

@ -66,8 +66,8 @@ typedef struct SDL_CameraSpec
Uint32 format; /**< Frame SDL_PixelFormatEnum format */
int width; /**< Frame width */
int height; /**< Frame height */
int interval_numerator; /**< Frame rate numerator ((dom / num) == fps) */
int interval_denominator; /**< Frame rate demoninator ((dom / num) == fps)*/
int interval_numerator; /**< Frame rate numerator ((dom / num) == fps, (num / dom) == duration) */
int interval_denominator; /**< Frame rate demoninator ((dom / num) == fps, (num / dom) == duration) */
} SDL_CameraSpec;
/**

View File

@ -246,8 +246,6 @@
#cmakedefine USE_POSIX_SPAWN @USE_POSIX_SPAWN@
#cmakedefine HAVE_COREMEDIA
/* SDL internal assertion support */
#if @SDL_DEFAULT_ASSERT_LEVEL_CONFIGURED@
#cmakedefine SDL_DEFAULT_ASSERT_LEVEL @SDL_DEFAULT_ASSERT_LEVEL@

View File

@ -198,8 +198,6 @@
#define SDL_VIDEO_METAL 1
#endif
#define HAVE_COREMEDIA 1
/* Enable system power support */
#define SDL_POWER_UIKIT 1
@ -213,6 +211,10 @@
#define SDL_FILESYSTEM_COCOA 1
/* enable camera support */
#ifndef SDL_PLATFORM_TVOS
#define SDL_CAMERA_DRIVER_COREMEDIA 1
#endif
#define SDL_CAMERA_DRIVER_DUMMY 1
#endif /* SDL_build_config_ios_h_ */

View File

@ -261,8 +261,6 @@
#endif
#endif
#define HAVE_COREMEDIA 1
/* Enable system power support */
#define SDL_POWER_MACOSX 1

View File

@ -73,6 +73,12 @@ const char *SDL_GetCurrentCameraDriver(void)
return camera_driver.name;
}
char *SDL_GetCameraThreadName(SDL_CameraDevice *device, char *buf, size_t buflen)
{
(void)SDL_snprintf(buf, buflen, "SDLCamera%d", (int) device->instance_id);
return buf;
}
int SDL_AddCameraFormat(CameraFormatAddData *data, Uint32 fmt, int w, int h, int interval_numerator, int interval_denominator)
{
SDL_assert(data != NULL);
@ -683,7 +689,7 @@ SDL_bool SDL_CameraThreadIterate(SDL_CameraDevice *device)
failed = SDL_TRUE;
}
// we can let go of the lock once we've tried to grab a frame of video and maybe moved the output frame from the empty to the filled list.
// we can let go of the lock once we've tried to grab a frame of video and maybe moved the output frame off the empty list.
// this lets us chew up the CPU for conversion and scaling without blocking other threads.
SDL_UnlockMutex(device->lock);
@ -988,7 +994,7 @@ SDL_Camera *SDL_OpenCameraDevice(SDL_CameraDeviceID instance_id, const SDL_Camer
// Start the camera thread if necessary
if (!camera_driver.impl.ProvidesOwnCallbackThread) {
char threadname[64];
SDL_snprintf(threadname, sizeof (threadname), "SDLCamera%d", (int) instance_id);
SDL_GetCameraThreadName(device, threadname, sizeof (threadname));
device->thread = SDL_CreateThreadInternal(CameraThread, threadname, 0, device);
if (!device->thread) {
ClosePhysicalCameraDevice(device);

View File

@ -28,10 +28,7 @@
#define DEBUG_CAMERA 0
// !!! FIXME: update these drivers!
#ifdef SDL_CAMERA_DRIVER_COREMEDIA
#undef SDL_CAMERA_DRIVER_COREMEDIA
#endif
// !!! FIXME: update this driver!
#ifdef SDL_CAMERA_DRIVER_ANDROID
#undef SDL_CAMERA_DRIVER_ANDROID
#endif
@ -53,6 +50,9 @@ extern SDL_CameraDevice *SDL_FindPhysicalCameraDeviceByCallback(SDL_bool (*callb
// Backends should call this when the user has approved/denied access to a camera.
extern void SDL_CameraDevicePermissionOutcome(SDL_CameraDevice *device, SDL_bool approved);
// Backends can call this to get a standardized name for a thread to power a specific camera device.
extern char *SDL_GetCameraThreadName(SDL_CameraDevice *device, char *buf, size_t buflen);
// These functions are the heart of the camera threads. Backends can call them directly if they aren't using the SDL-provided thread.
extern void SDL_CameraThreadSetup(SDL_CameraDevice *device);
extern SDL_bool SDL_CameraThreadIterate(SDL_CameraDevice *device);

View File

@ -26,17 +26,6 @@
#include "../SDL_camera_c.h"
#include "../../thread/SDL_systhread.h"
#if defined(HAVE_COREMEDIA) && defined(SDL_PLATFORM_MACOS) && (__MAC_OS_X_VERSION_MAX_ALLOWED < 101500)
// AVCaptureDeviceTypeBuiltInWideAngleCamera requires macOS SDK 10.15
#undef HAVE_COREMEDIA
#endif
#ifdef SDL_PLATFORM_TVOS
#undef HAVE_COREMEDIA
#endif
#ifdef HAVE_COREMEDIA
#import <AVFoundation/AVFoundation.h>
#import <CoreMedia/CoreMedia.h>
@ -50,537 +39,434 @@
* MACOSX:
* Add to the Code Sign Entitlement file:
* <key>com.apple.security.device.camera</key> <true/>
*
*
* IOS:
*
* - Need to link with:: CoreMedia CoreVideo
* - Add #define SDL_CAMERA 1
* to SDL_build_config_ios.h
*/
@class MySampleBufferDelegate;
struct SDL_PrivateCameraData
static Uint32 CoreMediaFormatToSDL(FourCharCode fmt)
{
dispatch_queue_t queue;
MySampleBufferDelegate *delegate;
AVCaptureSession *session;
CMSimpleQueueRef frame_queue;
};
static NSString *fourcc_to_nstring(Uint32 code)
{
Uint8 buf[4];
*(Uint32 *)buf = code;
return [NSString stringWithFormat:@"%c%c%c%c", buf[3], buf[2], buf[1], buf[0]];
}
static NSArray<AVCaptureDevice *> *DiscoverCameraDevices()
{
NSArray *deviceType = @[AVCaptureDeviceTypeBuiltInWideAngleCamera];
AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
discoverySessionWithDeviceTypes:deviceType
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionUnspecified];
NSArray<AVCaptureDevice *> *devices = discoverySession.devices;
if ([devices count] > 0) {
return devices;
} else {
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (captureDevice == nil) {
return devices;
} else {
NSArray<AVCaptureDevice *> *default_device = @[ captureDevice ];
return default_device;
}
switch (fmt) {
#define CASE(x, y) case x: return y
// the 16LE ones should use 16BE if we're on a Bigendian system like PowerPC,
// but at current time there is no bigendian Apple platform that has CoreMedia.
CASE(kCMPixelFormat_16LE555, SDL_PIXELFORMAT_RGB555);
CASE(kCMPixelFormat_16LE5551, SDL_PIXELFORMAT_RGBA5551);
CASE(kCMPixelFormat_16LE565, SDL_PIXELFORMAT_RGB565);
CASE(kCMPixelFormat_24RGB, SDL_PIXELFORMAT_RGB24);
CASE(kCMPixelFormat_32ARGB, SDL_PIXELFORMAT_ARGB32);
CASE(kCMPixelFormat_32BGRA, SDL_PIXELFORMAT_BGRA32);
CASE(kCMPixelFormat_422YpCbCr8, SDL_PIXELFORMAT_YUY2);
CASE(kCMPixelFormat_422YpCbCr8_yuvs, SDL_PIXELFORMAT_UYVY);
#undef CASE
default:
#if DEBUG_CAMERA
SDL_Log("CAMERA: Unknown format FourCharCode '%d'", (int) fmt);
#endif
break;
}
return devices;
}
static AVCaptureDevice *GetCameraDeviceByName(const char *dev_name)
{
NSArray<AVCaptureDevice *> *devices = DiscoverCameraDevices();
for (AVCaptureDevice *device in devices) {
char buf[1024];
NSString *cameraID = [device localizedName];
const char *str = [cameraID UTF8String];
SDL_snprintf(buf, sizeof (buf) - 1, "%s", str);
if (SDL_strcmp(buf, dev_name) == 0) {
return device;
}
}
return nil;
}
static Uint32 nsfourcc_to_sdlformat(NSString *nsfourcc)
{
const char *str = [nsfourcc UTF8String];
/* FIXME
* on IOS this mode gives 2 planes, and it's NV12
* on macos, 1 plane/ YVYU
*/
#ifdef SDL_PLATFORM_MACOS
if (SDL_strcmp("420v", str) == 0) return SDL_PIXELFORMAT_YVYU;
#else
if (SDL_strcmp("420v", str) == 0) return SDL_PIXELFORMAT_NV12;
#endif
if (SDL_strcmp("yuvs", str) == 0) return SDL_PIXELFORMAT_UYVY;
if (SDL_strcmp("420f", str) == 0) return SDL_PIXELFORMAT_UNKNOWN;
#if DEBUG_CAMERA
SDL_Log("CAMERA: Unknown format '%s'", str);
#endif
return SDL_PIXELFORMAT_UNKNOWN;
}
static NSString *sdlformat_to_nsfourcc(Uint32 fmt)
{
const char *str = "";
NSString *result;
@class SDLCaptureVideoDataOutputSampleBufferDelegate;
#ifdef SDL_PLATFORM_MACOS
if (fmt == SDL_PIXELFORMAT_YVYU) str = "420v";
#else
if (fmt == SDL_PIXELFORMAT_NV12) str = "420v";
#endif
if (fmt == SDL_PIXELFORMAT_UYVY) str = "yuvs";
return [[NSString alloc] initWithUTF8String: str];
}
@interface MySampleBufferDelegate : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate>
@property struct SDL_PrivateCameraData *hidden;
- (void) set: (struct SDL_PrivateCameraData *) val;
// just a simple wrapper to help ARC manage memory...
@interface SDLPrivateCameraData : NSObject
@property(nonatomic, retain) AVCaptureSession *session;
@property(nonatomic, retain) SDLCaptureVideoDataOutputSampleBufferDelegate *delegate;
@property(nonatomic, assign) CMSampleBufferRef current_sample;
@end
@implementation MySampleBufferDelegate
- (void) set: (struct SDL_PrivateCameraData *) val {
_hidden = val;
}
- (void) captureOutput:(AVCaptureOutput *)output
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *) connection {
CFRetain(sampleBuffer);
CMSimpleQueueEnqueue(_hidden->frame_queue, sampleBuffer);
}
- (void)captureOutput:(AVCaptureOutput *)output
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
#if DEBUG_CAMERA
SDL_Log("CAMERA: Drop frame..");
#endif
}
@implementation SDLPrivateCameraData
@end
static int COREMEDIA_OpenDevice(SDL_CameraDevice *_this)
static SDL_bool CheckCameraPermissions(SDL_CameraDevice *device)
{
_this->hidden = (struct SDL_PrivateCameraData *) SDL_calloc(1, sizeof (struct SDL_PrivateCameraData));
if (_this->hidden == NULL) {
return -1;
if (device->permission == 0) { // still expecting a permission result.
if (@available(macOS 14, *)) {
const AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
if (status != AVAuthorizationStatusNotDetermined) { // NotDetermined == still waiting for an answer from the user.
SDL_CameraDevicePermissionOutcome(device, (status == AVAuthorizationStatusAuthorized) ? SDL_TRUE : SDL_FALSE);
}
} else {
SDL_CameraDevicePermissionOutcome(device, SDL_TRUE); // always allowed (or just unqueryable...?) on older macOS.
}
}
return 0;
return (device->permission > 0);
}
static void COREMEDIA_CloseDevice(SDL_CameraDevice *_this)
// this delegate just receives new video frames on a Grand Central Dispatch queue, and fires off the
// main device thread iterate function directly to consume it.
@interface SDLCaptureVideoDataOutputSampleBufferDelegate : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate>
@property SDL_CameraDevice *device;
-(id) init:(SDL_CameraDevice *) dev;
-(void) captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
@end
@implementation SDLCaptureVideoDataOutputSampleBufferDelegate
-(id) init:(SDL_CameraDevice *) dev {
if ( self = [super init] ) {
_device = dev;
}
return self;
}
- (void) captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
SDL_CameraDevice *device = self.device;
if (!device || !device->hidden) {
return; // oh well.
}
if (!CheckCameraPermissions(device)) {
return; // nothing to do right now, dump what is probably a completely black frame.
}
SDLPrivateCameraData *hidden = (__bridge SDLPrivateCameraData *) device->hidden;
hidden.current_sample = sampleBuffer;
SDL_CameraThreadIterate(device);
hidden.current_sample = NULL;
}
- (void)captureOutput:(AVCaptureOutput *)output didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
#if DEBUG_CAMERA
SDL_Log("CAMERA: Drop frame.");
#endif
}
@end
static int COREMEDIA_WaitDevice(SDL_CameraDevice *device)
{
if (!_this) {
return;
}
if (_this->hidden) {
AVCaptureSession *session = _this->hidden->session;
if (session) {
AVCaptureInput *input;
AVCaptureVideoDataOutput *output;
input = [session.inputs objectAtIndex:0];
[session removeInput:input];
output = (AVCaptureVideoDataOutput*)[session.outputs objectAtIndex:0];
[session removeOutput:output];
// TODO more cleanup ?
}
if (_this->hidden->frame_queue) {
CFRelease(_this->hidden->frame_queue);
}
SDL_free(_this->hidden);
_this->hidden = NULL;
}
return 0; // this isn't used atm, since we run our own thread out of Grand Central Dispatch.
}
static int COREMEDIA_InitDevice(SDL_CameraDevice *_this)
static int COREMEDIA_AcquireFrame(SDL_CameraDevice *device, SDL_Surface *frame, Uint64 *timestampNS)
{
// !!! FIXME: autorelease pool?
NSString *fmt = sdlformat_to_nsfourcc(_this->spec.format);
int w = _this->spec.width;
int h = _this->spec.height;
int retval = 1;
SDLPrivateCameraData *hidden = (__bridge SDLPrivateCameraData *) device->hidden;
CMSampleBufferRef sample_buffer = hidden.current_sample;
hidden.current_sample = NULL;
SDL_assert(sample_buffer != NULL); // should only have been called from our delegate with a new frame.
NSError *error = nil;
AVCaptureDevice *device = nil;
AVCaptureDeviceInput *input = nil;
AVCaptureVideoDataOutput *output = nil;
AVCaptureDeviceFormat *spec_format = nil;
#ifdef SDL_PLATFORM_MACOS
if (@available(macOS 10.15, *)) {
// good.
CMSampleTimingInfo timinginfo;
if (CMSampleBufferGetSampleTimingInfo(sample_buffer, 0, &timinginfo) == noErr) {
*timestampNS = (Uint64) (CMTimeGetSeconds(timinginfo.presentationTimeStamp) * ((Float64) SDL_NS_PER_SECOND));
} else {
return -1;
}
#endif
device = GetCameraDeviceByName(_this->dev_name);
if (!device) {
goto error;
SDL_assert(!"this shouldn't happen, I think.");
*timestampNS = 0;
}
_this->hidden->session = [[AVCaptureSession alloc] init];
if (_this->hidden->session == nil) {
goto error;
CVImageBufferRef image = CMSampleBufferGetImageBuffer(sample_buffer); // does not retain `image` (and we don't want it to).
const int numPlanes = (int) CVPixelBufferGetPlaneCount(image);
const int planar = (int) CVPixelBufferIsPlanar(image);
#if DEBUG_CAMERA
const int w = (int) CVPixelBufferGetWidth(image);
const int h = (int) CVPixelBufferGetHeight(image);
const int sz = (int) CVPixelBufferGetDataSize(image);
const int pitch = (int) CVPixelBufferGetBytesPerRow(image);
SDL_Log("CAMERA: buffer planar=%d numPlanes=%d %d x %d sz=%d pitch=%d", planar, numPlanes, w, h, sz, pitch);
#endif
// !!! FIXME: this currently copies the data to the surface (see FIXME about non-contiguous planar surfaces, but in theory we could just keep this locked until ReleaseFrame...
CVPixelBufferLockBaseAddress(image, 0);
if ((planar == 0) && (numPlanes == 0)) {
const int pitch = (int) CVPixelBufferGetBytesPerRow(image);
const size_t buflen = pitch * frame->h;
frame->pixels = SDL_aligned_alloc(SDL_SIMDGetAlignment(), buflen);
if (frame->pixels == NULL) {
retval = -1;
} else {
frame->pitch = pitch;
SDL_memcpy(frame->pixels, CVPixelBufferGetBaseAddress(image), buflen);
}
} else {
// !!! FIXME: we have an open issue in SDL3 to allow SDL_Surface to support non-contiguous planar data, but we don't have it yet.
size_t buflen = 0;
for (int i = 0; (i < numPlanes) && (i < 3); i++) {
buflen += CVPixelBufferGetBytesPerRowOfPlane(image, i);
}
buflen *= frame->h;
frame->pixels = SDL_aligned_alloc(SDL_SIMDGetAlignment(), buflen);
if (frame->pixels == NULL) {
retval = -1;
} else {
Uint8 *dst = frame->pixels;
frame->pitch = (int) CVPixelBufferGetBytesPerRowOfPlane(image, 0); // this is what SDL3 currently expects, probably incorrectly.
for (int i = 0; (i < numPlanes) && (i < 3); i++) {
const void *src = CVPixelBufferGetBaseAddressOfPlane(image, i);
const size_t pitch = CVPixelBufferGetBytesPerRowOfPlane(image, i);
SDL_memcpy(dst, src, pitch * frame->h);
dst += pitch * frame->h;
}
}
}
[_this->hidden->session setSessionPreset:AVCaptureSessionPresetHigh];
CVPixelBufferUnlockBaseAddress(image, 0);
return retval;
}
static void COREMEDIA_ReleaseFrame(SDL_CameraDevice *device, SDL_Surface *frame)
{
// !!! FIXME: this currently copies the data to the surface, but in theory we could just keep this locked until ReleaseFrame...
SDL_aligned_free(frame->pixels);
}
static void COREMEDIA_CloseDevice(SDL_CameraDevice *device)
{
if (device && device->hidden) {
SDLPrivateCameraData *hidden = (SDLPrivateCameraData *) CFBridgingRelease(device->hidden);
device->hidden = NULL;
AVCaptureSession *session = hidden.session;
if (session) {
hidden.session = nil;
[session stopRunning];
[session removeInput:[session.inputs objectAtIndex:0]];
[session removeOutput:(AVCaptureVideoDataOutput*)[session.outputs objectAtIndex:0]];
session = nil;
}
hidden.delegate = NULL;
hidden.current_sample = NULL;
}
}
static int COREMEDIA_OpenDevice(SDL_CameraDevice *device, const SDL_CameraSpec *spec)
{
AVCaptureDevice *avdevice = (__bridge AVCaptureDevice *) device->handle;
// Pick format that matches the spec
NSArray<AVCaptureDeviceFormat *> *formats = [device formats];
const Uint32 sdlfmt = spec->format;
const int w = spec->width;
const int h = spec->height;
const int rate = spec->interval_denominator;
AVCaptureDeviceFormat *spec_format = nil;
NSArray<AVCaptureDeviceFormat *> *formats = [avdevice formats];
for (AVCaptureDeviceFormat *format in formats) {
CMFormatDescriptionRef formatDescription = [format formatDescription];
FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(formatDescription);
NSString *str = fourcc_to_nstring(mediaSubType);
if ([str isEqualToString:fmt]) {
CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDescription);
if (dim.width == w && dim.height == h) {
spec_format = format;
break;
}
if (CoreMediaFormatToSDL(CMFormatDescriptionGetMediaSubType(formatDescription)) != sdlfmt) {
continue;
}
const CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDescription);
if ( ((int) dim.width != w) || (((int) dim.height) != h) ) {
continue;
}
for (AVFrameRateRange *framerate in format.videoSupportedFrameRateRanges) {
if ((rate == (int) SDL_ceil((double) framerate.minFrameRate)) || (rate == (int) SDL_floor((double) framerate.maxFrameRate))) {
spec_format = format;
break;
}
}
if (spec_format != nil) {
break;
}
}
if (spec_format == nil) {
return SDL_SetError("format not found");
}
// Set format
if ([device lockForConfiguration:NULL] == YES) {
device.activeFormat = spec_format;
[device unlockForConfiguration];
} else {
return SDL_SetError("camera spec format not available");
} else if (![avdevice lockForConfiguration:NULL]) {
return SDL_SetError("Cannot lockForConfiguration");
}
// Input
input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
avdevice.activeFormat = spec_format;
[avdevice unlockForConfiguration];
AVCaptureSession *session = [[AVCaptureSession alloc] init];
if (session == nil) {
return SDL_SetError("Failed to allocate/init AVCaptureSession");
}
session.sessionPreset = AVCaptureSessionPresetHigh;
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:avdevice error:&error];
if (!input) {
return SDL_SetError("Cannot create AVCaptureDeviceInput");
}
// Output
output = [[AVCaptureVideoDataOutput alloc] init];
#ifdef SDL_PLATFORM_MACOS
// FIXME this now fail on ios ... but not using anything works...
// Specify the pixel format
output.videoSettings =
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_422YpCbCr8]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
#endif
_this->hidden->delegate = [[MySampleBufferDelegate alloc] init];
[_this->hidden->delegate set:_this->hidden];
CMSimpleQueueCreate(kCFAllocatorDefault, 30 /* buffers */, &_this->hidden->frame_queue);
if (_this->hidden->frame_queue == nil) {
return SDL_SetError("CMSimpleQueueCreate() failed");
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
if (!output) {
return SDL_SetError("Cannot create AVCaptureVideoDataOutput");
}
_this->hidden->queue = dispatch_queue_create("my_queue", NULL);
[output setSampleBufferDelegate:_this->hidden->delegate queue:_this->hidden->queue];
char threadname[64];
SDL_GetCameraThreadName(device, threadname, sizeof (threadname));
dispatch_queue_t queue = dispatch_queue_create(threadname, NULL);
//dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
if (!queue) {
return SDL_SetError("dispatch_queue_create() failed");
}
if ([_this->hidden->session canAddInput:input] ){
[_this->hidden->session addInput:input];
} else {
SDLCaptureVideoDataOutputSampleBufferDelegate *delegate = [[SDLCaptureVideoDataOutputSampleBufferDelegate alloc] init:device];
if (delegate == nil) {
return SDL_SetError("Cannot create SDLCaptureVideoDataOutputSampleBufferDelegate");
}
[output setSampleBufferDelegate:delegate queue:queue];
if (![session canAddInput:input]) {
return SDL_SetError("Cannot add AVCaptureDeviceInput");
}
[session addInput:input];
if ([_this->hidden->session canAddOutput:output] ){
[_this->hidden->session addOutput:output];
} else {
if (![session canAddOutput:output]) {
return SDL_SetError("Cannot add AVCaptureVideoDataOutput");
}
[session addOutput:output];
[_this->hidden->session commitConfiguration];
[session commitConfiguration];
return 0;
}
static int COREMEDIA_GetDeviceSpec(SDL_CameraDevice *_this, SDL_CameraSpec *spec)
{
// !!! FIXME: make sure higher level checks spec != NULL
if (spec) {
SDL_copyp(spec, &_this->spec);
return 0;
SDLPrivateCameraData *hidden = [[SDLPrivateCameraData alloc] init];
if (hidden == nil) {
return SDL_SetError("Cannot create SDLPrivateCameraData");
}
return -1;
}
static int COREMEDIA_StartCamera(SDL_CameraDevice *_this)
{
[_this->hidden->session startRunning];
hidden.session = session;
hidden.delegate = delegate;
hidden.current_sample = NULL;
device->hidden = (struct SDL_PrivateCameraData *)CFBridgingRetain(hidden);
[session startRunning]; // !!! FIXME: docs say this can block while camera warms up and shouldn't be done on main thread. Maybe push through `queue`?
CheckCameraPermissions(device); // check right away, in case the process is already granted permission.
return 0;
}
static int COREMEDIA_StopCamera(SDL_CameraDevice *_this)
static void COREMEDIA_FreeDeviceHandle(SDL_CameraDevice *device)
{
[_this->hidden->session stopRunning];
return 0;
if (device && device->handle) {
CFBridgingRelease(device->handle);
}
}
static int COREMEDIA_AcquireFrame(SDL_CameraDevice *_this, SDL_CameraFrame *frame)
static void GatherCameraSpecs(AVCaptureDevice *device, CameraFormatAddData *add_data)
{
if (CMSimpleQueueGetCount(_this->hidden->frame_queue) > 0) {
CMSampleBufferRef sampleBuffer = (CMSampleBufferRef)CMSimpleQueueDequeue(_this->hidden->frame_queue);
frame->internal = (void *) sampleBuffer;
frame->timestampNS = SDL_GetTicksNS();
SDL_zerop(add_data);
CVImageBufferRef image = CMSampleBufferGetImageBuffer(sampleBuffer);
const int numPlanes = CVPixelBufferGetPlaneCount(image);
const int planar = CVPixelBufferIsPlanar(image);
for (AVCaptureDeviceFormat *fmt in device.formats) {
if (CMFormatDescriptionGetMediaType(fmt.formatDescription) != kCMMediaType_Video) {
continue;
}
#if DEBUG_CAMERA
const int w = CVPixelBufferGetWidth(image);
const int h = CVPixelBufferGetHeight(image);
const int sz = CVPixelBufferGetDataSize(image);
const int pitch = CVPixelBufferGetBytesPerRow(image);
SDL_Log("CAMERA: buffer planar=%d count:%d %d x %d sz=%d pitch=%d", planar, numPlanes, w, h, sz, pitch);
#endif
const Uint32 sdlfmt = CoreMediaFormatToSDL(CMFormatDescriptionGetMediaSubType(fmt.formatDescription));
if (sdlfmt == SDL_PIXELFORMAT_UNKNOWN) {
continue;
}
CVPixelBufferLockBaseAddress(image, 0);
const CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(fmt.formatDescription);
const int w = (int) dims.width;
const int h = (int) dims.height;
for (AVFrameRateRange *framerate in fmt.videoSupportedFrameRateRanges) {
int rate;
if ((planar == 0) && (numPlanes == 0)) {
frame->pitch[0] = CVPixelBufferGetBytesPerRow(image);
frame->data[0] = CVPixelBufferGetBaseAddress(image);
frame->num_planes = 1;
} else {
for (int i = 0; (i < numPlanes) && (i < 3); i++) {
frame->num_planes += 1;
frame->data[i] = CVPixelBufferGetBaseAddressOfPlane(image, i);
frame->pitch[i] = CVPixelBufferGetBytesPerRowOfPlane(image, i);
rate = (int) SDL_ceil((double) framerate.minFrameRate);
if (rate) {
SDL_AddCameraFormat(add_data, sdlfmt, w, h, 1, rate);
}
}
// Unlocked when frame is released
} else {
// no frame
SDL_Delay(20); // TODO fix some delay
}
return 0;
}
static int COREMEDIA_ReleaseFrame(SDL_CameraDevice *_this, SDL_CameraFrame *frame)
{
if (frame->internal) {
CMSampleBufferRef sampleBuffer = (CMSampleBufferRef) frame->internal;
CVImageBufferRef image = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferUnlockBaseAddress(image, 0);
CFRelease(sampleBuffer);
}
return 0;
}
static int COREMEDIA_GetNumFormats(SDL_CameraDevice *_this)
{
AVCaptureDevice *device = GetCameraDeviceByName(_this->dev_name);
if (device) {
// LIST FORMATS
NSMutableOrderedSet<NSString *> *array_formats = [NSMutableOrderedSet new];
NSArray<AVCaptureDeviceFormat *> *formats = [device formats];
for (AVCaptureDeviceFormat *format in formats) {
// NSLog(@"%@", formats);
CMFormatDescriptionRef formatDescription = [format formatDescription];
//NSLog(@"%@", formatDescription);
FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(formatDescription);
NSString *str = fourcc_to_nstring(mediaSubType);
[array_formats addObject:str];
}
return [array_formats count];
}
return 0;
}
static int COREMEDIA_GetFormat(SDL_CameraDevice *_this, int index, Uint32 *format)
{
AVCaptureDevice *device = GetCameraDeviceByName(_this->dev_name);
if (device) {
// LIST FORMATS
NSMutableOrderedSet<NSString *> *array_formats = [NSMutableOrderedSet new];
NSArray<AVCaptureDeviceFormat *> *formats = [device formats];
NSString *str;
for (AVCaptureDeviceFormat *f in formats) {
FourCharCode mediaSubType;
CMFormatDescriptionRef formatDescription;
formatDescription = [f formatDescription];
mediaSubType = CMFormatDescriptionGetMediaSubType(formatDescription);
str = fourcc_to_nstring(mediaSubType);
[array_formats addObject:str];
}
str = array_formats[index];
*format = nsfourcc_to_sdlformat(str);
return 0;
}
return -1;
}
static int COREMEDIA_GetNumFrameSizes(SDL_CameraDevice *_this, Uint32 format)
{
AVCaptureDevice *device = GetCameraDeviceByName(_this->dev_name);
if (device) {
NSString *fmt = sdlformat_to_nsfourcc(format);
int count = 0;
NSArray<AVCaptureDeviceFormat *> *formats = [device formats];
for (AVCaptureDeviceFormat *f in formats) {
CMFormatDescriptionRef formatDescription = [f formatDescription];
FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(formatDescription);
NSString *str = fourcc_to_nstring(mediaSubType);
if ([str isEqualToString:fmt]) {
count++;
}
}
return count;
}
return 0;
}
static int COREMEDIA_GetFrameSize(SDL_CameraDevice *_this, Uint32 format, int index, int *width, int *height)
{
AVCaptureDevice *device = GetCameraDeviceByName(_this->dev_name);
if (device) {
NSString *fmt = sdlformat_to_nsfourcc(format);
int count = 0;
NSArray<AVCaptureDeviceFormat *> *formats = [device formats];
for (AVCaptureDeviceFormat *f in formats) {
CMFormatDescriptionRef formatDescription = [f formatDescription];
FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(formatDescription);
NSString *str = fourcc_to_nstring(mediaSubType);
if ([str isEqualToString:fmt]) {
if (index == count) {
CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDescription);
*width = dim.width;
*height = dim.height;
return 0;
}
count++;
rate = (int) SDL_floor((double) framerate.maxFrameRate);
if (rate) {
SDL_AddCameraFormat(add_data, sdlfmt, w, h, 1, rate);
}
}
}
return -1;
}
static int COREMEDIA_GetDeviceName(SDL_CameraDeviceID instance_id, char *buf, int size)
static SDL_bool FindCoreMediaCameraDeviceByUniqueID(SDL_CameraDevice *device, void *userdata)
{
int index = instance_id - 1;
NSArray<AVCaptureDevice *> *devices = DiscoverCameraDevices();
if (index < [devices count]) {
AVCaptureDevice *device = devices[index];
NSString *cameraID = [device localizedName];
const char *str = [cameraID UTF8String];
SDL_snprintf(buf, size, "%s", str);
return 0;
}
return -1;
NSString *uniqueid = (__bridge NSString *) userdata;
AVCaptureDevice *avdev = (__bridge AVCaptureDevice *) device->handle;
return ([uniqueid isEqualToString:avdev.uniqueID]) ? SDL_TRUE : SDL_FALSE;
}
static int GetNumCameraDevices(void)
static void MaybeAddDevice(AVCaptureDevice *device)
{
NSArray<AVCaptureDevice *> *devices = DiscoverCameraDevices();
return [devices count];
}
static SDL_CameraDeviceID *COREMEDIA_GetDevices(int *count)
{
// hard-coded list of ID
const int num = GetNumCameraDevices();
SDL_CameraDeviceID *retval = (SDL_CameraDeviceID *)SDL_calloc((num + 1), sizeof(*ret));
if (retval == NULL) {
*count = 0;
return NULL;
if (!device.connected) {
return; // not connected.
} else if (![device hasMediaType:AVMediaTypeVideo]) {
return; // not a camera.
} else if (SDL_FindPhysicalCameraDeviceByCallback(FindCoreMediaCameraDeviceByUniqueID, (__bridge void *) device.uniqueID)) {
return; // already have this one.
}
for (int i = 0; i < num; i++) {
retval[i] = i + 1;
CameraFormatAddData add_data;
GatherCameraSpecs(device, &add_data);
if (add_data.num_specs > 0) {
SDL_AddCameraDevice(device.localizedName.UTF8String, add_data.num_specs, add_data.specs, (void *) CFBridgingRetain(device));
}
retval[num] = 0;
*count = num;
return ret;
SDL_free(add_data.specs);
}
static void COREMEDIA_DetectDevices(void)
{
NSArray<AVCaptureDevice *> *devices = nil;
if (@available(macOS 10.15, iOS 13, *)) {
// kind of annoying that there isn't a "give me anything that looks like a camera" option,
// so this list will need to be updated when Apple decides to add
// AVCaptureDeviceTypeBuiltInQuadrupleCamera some day.
NSArray *device_types = @[
#ifdef SDL_PLATFORM_IOS
AVCaptureDeviceTypeBuiltInTelephotoCamera,
AVCaptureDeviceTypeBuiltInDualCamera,
AVCaptureDeviceTypeBuiltInDualWideCamera,
AVCaptureDeviceTypeBuiltInTripleCamera,
AVCaptureDeviceTypeBuiltInUltraWideCamera,
#else
AVCaptureDeviceTypeExternalUnknown,
#endif
AVCaptureDeviceTypeBuiltInWideAngleCamera
];
AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
discoverySessionWithDeviceTypes:device_types
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionUnspecified];
devices = discoverySession.devices;
// !!! FIXME: this can use Key Value Observation to get hotplug events.
} else {
// this is deprecated but works back to macOS 10.7; 10.15 added AVCaptureDeviceDiscoverySession as a replacement.
devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
// !!! FIXME: this can use AVCaptureDeviceWasConnectedNotification and AVCaptureDeviceWasDisconnectedNotification with NSNotificationCenter to get hotplug events.
}
for (AVCaptureDevice *device in devices) {
MaybeAddDevice(device);
}
}
static void COREMEDIA_Deinitialize(void)
{
// !!! FIXME: disable hotplug.
}
static SDL_bool COREMEDIA_Init(SDL_CameraDriverImpl *impl)
{
#ifndef HAVE_COREMEDIA
return SDL_FALSE;
#else
impl->DetectDevices = COREMEDIA_DetectDevices;
impl->OpenDevice = COREMEDIA_OpenDevice;
impl->CloseDevice = COREMEDIA_CloseDevice;
impl->InitDevice = COREMEDIA_InitDevice;
impl->GetDeviceSpec = COREMEDIA_GetDeviceSpec;
impl->StartCamera = COREMEDIA_StartCamera;
impl->StopCamera = COREMEDIA_StopCamera;
impl->WaitDevice = COREMEDIA_WaitDevice;
impl->AcquireFrame = COREMEDIA_AcquireFrame;
impl->ReleaseFrame = COREMEDIA_ReleaseFrame;
impl->GetNumFormats = COREMEDIA_GetNumFormats;
impl->GetFormat = COREMEDIA_GetFormat;
impl->GetNumFrameSizes = COREMEDIA_GetNumFrameSizes;
impl->GetFrameSize = COREMEDIA_GetFrameSize;
impl->GetDeviceName = COREMEDIA_GetDeviceName;
impl->GetDevices = COREMEDIA_GetDevices;
impl->FreeDeviceHandle = COREMEDIA_FreeDeviceHandle;
impl->Deinitialize = COREMEDIA_Deinitialize;
impl->ProvidesOwnCallbackThread = SDL_TRUE;
return SDL_TRUE;
#endif
}
CameraBootStrap COREMEDIA_bootstrap = {
"coremedia", "SDL Apple CoreMedia camera driver", COREMEDIA_Init, SDL_FALSE
};
#endif // HAVE_COREMEDIA
#endif // SDL_CAMERA_COREMEDIA
#endif // SDL_CAMERA_DRIVER_COREMEDIA

View File

@ -501,8 +501,6 @@ static int MEDIAFOUNDATION_OpenDevice(SDL_CameraDevice *device, const SDL_Camera
//PROPVARIANT var;
HRESULT ret;
SDL_Log("MEDIAFOUNDATION spec format: %s", SDL_GetPixelFormatName(spec->format));
#if 0
IMFStreamDescriptor *streamdesc = NULL;
IMFPresentationDescriptor *presentdesc = NULL;