Added support for creating an SDL texture from a CVPixelBufferRef

main
Sam Lantinga 2024-02-06 13:54:05 -08:00
parent c79462dc50
commit 2039c46d2c
6 changed files with 211 additions and 235 deletions

View File

@ -503,6 +503,10 @@ extern DECLSPEC SDL_Texture *SDLCALL SDL_CreateTextureFromSurface(SDL_Renderer *
* associated with the V plane of a YUV texture, if you want to wrap an * associated with the V plane of a YUV texture, if you want to wrap an
* existing texture. * existing texture.
* *
* With the metal renderer:
*
* - `SDL_PROP_TEXTURE_CREATE_METAL_PIXELBUFFER_POINTER`: the CVPixelBufferRef associated with the texture, if you want to create a texture from an existing pixel buffer.
*
* With the opengl renderer: * With the opengl renderer:
* *
* - `SDL_PROP_TEXTURE_CREATE_OPENGL_TEXTURE_NUMBER`: the GLuint texture * - `SDL_PROP_TEXTURE_CREATE_OPENGL_TEXTURE_NUMBER`: the GLuint texture
@ -560,6 +564,7 @@ extern DECLSPEC SDL_Texture *SDLCALL SDL_CreateTextureWithProperties(SDL_Rendere
#define SDL_PROP_TEXTURE_CREATE_D3D12_TEXTURE_POINTER "d3d12.texture" #define SDL_PROP_TEXTURE_CREATE_D3D12_TEXTURE_POINTER "d3d12.texture"
#define SDL_PROP_TEXTURE_CREATE_D3D12_TEXTURE_U_POINTER "d3d12.texture_u" #define SDL_PROP_TEXTURE_CREATE_D3D12_TEXTURE_U_POINTER "d3d12.texture_u"
#define SDL_PROP_TEXTURE_CREATE_D3D12_TEXTURE_V_POINTER "d3d12.texture_v" #define SDL_PROP_TEXTURE_CREATE_D3D12_TEXTURE_V_POINTER "d3d12.texture_v"
#define SDL_PROP_TEXTURE_CREATE_METAL_PIXELBUFFER_POINTER "metal.pixelbuffer"
#define SDL_PROP_TEXTURE_CREATE_OPENGL_TEXTURE_NUMBER "opengl.texture" #define SDL_PROP_TEXTURE_CREATE_OPENGL_TEXTURE_NUMBER "opengl.texture"
#define SDL_PROP_TEXTURE_CREATE_OPENGL_TEXTURE_UV_NUMBER "opengl.texture_uv" #define SDL_PROP_TEXTURE_CREATE_OPENGL_TEXTURE_UV_NUMBER "opengl.texture_uv"
#define SDL_PROP_TEXTURE_CREATE_OPENGL_TEXTURE_U_NUMBER "opengl.texture_u" #define SDL_PROP_TEXTURE_CREATE_OPENGL_TEXTURE_U_NUMBER "opengl.texture_u"

View File

@ -26,6 +26,7 @@
#include "../../video/SDL_pixels_c.h" #include "../../video/SDL_pixels_c.h"
#include <Availability.h> #include <Availability.h>
#import <CoreVideo/CoreVideo.h>
#import <Metal/Metal.h> #import <Metal/Metal.h>
#import <QuartzCore/CAMetalLayer.h> #import <QuartzCore/CAMetalLayer.h>
@ -544,6 +545,91 @@ static SDL_bool METAL_SupportsBlendMode(SDL_Renderer *renderer, SDL_BlendMode bl
return SDL_TRUE; return SDL_TRUE;
} }
size_t GetBT601ConversionMatrix( SDL_Colorspace colorspace )
{
switch (SDL_COLORSPACERANGE(colorspace)) {
case SDL_COLOR_RANGE_LIMITED:
case SDL_COLOR_RANGE_UNKNOWN:
return CONSTANTS_OFFSET_DECODE_BT601_LIMITED;
break;
case SDL_COLOR_RANGE_FULL:
return CONSTANTS_OFFSET_DECODE_BT601_FULL;
break;
default:
break;
}
return 0;
}
size_t GetBT709ConversionMatrix(SDL_Colorspace colorspace)
{
switch (SDL_COLORSPACERANGE(colorspace)) {
case SDL_COLOR_RANGE_LIMITED:
case SDL_COLOR_RANGE_UNKNOWN:
return CONSTANTS_OFFSET_DECODE_BT709_LIMITED;
break;
case SDL_COLOR_RANGE_FULL:
return CONSTANTS_OFFSET_DECODE_BT709_FULL;
break;
default:
break;
}
return 0;
}
size_t GetBT2020ConversionMatrix(SDL_Colorspace colorspace)
{
switch (SDL_COLORSPACERANGE(colorspace)) {
case SDL_COLOR_RANGE_LIMITED:
case SDL_COLOR_RANGE_UNKNOWN:
return 0;
break;
case SDL_COLOR_RANGE_FULL:
return 0;
break;
default:
break;
}
return 0;
}
size_t GetYCbCRtoRGBConversionMatrix(SDL_Colorspace colorspace, int w, int h, int bits_per_pixel)
{
const int YUV_SD_THRESHOLD = 576;
switch (SDL_COLORSPACEMATRIX(colorspace)) {
case SDL_MATRIX_COEFFICIENTS_BT601:
return GetBT601ConversionMatrix(colorspace);
case SDL_MATRIX_COEFFICIENTS_BT709:
return GetBT709ConversionMatrix(colorspace);
/* FIXME: Are these the same? */
case SDL_MATRIX_COEFFICIENTS_BT2020_NCL:
case SDL_MATRIX_COEFFICIENTS_BT2020_CL:
return GetBT2020ConversionMatrix(colorspace);
case SDL_MATRIX_COEFFICIENTS_UNSPECIFIED:
switch (bits_per_pixel) {
case 8:
if (h <= YUV_SD_THRESHOLD) {
return GetBT601ConversionMatrix(colorspace);
} else {
return GetBT709ConversionMatrix(colorspace);
}
case 10:
case 16:
return GetBT2020ConversionMatrix(colorspace);
default:
break;
}
break;
default:
break;
}
return 0;
}
static int METAL_CreateTexture(SDL_Renderer *renderer, SDL_Texture *texture, SDL_PropertiesID create_props) static int METAL_CreateTexture(SDL_Renderer *renderer, SDL_Texture *texture, SDL_PropertiesID create_props)
{ {
@autoreleasepool { @autoreleasepool {
@ -553,6 +639,16 @@ static int METAL_CreateTexture(SDL_Renderer *renderer, SDL_Texture *texture, SDL
id<MTLTexture> mtltexture, mtltextureUv; id<MTLTexture> mtltexture, mtltextureUv;
BOOL yuv, nv12; BOOL yuv, nv12;
METAL_TextureData *texturedata; METAL_TextureData *texturedata;
CVPixelBufferRef pixelbuffer = nil;
IOSurfaceRef surface = nil;
pixelbuffer = SDL_GetProperty(create_props, SDL_PROP_TEXTURE_CREATE_METAL_PIXELBUFFER_POINTER, nil);
if (pixelbuffer) {
surface = CVPixelBufferGetIOSurface(pixelbuffer);
if (!surface) {
return SDL_SetError("CVPixelBufferGetIOSurface() failed");
}
}
switch (texture->format) { switch (texture->format) {
case SDL_PIXELFORMAT_ABGR8888: case SDL_PIXELFORMAT_ABGR8888:
@ -599,7 +695,11 @@ static int METAL_CreateTexture(SDL_Renderer *renderer, SDL_Texture *texture, SDL
} }
} }
if (surface) {
mtltexture = [data.mtldevice newTextureWithDescriptor:mtltexdesc iosurface:surface plane:0];
} else {
mtltexture = [data.mtldevice newTextureWithDescriptor:mtltexdesc]; mtltexture = [data.mtldevice newTextureWithDescriptor:mtltexdesc];
}
if (mtltexture == nil) { if (mtltexture == nil) {
return SDL_SetError("Texture allocation failed"); return SDL_SetError("Texture allocation failed");
} }
@ -622,7 +722,11 @@ static int METAL_CreateTexture(SDL_Renderer *renderer, SDL_Texture *texture, SDL
} }
if (yuv || nv12) { if (yuv || nv12) {
if (surface) {
mtltextureUv = [data.mtldevice newTextureWithDescriptor:mtltexdesc iosurface:surface plane:1];
} else {
mtltextureUv = [data.mtldevice newTextureWithDescriptor:mtltexdesc]; mtltextureUv = [data.mtldevice newTextureWithDescriptor:mtltexdesc];
}
if (mtltextureUv == nil) { if (mtltextureUv == nil) {
return SDL_SetError("Texture allocation failed"); return SDL_SetError("Texture allocation failed");
} }
@ -653,21 +757,9 @@ static int METAL_CreateTexture(SDL_Renderer *renderer, SDL_Texture *texture, SDL
} }
#if SDL_HAVE_YUV #if SDL_HAVE_YUV
if (yuv || nv12) { if (yuv || nv12) {
size_t offset = 0; size_t offset = GetYCbCRtoRGBConversionMatrix(texture->colorspace, texture->w, texture->h, 8);
if (SDL_ISCOLORSPACE_YUV_BT601(texture->colorspace)) { if (offset == 0) {
if (SDL_ISCOLORSPACE_LIMITED_RANGE(texture->colorspace)) { return SDL_SetError("Unsupported YUV colorspace");
offset = CONSTANTS_OFFSET_DECODE_BT601_LIMITED;
} else {
offset = CONSTANTS_OFFSET_DECODE_BT601_FULL;
}
} else if (SDL_ISCOLORSPACE_YUV_BT709(texture->colorspace)) {
if (SDL_ISCOLORSPACE_LIMITED_RANGE(texture->colorspace)) {
offset = CONSTANTS_OFFSET_DECODE_BT709_LIMITED;
} else {
offset = CONSTANTS_OFFSET_DECODE_BT709_FULL;
}
} else {
offset = 0;
} }
texturedata.conversionBufferOffset = offset; texturedata.conversionBufferOffset = offset;
} }

View File

@ -230,8 +230,7 @@ endif()
if(FFmpeg_FOUND AND LIBAVUTIL_AVFRAME_HAS_CH_LAYOUT) if(FFmpeg_FOUND AND LIBAVUTIL_AVFRAME_HAS_CH_LAYOUT)
add_sdl_test_executable(testffmpeg NO_C90 SOURCES testffmpeg.c ${icon_bmp_header}) add_sdl_test_executable(testffmpeg NO_C90 SOURCES testffmpeg.c ${icon_bmp_header})
if(APPLE) if(APPLE)
target_sources(testffmpeg PRIVATE testffmpeg_videotoolbox.m) target_link_options(testffmpeg PRIVATE "-Wl,-framework,CoreVideo")
target_link_options(testffmpeg PRIVATE "-Wl,-framework,CoreFoundation" "-Wl,-framework,CoreVideo" "-Wl,-framework,Metal")
endif() endif()
if(HAVE_OPENGLES_V2) if(HAVE_OPENGLES_V2)
message(DEBUG "Enabling EGL support in testffmpeg") message(DEBUG "Enabling EGL support in testffmpeg")

View File

@ -47,7 +47,7 @@
#endif #endif
#ifdef SDL_PLATFORM_APPLE #ifdef SDL_PLATFORM_APPLE
#include "testffmpeg_videotoolbox.h" #include <CoreVideo/CoreVideo.h>
#endif #endif
#ifdef SDL_PLATFORM_WIN32 #ifdef SDL_PLATFORM_WIN32
@ -85,9 +85,6 @@ static SDL_bool has_EGL_EXT_image_dma_buf_import;
static PFNGLACTIVETEXTUREARBPROC glActiveTextureARBFunc; static PFNGLACTIVETEXTUREARBPROC glActiveTextureARBFunc;
static PFNGLEGLIMAGETARGETTEXTURE2DOESPROC glEGLImageTargetTexture2DOESFunc; static PFNGLEGLIMAGETARGETTEXTURE2DOESPROC glEGLImageTargetTexture2DOESFunc;
#endif #endif
#ifdef SDL_PLATFORM_APPLE
static SDL_bool has_videotoolbox_output;
#endif
#ifdef SDL_PLATFORM_WIN32 #ifdef SDL_PLATFORM_WIN32
static ID3D11Device *d3d11_device; static ID3D11Device *d3d11_device;
static ID3D11DeviceContext *d3d11_context; static ID3D11DeviceContext *d3d11_context;
@ -100,8 +97,8 @@ struct SwsContextContainer
static const char *SWS_CONTEXT_CONTAINER_PROPERTY = "SWS_CONTEXT_CONTAINER"; static const char *SWS_CONTEXT_CONTAINER_PROPERTY = "SWS_CONTEXT_CONTAINER";
static int done; static int done;
/* This function isn't Windows specific, but we haven't hooked up HDR video support on other platforms yet */ /* This function isn't platform specific, but we haven't hooked up HDR video support on other platforms yet */
#ifdef SDL_PLATFORM_WIN32 #if defined(SDL_PLATFORM_WIN32) || defined(SDL_PLATFORM_APPLE)
static void GetDisplayHDRProperties(SDL_bool *HDR_display, float *SDR_white_level) static void GetDisplayHDRProperties(SDL_bool *HDR_display, float *SDR_white_level)
{ {
SDL_PropertiesID props; SDL_PropertiesID props;
@ -125,7 +122,7 @@ static void GetDisplayHDRProperties(SDL_bool *HDR_display, float *SDR_white_leve
*HDR_display = SDL_TRUE; *HDR_display = SDL_TRUE;
*SDR_white_level = SDL_GetFloatProperty(props, SDL_PROP_DISPLAY_SDR_WHITE_LEVEL_FLOAT, DEFAULT_SDR_WHITE_LEVEL); *SDR_white_level = SDL_GetFloatProperty(props, SDL_PROP_DISPLAY_SDR_WHITE_LEVEL_FLOAT, DEFAULT_SDR_WHITE_LEVEL);
} }
#endif /* SDL_PLATFORM_WIN32 */ #endif /* SDL_PLATFORM_WIN32 || SDL_PLATFORM_APPLE */
static SDL_bool CreateWindowAndRenderer(Uint32 window_flags, const char *driver) static SDL_bool CreateWindowAndRenderer(Uint32 window_flags, const char *driver)
{ {
@ -195,10 +192,6 @@ static SDL_bool CreateWindowAndRenderer(Uint32 window_flags, const char *driver)
} }
#endif /* HAVE_EGL */ #endif /* HAVE_EGL */
#ifdef SDL_PLATFORM_APPLE
has_videotoolbox_output = SetupVideoToolboxOutput(renderer);
#endif
#ifdef SDL_PLATFORM_WIN32 #ifdef SDL_PLATFORM_WIN32
d3d11_device = (ID3D11Device *)SDL_GetProperty(SDL_GetRendererProperties(renderer), SDL_PROP_RENDERER_D3D11_DEVICE_POINTER, NULL); d3d11_device = (ID3D11Device *)SDL_GetProperty(SDL_GetRendererProperties(renderer), SDL_PROP_RENDERER_D3D11_DEVICE_POINTER, NULL);
if (d3d11_device) { if (d3d11_device) {
@ -315,7 +308,7 @@ static SDL_bool SupportedPixelFormat(enum AVPixelFormat format)
return SDL_TRUE; return SDL_TRUE;
} }
#ifdef SDL_PLATFORM_APPLE #ifdef SDL_PLATFORM_APPLE
if (has_videotoolbox_output && format == AV_PIX_FMT_VIDEOTOOLBOX) { if (format == AV_PIX_FMT_VIDEOTOOLBOX) {
return SDL_TRUE; return SDL_TRUE;
} }
#endif #endif
@ -458,6 +451,9 @@ static SDL_Colorspace GetFrameColorspace(AVFrame *frame)
SDL_Colorspace colorspace = SDL_COLORSPACE_SRGB; SDL_Colorspace colorspace = SDL_COLORSPACE_SRGB;
if (frame && frame->colorspace != AVCOL_SPC_RGB) { if (frame && frame->colorspace != AVCOL_SPC_RGB) {
#ifdef DEBUG_COLORSPACE
SDL_Log("Frame colorspace: range: %d, primaries: %d, trc: %d, colorspace: %d, chroma_location: %d\n", frame->color_range, frame->color_primaries, frame->color_trc, frame->colorspace, frame->chroma_location);
#endif
colorspace = SDL_DEFINE_COLORSPACE(SDL_COLOR_TYPE_YCBCR, colorspace = SDL_DEFINE_COLORSPACE(SDL_COLOR_TYPE_YCBCR,
frame->color_range, frame->color_range,
frame->color_primaries, frame->color_primaries,
@ -674,10 +670,6 @@ static SDL_bool GetTextureForD3D11Frame(AVFrame *frame, SDL_Texture **texture)
D3D11_TEXTURE2D_DESC desc; D3D11_TEXTURE2D_DESC desc;
SDL_zero(desc); SDL_zero(desc);
ID3D11Texture2D_GetDesc(pTexture, &desc); ID3D11Texture2D_GetDesc(pTexture, &desc);
if (desc.Format != DXGI_FORMAT_NV12 && desc.Format != DXGI_FORMAT_P010 && desc.Format != DXGI_FORMAT_P016) {
SDL_SetError("Unsupported texture format, expected DXGI_FORMAT_NV12, got %d", desc.Format);
return SDL_FALSE;
}
if (*texture) { if (*texture) {
SDL_QueryTexture(*texture, NULL, NULL, &texture_width, &texture_height); SDL_QueryTexture(*texture, NULL, NULL, &texture_width, &texture_height);
@ -686,6 +678,24 @@ static SDL_bool GetTextureForD3D11Frame(AVFrame *frame, SDL_Texture **texture)
float SDR_white_level, video_white_level; float SDR_white_level, video_white_level;
SDL_bool HDR_display = SDL_FALSE; SDL_bool HDR_display = SDL_FALSE;
SDL_bool HDR_video = SDL_FALSE; SDL_bool HDR_video = SDL_FALSE;
Uint32 format;
switch (desc.Format) {
case DXGI_FORMAT_NV12:
format = SDL_PIXELFORMAT_NV12;
break;
case DXGI_FORMAT_P010:
format = SDL_PIXELFORMAT_P010;
HDR_video = SDL_TRUE;
break;
case DXGI_FORMAT_P016:
format = SDL_PIXELFORMAT_P016;
HDR_video = SDL_TRUE;
break;
default:
SDL_SetError("Unsupported texture format %d", desc.Format);
return SDL_FALSE;
}
if (*texture) { if (*texture) {
SDL_DestroyTexture(*texture); SDL_DestroyTexture(*texture);
@ -695,23 +705,7 @@ static SDL_bool GetTextureForD3D11Frame(AVFrame *frame, SDL_Texture **texture)
SDL_PropertiesID props = SDL_CreateProperties(); SDL_PropertiesID props = SDL_CreateProperties();
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_COLORSPACE_NUMBER, GetFrameColorspace(frame)); SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_COLORSPACE_NUMBER, GetFrameColorspace(frame));
switch (desc.Format) { SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_FORMAT_NUMBER, format);
case DXGI_FORMAT_NV12:
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_FORMAT_NUMBER, SDL_PIXELFORMAT_NV12);
break;
case DXGI_FORMAT_P010:
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_FORMAT_NUMBER, SDL_PIXELFORMAT_P010);
HDR_video = SDL_TRUE;
break;
case DXGI_FORMAT_P016:
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_FORMAT_NUMBER, SDL_PIXELFORMAT_P016);
HDR_video = SDL_TRUE;
break;
default:
/* This should be checked above */
SDL_assert(!"Unknown pixel format");
break;
}
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_ACCESS_NUMBER, SDL_TEXTUREACCESS_STATIC); SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_ACCESS_NUMBER, SDL_TEXTUREACCESS_STATIC);
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_WIDTH_NUMBER, desc.Width); SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_WIDTH_NUMBER, desc.Width);
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_HEIGHT_NUMBER, desc.Height); SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_HEIGHT_NUMBER, desc.Height);
@ -746,6 +740,71 @@ static SDL_bool GetTextureForD3D11Frame(AVFrame *frame, SDL_Texture **texture)
#endif #endif
} }
static SDL_bool GetTextureForVideoToolboxFrame(AVFrame *frame, SDL_Texture **texture)
{
#ifdef SDL_PLATFORM_APPLE
int texture_width = 0, texture_height = 0;
CVPixelBufferRef pPixelBuffer = (CVPixelBufferRef)frame->data[3];
OSType nPixelBufferType = CVPixelBufferGetPixelFormatType(pPixelBuffer);
size_t nPixelBufferWidth = CVPixelBufferGetWidthOfPlane(pPixelBuffer, 0);
size_t nPixelBufferHeight = CVPixelBufferGetHeightOfPlane(pPixelBuffer, 0);
SDL_PropertiesID props;
Uint32 format;
float SDR_white_level, video_white_level;
SDL_bool HDR_display = SDL_FALSE;
SDL_bool HDR_video = SDL_FALSE;
switch (nPixelBufferType) {
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
format = SDL_PIXELFORMAT_NV12;
break;
default:
SDL_SetError("Unsupported texture format %c%c%c%c",
(char)((nPixelBufferType >> 24) & 0xFF),
(char)((nPixelBufferType >> 16) & 0xFF),
(char)((nPixelBufferType >> 8) & 0xFF),
(char)((nPixelBufferType >> 0) & 0xFF));
return SDL_FALSE;
}
if (*texture) {
/* Free the previous texture now that we're about to render a new one */
/* FIXME: We can actually keep a cache of textures that map to pixel buffers */
SDL_DestroyTexture(*texture);
}
GetDisplayHDRProperties(&HDR_display, &SDR_white_level);
props = SDL_CreateProperties();
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_COLORSPACE_NUMBER, GetFrameColorspace(frame));
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_FORMAT_NUMBER, format);
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_ACCESS_NUMBER, SDL_TEXTUREACCESS_STATIC);
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_WIDTH_NUMBER, nPixelBufferWidth);
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_HEIGHT_NUMBER, nPixelBufferHeight);
SDL_SetProperty(props, SDL_PROP_TEXTURE_CREATE_METAL_PIXELBUFFER_POINTER, pPixelBuffer);
*texture = SDL_CreateTextureWithProperties(renderer, props);
SDL_DestroyProperties(props);
if (!*texture) {
return SDL_FALSE;
}
if (HDR_video != HDR_display) {
if (HDR_display) {
video_white_level = SDR_DISPLAY_WHITE_LEVEL;
} else {
video_white_level = DEFAULT_HDR_WHITE_LEVEL;
}
SDL_SetRenderColorScale(renderer, SDR_white_level / video_white_level);
} else {
SDL_SetRenderColorScale(renderer, 1.0f);
}
return SDL_TRUE;
#else
return SDL_FALSE;
#endif
}
static SDL_bool GetTextureForFrame(AVFrame *frame, SDL_Texture **texture) static SDL_bool GetTextureForFrame(AVFrame *frame, SDL_Texture **texture)
{ {
switch (frame->format) { switch (frame->format) {
@ -755,6 +814,8 @@ static SDL_bool GetTextureForFrame(AVFrame *frame, SDL_Texture **texture)
return GetTextureForDRMFrame(frame, texture); return GetTextureForDRMFrame(frame, texture);
case AV_PIX_FMT_D3D11: case AV_PIX_FMT_D3D11:
return GetTextureForD3D11Frame(frame, texture); return GetTextureForD3D11Frame(frame, texture);
case AV_PIX_FMT_VIDEOTOOLBOX:
return GetTextureForVideoToolboxFrame(frame, texture);
default: default:
return GetTextureForMemoryFrame(frame, texture); return GetTextureForMemoryFrame(frame, texture);
} }
@ -762,7 +823,7 @@ static SDL_bool GetTextureForFrame(AVFrame *frame, SDL_Texture **texture)
static void DisplayVideoTexture(AVFrame *frame) static void DisplayVideoTexture(AVFrame *frame)
{ {
#if 1 /* This data doesn't seem to be valid in any of the videos I've tried */ #if 0 /* This data doesn't seem to be valid in any of the videos I've tried */
AVFrameSideData *sd = av_frame_get_side_data(frame, AV_FRAME_DATA_MASTERING_DISPLAY_METADATA); AVFrameSideData *sd = av_frame_get_side_data(frame, AV_FRAME_DATA_MASTERING_DISPLAY_METADATA);
if (sd) { if (sd) {
AVMasteringDisplayMetadata *mdm = (AVMasteringDisplayMetadata *)sd->data; AVMasteringDisplayMetadata *mdm = (AVMasteringDisplayMetadata *)sd->data;
@ -789,25 +850,9 @@ static void DisplayVideoTexture(AVFrame *frame)
} }
} }
static void DisplayVideoToolbox(AVFrame *frame)
{
#ifdef SDL_PLATFORM_APPLE
SDL_Rect viewport;
SDL_GetRenderViewport(renderer, &viewport);
DisplayVideoToolboxFrame(renderer, frame->data[3], 0, 0, frame->width, frame->height, viewport.x, viewport.y, viewport.w, viewport.h);
#endif
}
static void DisplayVideoFrame(AVFrame *frame) static void DisplayVideoFrame(AVFrame *frame)
{ {
switch (frame->format) {
case AV_PIX_FMT_VIDEOTOOLBOX:
DisplayVideoToolbox(frame);
break;
default:
DisplayVideoTexture(frame); DisplayVideoTexture(frame);
break;
}
} }
static void HandleVideoFrame(AVFrame *frame, double pts) static void HandleVideoFrame(AVFrame *frame, double pts)
@ -1230,9 +1275,6 @@ int main(int argc, char *argv[])
} }
return_code = 0; return_code = 0;
quit: quit:
#ifdef SDL_PLATFORM_APPLE
CleanupVideoToolboxOutput();
#endif
#ifdef SDL_PLATFORM_WIN32 #ifdef SDL_PLATFORM_WIN32
if (d3d11_context) { if (d3d11_context) {
ID3D11DeviceContext_Release(d3d11_device); ID3D11DeviceContext_Release(d3d11_device);

View File

@ -1,15 +0,0 @@
/*
Copyright (C) 1997-2024 Sam Lantinga <slouken@libsdl.org>
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely.
*/
extern SDL_bool SetupVideoToolboxOutput(SDL_Renderer *renderer);
extern SDL_bool DisplayVideoToolboxFrame(SDL_Renderer *renderer, void *buffer, int srcX, int srcY, int srcW, int srcH, int dstX, int dstY, int dstW, int dstH );
extern void CleanupVideoToolboxOutput();

View File

@ -1,147 +0,0 @@
/*
Copyright (C) 1997-2024 Sam Lantinga <slouken@libsdl.org>
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely.
*/
#include <SDL3/SDL.h>
#include "testffmpeg_videotoolbox.h"
#include <CoreVideo/CoreVideo.h>
#include <Metal/Metal.h>
#include <QuartzCore/CAMetalLayer.h>
#include <simd/simd.h>
// Metal BT.601 to RGB conversion shader
static NSString *drawMetalShaderSource =
@" using namespace metal;\n"
"\n"
" struct Vertex\n"
" {\n"
" float4 position [[position]];\n"
" float2 texCoords;\n"
" };\n"
"\n"
" constexpr sampler s(coord::normalized, address::clamp_to_edge, filter::linear);\n"
"\n"
" vertex Vertex draw_vs(constant Vertex *vertices [[ buffer(0) ]], uint vid [[ vertex_id ]])\n"
" {\n"
" return vertices[ vid ];\n"
" }\n"
"\n"
" fragment float4 draw_ps_bt601(Vertex in [[ stage_in ]],\n"
" texture2d<float> textureY [[ texture(0) ]],\n"
" texture2d<float> textureUV [[ texture(1) ]])\n"
" {\n"
" float3 yuv = float3(textureY.sample(s, in.texCoords).r, textureUV.sample(s, in.texCoords).rg);\n"
" float3 rgb;\n"
" yuv += float3(-0.0627451017, -0.501960814, -0.501960814);\n"
" rgb.r = dot(yuv, float3(1.1644, 0.000, 1.596));\n"
" rgb.g = dot(yuv, float3(1.1644, -0.3918, -0.813));\n"
" rgb.b = dot(yuv, float3(1.1644, 2.0172, 0.000));\n"
" return float4(rgb, 1.0);\n"
" }\n"
;
// keep this structure aligned with the proceeding drawMetalShaderSource's struct Vertex
typedef struct Vertex
{
vector_float4 position;
vector_float2 texCoord;
} Vertex;
static void SetVertex(Vertex *vertex, float x, float y, float s, float t)
{
vertex->position[ 0 ] = x;
vertex->position[ 1 ] = y;
vertex->position[ 2 ] = 0.0f;
vertex->position[ 3 ] = 1.0f;
vertex->texCoord[ 0 ] = s;
vertex->texCoord[ 1 ] = t;
}
static CAMetalLayer *metal_layer;
static id<MTLLibrary> library;
static id<MTLRenderPipelineState> video_pipeline;
SDL_bool SetupVideoToolboxOutput(SDL_Renderer *renderer)
{ @autoreleasepool {
NSError *error;
// Create the metal view
metal_layer = (CAMetalLayer *)SDL_GetRenderMetalLayer(renderer);
if (!metal_layer) {
return SDL_FALSE;
}
// FIXME: Handle other colorspaces besides BT.601
library = [metal_layer.device newLibraryWithSource:drawMetalShaderSource options:nil error:&error];
MTLRenderPipelineDescriptor *videoPipelineDescriptor = [[MTLRenderPipelineDescriptor new] autorelease];
videoPipelineDescriptor.vertexFunction = [library newFunctionWithName:@"draw_vs"];
videoPipelineDescriptor.fragmentFunction = [library newFunctionWithName:@"draw_ps_bt601"];
videoPipelineDescriptor.colorAttachments[ 0 ].pixelFormat = metal_layer.pixelFormat;
video_pipeline = [metal_layer.device newRenderPipelineStateWithDescriptor:videoPipelineDescriptor error:nil];
if (!video_pipeline) {
SDL_SetError("Couldn't create video pipeline");
return SDL_FALSE;
}
return true;
}}
SDL_bool DisplayVideoToolboxFrame(SDL_Renderer *renderer, void *buffer, int srcX, int srcY, int srcW, int srcH, int dstX, int dstY, int dstW, int dstH )
{ @autoreleasepool {
CVPixelBufferRef pPixelBuffer = (CVPixelBufferRef)buffer;
size_t nPixelBufferWidth = CVPixelBufferGetWidthOfPlane(pPixelBuffer, 0);
size_t nPixelBufferHeight = CVPixelBufferGetHeightOfPlane(pPixelBuffer, 0);
id<MTLTexture> videoFrameTextureY = nil;
id<MTLTexture> videoFrameTextureUV = nil;
IOSurfaceRef pSurface = CVPixelBufferGetIOSurface(pPixelBuffer);
MTLTextureDescriptor *textureDescriptorY = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm width:nPixelBufferWidth height:nPixelBufferHeight mipmapped:NO];
MTLTextureDescriptor *textureDescriptorUV = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatRG8Unorm width:CVPixelBufferGetWidthOfPlane(pPixelBuffer, 1) height:CVPixelBufferGetHeightOfPlane(pPixelBuffer, 1) mipmapped:NO];
videoFrameTextureY = [[metal_layer.device newTextureWithDescriptor:textureDescriptorY iosurface:pSurface plane:0] autorelease];
videoFrameTextureUV = [[metal_layer.device newTextureWithDescriptor:textureDescriptorUV iosurface:pSurface plane:1] autorelease];
float flMinSrcX = ( srcX + 0.5f ) / nPixelBufferWidth;
float flMaxSrcX = ( srcX + srcW + 0.5f ) / nPixelBufferWidth;
float flMinSrcY = ( srcY + 0.5f ) / nPixelBufferHeight;
float flMaxSrcY = ( srcY + srcH + 0.5f ) / nPixelBufferHeight;
int nOutputWidth, nOutputHeight;
nOutputWidth = metal_layer.drawableSize.width;
nOutputHeight = metal_layer.drawableSize.height;
float flMinDstX = 2.0f * ( ( dstX + 0.5f ) / nOutputWidth ) - 1.0f;
float flMaxDstX = 2.0f * ( ( dstX + dstW + 0.5f ) / nOutputWidth ) - 1.0f;
float flMinDstY = 2.0f * ( ( nOutputHeight - dstY - 0.5f ) / nOutputHeight ) - 1.0f;
float flMaxDstY = 2.0f * ( ( nOutputHeight - ( dstY + dstH ) - 0.5f ) / nOutputHeight ) - 1.0f;
Vertex arrVerts[4];
SetVertex(&arrVerts[0], flMinDstX, flMaxDstY, flMinSrcX, flMaxSrcY);
SetVertex(&arrVerts[1], flMinDstX, flMinDstY, flMinSrcX, flMinSrcY);
SetVertex(&arrVerts[2], flMaxDstX, flMaxDstY, flMaxSrcX, flMaxSrcY);
SetVertex(&arrVerts[3], flMaxDstX, flMinDstY, flMaxSrcX, flMinSrcY);
id<MTLRenderCommandEncoder> renderEncoder = (id<MTLRenderCommandEncoder>)SDL_GetRenderMetalCommandEncoder(renderer);
[renderEncoder setRenderPipelineState:video_pipeline];
[renderEncoder setFragmentTexture:videoFrameTextureY atIndex:0];
[renderEncoder setFragmentTexture:videoFrameTextureUV atIndex:1];
[renderEncoder setVertexBytes:arrVerts length:sizeof(arrVerts) atIndex:0];
[renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:SDL_arraysize(arrVerts)];
return SDL_TRUE;
}}
void CleanupVideoToolboxOutput()
{
}