0
0
mirror of https://github.com/obsproject/obs-studio.git synced 2024-09-20 13:08:50 +02:00

mac-virtualcam: Remove unused CMSampleBuffer utility functions

This change removes the unused CMSampleBuffer utility functions that
were still left from the previous implementation. Since we construct the
CMSampleBuffer directly from an IOSurface, we do not need any custom
construction logic anymore, since that is now performed by the OBS
plugin.
This commit is contained in:
Fabian Mastenbroek 2022-05-26 17:10:28 +02:00
parent 202eb8f513
commit 5edabfe7c1
No known key found for this signature in database
GPG Key ID: 405FC6F81F0A7B85
2 changed files with 1 additions and 170 deletions

View File

@ -7,16 +7,6 @@
#include <CoreMediaIO/CMIOSampleBuffer.h>
OSStatus CMSampleBufferCreateFromData(NSSize size,
CMSampleTimingInfo timingInfo,
UInt64 sequenceNumber, NSData *data,
CMSampleBufferRef *sampleBuffer);
OSStatus CMSampleBufferCreateFromDataNoCopy(NSSize size,
CMSampleTimingInfo timingInfo,
UInt64 sequenceNumber, NSData *data,
CMSampleBufferRef *sampleBuffer);
CMSampleTimingInfo CMSampleTimingInfoForTimestamp(uint64_t timestampNanos,
uint32_t fpsNumerator,
uint32_t fpsDenominator);

View File

@ -7,165 +7,6 @@
#import "CMSampleBufferUtils.h"
#include "Logging.h"
/*!
CMSampleBufferCreateFromData
Creates a CMSampleBuffer by copying bytes from NSData into a CVPixelBuffer.
*/
OSStatus CMSampleBufferCreateFromData(NSSize size,
CMSampleTimingInfo timingInfo,
UInt64 sequenceNumber, NSData *data,
CMSampleBufferRef *sampleBuffer)
{
OSStatus err = noErr;
// Create an empty pixel buffer
CVPixelBufferRef pixelBuffer;
err = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height,
kCVPixelFormatType_422YpCbCr8, nil,
&pixelBuffer);
if (err != noErr) {
DLog(@"CVPixelBufferCreate err %d", err);
return err;
}
// Generate the video format description from that pixel buffer
CMFormatDescriptionRef format;
err = CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer,
&format);
if (err != noErr) {
DLog(@"CMVideoFormatDescriptionCreateForImageBuffer err %d",
err);
return err;
}
// Copy memory into the pixel buffer
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
uint8_t *dest =
(uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
uint8_t *src = (uint8_t *)data.bytes;
size_t destBytesPerRow =
CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
size_t srcBytesPerRow = size.width * 2;
// Sometimes CVPixelBufferCreate will create a pixelbuffer that's a different
// size than necessary to hold the frame (probably for some optimization reason).
// If that is the case this will do a row-by-row copy into the buffer.
if (destBytesPerRow == srcBytesPerRow) {
memcpy(dest, src, data.length);
} else {
for (int line = 0; line < size.height; line++) {
memcpy(dest, src, srcBytesPerRow);
src += srcBytesPerRow;
dest += destBytesPerRow;
}
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
err = CMIOSampleBufferCreateForImageBuffer(kCFAllocatorDefault,
pixelBuffer, format,
&timingInfo, sequenceNumber,
0, sampleBuffer);
CFRelease(format);
CFRelease(pixelBuffer);
if (err != noErr) {
DLog(@"CMIOSampleBufferCreateForImageBuffer err %d", err);
return err;
}
return noErr;
}
static void releaseNSData(void *o, void *, size_t)
{
NSData *data = (__bridge_transfer NSData *)o;
data = nil; // Assuming ARC is enabled
}
// From https://stackoverflow.com/questions/26158253/how-to-create-a-cmblockbufferref-from-nsdata
OSStatus createReadonlyBlockBuffer(CMBlockBufferRef *result, NSData *data)
{
CMBlockBufferCustomBlockSource blockSource = {
.version = kCMBlockBufferCustomBlockSourceVersion,
.AllocateBlock = NULL,
.FreeBlock = &releaseNSData,
.refCon = (__bridge_retained void *)data,
};
return CMBlockBufferCreateWithMemoryBlock(NULL, (void *)data.bytes,
data.length, NULL,
&blockSource, 0, data.length,
0, result);
}
/*!
CMSampleBufferCreateFromDataNoCopy
Creates a CMSampleBuffer by using the bytes directly from NSData (without copying them).
Seems to mostly work but does not work at full resolution in OBS for some reason (which prevents loopback testing).
*/
OSStatus CMSampleBufferCreateFromDataNoCopy(NSSize size,
CMSampleTimingInfo timingInfo,
UInt64 sequenceNumber, NSData *data,
CMSampleBufferRef *sampleBuffer)
{
OSStatus err = noErr;
CMBlockBufferRef dataBuffer;
createReadonlyBlockBuffer(&dataBuffer, data);
// Magic format properties snagged from https://github.com/lvsti/CoreMediaIO-DAL-Example/blob/0392cbf27ed33425a1a5bd9f495b2ccec8f20501/Sources/Extras/CoreMediaIO/DeviceAbstractionLayer/Devices/Sample/PlugIn/CMIO_DP_Sample_Stream.cpp#L830
NSDictionary *extensions = @{
@"com.apple.cmio.format_extension.video.only_has_i_frames":
@YES,
(__bridge NSString *)
kCMFormatDescriptionExtension_FieldCount: @1,
(__bridge NSString *)
kCMFormatDescriptionExtension_ColorPrimaries:
(__bridge NSString *)
kCMFormatDescriptionColorPrimaries_SMPTE_C,
(__bridge NSString *)
kCMFormatDescriptionExtension_TransferFunction: (
__bridge NSString *)
kCMFormatDescriptionTransferFunction_ITU_R_709_2,
(__bridge NSString *)
kCMFormatDescriptionExtension_YCbCrMatrix: (__bridge NSString *)
kCMFormatDescriptionYCbCrMatrix_ITU_R_601_4,
(__bridge NSString *)
kCMFormatDescriptionExtension_BytesPerRow: @(size.width * 2),
(__bridge NSString *)kCMFormatDescriptionExtension_FormatName:
@"Component Video - CCIR-601 uyvy",
(__bridge NSString *)kCMFormatDescriptionExtension_Version: @2,
};
CMFormatDescriptionRef format;
err = CMVideoFormatDescriptionCreate(
NULL, kCMVideoCodecType_422YpCbCr8, size.width, size.height,
(__bridge CFDictionaryRef)extensions, &format);
if (err != noErr) {
DLog(@"CMVideoFormatDescriptionCreate err %d", err);
return err;
}
size_t dataSize = data.length;
err = CMIOSampleBufferCreate(kCFAllocatorDefault, dataBuffer, format, 1,
1, &timingInfo, 1, &dataSize,
sequenceNumber, 0, sampleBuffer);
CFRelease(format);
CFRelease(dataBuffer);
if (err != noErr) {
DLog(@"CMIOSampleBufferCreate err %d", err);
return err;
}
return noErr;
}
CMSampleTimingInfo CMSampleTimingInfoForTimestamp(uint64_t timestampNanos,
uint32_t fpsNumerator,
uint32_t fpsDenominator)
@ -175,7 +16,7 @@ CMSampleTimingInfo CMSampleTimingInfoForTimestamp(uint64_t timestampNanos,
// timestamps and scales like mach_absolute_time() and NSEC_PER_SEC will work for display, but will error out
// when trying to record.
//
// 600 is a commmon default in Apple's docs https://developer.apple.com/documentation/avfoundation/avmutablemovie/1390622-timescale
// 600 is a common default in Apple's docs https://developer.apple.com/documentation/avfoundation/avmutablemovie/1390622-timescale
CMTimeScale scale = 600;
CMSampleTimingInfo timing;
timing.duration =