0
0
mirror of https://github.com/obsproject/obs-studio.git synced 2024-09-20 13:08:50 +02:00

Merge pull request #6573 from fabianishere/feat/mac-virtualcam

mac-virtualcam: Avoid transcoding where possible
This commit is contained in:
Patrick Heyer 2022-06-12 15:52:05 +02:00 committed by GitHub
commit 1f72dad245
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 283 additions and 298 deletions

View File

@ -7,16 +7,6 @@
#include <CoreMediaIO/CMIOSampleBuffer.h>
OSStatus CMSampleBufferCreateFromData(NSSize size,
CMSampleTimingInfo timingInfo,
UInt64 sequenceNumber, NSData *data,
CMSampleBufferRef *sampleBuffer);
OSStatus CMSampleBufferCreateFromDataNoCopy(NSSize size,
CMSampleTimingInfo timingInfo,
UInt64 sequenceNumber, NSData *data,
CMSampleBufferRef *sampleBuffer);
CMSampleTimingInfo CMSampleTimingInfoForTimestamp(uint64_t timestampNanos,
uint32_t fpsNumerator,
uint32_t fpsDenominator);

View File

@ -7,165 +7,6 @@
#import "CMSampleBufferUtils.h"
#include "Logging.h"
/*!
CMSampleBufferCreateFromData
Creates a CMSampleBuffer by copying bytes from NSData into a CVPixelBuffer.
*/
OSStatus CMSampleBufferCreateFromData(NSSize size,
CMSampleTimingInfo timingInfo,
UInt64 sequenceNumber, NSData *data,
CMSampleBufferRef *sampleBuffer)
{
OSStatus err = noErr;
// Create an empty pixel buffer
CVPixelBufferRef pixelBuffer;
err = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height,
kCVPixelFormatType_422YpCbCr8, nil,
&pixelBuffer);
if (err != noErr) {
DLog(@"CVPixelBufferCreate err %d", err);
return err;
}
// Generate the video format description from that pixel buffer
CMFormatDescriptionRef format;
err = CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer,
&format);
if (err != noErr) {
DLog(@"CMVideoFormatDescriptionCreateForImageBuffer err %d",
err);
return err;
}
// Copy memory into the pixel buffer
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
uint8_t *dest =
(uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
uint8_t *src = (uint8_t *)data.bytes;
size_t destBytesPerRow =
CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
size_t srcBytesPerRow = size.width * 2;
// Sometimes CVPixelBufferCreate will create a pixelbuffer that's a different
// size than necessary to hold the frame (probably for some optimization reason).
// If that is the case this will do a row-by-row copy into the buffer.
if (destBytesPerRow == srcBytesPerRow) {
memcpy(dest, src, data.length);
} else {
for (int line = 0; line < size.height; line++) {
memcpy(dest, src, srcBytesPerRow);
src += srcBytesPerRow;
dest += destBytesPerRow;
}
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
err = CMIOSampleBufferCreateForImageBuffer(kCFAllocatorDefault,
pixelBuffer, format,
&timingInfo, sequenceNumber,
0, sampleBuffer);
CFRelease(format);
CFRelease(pixelBuffer);
if (err != noErr) {
DLog(@"CMIOSampleBufferCreateForImageBuffer err %d", err);
return err;
}
return noErr;
}
static void releaseNSData(void *o, void *, size_t)
{
NSData *data = (__bridge_transfer NSData *)o;
data = nil; // Assuming ARC is enabled
}
// From https://stackoverflow.com/questions/26158253/how-to-create-a-cmblockbufferref-from-nsdata
OSStatus createReadonlyBlockBuffer(CMBlockBufferRef *result, NSData *data)
{
CMBlockBufferCustomBlockSource blockSource = {
.version = kCMBlockBufferCustomBlockSourceVersion,
.AllocateBlock = NULL,
.FreeBlock = &releaseNSData,
.refCon = (__bridge_retained void *)data,
};
return CMBlockBufferCreateWithMemoryBlock(NULL, (void *)data.bytes,
data.length, NULL,
&blockSource, 0, data.length,
0, result);
}
/*!
CMSampleBufferCreateFromDataNoCopy
Creates a CMSampleBuffer by using the bytes directly from NSData (without copying them).
Seems to mostly work but does not work at full resolution in OBS for some reason (which prevents loopback testing).
*/
OSStatus CMSampleBufferCreateFromDataNoCopy(NSSize size,
CMSampleTimingInfo timingInfo,
UInt64 sequenceNumber, NSData *data,
CMSampleBufferRef *sampleBuffer)
{
OSStatus err = noErr;
CMBlockBufferRef dataBuffer;
createReadonlyBlockBuffer(&dataBuffer, data);
// Magic format properties snagged from https://github.com/lvsti/CoreMediaIO-DAL-Example/blob/0392cbf27ed33425a1a5bd9f495b2ccec8f20501/Sources/Extras/CoreMediaIO/DeviceAbstractionLayer/Devices/Sample/PlugIn/CMIO_DP_Sample_Stream.cpp#L830
NSDictionary *extensions = @{
@"com.apple.cmio.format_extension.video.only_has_i_frames":
@YES,
(__bridge NSString *)
kCMFormatDescriptionExtension_FieldCount: @1,
(__bridge NSString *)
kCMFormatDescriptionExtension_ColorPrimaries:
(__bridge NSString *)
kCMFormatDescriptionColorPrimaries_SMPTE_C,
(__bridge NSString *)
kCMFormatDescriptionExtension_TransferFunction: (
__bridge NSString *)
kCMFormatDescriptionTransferFunction_ITU_R_709_2,
(__bridge NSString *)
kCMFormatDescriptionExtension_YCbCrMatrix: (__bridge NSString *)
kCMFormatDescriptionYCbCrMatrix_ITU_R_601_4,
(__bridge NSString *)
kCMFormatDescriptionExtension_BytesPerRow: @(size.width * 2),
(__bridge NSString *)kCMFormatDescriptionExtension_FormatName:
@"Component Video - CCIR-601 uyvy",
(__bridge NSString *)kCMFormatDescriptionExtension_Version: @2,
};
CMFormatDescriptionRef format;
err = CMVideoFormatDescriptionCreate(
NULL, kCMVideoCodecType_422YpCbCr8, size.width, size.height,
(__bridge CFDictionaryRef)extensions, &format);
if (err != noErr) {
DLog(@"CMVideoFormatDescriptionCreate err %d", err);
return err;
}
size_t dataSize = data.length;
err = CMIOSampleBufferCreate(kCFAllocatorDefault, dataBuffer, format, 1,
1, &timingInfo, 1, &dataSize,
sequenceNumber, 0, sampleBuffer);
CFRelease(format);
CFRelease(dataBuffer);
if (err != noErr) {
DLog(@"CMIOSampleBufferCreate err %d", err);
return err;
}
return noErr;
}
CMSampleTimingInfo CMSampleTimingInfoForTimestamp(uint64_t timestampNanos,
uint32_t fpsNumerator,
uint32_t fpsDenominator)
@ -175,7 +16,7 @@ CMSampleTimingInfo CMSampleTimingInfoForTimestamp(uint64_t timestampNanos,
// timestamps and scales like mach_absolute_time() and NSEC_PER_SEC will work for display, but will error out
// when trying to record.
//
// 600 is a commmon default in Apple's docs https://developer.apple.com/documentation/avfoundation/avmutablemovie/1390622-timescale
// 600 is a common default in Apple's docs https://developer.apple.com/documentation/avfoundation/avmutablemovie/1390622-timescale
CMTimeScale scale = 600;
CMSampleTimingInfo timing;
timing.duration =

View File

@ -1,15 +1,12 @@
# Build DAL plugin universal to ensure compatibility with Rosetta-translated
# apps on arm64 hosts
set(CMAKE_OSX_ARCHITECTURES "x86_64;arm64")
project(mac-dal-plugin)
find_library(COCOA Cocoa)
find_library(COREMEDIA CoreMedia)
find_library(COREMEDIAIO CoreMediaIO)
find_library(COREVIDEO CoreVideo)
find_library(IOSURFACE IOSurface)
mark_as_advanced(COCOA COREMEDIA COREMEDIAIO COREVIDEO)
mark_as_advanced(COCOA COREMEDIA COREMEDIAIO COREVIDEO IOSURFACE)
add_library(mac-dal-plugin MODULE)
add_library(OBS::mac-dal-plugin ALIAS mac-dal-plugin)
@ -41,8 +38,9 @@ target_include_directories(
target_compile_options(mac-dal-plugin PRIVATE -fobjc-arc -fobjc-weak)
target_link_libraries(mac-dal-plugin PRIVATE ${COCOA} ${COREMEDIA}
${COREMEDIAIO} ${COREVIDEO})
target_link_libraries(
mac-dal-plugin PRIVATE ${COCOA} ${COREMEDIA} ${COREMEDIAIO} ${COREVIDEO}
${IOSURFACE})
set(MACOSX_PLUGIN_BUNDLE_TYPE "BNDL")
target_sources(mac-dal-plugin PRIVATE placeholder.png)
@ -58,6 +56,10 @@ set_target_properties(
FOLDER "plugins"
VERSION "0"
SOVERSION "0"
# Force the DAL plugin to be built for arm64e as well. Note that
# we cannot build OBS for arm64e, since its libraries are not
# built for this architecture at the moment.
OSX_ARCHITECTURES "x86_64;arm64;arm64e"
LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/../../"
MACOSX_BUNDLE_INFO_PLIST
"${CMAKE_SOURCE_DIR}/cmake/bundle/macOS/Virtualcam-Info.plist.in")

View File

@ -6,16 +6,16 @@
//
#import <Foundation/Foundation.h>
#import <CoreVideo/CoreVideo.h>
NS_ASSUME_NONNULL_BEGIN
@protocol MachClientDelegate
- (void)receivedFrameWithSize:(NSSize)size
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
frameData:(NSData *)frameData;
- (void)receivedPixelBuffer:(CVPixelBufferRef)frame
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator;
- (void)receivedStop;
@end

View File

@ -101,29 +101,37 @@
break;
case MachMsgIdFrame:
VLog(@"Received frame message");
if (components.count >= 6) {
CGFloat width;
[components[0] getBytes:&width length:sizeof(width)];
CGFloat height;
[components[1] getBytes:&height length:sizeof(height)];
if (components.count >= 4) {
NSMachPort *framePort = (NSMachPort *)components[0];
IOSurfaceRef surface = IOSurfaceLookupFromMachPort(
[framePort machPort]);
CVPixelBufferRef frame;
CVPixelBufferCreateWithIOSurface(kCFAllocatorDefault,
surface, NULL, &frame);
uint64_t timestamp;
[components[2] getBytes:&timestamp
[components[1] getBytes:&timestamp
length:sizeof(timestamp)];
VLog(@"Received frame data: %fx%f (%llu)", width,
height, timestamp);
NSData *frameData = components[3];
VLog(@"Received frame data: %zux%zu (%llu)",
CVPixelBufferGetWidth(frame),
CVPixelBufferGetHeight(frame), timestamp);
uint32_t fpsNumerator;
[components[4] getBytes:&fpsNumerator
[components[2] getBytes:&fpsNumerator
length:sizeof(fpsNumerator)];
uint32_t fpsDenominator;
[components[5] getBytes:&fpsDenominator
[components[3] getBytes:&fpsDenominator
length:sizeof(fpsDenominator)];
[self.delegate
receivedFrameWithSize:NSMakeSize(width, height)
timestamp:timestamp
fpsNumerator:fpsNumerator
fpsDenominator:fpsDenominator
frameData:frameData];
[self.delegate receivedPixelBuffer:frame
timestamp:timestamp
fpsNumerator:fpsNumerator
fpsDenominator:fpsDenominator];
CVPixelBufferRelease(frame);
CFRelease(surface);
}
break;
case MachMsgIdStop:

View File

@ -203,19 +203,21 @@ typedef enum {
#pragma mark - MachClientDelegate
- (void)receivedFrameWithSize:(NSSize)size
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
frameData:(NSData *)frameData
- (void)receivedPixelBuffer:(CVPixelBufferRef)frame
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
{
size_t width = CVPixelBufferGetWidth(frame);
size_t height = CVPixelBufferGetHeight(frame);
dispatch_sync(_stateQueue, ^{
if (_state == PlugInStateWaitingForServer) {
NSUserDefaults *defaults =
[NSUserDefaults standardUserDefaults];
[defaults setInteger:size.width
[defaults setInteger:(long)width
forKey:kTestCardWidthKey];
[defaults setInteger:size.height
[defaults setInteger:(long)height
forKey:kTestCardHeightKey];
[defaults setDouble:(double)fpsNumerator /
(double)fpsDenominator
@ -234,11 +236,10 @@ typedef enum {
dispatch_time(DISPATCH_TIME_NOW, 5.0 * NSEC_PER_SEC),
5.0 * NSEC_PER_SEC, (1ull * NSEC_PER_SEC) / 10);
[self.stream queueFrameWithSize:size
timestamp:timestamp
fpsNumerator:fpsNumerator
fpsDenominator:fpsDenominator
frameData:frameData];
[self.stream queuePixelBuffer:frame
timestamp:timestamp
fpsNumerator:fpsNumerator
fpsDenominator:fpsDenominator];
}
- (void)receivedStop

View File

@ -18,6 +18,7 @@
// along with obs-mac-virtualcam. If not, see <http://www.gnu.org/licenses/>.
#import "OBSDALObjectStore.h"
#import <CoreVideo/CoreVideo.h>
NS_ASSUME_NONNULL_BEGIN
@ -35,11 +36,10 @@ NS_ASSUME_NONNULL_BEGIN
- (void)stopServingDefaultFrames;
- (void)queueFrameWithSize:(NSSize)size
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
frameData:(NSData *)frameData;
- (void)queuePixelBuffer:(CVPixelBufferRef)frame
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator;
@end

View File

@ -349,11 +349,10 @@
}
}
- (void)queueFrameWithSize:(NSSize)size
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
frameData:(NSData *)frameData
- (void)queuePixelBuffer:(CVPixelBufferRef)frame
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
{
if (CMSimpleQueueGetFullness(self.queue) >= 1.0) {
DLog(@"Queue is full, bailing out");
@ -374,9 +373,34 @@
self.sequenceNumber = CMIOGetNextSequenceNumber(self.sequenceNumber);
CMSampleBufferRef sampleBuffer;
CMSampleBufferCreateFromData(size, timingInfo, self.sequenceNumber,
frameData, &sampleBuffer);
CMSimpleQueueEnqueue(self.queue, sampleBuffer);
// Generate the video format description from that pixel buffer
CMVideoFormatDescriptionRef format;
err = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault,
frame, &format);
if (err != noErr) {
DLog(@"CMVideoFormatDescriptionCreateForImageBuffer err %d",
err);
return;
}
err = CMIOSampleBufferCreateForImageBuffer(
kCFAllocatorDefault, frame, format, &timingInfo,
self.sequenceNumber, kCMIOSampleBufferNoDiscontinuities,
&sampleBuffer);
CFRelease(format);
if (err != noErr) {
DLog(@"CMIOSampleBufferCreateForImageBuffer err %d", err);
return;
}
err = CMSimpleQueueEnqueue(self.queue, sampleBuffer);
if (err != noErr) {
DLog(@"CMSimpleQueueEnqueue err %d", err);
return;
}
// Inform the clients that the queue has been altered
if (self.alteredProc != NULL) {
@ -389,7 +413,8 @@
{
CMVideoFormatDescriptionRef formatDescription;
OSStatus err = CMVideoFormatDescriptionCreate(
kCFAllocatorDefault, kCMVideoCodecType_422YpCbCr8,
kCFAllocatorDefault,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
self.testCardSize.width, self.testCardSize.height, NULL,
&formatDescription);
if (err != noErr) {
@ -551,7 +576,14 @@
- (BOOL)isPropertySettableWithAddress:(CMIOObjectPropertyAddress)address
{
return false;
switch (address.mSelector) {
case kCMIOStreamPropertyFormatDescription:
case kCMIOStreamPropertyFrameRate:
// Suppress error logs complaining about the application not being able to set the desired format or frame rate.
return true;
default:
return false;
}
}
- (void)setPropertyDataWithAddress:(CMIOObjectPropertyAddress)address

View File

@ -1,8 +1,10 @@
project(mac-virtualcam)
find_library(APPKIT AppKit)
find_library(COREVIDEO CoreVideo)
find_library(IOSURFACE IOSurface)
mark_as_advanced(APPKIT)
mark_as_advanced(APPKIT COREVIDEO IOSURFACE)
add_library(mac-virtualcam MODULE)
add_library(OBS::virtualcam ALIAS mac-virtualcam)
@ -15,8 +17,9 @@ target_include_directories(
mac-virtualcam
PRIVATE "$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>/../common")
target_link_libraries(mac-virtualcam PRIVATE OBS::libobs OBS::frontend-api
${APPKIT})
target_link_libraries(
mac-virtualcam PRIVATE OBS::libobs OBS::frontend-api ${APPKIT} ${COREVIDEO}
${IOSURFACE})
target_compile_features(mac-virtualcam PRIVATE cxx_deleted_functions
cxx_rvalue_references cxx_std_17)

View File

@ -6,6 +6,7 @@
//
#import <Foundation/Foundation.h>
#import <CoreVideo/CoreVideo.h>
NS_ASSUME_NONNULL_BEGIN
@ -16,11 +17,10 @@ NS_ASSUME_NONNULL_BEGIN
/*!
Will eventually be used for sending frames to all connected clients
*/
- (void)sendFrameWithSize:(NSSize)size
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
frameBytes:(uint8_t *)frameBytes;
- (void)sendPixelBuffer:(CVPixelBufferRef)frame
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator;
- (void)stop;

View File

@ -7,6 +7,7 @@
#import "OBSDALMachServer.h"
#include <obs-module.h>
#include <CoreVideo/CoreVideo.h>
#include "MachProtocol.h"
#include "Defines.h"
@ -100,19 +101,24 @@
receivePort:nil
components:components];
message.msgid = msgId;
if (![message
if (![port isValid] ||
![message
sendBeforeDate:
[NSDate dateWithTimeIntervalSinceNow:
1.0]]) {
blog(LOG_DEBUG,
"failed to send message to %d, removing it from the clients!",
((NSMachPort *)port).machPort);
[port invalidate];
[removedPorts addObject:port];
}
} @catch (NSException *exception) {
blog(LOG_DEBUG,
"failed to send message (exception) to %d, removing it from the clients!",
((NSMachPort *)port).machPort);
[port invalidate];
[removedPorts addObject:port];
}
}
@ -121,23 +127,16 @@
[self.clientPorts minusSet:removedPorts];
}
- (void)sendFrameWithSize:(NSSize)size
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
frameBytes:(uint8_t *)frameBytes
- (void)sendPixelBuffer:(CVPixelBufferRef)frame
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
{
if ([self.clientPorts count] <= 0) {
return;
}
@autoreleasepool {
CGFloat width = size.width;
NSData *widthData = [NSData dataWithBytes:&width
length:sizeof(width)];
CGFloat height = size.height;
NSData *heightData = [NSData dataWithBytes:&height
length:sizeof(height)];
NSData *timestampData = [NSData
dataWithBytes:&timestamp
length:sizeof(timestamp)];
@ -148,19 +147,20 @@
dataWithBytes:&fpsDenominator
length:sizeof(fpsDenominator)];
// NOTE: I'm not totally sure about the safety of dataWithBytesNoCopy in this context.
// Seems like there could potentially be an issue if the frameBuffer went away before the
// mach message finished sending. But it seems to be working and avoids a memory copy. Alternately
// we could do something like
// NSData *frameData = [NSData dataWithBytes:(void *)frameBytes length:size.width * size.height * 2];
NSData *frameData = [NSData
dataWithBytesNoCopy:(void *)frameBytes
length:size.width * size.height * 2
freeWhenDone:NO];
NSPort *framePort = [NSMachPort
portWithMachPort:IOSurfaceCreateMachPort(
CVPixelBufferGetIOSurface(
frame))];
if (!framePort) {
blog(LOG_ERROR,
"unable to allocate mach port for pixel buffer");
return;
}
[self sendMessageToClientsWithMsgId:MachMsgIdFrame
components:@[
widthData, heightData,
timestampData, frameData,
framePort, timestampData,
fpsNumeratorData,
fpsDenominatorData
]];

View File

@ -1,5 +1,4 @@
#include <obs-module.h>
#include <AppKit/AppKit.h>
#include "OBSDALMachServer.h"
#include "Defines.h"
@ -10,9 +9,12 @@ MODULE_EXPORT const char *obs_module_description(void)
return "macOS virtual webcam output";
}
obs_output_t *outputRef;
obs_video_info videoInfo;
static OBSDALMachServer *sMachServer;
struct virtualcam_data {
obs_output_t *output;
obs_video_info videoInfo;
CVPixelBufferPoolRef pool;
OBSDALMachServer *machServer;
};
static bool check_dal_plugin()
{
@ -105,37 +107,52 @@ static bool check_dal_plugin()
return true;
}
FourCharCode convert_video_format_to_mac(enum video_format format)
{
switch (format) {
case VIDEO_FORMAT_I420:
return kCVPixelFormatType_420YpCbCr8Planar;
case VIDEO_FORMAT_NV12:
return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
case VIDEO_FORMAT_UYVY:
return kCVPixelFormatType_422YpCbCr8;
default:
// Zero indicates that the format is not supported on macOS
// Note that some formats do have an associated constant, but
// constructing such formats fails with kCVReturnInvalidPixelFormat.
return 0;
}
}
static const char *virtualcam_output_get_name(void *type_data)
{
(void)type_data;
return obs_module_text("Plugin_Name");
}
// This is a dummy pointer so we have something to return from virtualcam_output_create
static void *data = &data;
static void *virtualcam_output_create(obs_data_t *settings,
obs_output_t *output)
{
UNUSED_PARAMETER(settings);
outputRef = output;
struct virtualcam_data *vcam =
(struct virtualcam_data *)bzalloc(sizeof(*vcam));
blog(LOG_DEBUG, "output_create");
sMachServer = [[OBSDALMachServer alloc] init];
return data;
vcam->output = output;
vcam->machServer = [[OBSDALMachServer alloc] init];
return vcam;
}
static void virtualcam_output_destroy(void *data)
{
UNUSED_PARAMETER(data);
blog(LOG_DEBUG, "output_destroy");
sMachServer = nil;
struct virtualcam_data *vcam = (struct virtualcam_data *)data;
vcam->machServer = nil;
}
static bool virtualcam_output_start(void *data)
{
UNUSED_PARAMETER(data);
struct virtualcam_data *vcam = (struct virtualcam_data *)data;
bool hasDalPlugin = check_dal_plugin();
@ -143,18 +160,45 @@ static bool virtualcam_output_start(void *data)
return false;
}
blog(LOG_DEBUG, "output_start");
obs_get_video_info(&vcam->videoInfo);
[sMachServer run];
FourCharCode video_format =
convert_video_format_to_mac(vcam->videoInfo.output_format);
obs_get_video_info(&videoInfo);
if (!video_format) {
// Selected output format is not supported natively by CoreVideo, CPU conversion necessary
blog(LOG_WARNING,
"Selected output format (%s) not supported by CoreVideo, enabling CPU transcoding...",
get_video_format_name(vcam->videoInfo.output_format));
struct video_scale_info conversion = {};
conversion.format = VIDEO_FORMAT_UYVY;
conversion.width = videoInfo.output_width;
conversion.height = videoInfo.output_height;
obs_output_set_video_conversion(outputRef, &conversion);
if (!obs_output_begin_data_capture(outputRef, 0)) {
struct video_scale_info conversion = {};
conversion.format = VIDEO_FORMAT_NV12;
conversion.width = vcam->videoInfo.output_width;
conversion.height = vcam->videoInfo.output_height;
obs_output_set_video_conversion(vcam->output, &conversion);
video_format = convert_video_format_to_mac(conversion.format);
}
NSDictionary *pAttr = @{};
NSDictionary *pbAttr = @{
(id)kCVPixelBufferPixelFormatTypeKey: @(video_format),
(id)kCVPixelBufferWidthKey: @(vcam->videoInfo.output_width),
(id)kCVPixelBufferHeightKey: @(vcam->videoInfo.output_height),
(id)kCVPixelBufferIOSurfacePropertiesKey: @{}
};
CVReturn status = CVPixelBufferPoolCreate(
kCFAllocatorDefault, (__bridge CFDictionaryRef)pAttr,
(__bridge CFDictionaryRef)pbAttr, &vcam->pool);
if (status != kCVReturnSuccess) {
blog(LOG_ERROR,
"unable to allocate pixel buffer pool (error %d)", status);
return false;
}
[vcam->machServer run];
if (!obs_output_begin_data_capture(vcam->output, 0)) {
return false;
}
@ -163,33 +207,97 @@ static bool virtualcam_output_start(void *data)
static void virtualcam_output_stop(void *data, uint64_t ts)
{
UNUSED_PARAMETER(data);
UNUSED_PARAMETER(ts);
blog(LOG_DEBUG, "output_stop");
obs_output_end_data_capture(outputRef);
[sMachServer stop];
struct virtualcam_data *vcam = (struct virtualcam_data *)data;
obs_output_end_data_capture(vcam->output);
[vcam->machServer stop];
CVPixelBufferPoolRelease(vcam->pool);
}
static void virtualcam_output_raw_video(void *data, struct video_data *frame)
{
UNUSED_PARAMETER(data);
struct virtualcam_data *vcam = (struct virtualcam_data *)data;
uint8_t *outData = frame->data[0];
if (frame->linesize[0] != (videoInfo.output_width * 2)) {
blog(LOG_ERROR,
"unexpected frame->linesize (expected:%d actual:%d)",
(videoInfo.output_width * 2), frame->linesize[0]);
CVPixelBufferRef frameRef = nil;
CVReturn status = CVPixelBufferPoolCreatePixelBuffer(
kCFAllocatorDefault, vcam->pool, &frameRef);
if (status != kCVReturnSuccess) {
blog(LOG_ERROR, "unable to allocate pixel buffer (error %d)",
status);
return;
}
CGFloat width = videoInfo.output_width;
CGFloat height = videoInfo.output_height;
// Copy all planes into pixel buffer
size_t planeCount = CVPixelBufferGetPlaneCount(frameRef);
CVPixelBufferLockBaseAddress(frameRef, 0);
[sMachServer sendFrameWithSize:NSMakeSize(width, height)
timestamp:frame->timestamp
fpsNumerator:videoInfo.fps_num
fpsDenominator:videoInfo.fps_den
frameBytes:outData];
if (planeCount == 0) {
uint8_t *src = frame->data[0];
uint8_t *dst = (uint8_t *)CVPixelBufferGetBaseAddress(frameRef);
size_t destBytesPerRow = CVPixelBufferGetBytesPerRow(frameRef);
size_t srcBytesPerRow = frame->linesize[0];
size_t height = CVPixelBufferGetHeight(frameRef);
// Sometimes CVPixelBufferCreate will create a pixel buffer that's a different
// size than necessary to hold the frame (probably for some optimization reason).
// If that is the case this will do a row-by-row copy into the buffer.
if (destBytesPerRow == srcBytesPerRow) {
memcpy(dst, src, destBytesPerRow * height);
} else {
for (int line = 0; (size_t)line < height; line++) {
memcpy(dst, src, srcBytesPerRow);
src += srcBytesPerRow;
dst += destBytesPerRow;
}
}
} else {
for (size_t plane = 0; plane < planeCount; plane++) {
uint8_t *src = frame->data[plane];
if (!src) {
blog(LOG_WARNING,
"Video data from OBS contains less planes than CVPixelBuffer");
break;
}
uint8_t *dst =
(uint8_t *)CVPixelBufferGetBaseAddressOfPlane(
frameRef, plane);
size_t destBytesPerRow =
CVPixelBufferGetBytesPerRowOfPlane(frameRef,
plane);
size_t srcBytesPerRow = frame->linesize[plane];
size_t height =
CVPixelBufferGetHeightOfPlane(frameRef, plane);
if (destBytesPerRow == srcBytesPerRow) {
memcpy(dst, src, destBytesPerRow * height);
} else {
for (int line = 0; (size_t)line < height;
line++) {
memcpy(dst, src, srcBytesPerRow);
src += srcBytesPerRow;
dst += destBytesPerRow;
}
}
}
}
CVPixelBufferUnlockBaseAddress(frameRef, 0);
// Share pixel buffer with clients
[vcam->machServer sendPixelBuffer:frameRef
timestamp:frame->timestamp
fpsNumerator:vcam->videoInfo.fps_num
fpsDenominator:vcam->videoInfo.fps_den];
CVPixelBufferRelease(frameRef);
}
struct obs_output_info virtualcam_output_info = {