Files
Apollo/src/platform/macos/av_video.m

150 lines
4.9 KiB
Objective-C

/**
* @file src/platform/macos/av_video.m
* @brief Definitions for video capture on macOS.
*/
#import "av_video.h"
@implementation AVVideo
// XXX: Currently, this function only returns the screen IDs as names,
// which is not very helpful to the user. The API to retrieve names
// was deprecated with 10.9+.
// However, there is a solution with little external code that can be used:
// https://stackoverflow.com/questions/20025868/cgdisplayioserviceport-is-deprecated-in-os-x-10-9-how-to-replace
+ (NSArray<NSDictionary *> *)displayNames {
CGDirectDisplayID displays[kMaxDisplays];
uint32_t count;
if (CGGetActiveDisplayList(kMaxDisplays, displays, &count) != kCGErrorSuccess) {
return [NSArray array];
}
NSMutableArray *result = [NSMutableArray array];
for (uint32_t i = 0; i < count; i++) {
[result addObject:@{
@"id": [NSNumber numberWithUnsignedInt:displays[i]],
@"name": [NSString stringWithFormat:@"%d", displays[i]],
@"displayName": [self getDisplayName:displays[i]],
}];
}
return [NSArray arrayWithArray:result];
}
+ (NSString *)getDisplayName:(CGDirectDisplayID)displayID {
for (NSScreen *screen in [NSScreen screens]) {
if (screen.deviceDescription[@"NSScreenNumber"] == [NSNumber numberWithUnsignedInt:displayID]) {
return screen.localizedName;
}
}
return nil;
}
- (id)initWithDisplay:(CGDirectDisplayID)displayID frameRate:(int)frameRate {
self = [super init];
CGDisplayModeRef mode = CGDisplayCopyDisplayMode(displayID);
self.displayID = displayID;
self.pixelFormat = kCVPixelFormatType_32BGRA;
self.frameWidth = (int) CGDisplayModeGetPixelWidth(mode);
self.frameHeight = (int) CGDisplayModeGetPixelHeight(mode);
self.minFrameDuration = CMTimeMake(1, frameRate);
self.session = [[AVCaptureSession alloc] init];
self.videoOutputs = [[NSMapTable alloc] init];
self.captureCallbacks = [[NSMapTable alloc] init];
self.captureSignals = [[NSMapTable alloc] init];
CFRelease(mode);
AVCaptureScreenInput *screenInput = [[AVCaptureScreenInput alloc] initWithDisplayID:self.displayID];
[screenInput setMinFrameDuration:self.minFrameDuration];
if ([self.session canAddInput:screenInput]) {
[self.session addInput:screenInput];
}
else {
[screenInput release];
return nil;
}
[self.session startRunning];
return self;
}
- (void)dealloc {
[self.videoOutputs release];
[self.captureCallbacks release];
[self.captureSignals release];
[self.session stopRunning];
[super dealloc];
}
- (void)setFrameWidth:(int)frameWidth frameHeight:(int)frameHeight {
self.frameWidth = frameWidth;
self.frameHeight = frameHeight;
}
- (dispatch_semaphore_t)capture:(FrameCallbackBlock)frameCallback {
@synchronized(self) {
AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[videoOutput setVideoSettings:@{
(NSString *) kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithUnsignedInt:self.pixelFormat],
(NSString *) kCVPixelBufferWidthKey: [NSNumber numberWithInt:self.frameWidth],
(NSString *) kCVPixelBufferHeightKey: [NSNumber numberWithInt:self.frameHeight],
(NSString *) AVVideoScalingModeKey: AVVideoScalingModeResizeAspect,
}];
dispatch_queue_attr_t qos = dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_SERIAL,
QOS_CLASS_USER_INITIATED,
DISPATCH_QUEUE_PRIORITY_HIGH);
dispatch_queue_t recordingQueue = dispatch_queue_create("videoCaptureQueue", qos);
[videoOutput setSampleBufferDelegate:self queue:recordingQueue];
[self.session stopRunning];
if ([self.session canAddOutput:videoOutput]) {
[self.session addOutput:videoOutput];
}
else {
[videoOutput release];
return nil;
}
AVCaptureConnection *videoConnection = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
dispatch_semaphore_t signal = dispatch_semaphore_create(0);
[self.videoOutputs setObject:videoOutput forKey:videoConnection];
[self.captureCallbacks setObject:frameCallback forKey:videoConnection];
[self.captureSignals setObject:signal forKey:videoConnection];
[self.session startRunning];
return signal;
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
FrameCallbackBlock callback = [self.captureCallbacks objectForKey:connection];
if (callback != nil) {
if (!callback(sampleBuffer)) {
@synchronized(self) {
[self.session stopRunning];
[self.captureCallbacks removeObjectForKey:connection];
[self.session removeOutput:[self.videoOutputs objectForKey:connection]];
[self.videoOutputs removeObjectForKey:connection];
dispatch_semaphore_signal([self.captureSignals objectForKey:connection]);
[self.captureSignals removeObjectForKey:connection];
[self.session startRunning];
}
}
}
}
@end