// // main.m // vsyncanalysis // // Created by Alexandre Janniaux on 27/10/2021. // #include #if TARGET_OS_OSX #import #else #import #endif #import "AppDelegate.h" // // ViewController.m // vsyncanalysis // // Created by Alexandre Janniaux on 27/10/2021. // //#define USE_OPENGL_ES #ifdef USE_OPENGL_ES #import #import #import #import #endif #import #import #if TARGET_OS_IPHONE #define VIEW_TYPE UIView #else #define VIEW_TYPE NSView #endif /* @interface GLWindow : UIView { CAEAGLLayer *_layer; } @end */ @interface PIPWindow : VIEW_TYPE @end @implementation PIPWindow : VIEW_TYPE + (Class)layerClass { return [AVSampleBufferDisplayLayer class]; } - (CALayer *)makeBackingLayer { return [[AVSampleBufferDisplayLayer alloc] init]; } @end #if 0 @implementation GLWindow + (Class)layerClass { return [CAEAGLLayer class]; } @end #endif #if TARGET_OS_IPHONE #define RESPONDER UIResponder #define APPLICATIONDELEGATE UIApplicationDelegate #else #define RESPONDER NSResponder #define APPLICATIONDELEGATE NSApplicationDelegate #endif @interface AppDelegate : RESPONDER { PIPWindow *_pip; #ifdef USE_OPENGL_ES //GLWindow *_view; EAGLContext *_context; CAEAGLLayer *_eagllayer; #endif #if TARGET_OS_OSX NSWindow *_window; NSGestureRecognizer *_tap; #else CADisplayLink *_displayLink; UIButton *_pipButton; UIWindow *_window; UITapGestureRecognizer *_tap; #endif AVPictureInPictureController *_pipCtrl; AVPictureInPictureControllerContentSource *_pipSource; AVSampleBufferDisplayLayer *_pipLayer; GLuint _framebuffer; GLuint _renderbuffer; CFTimeInterval _lastRender; const char *_renderMode, *_dropMode; long long unsigned _frameNo; } - (void)setup; @end @implementation AppDelegate - (void)enablePip:(id)sender { NSLog(@"ENABLE PIP"); //AVPictureInPictureControllerContentSource *source = [[AVPictureInPictureControllerContentSource alloc] initWithSampleBufferDisplayLayer:_pipLayer playbackDelegate:self]; [_pipCtrl startPictureInPicture]; //[_pipCtrl setContentSource:source]; } #if TARGET_OS_OSX - (void)applicationDidFinishLaunching:(NSNotification *)notification {} - (void)setup { #else - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { // Do any additional setup after loading the view. AVAudioSession * audioSession = [AVAudioSession sharedInstance]; [audioSession setCategory:AVAudioSessionCategoryPlayback error:nil]; #endif #if TARGET_OS_OSX NSRect frame = NSMakeRect(0, 0, 200, 200); NSWindow* window = [[NSWindow alloc] initWithContentRect:frame styleMask:NSWindowStyleMaskResizable backing:NSBackingStoreBuffered defer:NO]; [window setBackgroundColor:[NSColor blueColor]]; _window = window; #else _window = [[UIWindow alloc] initWithFrame:UIScreen.mainScreen.bounds]; _window.rootViewController = [[UIViewController alloc] init]; _window.backgroundColor = [UIColor blackColor]; #endif _pipLayer = [[AVSampleBufferDisplayLayer alloc] init]; _pipLayer.frame = _window.frame; _pip = [[PIPWindow alloc] initWithFrame:_window.frame]; _pip.userInteractionEnabled = NO; #if TARGET_OS_OSX _pip.wantsLayer = YES; #endif assert([AVPictureInPictureController isPictureInPictureSupported]); //assert([[_pip layer] isKindOfClass:[AVSampleBufferDisplayLayer class]]); AVSampleBufferDisplayLayer *pipLayer = _pip.layer; //_pipLayer;// [_pip layer]; assert(pipLayer); _pipSource = [[AVPictureInPictureControllerContentSource alloc] initWithSampleBufferDisplayLayer:pipLayer playbackDelegate:self]; _pipCtrl = [[AVPictureInPictureController alloc] initWithContentSource:_pipSource]; _pipCtrl.delegate = self; assert(_pipCtrl); #if 0 _pipButton = [UIButton buttonWithType:UIButtonTypeSystem]; _pipButton.frame = CGRectMake(0, 0, 50, 50); _pipButton.backgroundColor = [UIColor whiteColor]; [_pipButton setTitle:@"PIP" forState:UIControlStateNormal]; UIImage* startImage = [AVPictureInPictureController pictureInPictureButtonStartImage]; UIImage* stopImage = [AVPictureInPictureController pictureInPictureButtonStopImage]; [_pipButton setImage:startImage forState:UIControlStateNormal]; [_pipButton setImage:stopImage forState:UIControlStateSelected]; [_pipButton addTarget:self action:@selector(enablePip:) forControlEvents:UIControlEventTouchUpInside]; //[_pipButton setUserInteractionEnabled:YES]; //[_pipButton setEnabled:YES]; [_window addSubview:_pipButton]; [_window bringSubviewToFront:_pipButton]; #endif #if TARGET_OS_IPHONE _tap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(enablePip:)]; [_window addGestureRecognizer:_tap]; [_tap setEnabled:YES]; //[_pip.layer addSublayer:_pipLayer]; #ifdef USE_OPENGL_ES [_pipLayer addSublayer:_eagllayer]; #endif [_window addSubview:_pip]; [_window makeKeyAndVisible]; #else [_window setContentView:_pip]; [_window makeKeyAndOrderFront:NSApp]; #endif #ifdef USE_OPENGL_ES _eagllayer = [[CAEAGLLayer alloc] init]; _eagllayer.frame = _pip.frame; _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3]; EAGLContext *previous = [EAGLContext currentContext]; [EAGLContext setCurrentContext:_context]; glGenRenderbuffers(1, &_renderbuffer); glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer); glGenFramebuffers(1, &_framebuffer); glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer); glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _renderbuffer); [_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:_eagllayer]; assert(glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE); [EAGLContext setCurrentContext:previous]; #endif _lastRender = CACurrentMediaTime(); _frameNo = 0; _renderMode = getenv("RENDER_MODE"); if (_renderMode == NULL) _renderMode = "vsync"; _dropMode = getenv("DROP_MODE"); if (_dropMode == NULL) _dropMode = "once"; if (!strcmp(_renderMode, "loop")) { dispatch_async(dispatch_get_main_queue(), ^{ [self periodicRender]; }); } else if (!strcmp(_renderMode, "vsync")) { #if TARGET_OS_IPHONE _displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(onVsync:)]; [_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes]; #else #endif } //assert([_pipCtrl isPictureInPicturePossible]); [_pipCtrl addObserver:self forKeyPath:@"isPictureInPicturePossible" options:NSKeyValueObservingOptionInitial context:(__bridge void *)self]; [_pipCtrl addObserver:self forKeyPath:@"isPictureInPictureActive" options:NSKeyValueObservingOptionNew context:(__bridge void *)self]; #if TARGET_OS_IPHONE return YES; #endif } - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { NSLog(@"Can start PiP: %d", _pipCtrl.isPictureInPicturePossible); NSLog(@"Is active PiP: %d", _pipCtrl.isPictureInPictureActive); NSLog(@"Is suspended PiP: %d", _pipCtrl.isPictureInPictureSuspended); } - (void)periodicRender { [self render]; dispatch_async(dispatch_get_main_queue(), ^{ [self periodicRender]; }); } #if TARGET_OS_IPHONE - (void)onVsync:(CADisplayLink *)sender { [self render]; } #endif - (void)render { CFTimeInterval begin_context = CACurrentMediaTime(); #ifdef USE_OPENGL_ES EAGLContext *previous = [EAGLContext currentContext]; [EAGLContext setCurrentContext:_context]; CFTimeInterval begin_clear = CACurrentMediaTime(); glClearColor(cos(begin_context), 0.f, 0.f, 1.f); glClear(GL_COLOR_BUFFER_BIT); CFTimeInterval begin_present = CACurrentMediaTime(); [_context presentRenderbuffer:GL_RENDERBUFFER]; CFTimeInterval begin_release = CACurrentMediaTime(); [EAGLContext setCurrentContext:previous]; CFTimeInterval end_render = CACurrentMediaTime(); if (begin_present - begin_clear > 5. / 1000 || true) { NSLog(@"RENDER: total=%.3lf makeCurrent=%.3lf clear=%.3lf present=%.3lf release=%.3lf", 1000. * (end_render - begin_context), 1000. * (begin_clear - begin_context), 1000. * (begin_present - begin_clear), 1000. * (begin_release - begin_present), 1000. * (end_render - begin_release)); NSLog(@"RENDER: FPS=%.3lf", 1.f / (begin_context - _lastRender)); } _lastRender = begin_context; _frameNo++; #endif CVPixelBufferRef pixelBuffer=NULL; CVPixelBufferCreate(kCFAllocatorDefault, 1280, 720, kCVPixelFormatType_32BGRA, NULL, &pixelBuffer); CMSampleTimingInfo info = { kCMTimeIndefinite, kCMTimeZero, kCMTimeInvalid}; CVPixelBufferLockBaseAddress(pixelBuffer, 0); size_t width = CVPixelBufferGetWidth(pixelBuffer); size_t height = CVPixelBufferGetHeight(pixelBuffer); UInt32* buffer = (UInt32*)CVPixelBufferGetBaseAddress(pixelBuffer); for ( unsigned long i = 0; i < width * height; i++ ) { uint32_t value = (uint32_t)(0xff * fabs(cos(begin_context))) << 16; buffer[i] = CFSwapInt32HostToBig(0x00000fff + value); } CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); CMFormatDescriptionRef formatDesc=NULL; CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &formatDesc); CMSampleBufferRef sampleBuffer=NULL; CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer, formatDesc, &info, &sampleBuffer); CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true); CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0); CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); //AVPictureInPictureControllerContentSource *source = _pipCtrl.contentSource; //AVSampleBufferDisplayLayer *pipLayer = _pipCtrl.pictureInPictureActive ? //_pipLayer : _pip.layer; //assert([pipLayer status] != AVQueuedSampleBufferRenderingStatusFailed); AVSampleBufferDisplayLayer *pipLayer = _pip.layer; [pipLayer enqueueSampleBuffer:sampleBuffer]; [_pipLayer enqueueSampleBuffer:sampleBuffer]; [pipLayer setNeedsDisplay]; [_pipCtrl invalidatePlaybackState]; CFRelease(sampleBuffer); CFRelease(formatDesc); CFRelease(pixelBuffer); } - (void)pictureInPictureController:(nonnull AVPictureInPictureController *)pictureInPictureController didTransitionToRenderSize:(CMVideoDimensions)newRenderSize { NSLog(@"RESIZED TO %fx%f", newRenderSize.width, newRenderSize.height); } - (void)pictureInPictureController:(nonnull AVPictureInPictureController *)pictureInPictureController setPlaying:(BOOL)playing { NSLog(@"PLAYING STATE: %d", playing); } - (void)pictureInPictureController:(nonnull AVPictureInPictureController *)pictureInPictureController skipByInterval:(CMTime)skipInterval completionHandler:(nonnull void (^)(void))completionHandler { } - (BOOL)pictureInPictureControllerIsPlaybackPaused:(nonnull AVPictureInPictureController *)pictureInPictureController { //NSLog(@"IS PLAYBACK PAUSED"); return NO; } - (CMTimeRange)pictureInPictureControllerTimeRangeForPlayback:(nonnull AVPictureInPictureController *)pictureInPictureController { return CMTimeRangeMake(kCMTimePositiveInfinity, kCMTimeZero); } - (void)pictureInPictureControllerPlaybackPaused:(nonnull AVPictureInPictureController *)controller { // NSLog(@"PLAYBACK PAUSED"); } - (CMTimeRange)pictureInPictureControllerPlaybackTimeRange:(nonnull AVPictureInPictureController *)pictureInPictureController { return CMTimeRangeMake(CMTimeMake(CACurrentMediaTime()*1000000, 1000000), CMTimeMake(100000000, 1000000)); } - (void)pictureInPictureController:(AVPictureInPictureController *)pictureInPictureController failedToStartPictureInPictureWithError:(NSError *)error { } - (void)pictureInPictureControllerWillStartPictureInPicture:(AVPictureInPictureController *)pictureInPictureController { } @end int main(int argc, char * argv[]) { NSString * appDelegateClassName; @autoreleasepool { // Setup code that might create autoreleased objects goes here. appDelegateClassName = NSStringFromClass([AppDelegate class]); } #if TARGET_OS_IPHONE return UIApplicationMain(argc, argv, nil, appDelegateClassName); #else AppDelegate *appDelegate = [[AppDelegate alloc] init]; [appDelegate setup]; [NSApp setDelegate:appDelegate]; [NSApp run]; return NSApplicationMain(argc, argv); #endif }