How to set the Portrait effect on/off in live camera view in AVFoundation in Swift?

I am using AVFoundation for live camera view. I can get my device from the current video input (of type AVCaptureDeviceInput) like:

let device = videoInput.device

The device's active format has a isPortraitEffectSupported. How can I set the Portrait Effect on and off in live camera view?

I setup the camera like this:

private var videoInput: AVCaptureDeviceInput!
    private let session = AVCaptureSession()
    private(set) var isSessionRunning = false
    private var renderingEnabled = true
    private let videoDataOutput = AVCaptureVideoDataOutput()
    private let photoOutput = AVCapturePhotoOutput()
    private(set) var cameraPosition: AVCaptureDevice.Position = .front

func configureSession() {
        
        sessionQueue.async { [weak self] in
            
            guard let strongSelf = self else { return }
            
            if strongSelf.setupResult != .success {
                return
            }
            
            let defaultVideoDevice: AVCaptureDevice? = strongSelf.videoDeviceDiscoverySession.devices.first(where: {$0.position == strongSelf.cameraPosition})
            
            guard let videoDevice = defaultVideoDevice else {
                print("Could not find any video device")
                strongSelf.setupResult = .configurationFailed
                return
            }
            
            do {
                
                strongSelf.videoInput = try AVCaptureDeviceInput(device: videoDevice)
                
            } catch {
                print("Could not create video device input: \(error)")
                strongSelf.setupResult = .configurationFailed
                return
            }
            
            strongSelf.session.beginConfiguration()
            
            strongSelf.session.sessionPreset = AVCaptureSession.Preset.photo
            
            
            // Add a video input.
            guard strongSelf.session.canAddInput(strongSelf.videoInput) else {
                print("Could not add video device input to the session")
                strongSelf.setupResult = .configurationFailed
                strongSelf.session.commitConfiguration()
                return
            }
            strongSelf.session.addInput(strongSelf.videoInput)
            
            // Add a video data output
            if strongSelf.session.canAddOutput(strongSelf.videoDataOutput) {
                strongSelf.session.addOutput(strongSelf.videoDataOutput)
                strongSelf.videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
                strongSelf.videoDataOutput.setSampleBufferDelegate(self, queue: strongSelf.dataOutputQueue)
                
            } else {
                print("Could not add video data output to the session")
                strongSelf.setupResult = .configurationFailed
                strongSelf.session.commitConfiguration()
                return
            }
            
            // Add photo output
            if strongSelf.session.canAddOutput(strongSelf.photoOutput) {
                strongSelf.session.addOutput(strongSelf.photoOutput)
                
                strongSelf.photoOutput.isHighResolutionCaptureEnabled = true
                
                
            } else {
                print("Could not add photo output to the session")
                strongSelf.setupResult = .configurationFailed
                strongSelf.session.commitConfiguration()
                return
            }
            
            strongSelf.session.commitConfiguration()
            
        }
    }
    
    func prepareSession(completion: @escaping (SessionSetupResult) -> Void) {
        
        sessionQueue.async { [weak self] in
            guard let strongSelf = self else { return }
            switch strongSelf.setupResult {
            case .success:
                strongSelf.addObservers()
                
                
                if strongSelf.photoOutput.isDepthDataDeliverySupported {
                    strongSelf.photoOutput.isDepthDataDeliveryEnabled = true
                }
                
                if let photoOrientation = AVCaptureVideoOrientation(interfaceOrientation: interfaceOrientation) {
                    if let unwrappedPhotoOutputConnection = strongSelf.photoOutput.connection(with: .video) {
                        unwrappedPhotoOutputConnection.videoOrientation = photoOrientation
                    }
                }
                
                strongSelf.dataOutputQueue.async {
                    strongSelf.renderingEnabled = true
                }
                
                strongSelf.session.startRunning()
                strongSelf.isSessionRunning = strongSelf.session.isRunning
                
                strongSelf.mainQueue.async {
                    strongSelf.previewView.videoPreviewLayer.session = strongSelf.session
                }
                
                completion(strongSelf.setupResult)
            default:
                completion(strongSelf.setupResult)
                
            }
        }
    }

Then to I set isPortraitEffectsMatteDeliveryEnabled like this:

func setPortraitAffectActive(_ state: Bool) {
        sessionQueue.async { [weak self] in
            guard let strongSelf = self else { return }
            if strongSelf.photoOutput.isPortraitEffectsMatteDeliverySupported {
                strongSelf.photoOutput.isPortraitEffectsMatteDeliveryEnabled = state
            }
            
        }
    }

However, I don't see any Portrait Effect in the live camera view! Any ideas why?

Hello,

As noted in this video (https://developer.apple.com/videos/play/wwdc2021/10047/?time=1353):

The Portrait effect is always under user control through Control Center only.

In other words, your app has no way to actually activate or deactivate the Portrait effect.

You said:

Then to I set isPortraitEffectsMatteDeliveryEnabled like this

The "Portrait Effects Matte" is distinctly a different feature from the Portrait effect, enabling it delivers a portrait effects matte image to your app when you capture a photo, whereas the Portrait effect is an effect that is applied to the video stream, and then your app is delivered the resulting video stream.


I also noticed the following in your code snippet:

strongSelf.videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]

Though it is unrelated to your question, I strongly recommend that you read over TN3121, to determine if your app truly needs to be using that pixel format.

Not having these settings under application control is extremely problematic, I am seeing both a 7-8x slowdown compared to my own AI image processing code that has zero need for any portrait or other effects, and frequent crashes such as this one:

  • thread #1, queue = 'com.apple.main-thread', stop reason = EXC_BAD_ACCESS (code=2, address=0x18e8d9e600)
    • frame #0: 0x00000001f37dba44 AGXMetalG14XAGX::TextureGen4<(AGXTextureMemoryLayout)3, AGX::G14X::Encoders, AGX::G14X::Classes>::TextureGen4(AGX::G14X::Device*, bool, AGXHardwareTextureMemoryOrder, MTLTextureType, AGX::TextureFormat const*, MTLPixelFormat, unsigned long, MTLStorageMode, AGXTextureCompressionSettings, eAGXColorSpaceConversion, eAGXTextureRotation, unsigned int, unsigned int, unsigned int, unsigned int, unsigned int, unsigned int, unsigned int, unsigned long, unsigned int, MTLCPUCacheMode, __IOSurface*, unsigned int, unsigned int, __IOSurface*, unsigned int, unsigned int, bool, bool, bool, unsigned long long) + 2108 frame #1: 0x00000001f37dc844 AGXMetalG14XAGX::TextureGen4<(AGXTextureMemoryLayout)3, AGX::G14X::Encoders, AGX::G14X::Classes>::TextureGen4(AGX::G14X::Device*, bool, AGXHardwareTextureMemoryOrder, MTLTextureType, AGX::TextureFormat const*, MTLPixelFormat, unsigned long, MTLStorageMode, AGXTextureCompressionSettings, eAGXColorSpaceConversion, eAGXTextureRotation, unsigned int, unsigned int, unsigned int, unsigned int, unsigned int, unsigned int, unsigned int, unsigned long, unsigned int, MTLCPUCacheMode, __IOSurface*, unsigned int, unsigned int, __IOSurface*, unsigned int, unsigned int, bool, bool, bool, unsigned long long) + 5692 frame #2: 0x00000001f37df1b4 AGXMetalG14X-[AGXG14XFamilyTexture initImplWithDevice:Descriptor:iosurface:plane:slice:buffer:bytesPerRow:allowNPOT:sparsePageSize:isCompressedIOSurface:isHeapBacked:] + 1176 frame #3: 0x00000001f37faf7c AGXMetalG14X-[AGXTexture initWithDevice:desc:iosurface:plane:slice:] + 216 frame #4: 0x00000001919672f0 QuartzCoreallocate_drawable_texture(id<MTLDevice>, __IOSurface*, unsigned int, unsigned int, MTLPixelFormat, unsigned long long, bool, NSString*, unsigned long) + 176 frame #5: 0x0000000191722338 QuartzCoreget_unused_drawable(_CAMetalLayerPrivate*, bool, bool) + 620 frame #6: 0x0000000191968378 QuartzCoreCAMetalLayerPrivateNextDrawableLocked(CAMetalLayer*, CAMetalDrawable**, unsigned long*) + 1652 frame #7: 0x0000000191721f94 QuartzCore-[CAMetalLayer nextDrawable] + 128 frame #8: 0x000000019b234350 MetalKit-[MTKView currentDrawable] + 184 frame #9: 0x000000019b233f7c MetalKit-[MTKView currentRenderPassDescriptor] + 40 frame #10: 0x0000000100010948 vertigodemoMetalViewImpl::Render(this=0x000000012f3cb310) at metalview.mm:701:28 frame #11: 0x00000001000107c4 vertigodemo-[MetalMTKView draw](self=0x0000000138058c00, _cmd="draw") at metalview.mm:369:11 frame #12: 0x000000019b233aa0 MetalKit__23-[MTKView __initCommon]_block_invoke + 40 frame #13: 0x00000001893ac910 libdispatch.dylib_dispatch_client_callout + 20 frame #14: 0x00000001893afdc8 libdispatch.dylib_dispatch_continuation_pop + 600 frame #15: 0x00000001893c3be4 libdispatch.dylib_dispatch_source_latch_and_call + 420 frame #16: 0x00000001893c27b4 libdispatch.dylib_dispatch_source_invoke + 832 frame #17: 0x00000001893baeb8 libdispatch.dylib_dispatch_main_queue_drain + 744 frame #18: 0x00000001893babc0 libdispatch.dylib_dispatch_main_queue_callback_4CF + 44 frame #19: 0x000000018967cea4 CoreFoundationCFRUNLOOP_IS_SERVICING_THE_MAIN_DISPATCH_QUEUE + 16 frame #20: 0x000000018963a760 CoreFoundation__CFRunLoopRun + 1996 frame #21: 0x000000018963993c CoreFoundationCFRunLoopRunSpecific + 608 frame #22: 0x0000000193c02448 HIToolboxRunCurrentEventLoopInMode + 292 frame #23: 0x0000000193c02284 HIToolboxReceiveNextEventCommon + 648 frame #24: 0x0000000193c01fdc HIToolbox_BlockUntilNextEventMatchingListInModeWithFilter + 76 frame #25: 0x000000018ce18ed0 AppKit_DPSNextEvent + 660 frame #26: 0x000000018d603eec AppKit-[NSApplication(NSEventRouting) _nextEventMatchingEventMask:untilDate:inMode:dequeue:] + 716 frame #27: 0x000000018ce0c37c AppKit-[NSApplication run] + 476 frame #28: 0x000000010000f990 vertigodemomain(argc=3, argv=0x000000016fdff760) at macos-main.mm:48:5 frame #29: 0x00000001891dd0e0 dyldstart + 2360
How to set the Portrait effect on/off in live camera view in AVFoundation in Swift?
 
 
Q