func onVideoFrameReceived()

in AmazonChimeSDKDemo/AmazonChimeSDKDemo/utils/MetalVideoProcessor.swift [58:111]


    func onVideoFrameReceived(frame: VideoFrame) {
        let pixelBuffer = (frame.buffer as? VideoFramePixelBuffer)!
        let inputBuffer = pixelBuffer.pixelBuffer

        if bufferPool == nil || frame.width != bufferPoolWidth || frame.height != bufferPoolHeight {
            updateBufferPool(newWidth: frame.width, newHeight: frame.height)
        }
        var outputBuffer: CVPixelBuffer?
        CVPixelBufferPoolCreatePixelBuffer(nil, bufferPool!, &outputBuffer)
        let validOutputBuffer = outputBuffer!

        // For simplicity, we only support NV12 frames
        let pixelFormat = CVPixelBufferGetPixelFormatType(inputBuffer)
        if pixelFormat != kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
            pixelFormat != kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange {
            return
        }

        // Create textures from input and output pixel buffers
        let inputLumaTexture = createTexutreFromBuffer(pixelBuffer: inputBuffer,
                                                       plane: 0,
                                                       format: MTLPixelFormat.r8Unorm)!
        let inputChromaTexture = createTexutreFromBuffer(pixelBuffer: inputBuffer,
                                                         plane: 1,
                                                         format: MTLPixelFormat.rg8Unorm)!

        let outputLumaTexture = createTexutreFromBuffer(pixelBuffer: validOutputBuffer,
                                                        plane: 0,
                                                        format: MTLPixelFormat.r8Unorm)!
        let outputChromaTexture = createTexutreFromBuffer(pixelBuffer: validOutputBuffer,
                                                          plane: 1,
                                                          format: MTLPixelFormat.rg8Unorm)!

        // For simplicity, we just use a MetalPerformanceShader here on each of the planes
        // but using a custom shader should be similarly straightforward
        let commandBuffer = commandQueue.makeCommandBuffer()!
        let gaussianBlur = MPSImageGaussianBlur(device: device, sigma: 16)
        gaussianBlur.edgeMode = MPSImageEdgeMode.clamp
        gaussianBlur.encode(commandBuffer: commandBuffer,
                            sourceTexture: inputLumaTexture,
                            destinationTexture: outputLumaTexture)
        gaussianBlur.encode(commandBuffer: commandBuffer,
                            sourceTexture: inputChromaTexture,
                            destinationTexture: outputChromaTexture)
        commandBuffer.commit()
        commandBuffer.waitUntilCompleted()

        let processedFrame = VideoFrame(timestampNs: frame.timestampNs,
                                        rotation: frame.rotation,
                                        buffer: VideoFramePixelBuffer(pixelBuffer: validOutputBuffer))
        for sink in sinks {
            (sink as? VideoSink)?.onVideoFrameReceived(frame: processedFrame)
        }
    }