let dimensions = CMVideoFormatDescriptionGetDimensions(CMSampleBufferGetFormatDescription(sampleBuffer)!) // ϐΫηϧใͷॻ͖ࠐΈઌͷόοϑΝΛ࡞ var outputPixelBuffer: CVPixelBuffer? = nil CVPixelBufferCreate(kCFAllocatorSystemDefault, Int(dimensions.width), Int(dimensions.height), kCVPixelFormatType_32BGRA, nil, &outputPixelBuffer) CVPixelBufferLockBaseAddress(outputPixelBuffer!, CVPixelBufferLockFlags(rawValue: 0)); defer { CVPixelBufferUnlockBaseAddress(outputPixelBuffer!, CVPixelBufferLockFlags(rawValue: 0)) } // όοϑΝʹCIImageΛॻ͖ࠐΈ let ciContext = ContextHolder.shared.context ciContext.render(newImage, to: outputPixelBuffer!) // ৽͍͠SampleBufferΛ࡞ var videoFormatDescription: CMVideoFormatDescription? = nil CMVideoFormatDescriptionCreateForImageBuffer(nil, outputPixelBuffer!, &videoFormatDescription) var sampleTimingInfo = self.sampleTimingInfo var newSampleBuffer: CMSampleBuffer? = nil CMSampleBufferCreateForImageBuffer(nil, outputPixelBuffer!, true, nil, nil, videoFormatDescription!, &sampleTimingInfo, &newSampleBuffer)