Created
April 25, 2025 05:59
-
-
Save kmansoft/b2d05ada872e45b4e885e16ce722ee64 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class CaptureManager : NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { | |
override init() { | |
super.init() | |
startCaptureSession() | |
} | |
deinit { | |
stopCaptureSession() | |
} | |
func setCameraCallback(callback: @escaping (CGImage) -> Void) { | |
cameraCallback = callback | |
} | |
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { | |
if output == videoOutput { | |
// Keep track of frames | |
cameraInputFrameCount += 1 | |
NSLog("Camera capture frame \(cameraInputFrameCount)") | |
if let image = cgImageFromSampleBuffer(sampleBuffer) { | |
DispatchQueue.main.async { | |
self.cameraCallback(image) | |
} | |
} | |
} | |
} | |
private let mediaQueue = DispatchQueue(label: "media-queue") | |
// Camera and microphone capture | |
private var captureSession: AVCaptureSession? | |
private var cameraInputFrameCount = 0 | |
private var videoOutput: AVCaptureOutput? | |
private var audioOutput: AVCaptureOutput? | |
private var cameraCallback: (CGImage) -> Void = { (image) in } | |
private func startCaptureSession() { | |
let captureSession = AVCaptureSession() | |
captureSession.beginConfiguration() | |
// Video | |
if | |
let videoDevice = AVCaptureDevice.default(for: .video), | |
let videoInput = try? AVCaptureDeviceInput(device: videoDevice) | |
{ | |
if captureSession.canAddInput(videoInput) { | |
captureSession.addInput(videoInput) | |
let videoOutput = AVCaptureVideoDataOutput() | |
videoOutput.setSampleBufferDelegate(self, queue: mediaQueue) | |
videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)] | |
if captureSession.canAddOutput(videoOutput) { | |
captureSession.addOutput(videoOutput) | |
self.videoOutput = videoOutput | |
} | |
} | |
} | |
captureSession.commitConfiguration() | |
captureSession.startRunning() | |
self.captureSession = captureSession | |
} | |
private func stopCaptureSession() { | |
captureSession?.stopRunning() | |
captureSession = nil | |
} | |
private func cgImageFromSampleBuffer(_ sampleBuffer: CMSampleBuffer) -> CGImage? { | |
// Get the CVImageBuffer (pixel buffer) from the sample buffer | |
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { | |
print("Failed to get image buffer from sample buffer") | |
return nil | |
} | |
CVPixelBufferLockBaseAddress(imageBuffer, .readOnly) | |
defer { | |
CVPixelBufferUnlockBaseAddress(imageBuffer, .readOnly) | |
} | |
// Create a CIImage from the pixel buffer | |
let ciImage = CIImage(cvPixelBuffer: imageBuffer) | |
// Create a CIContext to render the CIImage to a CGImage | |
let context = CIContext() | |
// Convert CIImage to CGImage | |
let cgImage = context.createCGImage(ciImage, from: ciImage.extent) | |
return cgImage | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment