70 lines
1.9 KiB
Swift
70 lines
1.9 KiB
Swift
import AVFoundation
|
|
import CoreGraphics
|
|
import UIKit
|
|
|
|
class FrameManager: NSObject, ObservableObject {
|
|
|
|
static let shared = FrameManager()
|
|
|
|
let context = CIContext()
|
|
|
|
@Published var current: CVPixelBuffer?
|
|
|
|
@Published var image: UIImage?
|
|
|
|
let videoOutputQueue = DispatchQueue(
|
|
label: "de.christophhagen.videoout",
|
|
qos: .userInitiated,
|
|
attributes: [],
|
|
autoreleaseFrequency: .workItem)
|
|
|
|
private override init() {
|
|
super.init()
|
|
|
|
CameraManager.shared.setVideoDelegate(self, queue: videoOutputQueue)
|
|
}
|
|
}
|
|
|
|
extension FrameManager: AVCaptureVideoDataOutputSampleBufferDelegate {
|
|
|
|
func captureOutput(
|
|
_ output: AVCaptureOutput,
|
|
didOutput sampleBuffer: CMSampleBuffer,
|
|
from connection: AVCaptureConnection
|
|
) {
|
|
if let buffer = sampleBuffer.imageBuffer {
|
|
DispatchQueue.main.async {
|
|
self.current = buffer
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
extension FrameManager: AVCapturePhotoCaptureDelegate {
|
|
|
|
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
|
|
let image = convert(photo, error: error)
|
|
DispatchQueue.main.async {
|
|
self.image = image
|
|
}
|
|
}
|
|
|
|
private func convert(_ photo: AVCapturePhoto, error: Error?) -> UIImage? {
|
|
guard error == nil else {
|
|
log("PhotoCaptureHandler: \(error!)")
|
|
return nil
|
|
}
|
|
|
|
guard let cgImage = photo.cgImageRepresentation() else {
|
|
log("PhotoCaptureHandler: No image captured")
|
|
return nil
|
|
}
|
|
let image = UIImage(cgImage: cgImage, scale: UIScreen.main.scale, orientation: .right)
|
|
guard let masked = image.crop(factor: CameraView.circleCropFactor).circleMasked else {
|
|
log("Could not mask image")
|
|
return nil
|
|
}
|
|
return masked
|
|
}
|
|
}
|