Caps-iOS/Caps/Data/Classifier.swift

53 lines
1.8 KiB
Swift
Raw Normal View History

2020-05-16 11:21:55 +02:00
import Foundation
import Vision
import CoreML
import UIKit
/// Recognise categories in images
class Classifier: Logger {
2022-06-10 21:20:49 +02:00
2020-05-16 11:21:55 +02:00
static let userDefaultsKey = "classifier.version"
2022-06-10 21:20:49 +02:00
2020-05-16 11:21:55 +02:00
let model: VNCoreMLModel
2022-06-10 21:20:49 +02:00
2020-05-16 11:21:55 +02:00
init(model: VNCoreMLModel) {
self.model = model
}
/**
Classify an image
- Parameter image: The image to classify
2022-06-10 21:20:49 +02:00
- Parameter completion: The callback with the match results
- Parameter matches: A dictionary with a map from cap id to classifier match
2020-05-16 11:21:55 +02:00
- Note: This method should not be scheduled on the main thread.
*/
2022-06-10 21:20:49 +02:00
func recognize(image: CGImage, completion: @escaping (_ matches: [Int: Float]?) -> Void) {
let image = CIImage(cgImage: image)
let handler = VNImageRequestHandler(ciImage: image, orientation: .up)
2020-05-16 11:21:55 +02:00
let request = VNCoreMLRequest(model: model) { request, error in
let matches = self.process(request: request, error: error)
completion(matches)
}
request.imageCropAndScaleOption = .centerCrop
do {
try handler.perform([request])
} catch {
self.error("Failed to perform classification: \(error)")
}
}
2022-06-10 21:20:49 +02:00
2020-05-16 11:21:55 +02:00
private func process(request: VNRequest, error: Error?) -> [Int : Float]? {
if let e = error {
self.error("Unable to classify image: \(e.localizedDescription)")
return nil
}
guard let result = request.results as? [VNClassificationObservation] else {
self.error("Invalid classifier result: \(String(describing: request.results))")
return nil
}
let matches = result.reduce(into: [:]) { $0[Int($1.identifier)!] = $1.confidence }
2022-06-10 21:20:49 +02:00
2020-05-16 11:21:55 +02:00
log("Classifed image with \(matches.count) classes")
return matches
}
}