Caps-iOS/CapCollector/Data/Classifier.swift
2020-05-16 11:21:55 +02:00

69 lines
2.1 KiB
Swift

//
// VisionHandler.swift
// CapFinder
//
// Created by User on 12.02.18.
// Copyright © 2018 User. All rights reserved.
//
import Foundation
import Vision
import CoreML
import UIKit
/// Recognise categories in images
class Classifier: Logger {
static let userDefaultsKey = "classifier.version"
let model: VNCoreMLModel
init(model: VNCoreMLModel) {
self.model = model
}
/**
Classify an image
- Parameter image: The image to classify
- Note: This method should not be scheduled on the main thread.
*/
func recognize(image: UIImage, completion: @escaping (_ matches: [Int: Float]?) -> Void) {
guard let ciImage = CIImage(image: image) else {
error("Unable to create CIImage")
completion(nil)
return
}
let orientation = CGImagePropertyOrientation(image.imageOrientation)
let handler = VNImageRequestHandler(ciImage: ciImage, orientation: orientation)
let request = VNCoreMLRequest(model: model) { request, error in
let matches = self.process(request: request, error: error)
completion(matches)
}
request.imageCropAndScaleOption = .centerCrop
do {
try handler.perform([request])
} catch {
self.error("Failed to perform classification: \(error)")
}
}
private func process(request: VNRequest, error: Error?) -> [Int : Float]? {
if let e = error {
self.error("Unable to classify image: \(e.localizedDescription)")
return nil
}
guard let result = request.results as? [VNClassificationObservation] else {
self.error("Invalid classifier result: \(String(describing: request.results))")
return nil
}
let matches = result.reduce(into: [:]) { $0[Int($1.identifier)!] = $1.confidence }
log("Classifed image with \(matches.count) classes")
DispatchQueue.global(qos: .background).async {
app.database.update(recognizedCaps: Set(matches.keys))
}
return matches
}
}