CoreML - Label presented as random string

Hello everybody,

I used Google Cloud Platform to create a Machine learning model to perform computer vision. I downloaded the CoreML model from the cloud platform website and followed the instructions in the Google Tutorial for iOS model deployment.

This is my code currently.

Code Block
class Classification {
    
    private lazy var classificationRequest: VNCoreMLRequest = {
        do {
            let model = try VNCoreMLModel(for: AutoML().model)
            let request = VNCoreMLRequest(model: model, completionHandler: { [weak self] request, error in
                if let classifications = request.results as? [VNClassificationObservation] {
                    print(classifications.first ?? "No classification!")
                }
            
            })
            
            request.imageCropAndScaleOption = .scaleFit
            return request
        }
        catch {
            fatalError("Error! Can't use Model.")
        }
    }()
    
    func classifyImage(receivedImage: UIImage) {
        
        let orientation = CGImagePropertyOrientation(rawValue: UInt32(receivedImage.imageOrientation.rawValue))
        
        if let image = CIImage(image: receivedImage) {
            DispatchQueue.global(qos: .userInitiated).async {
                
                let handler = VNImageRequestHandler(ciImage: image, orientation: orientation!)
                do {
                    try handler.perform([self.classificationRequest])
                }
                catch {
                    fatalError("Error classifying image!")
                }
            }
        }
    }


My code executes and I receive this:

<VNClassificationObservation: 0x600002091d40> A7DBD70C-541C-4112-84A4-C6B4ED2EB7E2 requestRevision=1 confidence=0.332127 "CICAgICAwPmveRIJQWdsYWlzX2lv"

I receive a confidence value but I don't receive a label string.

Is there any step I am not taking?

With the model there is also a dict.txt file. Is there anything I have to do with that and that I am not doing?

Thank you!

Replies

The labels are stored in your mlmodel file. If you open the mlmodel in Xcode 12, it will display what those labels are. My guess is that instead of actual labels, your mlmodel contains "CICAgICAwPmveRIJQWdsYWlzX2lv" and so on.

If that is the case, you can make a dictionary in the app that maps "CICAgICAwPmveRIJQWdsYWlzX2lv" and so on to the real labels, or you can replace these labels inside the mlmodel file by editing it using coremltools. (My e-book Core ML Survival Guide has a chapter on how to replace the labels in the model.)
Hello,

Thank you for your help. But I don't know what "CICAgICAwPmveRIJQWdsYWlzX2lv" corresponds to. I am dealing with more than 100 different labels and the object of study is very peculiar. I am identifying different species of butterflies.

The Google Cloud platform gives a dict.txt file.

This is the content of my CoreML file. Do you see anything that I could change?

Code Block
//
// AutoML.swift
//
// This file was automatically generated and should not be edited.
//
import CoreML
/// Model Prediction Input Type
@available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *)
class AutoMLInput : MLFeatureProvider {
    /// image0 as color (kCVPixelFormatType_32BGRA) image buffer, 224 pixels wide by 224 pixels high
    var image0: CVPixelBuffer
    var featureNames: Set<String> {
        get {
            return ["image0"]
        }
    }
    
    func featureValue(for featureName: String) -> MLFeatureValue? {
        if (featureName == "image0") {
            return MLFeatureValue(pixelBuffer: image0)
        }
        return nil
    }
    
    init(image0: CVPixelBuffer) {
        self.image0 = image0
    }
}
/// Model Prediction Output Type
@available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *)
class AutoMLOutput : MLFeatureProvider {
    /// Source provided by CoreML
    private let provider : MLFeatureProvider
    /// scores0 as dictionary of strings to doubles
    lazy var scores0: [String : Double] = {
        [unowned self] in return self.provider.featureValue(for: "scores0")!.dictionaryValue as! [String : Double]
    }()
    /// classLabel as string value
    lazy var classLabel: String = {
        [unowned self] in return self.provider.featureValue(for: "classLabel")!.stringValue
    }()
    var featureNames: Set<String> {
        return self.provider.featureNames
    }
    
    func featureValue(for featureName: String) -> MLFeatureValue? {
        return self.provider.featureValue(for: featureName)
    }
    init(scores0: [String : Double], classLabel: String) {
        self.provider = try! MLDictionaryFeatureProvider(dictionary: ["scores0" : MLFeatureValue(dictionary: scores0 as [AnyHashable : NSNumber]), "classLabel" : MLFeatureValue(string: classLabel)])
    }
    init(features: MLFeatureProvider) {
        self.provider = features
    }
}
/// Class for model loading and prediction
@available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *)
class AutoML {
    var model: MLModel
/// URL of model assuming it was installed in the same bundle as this class
    class var urlOfModelInThisBundle : URL {
        let bundle = Bundle(for: AutoML.self)
        return bundle.url(forResource: "AutoML", withExtension:"mlmodelc")!
    }
    /**
        Construct a model with explicit path to mlmodelc file
        - parameters:
           - url: the file url of the model
           - throws: an NSError object that describes the problem
    */
    init(contentsOf url: URL) throws {
        self.model = try MLModel(contentsOf: url)
    }
    /// Construct a model that automatically loads the model from the app's bundle
    convenience init() {
        try! self.init(contentsOf: type(of:self).urlOfModelInThisBundle)
    }
    /**
        Construct a model with configuration
        - parameters:
           - configuration: the desired model configuration
           - throws: an NSError object that describes the problem
    */
    @available(macOS 10.14, iOS 12.0, tvOS 12.0, watchOS 5.0, *)
    convenience init(configuration: MLModelConfiguration) throws {
        try self.init(contentsOf: type(of:self).urlOfModelInThisBundle, configuration: configuration)
    }
    /**
        Construct a model with explicit path to mlmodelc file and configuration
        - parameters:
           - url: the file url of the model
           - configuration: the desired model configuration
           - throws: an NSError object that describes the problem
    */
    @available(macOS 10.14, iOS 12.0, tvOS 12.0, watchOS 5.0, *)
    init(contentsOf url: URL, configuration: MLModelConfiguration) throws {
        self.model = try MLModel(contentsOf: url, configuration: configuration)
    }
    /**
        Make a prediction using the structured interface
        - parameters:
           - input: the input to the prediction as AutoMLInput
        - throws: an NSError object that describes the problem
        - returns: the result of the prediction as AutoMLOutput
    */
    func prediction(input: AutoMLInput) throws -> AutoMLOutput {
        return try self.prediction(input: input, options: MLPredictionOptions())
    }
    /**
        Make a prediction using the structured interface
        - parameters:
           - input: the input to the prediction as AutoMLInput
           - options: prediction options 
        - throws: an NSError object that describes the problem
        - returns: the result of the prediction as AutoMLOutput
    */
    func prediction(input: AutoMLInput, options: MLPredictionOptions) throws -> AutoMLOutput {
        let outFeatures = try model.prediction(from: input, options:options)
        return AutoMLOutput(features: outFeatures)
    }
    /**
        Make a prediction using the convenience interface
        - parameters:
            - image0 as color (kCVPixelFormatType_32BGRA) image buffer, 224 pixels wide by 224 pixels high
        - throws: an NSError object that describes the problem
        - returns: the result of the prediction as AutoMLOutput
    */
    func prediction(image0: CVPixelBuffer) throws -> AutoMLOutput {
        let input_ = AutoMLInput(image0: image0)
        return try self.prediction(input: input_)
    }
    /**
        Make a batch prediction using the structured interface
        - parameters:
           - inputs: the inputs to the prediction as [AutoMLInput]
           - options: prediction options 
        - throws: an NSError object that describes the problem
        - returns: the result of the prediction as [AutoMLOutput]
    */
    @available(macOS 10.14, iOS 12.0, tvOS 12.0, watchOS 5.0, *)
    func predictions(inputs: [AutoMLInput], options: MLPredictionOptions = MLPredictionOptions()) throws -> [AutoMLOutput] {
        let batchIn = MLArrayBatchProvider(array: inputs)
        let batchOut = try model.predictions(from: batchIn, options: options)
        var results : [AutoMLOutput] = []
        results.reserveCapacity(inputs.count)
        for i in 0..<batchOut.count {
            let outProvider = batchOut.features(at: i)
            let result =  AutoMLOutput(features: outProvider)
            results.append(result)
        }
        return results
    }
}


Thank you!
I replied on Stack Overflow, as you asked there too.