Ai
Below is an example of a simple Swift code snippet that demonstrates how to use Core ML for image classification in an iOS app. This code assumes you have a Core ML model named YourModelName, which can classify images. You should replace YourModelName with the actual name of your machine learning model.
First, make sure you have added the Core ML model to your Xcode project.
Here’s the entire Swift code for a simple view controller that uses the model to classify an image:
import UIKit
import CoreML
import Vision
class ViewController: UIViewController {
// ImageView to display the selected image
@IBOutlet weak var imageView: UIImageView!
// Label to display the classification result
@IBOutlet weak var resultLabel: UILabel!
// Core ML model
lazy var model: VNCoreMLModel = {
do {
let config = MLModelConfiguration()
let model = try VNCoreMLModel(for: YourModelName(configuration: config).model)
return model
} catch {
fatalError("Failed to load Vision ML model: \(error)")
}
}()
override func viewDidLoad() {
super.viewDidLoad()
resultLabel.text = "Choose an image to start classification"
}
// Function to classify the given image
func classifyImage(_ image: UIImage) {
guard let cgImage = image.cgImage else {
resultLabel.text = "Failed to get CGImage"
return
}
let request = VNCoreMLRequest(model: model) { [weak self] (request, error) in
self?.processClassifications(for: request, error: error)
}
let handler = VNImageRequestHandler(cgImage: cgImage, options: [:])
do {
try handler.perform([request])
} catch {
resultLabel.text = "Failed to perform classification: \(error)"
}
}
// Function to update UI with classification results
func processClassifications(for request: VNRequest, error: Error?) {
DispatchQueue.main.async {
if let classifications = request.results as? [VNClassificationObservation] {
if classifications.isEmpty {
self.resultLabel.text = "Nothing recognized."
} else {
let topClassifications = classifications.prefix(2)
let descriptions = topClassifications.map { classification in
return String(format: "%.2f", classification.confidence * 100) + "% - " + classification.identifier
}
self.resultLabel.text = "Classification:\n" + descriptions.joined(separator: "\n")
}
} else if let error = error {
self.resultLabel.text = "Classification failed: \(error.localizedDescription)"
} else {
self.resultLabel.text = "Unknown error"
}
}
}
// Function to select an image (e.g., from photo library)
@IBAction func selectImage(_ sender: Any) {
let imagePickerController = UIImagePickerController()
imagePickerController.delegate = self
imagePickerController.sourceType = .photoLibrary
present(imagePickerController, animated: true)
}
}
extension ViewController: UIImagePickerControllerDelegate, UINavigationControllerDelegate {
// Delegate method for image picker
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
picker.dismiss(animated: true)
let image = info[.originalImage] as! UIImage
imageView.image = image
classifyImage(image)
}
}
Please ensure that you have connected the imageView and resultLabel outlets in your storyboard or xib file, and that you have set the class of your view controller to ViewController.
Also, remember to request the appropriate permissions in your app’s Info.plist file if you are accessing the user’s photo library.
You should replace "YourModelName" with the name of your Core ML model, and ensure that the model is compatible with the version of Core ML that you are using.