Hi, I am creating an app that recognises and extracts text from image.
The line of code request.recognitionLevel = .accurate can also take the value of request.recognitionLevel = .fast .
I would like to build a toggle which switches the value of request.recognitionLevel between fast and accurate however I am unsure on how to do this.
Any help would be greatly appreciated.
Many thanks for your time.
My code is shown below:
import SwiftUI
import UIKit
import Vision
class ViewController2: UIViewController {
@IBOutlet weak var imageView: UIImageView!
@IBOutlet weak var textView: UITextView!
@IBOutlet weak var activityIndicator: UIActivityIndicatorView!
@IBOutlet weak var button: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
button.backgroundColor = .systemCyan // Background colour of the select photo button
button.setTitle("Select Photo", for: .normal) // Button text
button.setTitleColor(.white, for: .normal) // Button text Colour
// Rounding the edges of the 'Select Photo' button:
button.layer.cornerRadius = 25
button.layer.borderWidth = 20
button.layer.borderColor = UIColor.systemCyan.cgColor
// Rounding the edges of the 'Share' button:
shareButton.layer.cornerRadius = 25
shareButton.layer.borderWidth = 10
shareButton.layer.borderColor = UIColor.systemCyan.cgColor
stopAnimating() // Activity indicator disappears
}
// Defining the activity indicators show and spin
private func startAnimating() {
self.activityIndicator.startAnimating()
}
// Defining the activity indicators stop and hide
private func stopAnimating(){
self.activityIndicator.stopAnimating()
}
@IBAction func selectPhotoButton(_ sender: Any) { // When selectPhoto button is pressed,
SelectPhotoButtonPressed() // run the function SelectPhotoButtonPressed.
}
private func SelectPhotoButtonPressed(){
if UIImagePickerController.isSourceTypeAvailable(.photoLibrary){
let imageSelector = UIImagePickerController() // Apple's interface for taking pictures and loading items from camera roll
imageSelector.sourceType = .photoLibrary // Opens the device's photo library
imageSelector.delegate = self // Leaves the UIImagePickerController when a picture is selected
self.present(imageSelector, animated: true, completion: nil) // Present the imageSelector
}
}
// Text Recognition
// Create request
var request = VNRecognizeTextRequest(completionHandler: nil)
private func VisionOCR(image: UIImage?){
var textString = "" // Variable textString = string
// Create completion handler
request = VNRecognizeTextRequest(completionHandler: { (request, error)in // Locates all the text in the image.
// Results are in the request
guard let results = request.results as?[VNRecognizedTextObservation] else {fatalError("Recieved Invalid Observation")}
for visionResult in results{
guard let recognisedText = visionResult.topCandidates(1).first else{ // Text stored in chronological order.
print("No text")
continue
}
textString += "\n\(recognisedText.string)"
DispatchQueue.main.async{ // FIFO queue
self.stopAnimating() // Hide the activityIndicator
self.textView.text = textString // Assign the textView the recoginsed text
}
}
})
// Properties
request.minimumTextHeight = 0.03125 // Default mimnimum height for text to be recognised in comparison to image, (1/32).
request.recognitionLevel = .accurate // Choice between accurate and fast.
request.recognitionLanguages = ["en_UK", "en-US", "fr-FR", "it-IT", "de-DE", "es-ES", "pt-BR", "zh-Hans", "zh-Hant", "yue-Hans", "yue-Hant", "ko-KR", "ja-JP", "ru-RU", "uk-UA"] // Recognisable languages.
request.usesLanguageCorrection = true // Applies language correction.
let requests = [request]
// Request handler
DispatchQueue.global(qos: .userInitiated).async{ // FIFO queue, qos (quality of service) determines priotity for scheduling tasks
guard let img = image?.cgImage else {fatalError("Missing image to scan")} // Variable image = computer generated image
// Create request handler
let requestHandler = VNImageRequestHandler(cgImage: img, options: [:]) // Performs Vision requests
// Send request to request handler
try? requestHandler.perform(requests) // Schedules Vision requests to be performed
}
}
}
extension ViewController2: UIImagePickerControllerDelegate, UINavigationControllerDelegate{
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey :Any]){
picker.dismiss(animated: true, completion: nil)
startAnimating()
self.textView.text = ""
let image = info[UIImagePickerController.InfoKey.originalImage]as?UIImage
self.imageView.image = image
VisionOCR(image: image)
}
}
public struct storyboardview2: UIViewControllerRepresentable{
public func makeUIViewController(context content: Context) -> UIViewController {
let storyboard = UIStoryboard(name: "Main", bundle: Bundle.main)
let controller = storyboard.instantiateViewController(identifier: "selectPhoto")
return controller
}
public func updateUIViewController(_ uiViewController: UIViewController, context: Context) {
}
}