I have a class called MLVision which handles the text recognition.
class MLVision {
lazy var vision = Vision.vision()
var textRecognizer: VisionTextRecognizer!
var textArray = [] as Array<String>
var lineText :String = ""
func startRecognising(image : UIImage,completion: @escaping (_ result: [String]) -> ()){
let visionImage = VisionImage(image: image)
let options = VisionCloudTextRecognizerOptions()
options.languageHints = ["en","hi"]
textRecognizer = vision.cloudTextRecognizer(options: options)
textRecognizer.process(visionImage, completion: { (features, error) in
guard error == nil, let features = features else {
print("No text Found!!")
return
}
for block in features.blocks {
for line in block.lines{
self.lineText = line.text
self.separate(lineText: self.lineText)
}
}
completion(self.textArray)
})
}
func separate(lineText : String){
let separators = CharacterSet(charactersIn: (":)(,•/·]["))
let ofWordsArray = lineText.components(separatedBy: separators)
for word in ofWordsArray{
let low = word.trimmingCharacters(in: .whitespacesAndNewlines).lowercased()
if low != ""{
textArray.append(low)
}
}
}
}
And this is the that ViewController I call the Mlvision class.
let mlVision = MLVision()
override func viewDidLoad() {
super.viewDidLoad()
mlVision.startRecognising(image: croppedImage!){ result in
self.textArray = result
self.searchIngredients(textArray: self.textArray)
}
}
So basically my app recognises text then separates it and pass it in an Array.
I want to handle if I check an image without text to show an alert.
I know that the
guard error == nil, let features = features else {return}
handles it but I want to check it from my ViewController. Is there a way to do that?
if i got your question right - you just want to handle errors from recognition in the scope where you started it, if so than you can modify your code to below using Result.
// add your own Errors
enum RecognitionError: Error {
case textNotFound
}
//modify `completion` closure by passing `Result` object instead of array
func startRecognising(image : UIImage,
completion: @escaping (_ result: Result<[String], RecognitionError>) -> ()){
let visionImage = VisionImage(image: image)
let options = VisionCloudTextRecognizerOptions()
options.languageHints = ["en","hi"]
textRecognizer = vision.cloudTextRecognizer(options: options)
textRecognizer.process(visionImage, completion: { (features, error) in
guard error == nil, let features = features else {
print("No text Found!!")
completion(.failure(.textNotFound))
return
}
for block in features.blocks {
for line in block.lines{
self.lineText = line.text
self.separate(lineText: self.lineText)
}
}
completion(.success(self.textArray))
})
}
And handle it in your ViewController:
override func viewDidLoad() {
super.viewDidLoad()
mlVision.startRecognising(image: croppedImage!){ result in
switch result {
case .success(let array):
self.textArray = result
self.searchIngredients(textArray: self.textArray)
case .failure(let error):
print(error)
}
}
}