in performance/SwiftUIPerformanceQuickstart/PerformanceExample/Shared/Process.swift [281:340]
func generateSaliencyMap() {
guard let uiImage = image, let ciImage = CIImage(image: uiImage) else {
print("Could not convert image into correct format.")
updateStatus(to: .failure(.saliencyMap))
return
}
let handler = VNImageRequestHandler(ciImage: ciImage, options: [:])
let request = VNGenerateAttentionBasedSaliencyImageRequest()
updateStatus(to: .running(.saliencyMap))
let trace = makeTrace(called: "Saliency_Map")
trace?.start()
try? handler.perform([request])
trace?.stop()
#if swift(>=5.5)
guard let observation = request.results?.first else {
print("Failed to generate saliency map.")
updateStatus(to: .failure(.saliencyMap))
return
}
#else
guard let observation = request.results?.first as? VNSaliencyImageObservation else {
print("Failed to generate saliency map.")
updateStatus(to: .failure(.saliencyMap))
return
}
#endif
let inputImage = CIImage(cvPixelBuffer: observation.pixelBuffer)
let scale = Double(ciImage.extent.height) / Double(inputImage.extent.height)
let aspectRatio = Double(ciImage.extent.width) / Double(ciImage.extent.height)
guard let scaleFilter = CIFilter(name: "CILanczosScaleTransform") else {
print("Failed to create scaling filter.")
updateStatus(to: .failure(.saliencyMap))
return
}
scaleFilter.setValue(inputImage, forKey: kCIInputImageKey)
scaleFilter.setValue(scale, forKey: kCIInputScaleKey)
scaleFilter.setValue(aspectRatio, forKey: kCIInputAspectRatioKey)
guard let scaledImage = scaleFilter.outputImage else {
print("Failed to scale saliency map.")
updateStatus(to: .failure(.saliencyMap))
return
}
let saliencyImage = context.createCGImage(scaledImage, from: scaledImage.extent)
guard let saliencyMap = saliencyImage else {
print("Failed to convert saliency map to correct format.")
updateStatus(to: .failure(.saliencyMap))
return
}
updateSaliencyMap(to: UIImage(cgImage: saliencyMap))
updateStatus(to: .success(.saliencyMap))
}