in SemanticSegmentationSample/Common/SemanticMapToImage.swift [59:83]
func encodeComputePipeline(commandBuffer: MTLCommandBuffer, semanticMap: MLShapedArray<Int32>, numClasses: Int) -> MTLTexture? {
guard let commandEncoder = commandBuffer.makeComputeCommandEncoder() else { return nil }
commandEncoder.setComputePipelineState(pipelineState)
let (width, height) = (semanticMap.shape[0], semanticMap.shape[1])
guard let outputTexture = makeTexture(width: width, height: height, pixelFormat: .bgra8Unorm) else { return nil }
commandEncoder.setTexture(sourceTexture(semanticMap), index: 0)
commandEncoder.setTexture(outputTexture, index: 1)
// FIXME: hardcoded for now
var classCount = numClasses
commandEncoder.setBytes(&classCount, length: MemoryLayout<Int32>.size, index: 0)
let w = pipelineState.threadExecutionWidth
let h = pipelineState.maxTotalThreadsPerThreadgroup / w
let threadsPerThreadgroup = MTLSizeMake(w, h, 1)
let threadsPerGrid = MTLSize(width: outputTexture.width,
height: outputTexture.height,
depth: 1)
commandEncoder.dispatchThreads(threadsPerGrid, threadsPerThreadgroup: threadsPerThreadgroup)
commandEncoder.endEncoding()
return outputTexture
}