I'm converting an app from OpenGL to Metal, using MTKit. The original app allowed the user to print the OpenGL-generated screen being viewed (I've also done this with Core Graphics). I'd like to do the same in Metal. I thought it would be easy to find a solution or at least an approach to the problem, but I'm stuck. Has anyone solved this, or does anyone have any inklings about what I should be looking for?
I'm a seasoned Mac programmer (would prefer an Objective-C solution but I do a bit of work with everything else) and am an advanced novice at Metal.
I assume you want to save image from MTKView. But this function should work for any textures. Also don't forget to set: framebufferOnly = false;
Property.
bool takeScreenshot = true;
/// Called whenever the view needs to render a frame
- (void)drawInMTKView:(nonnull MTKView *)view
{
// Create a new command buffer for each render pass to the current drawable
id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
commandBuffer.label = @"MyCommand";
// Obtain a renderPassDescriptor generated from the view's drawable textures
MTLRenderPassDescriptor *renderPassDescriptor = view.currentRenderPassDescriptor;
id<MTLTexture> currentSwapChainTexture = view.currentDrawable.texture;
// Your render code...
[commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> cb)
{
if(takeScreenshot)
{
SaveTexture(currentSwapChainTexture);
takeScreenshot = false;
}
}];
// Finalize rendering here & push the command buffer to the GPU
[commandBuffer commit];
// CPU <-> GPU Synchronization
if(takeScreenshot)[commandBuffer waitUntilCompleted];
Save texture function:
void SaveTexture(id<MTLTexture> texture)
{
int width = (int) texture.width;
int height = (int) texture.height;
int bytePerPixel = 4;
int bytesPerRow = width * bytePerPixel;
int bytesCount = width * height * bytePerPixel;
int bitsPerComponent = 8;
int bitsPerPixel = 32;
void *imageBytes = malloc(bytesCount);
void *destBytes = malloc(bytesCount);
MTLRegion mtlregion = MTLRegionMake2D(0, 0, width, height);
[texture getBytes:imageBytes bytesPerRow:bytesPerRow fromRegion:mtlregion mipmapLevel:0];
vImage_Buffer src;
src.data = imageBytes;
src.width = width;
src.height = height;
src.rowBytes = bytesPerRow;
vImage_Buffer dest;
dest.data = destBytes;
dest.width = width;
dest.height = height;
dest.rowBytes = bytesPerRow;
// BGRA -> RGBA (Swap)
const uint8_t map[4] = {2, 1, 0, 3};
vImagePermuteChannels_ARGB8888(&src, &dest, map, kvImageNoFlags);
CGColorSpaceRef cgColorSpaceRef = CGColorSpaceCreateWithName(kCGColorSpaceDisplayP3); //kCGColorSpaceSRGB - For sRGB
CGBitmapInfo bitmapInfo = kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big;
CGContextRef context = CGBitmapContextCreate(destBytes, width, height, bitsPerComponent, bytesPerRow, cgColorSpaceRef, bitmapInfo);
CGImageRef cgImage = CGBitmapContextCreateImage(context);
// Your NSImage
NSImage * image = [[NSImage alloc] initWithCGImage:cgImage size:NSZeroSize];
// Save to Photos
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^
{
[PHAssetCreationRequest creationRequestForAssetFromImage: image];
}
completionHandler:^(BOOL success, NSError * error)
{
if(success) printf("Success \n");
}];
free(imageBytes);
free(destBytes);
CGColorSpaceRelease(cgColorSpaceRef);
CGContextRelease(context);
CGImageRelease(cgImage);
texture = nil;
}