macOS上的金属框架

时间:2016-12-28 03:36:38

标签: macos metal

我正在创建一个简单的纹理显示,它实质上通过金属显示呈现BGRA格式的视频帧。我遵循Metal WWDC会话中讲述的相同步骤。但是我在创建渲染编码器时遇到了问题。我的代码是

id <MTLDevice> device = MTLCreateSystemDefaultDevice();
id<MTLCommandQueue> commandQueue = [device newCommandQueue];

id<MTLLibrary> library = [device newDefaultLibrary];

// Create Render Command Descriptor.
MTLRenderPipelineDescriptor* renderPipelineDesc = [MTLRenderPipelineDescriptor new];
renderPipelineDesc.colorAttachments[0].pixelFormat = MTLPixelFormatBGRA8Unorm;
renderPipelineDesc.vertexFunction = [library newFunctionWithName:@"basic_vertex"];
renderPipelineDesc.fragmentFunction = [library newFunctionWithName:@"basic_fragment"];

NSError* error = nil;
id<MTLRenderPipelineState> renderPipelineState = [device newRenderPipelineStateWithDescriptor:renderPipelineDesc
                                                               error:&error];

id<MTLCommandBuffer> commandBuffer = [commandQueue commandBuffer];

MTLRenderPassDescriptor* renderPassDesc = [MTLRenderPassDescriptor renderPassDescriptor];

id<CAMetalDrawable> drawable = [_metalLayer nextDrawable];

MTLRenderPassColorAttachmentDescriptor* colorAttachmentDesc = [MTLRenderPassColorAttachmentDescriptor new];
colorAttachmentDesc.texture = drawable.texture;
colorAttachmentDesc.loadAction = MTLLoadActionLoad;
colorAttachmentDesc.storeAction = MTLStoreActionStore;
colorAttachmentDesc.clearColor = MTLClearColorMake(0, 0, 0, 1);

[renderPassDesc.colorAttachments setObject:colorAttachmentDesc atIndexedSubscript:0];

[inTexture replaceRegion:region
         mipmapLevel:0
           withBytes:imageBytes
         bytesPerRow:CVPixelBufferGetBytesPerRow(_image)];

id<MTLRenderCommandEncoder> renderCmdEncoder = [commandBuffer renderCommandEncoderWithDescriptor:renderPassDesc];

[renderCmdEncoder setRenderPipelineState:_renderPipelineState];
[renderCmdEncoder endEncoding];

这段代码崩溃,说'&34;找不到渲染目标&#34;     id renderCmdEncoder = [commandBuffer renderCommandEncoderWithDescriptor:renderPassDesc]; 我无法确定在何处以及如何设置渲染目标。

2 个答案:

答案 0 :(得分:0)

这将完美地运作;如果您需要帮助实现它,请告诉我:

@import UIKit;
@import AVFoundation;
@import CoreMedia;
#import <MetalKit/MetalKit.h>
#import <Metal/Metal.h>
#import <MetalPerformanceShaders/MetalPerformanceShaders.h>

@interface ViewController : UIViewController <MTKViewDelegate, AVCaptureVideoDataOutputSampleBufferDelegate>  {
    NSString *_displayName;
    NSString *serviceType;
}

@property (retain, nonatomic) SessionContainer *session;
@property (retain, nonatomic) AVCaptureSession *avSession;

@end;

#import "ViewController.h"

@interface ViewController () {
    MTKView *_metalView;

    id<MTLDevice> _device;
    id<MTLCommandQueue> _commandQueue;
    id<MTLTexture> _texture;

    CVMetalTextureCacheRef _textureCache;
}

@property (strong, nonatomic) AVCaptureDevice *videoDevice;
@property (nonatomic) dispatch_queue_t sessionQueue;

@end

@implementation ViewController

- (void)viewDidLoad {
    NSLog(@"%s", __PRETTY_FUNCTION__);
    [super viewDidLoad];

    _device = MTLCreateSystemDefaultDevice();
    _metalView = [[MTKView alloc] initWithFrame:self.view.bounds];
    [_metalView setContentMode:UIViewContentModeScaleAspectFit];
    _metalView.device = _device;
    _metalView.delegate = self;
    _metalView.clearColor = MTLClearColorMake(1, 1, 1, 1);
    _metalView.colorPixelFormat = MTLPixelFormatBGRA8Unorm;
    _metalView.framebufferOnly = NO;
    _metalView.autoResizeDrawable = NO;

    CVMetalTextureCacheCreate(NULL, NULL, _device, NULL, &_textureCache);

    [self.view addSubview:_metalView];

    self.sessionQueue = dispatch_queue_create( "session queue", DISPATCH_QUEUE_SERIAL );

    if ([self setupCamera]) {
        [_avSession startRunning];
    }
}

- (BOOL)setupCamera {
    NSLog(@"%s", __PRETTY_FUNCTION__);
    @try {
        NSError * error;

            _avSession = [[AVCaptureSession alloc] init];
            [_avSession beginConfiguration];
            [_avSession setSessionPreset:AVCaptureSessionPreset640x480];

            // get list of devices; connect to front-facing camera
            self.videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
            if (self.videoDevice == nil) return FALSE;

            AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.videoDevice error:&error];
            [_avSession addInput:input];

            dispatch_queue_t sampleBufferQueue = dispatch_queue_create("CameraMulticaster", DISPATCH_QUEUE_SERIAL);

            AVCaptureVideoDataOutput * dataOutput = [[AVCaptureVideoDataOutput alloc] init];
            [dataOutput setAlwaysDiscardsLateVideoFrames:YES];
            [dataOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)}];
            [dataOutput setSampleBufferDelegate:self queue:sampleBufferQueue];

            [_avSession addOutput:dataOutput];
            [_avSession commitConfiguration]; 
    } @catch (NSException *exception) {
        NSLog(@"%s - %@", __PRETTY_FUNCTION__, exception.description);
        return FALSE;
    } @finally {
        return TRUE;
    }

}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    {
        size_t width = CVPixelBufferGetWidth(pixelBuffer);
        size_t height = CVPixelBufferGetHeight(pixelBuffer);

        CVMetalTextureRef texture = NULL;
        CVReturn status = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, MTLPixelFormatBGRA8Unorm, width, height, 0, &texture);
        if(status == kCVReturnSuccess)
        {
            _metalView.drawableSize = CGSizeMake(width, height);
            _texture = CVMetalTextureGetTexture(texture);
            _commandQueue = [_device newCommandQueue];
            CFRelease(texture);
        }
    }
}

- (void)drawInMTKView:(MTKView *)view {
    // creating command encoder
    if (_texture) {
        id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
        id<MTLTexture> drawingTexture = view.currentDrawable.texture;

        // set up and encode the filter
        MPSImageGaussianBlur *filter = [[MPSImageGaussianBlur alloc] initWithDevice:_device sigma:5];

        [filter encodeToCommandBuffer:commandBuffer sourceTexture:_texture destinationTexture:drawingTexture];

        // committing the drawing
        [commandBuffer presentDrawable:view.currentDrawable];
        [commandBuffer commit];
        _texture = nil;
    }
}

- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {

}

@end

答案 1 :(得分:0)

你应该尝试以下几点之一

1.不使用创建新的渲染过程描述符,而是使用来自MTKView对象的当前渲染过程描述符对象。这个渲染过程描述符已经被配置。你不需要设置任何东西。下面给出的示例代码 -

renderPassDescriptor.colorAttachments[0].clearColor = 

MTLClearColor(red:  

0.0,green: 0.0,blue: 0.0,alpha: 1.0)
renderPassDescriptor.colorAttachments[0].loadAction = .clear
renderPassDescriptor.colorAttachments[0].storeAction = .store

renderPassDescriptor.depthAttachment.clearDepth = 1.0
renderPassDescriptor.depthAttachment.loadAction = .clear
renderPassDescriptor.depthAttachment.storeAction = .dontCare

let view = self.view as!MTKView
let textDesc = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: 

.bgra8Unorm, width: Int(view.frame.width), 
 height: Int(view.frame.height), mipmapped: false)
 textDesc.depth = 1
 //see below line       
textDesc.usage =   
[MTLTextureUsage.renderTarget,MTLTextureUsage.shaderRead]
textDesc.storageMode = .private
mainPassFrameBuffer = device.makeTexture(descriptor: textDesc)
renderPassDescriptor.colorAttachments[0].texture = mainPassFrameBuffer

2.您正在创建一个新的渲染过程描述符,然后通过可绘制对象的纹理设置其颜色附件,因此您应该创建一个新的纹理对象,然后将此纹理的用法设置为渲染目标。然后将获得在新纹理中呈现的内容,但它不会显示在屏幕上,因此要显示您的textue内容,您必须在可绘制纹理中复制纹理内容,然后呈现可绘制。

下面是制作渲染目标的代码 -

 public void askLocation() {
    if (ContextCompat.checkSelfPermission(MainActivity.this, Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED) {
        LocationManager locationManager = (LocationManager) getSystemService(LOCATION_SERVICE);
        Criteria criteria = new Criteria();
        String provider = locationManager.getBestProvider(criteria, true);
        try {
            mLocation = locationManager.getLastKnownLocation(provider);
        } catch (SecurityException e) {
            //
        }

        String currentCoordinates = somename.getString("LastLocation", "0,0");

        if (mLocation != null) {
            double currentLatitude = mLocation.getLatitude();
            double currentLongitude = mLocation.getLongitude();

            currentCoordinates = currentLatitude + "," + currentLongitude;
        }

        SharedPreferences.Editor editor = somename.edit();
        editor.putString("LastLocation", currentCoordinates).commit();

        Log.d("SomeName", "lastLocation start: " + currentCoordinates);
    }
}