使用gstreamer通过网络将ARFrame CVPixelBuffer作为字节数组发送时崩溃

时间:2018-05-19 21:26:42

标签: ios objective-c video-streaming gstreamer arkit

我想通过网络发送ARFrame像素缓冲区数据,我在下面列出了我的设置。使用此设置,如果我尝试在几帧之后发送帧应用程序崩溃gstreamers C代码,但如果我发送相机的AVCaptureVideoDataOutput像素缓冲区,流,工作正常。我已将AVCaptureSession的像素格式类型设置为

  

kCVPixelFormatType_420YpCbCr8BiPlanarFullRange

所以它复制了ARFrame给出的相同类型。请帮助我找不到任何解决方案。我很抱歉,如果我的英语不好或者我错过了什么,请问我。

我的设置

  1. 从ARKit的didUpdateFrame委托中获取ARFrame的pixelBuffer
  2. 使用VTCompressionSession
  3. 编码为h264
    - (void)SendFrames:(CVPixelBufferRef)pixelBuffer :(NSTimeInterval)timeStamp
    {
      size_t width = CVPixelBufferGetWidth(pixelBuffer);
      size_t height = CVPixelBufferGetHeight(pixelBuffer);
    
      if(session == NULL)
      {
          [self initEncoder:width height:height];
      }
    
      CMTime presentationTimeStamp = CMTimeMake(0, 1);
    
      OSStatus statusCode = VTCompressionSessionEncodeFrame(session, pixelBuffer, presentationTimeStamp, kCMTimeInvalid, NULL, NULL, NULL);
      if (statusCode != noErr) {
    
          // End the session
          VTCompressionSessionInvalidate(session);
          CFRelease(session);
          session = NULL;
          return;
      }
    
      VTCompressionSessionEndPass(session, NULL, NULL);
    }
    
    - (void) initEncoder:(size_t)width  height:(size_t)height
    {
      OSStatus status = VTCompressionSessionCreate(NULL, (int)width, (int)height, kCMVideoCodecType_H264, NULL, NULL, NULL, OutputCallback, NULL, &session);
    
      NSLog(@":VTCompressionSessionCreate %d", (int)status);
    
      if (status != noErr)
      {
        NSLog(@"Unable to create a H264 session");
        return ;
      }
    
      VTSessionSetProperty(session, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
      VTSessionSetProperty(session, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Baseline_AutoLevel);
      VTCompressionSessionPrepareToEncodeFrames(session);
    }
    
    1. 从回调中获取sampleBuffer,将其转换为基本流
    2. void OutputCallback(void *outputCallbackRefCon, void *sourceFrameRefCon, OSStatus status, VTEncodeInfoFlags infoFlags,CMSampleBufferRef sampleBuffer)
      {
          if (status != noErr) {
              NSLog(@"Error encoding video, err=%lld", (int64_t)status);
              return;
          }
      
          if (!CMSampleBufferDataIsReady(sampleBuffer))
          {
              NSLog(@"didCompressH264 data is not ready ");
              return;
          }
      
          // In this example we will use a NSMutableData object to store the
          // elementary stream.
          NSMutableData *elementaryStream = [NSMutableData data];
      
          // This is the start code that we will write to
          // the elementary stream before every NAL unit
          static const size_t startCodeLength = 4;
          static const uint8_t startCode[] = {0x00, 0x00, 0x00, 0x01};
      
          // Write the SPS and PPS NAL units to the elementary stream 
          CMFormatDescriptionRef description = CMSampleBufferGetFormatDescription(sampleBuffer);
      
      
          // Find out how many parameter sets there are
          size_t numberOfParameterSets;
          int AVCCHeaderLength;
          CMVideoFormatDescriptionGetH264ParameterSetAtIndex(description,
                                                             0, NULL, NULL,
                                                             &numberOfParameterSets,
                                                             &AVCCHeaderLength);
      
      
      
          // Write each parameter set to the elementary stream
          for (int i = 0; i < numberOfParameterSets; i++) {
              const uint8_t *parameterSetPointer;
              int  NALUnitHeaderLengthOut = 0;
      
              size_t parameterSetLength;
              CMVideoFormatDescriptionGetH264ParameterSetAtIndex(description,
                                                                 i,
                                                                 &parameterSetPointer,
                                                                 &parameterSetLength,
                                                                 NULL, &NALUnitHeaderLengthOut);
      
              // Write the parameter set to the elementary stream
              [elementaryStream appendBytes:startCode length:startCodeLength];
              [elementaryStream appendBytes:parameterSetPointer length:parameterSetLength];
      
          }
      
      
          // Get a pointer to the raw AVCC NAL unit data in the sample buffer
           size_t blockBufferLength;
           uint8_t *bufferDataPointer = NULL;
           size_t lengthAtOffset = 0;
           size_t bufferOffset = 0;
      
      
          CMBlockBufferGetDataPointer(CMSampleBufferGetDataBuffer(sampleBuffer),
                                      bufferOffset,
                                      &lengthAtOffset,
                                      &blockBufferLength,
                                      (char **)&bufferDataPointer);
      
          // Loop through all the NAL units in the block buffer
          // and write them to the elementary stream with
          // start codes instead of AVCC length headers
      
          while (bufferOffset < blockBufferLength - AVCCHeaderLength) {
              // Read the NAL unit length
              uint32_t NALUnitLength = 0;
              memcpy(&NALUnitLength, bufferDataPointer + bufferOffset, AVCCHeaderLength);
              // Convert the length value from Big-endian to Little-endian
              NALUnitLength = CFSwapInt32BigToHost(NALUnitLength);
      
              // Write start code to the elementary stream
              [elementaryStream appendBytes:startCode length:startCodeLength];
              // Write the NAL unit without the AVCC length header to the elementary stream
              [elementaryStream appendBytes:bufferDataPointer + bufferOffset + AVCCHeaderLength
                                     length:NALUnitLength];
              // Move to the next NAL unit in the block buffer
              bufferOffset += AVCCHeaderLength + NALUnitLength;
          }
      
          char *bytePtr = (char *)[elementaryStream mutableBytes];
          long maxSize = (long)elementaryStream.length;
      
          CMTime presentationtime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
      
          vidplayer_stream(bytePtr, maxSize, (long)presentationtime.value);
      }
      

0 个答案:

没有答案