可以获取NSInputStream数据并播放视频

时间:2014-11-11 08:54:06

标签: ios objective-c nsstream nsinputstream nsoutputstream

我正在使用NSSInputStream& NSOutputStream发送和接收数据, 在服务器(我命名):打开流以将数据发送到客户端

- (BOOL)openStreams
{
    //NSLog(@"SERVER: Open stream");
    [self.delegate callbackMessageFromServer:@"SERVER: Open stream"];

    NSURL *url = [[NSBundle mainBundle]URLForResource:@"video.mp4" withExtension:nil];
    NSLog(@"path %@",url);

    NSData *data =[NSData dataWithContentsOfURL:url];
    NSLog(@"data %@",data);

    self.inputStream = [NSInputStream inputStreamWithURL:url];

    [self.inputStream setDelegate:self];
    [self.outputStream setDelegate:self];

    [self.inputStream scheduleInRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
    [self.outputStream scheduleInRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];

    [self.inputStream open];
    [self.outputStream open];

    return YES;
}

In - (void)stream:(NSStream *)aStream handleEvent:(NSStreamEvent)eventCode

 - (void)stream:(NSStream *)aStream handleEvent:(NSStreamEvent)eventCode
{
switch (eventCode) {
        case NSStreamEventHasSpaceAvailable:
        {
            [self sendData];

        }break;
}
- (void)sendData
{
    if (self.bufferOffset == self.bufferLimit)
    {
        NSInteger bytesRead = [self.inputStream read:self.buffer maxLength:kSendBufferSize];
        if (bytesRead == -1)
        {
            NSLog(@"Server : File read error");
         // close stream
        }
        else if (bytesRead ==0)
        {
            NSLog(@"Server: Sending data successful");
         // close stream
        }
        else
        {
            self.bufferOffset = 0;
            self.bufferLimit = bytesRead;
        }   
    }
    if (self.bufferOffset != self.bufferLimit)
    {
        NSInteger bytesWritten;
        bytesWritten = [self.outputStream write:&self.buffer[self.bufferOffset] maxLength:self.bufferLimit - self.bufferOffset];
        assert(bytesWritten != 0);
        if (bytesWritten == -1)
        {
         // close stream
        }
        else
        {
            self.bufferOffset += bytesWritten;
        }

    }
}

在客户端,如何接收数据和播放视频?,谢谢

1 个答案:

答案 0 :(得分:0)

这是更高效的代码(我确信你可以告诉一切顺利; I have a working sample app, if you want it):

case NSStreamEventHasBytesAvailable: {
            NSLog(@"NSStreamEventHasBytesAvailable");
            uint8_t * mbuf[DATA_LENGTH];
            mlen = [(NSInputStream *)stream read:(uint8_t *)mbuf maxLength:DATA_LENGTH];
            NSLog(@"mlen == %lu", mlen);
            [mdata appendBytes:(const void *)mbuf length:mlen];
            NSLog(@"mdata length == %lu", mdata.length);
            if (mlen < DATA_LENGTH) {
                NSLog(@"displayImage");
                UIImage *image = [UIImage imageWithData:mdata];
                [self.peerConnectionViewController.view.subviews[0].layer setContents:(__bridge id)image.CGImage];
                mdata = nil;
                mlen  = DATA_LENGTH;
                mdata = [[NSMutableData alloc] init];
            }
        } break;

...

- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer,0);
    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef newImage = CGBitmapContextCreateImage(newContext);
    CGContextRelease(newContext);
    CGColorSpaceRelease(colorSpace);
    UIImage *image = [[UIImage alloc] initWithCGImage:newImage scale:1 orientation:UIImageOrientationUp];
    CGImageRelease(newImage);
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    
    NSData *data = [NSData dataWithData:UIImageJPEGRepresentation(image, 0.25)];
    
    __block BOOL baseCaseCondition = NO; // obviously this should be data driven, not hardcoded
    __block NSInteger _len = DATA_LENGTH;
    __block NSInteger _byteIndex = 0;
    typedef void (^RecursiveBlock)(void (^)());
    RecursiveBlock aRecursiveBlock;
    
    aRecursiveBlock = ^(RecursiveBlock block) {
        NSLog(@"Block called...");
        baseCaseCondition = (data.length > 0 && _byteIndex < data.length) ? TRUE : FALSE;
        if ((baseCaseCondition) && block)
        {
            _len = (data.length - _byteIndex) == 0 ? 1 : (data.length - _byteIndex) < DATA_LENGTH ? (data.length - _byteIndex) : DATA_LENGTH;
            //
            NSLog(@"START | byteIndex: %lu/%lu  writing len: %lu", _byteIndex, data.length, _len);
            //
            uint8_t * bytes[_len];
            [data getBytes:&bytes range:NSMakeRange(_byteIndex, _len)];
            _byteIndex += [self.outputStream write:(const uint8_t *)bytes maxLength:_len];
            //
            NSLog(@"END | byteIndex: %lu/%lu wrote len: %lu", _byteIndex, data.length, _len);
            //
            dispatch_barrier_async(dispatch_get_main_queue(), ^{
                block(block);
            });
        }
    };
    
    if (self.outputStream.hasSpaceAvailable)
            aRecursiveBlock(aRecursiveBlock);
}