FFMPEG中位于avformat_find_stream_info的EXC_BAD_ACCESS

时间:2018-12-18 23:22:34

标签: ios ffmpeg red5pro

借助以下库,我使用ffmpeg实现了IP摄像机流传输:https://github.com/kolyvan/kxmovie另外,我正在记录即将来临的IPCamera流,并且使用ffmpeg进行了同样的操作。

现在,一旦在项目if (avformat_find_stream_info(formatCtx, NULL) < 0)中将R5ProStreaming.framework添加到项目中并在实际设备中运行应用程序,我将面临一个问题。 然后,当我删除该框架并将其移至废纸and并再次运行时,一切正常。

- (kxMovieError) openInput: (NSString *) path
{
    AVFormatContext *formatCtx = NULL;

    if (_interruptCallback) {

        formatCtx = avformat_alloc_context();
        if (!formatCtx)
            return kxMovieErrorOpenFile;

        AVIOInterruptCB cb = {interrupt_callback, (__bridge void *)(self)};
        formatCtx->interrupt_callback = cb;
    }
    AVDictionary *opts = 0;
//
    av_dict_set(&opts, "rtsp_transport", "tcp", 0);

    if (avformat_open_input(&formatCtx, [path cStringUsingEncoding: NSUTF8StringEncoding], NULL,  &opts) < 0) {
        av_log(NULL, AV_LOG_ERROR, "Couldn't open file\n");
        return kxMovieErrorStreamInfoNotFound;


    }
        if (avformat_open_input(&formatCtx, [path cStringUsingEncoding: NSUTF8StringEncoding], NULL, NULL) < 0) {

        if (formatCtx)
            avformat_free_context(formatCtx);
        return kxMovieErrorOpenFile;
    }

    //-----APP IS GETTING CRASHED HERE AND GIVING EXC_BAD_ACCESS---//
    if (avformat_find_stream_info(formatCtx, NULL) < 0)
    {
        avformat_close_input(&formatCtx);
        return kxMovieErrorStreamInfoNotFound;
    }
//
    av_dump_format(formatCtx, 0, [path.lastPathComponent cStringUsingEncoding: NSUTF8StringEncoding], false);

    _formatCtx = formatCtx;

    inputFormatCtx = _formatCtx;
    NSString *filePath = [[NSBundle mainBundle] pathForResource:@"newdemo" ofType:@".mov"];

    if (filePath)
    {
        NSLog(@"%s - %d # File found", __PRETTY_FUNCTION__, __LINE__);
    }
    else
    {
        NSLog(@"%s - %d # File NOT found", __PRETTY_FUNCTION__, __LINE__);
    }

    /*
     *  av_find_input_format(const char *short_name)
     *
     *  Find AVInputFormat based on the short name of the input format.
     */
    AVInputFormat *inputFormat = av_find_input_format([@"mpeg" UTF8String]);

    if (inputFormat)
    {
        NSLog(@"%s - %d # inputFormat identifed", __PRETTY_FUNCTION__, __LINE__);
    }
    else
    {
        NSLog(@"%s - %d # inputFormat NOT identifed", __PRETTY_FUNCTION__, __LINE__) ;
    }

    const char *utf8FilePath = [filePath UTF8String];
    NSLog(@"%s - %d # utf8FilePath = %s", __PRETTY_FUNCTION__, __LINE__, utf8FilePath);

    /*
     *  avformat_open_input(AVFormatContext **ps, const char *filename, AVInputFormat *fmt, AVDictionary **options)
     *
     *  Open an input stream and read the header. The codecs are not opened.
     */
    int openInputValue =0;
    NSLog(@"%s - %d # openInputValue = %d", __PRETTY_FUNCTION__, __LINE__, openInputValue);

    if (openInputValue == 0)
    {
        NSLog(@"%s - %d # Can open the file", __PRETTY_FUNCTION__, __LINE__);
    }
    else
    {
        NSLog(@"%s - %d # Cannot open the file", __PRETTY_FUNCTION__, __LINE__);
        avformat_close_input(&inputFormatCtx);
    }

    /*
     *  Read packets of a media file to get stream information.
     *
     *  avformat_find_stream_info(AVFormatContext *ic, AVDictionary **options)
     */
    //    int streamInfoValue = avformat_find_stream_info(inputFormatCtx, NULL);
    //    NSLog(@"%s - %d # streamInfoValue = %d", __PRETTY_FUNCTION__, __LINE__, streamInfoValue);
    //
    //    if (streamInfoValue < 0)
    //    {
    //        NSLog(@"%s - %d # streamInfoValue Error", __PRETTY_FUNCTION__, __LINE__);
    //        avformat_close_input(&inputFormatCtx);
    //    }

    /*
     *  nb_streams : Number of Audio and Video streams of the input file
     */

    NSUInteger inputStreamCount = inputFormatCtx->nb_streams;
    NSLog(@"%s - %d # inputStreamCount = %lu", __PRETTY_FUNCTION__, __LINE__, (unsigned long)inputStreamCount);

    for(unsigned int i = 0; i<inputStreamCount; i++)
    {
        if(inputFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            NSLog(@"%s - %d # Found Video Stream", __PRETTY_FUNCTION__, __LINE__);
            inputVideoStreamIndex = i;
            inputVideoStream = inputFormatCtx->streams[i];
        }

        if(inputFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
        {
            NSLog(@"%s - %d # Found Audio Stream", __PRETTY_FUNCTION__, __LINE__);
            inputAudioStreamIndex = i;
            inputAudioStream = inputFormatCtx->streams[i];
        }
    }

    if(inputVideoStreamIndex == -1 && inputAudioStreamIndex == -1)
    {
        NSLog(@"%s - %d # Have not found any Video or Audio stream", __PRETTY_FUNCTION__, __LINE__);
    }

    /*
     *  Finding duration of the stream
     */
    if(inputFormatCtx->duration == AV_NOPTS_VALUE)
    {
        NSLog(@"%s - %d # Undefined timestamp value", __PRETTY_FUNCTION__, __LINE__);

        if(_videoStream != -1 && inputFormatCtx->streams[_videoStream])
        {
            //            if(inputFormatCtx->streams[_videoStream]->duration != AV_NOPTS_VALUE)
            //            {
            inputEndtimeInt64 = (inputFormatCtx->streams[_videoStream]->duration)/(inputFormatCtx->streams[_videoStream]->time_base.den/inputFormatCtx->streams[_videoStream]->time_base.num);
            //            }
            //            else
            //            {
            //                inputEndtimeInt64 = (inputFormatCtx->duration)/(AV_TIME_BASE);
            //
            //            }
        }
        else if(_audioStream != -1 && inputFormatCtx->streams[_audioStream])
        {
            //            if(inputFormatCtx->streams[_audioStream]->duration != AV_NOPTS_VALUE)
            //            {
            inputEndtimeInt64 = (inputFormatCtx->streams[_audioStream]->duration)/(AV_TIME_BASE);
            //            }
            //            else
            //            {
            //                inputEndtimeInt64 = (inputFormatCtx->duration)/(AV_TIME_BASE);
            //
            //            }
        }
    }
    else
    {
        NSLog(@"%s - %d # Defined timestamp value", __PRETTY_FUNCTION__, __LINE__);

        inputEndtimeInt64 = (inputFormatCtx->duration)/(AV_TIME_BASE);
    }
    NSLog(@"%s - %d # inputEndtimeInt64 = %lld", __PRETTY_FUNCTION__, __LINE__, inputEndtimeInt64);

    /*
     *  Finding out the frame rate
     */
    if(_videoStream != -1 && inputFormatCtx->streams[_videoStream])
    {

        framesPerSec =  (inputFormatCtx->streams[_videoStream]->r_frame_rate.num)/ (inputFormatCtx->streams[_videoStream]->r_frame_rate.den);
    }
    else
    {
        framesPerSec = 24;
    }

    numberOfFrames = framesPerSec * (int) inputEndtimeInt64;
    NSLog(@"%s - %d # numberOfFrames = %d", __PRETTY_FUNCTION__, __LINE__, numberOfFrames);


    /*
     *  Seek to timestamp ts.
     *
     *  avformat_seek_file(AVFormatContext *s, int stream_index, int64_t min_ts, int64_t ts, int64_t max_ts, int flags)
     */

    if(avformat_seek_file(inputFormatCtx, inputAudioStreamIndex, INT64_MIN, outputStartTimeInt64, INT64_MAX, AVSEEK_FLAG_FRAME) < 0)
    {
        NSLog(@"%s - %d # Seek OK", __PRETTY_FUNCTION__, __LINE__);
    }
    else
    {
        NSLog(@"%s - %d # Seek ERROR", __PRETTY_FUNCTION__, __LINE__);
    }

    /*
     *  Creating output file path1
     */

//    NSString * timestamp = [NSString stringWithFormat:@"%f",[[NSDate date] timeIntervalSince1970] * 1000];
//    
//    NSArray *directoryPathsArray = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
//    NSString *documentsDirectory = [directoryPathsArray objectAtIndex:0];
//    NSString *outputFilePath = [NSString stringWithFormat:@"%@/%@.mov",documentsDirectory,timestamp]; // Not working if we replace .avi with .mp4

    NSString* filename = [NSString stringWithFormat:@"IPCamera%d.mov", _currentFile];
    NSString* outputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
    _url = [NSURL fileURLWithPath:outputFilePath];

    /*
     *  Return the output format in the list of registered output formats
     *  which best matches the provided parameters, or return NULL if
     *  there is no match.
     *
     *  av_guess_format(const char *short_name, const char *filename, const char *mime_type)
     */
    outputFormat = av_guess_format(NULL, [outputFilePath UTF8String], NULL);

    NSLog(@"%s - %d # outputFormat->name = %s", __PRETTY_FUNCTION__, __LINE__, outputFormat->name);

    if(outputFormat == NULL)
    {
        NSLog(@"%s - %d # outputFormat == NULL", __PRETTY_FUNCTION__, __LINE__);
    }
    else
    {
        /*
         *  Allocate an AVFormatContext.
         */
        outputContext = avformat_alloc_context();

        if(outputContext)
        {
            outputContext->oformat = outputFormat;      // The output container format.

            snprintf(outputContext->filename, sizeof(outputContext->filename), "%s", [outputFilePath UTF8String]);
        }
        else
        {
            NSLog(@"%s - %d # outputContext == NULL", __PRETTY_FUNCTION__, __LINE__);
        }
    }

    outputVideoCodec = outputAudioCodec = NULL;

    /*
     *  video_codec = default video codec
     */
    if(outputFormat->video_codec != AV_CODEC_ID_NONE && inputVideoStream != NULL)
    {
        /*
         *  Find a registered encoder with a matching codec ID.
         *
         *  avcodec_find_encoder(enum AVCodecID id)
         */
        outputVideoCodec = avcodec_find_encoder(outputFormat->video_codec);

        if(NULL == outputVideoCodec)
        {
            NSLog(@"%s - %d # Could Not Find Vid Encoder", __PRETTY_FUNCTION__, __LINE__);
        }
        else
        {
            NSLog(@"%s - %d # Found Out Vid Encoder", __PRETTY_FUNCTION__, __LINE__);

            /*
             *  Add a new stream to a media file.
             *
             *  avformat_new_stream(AVFormatContext *s, const AVCodec *c)
             */
            outputVideoStream = avformat_new_stream(outputContext, outputVideoCodec);

            if(NULL == outputVideoStream)
            {
                NSLog(@"%s - %d # Failed to Allocate Output Vid Strm", __PRETTY_FUNCTION__, __LINE__);
            }
            else
            {
                NSLog(@"%s - %d # Allocated Video Stream", __PRETTY_FUNCTION__, __LINE__);

                /*
                 *  Copy the settings of the source AVCodecContext into the destination AVCodecContext.
                 *
                 *  avcodec_copy_context(AVCodecContext *dest, const AVCodecContext *src)
                 */

                if(avcodec_copy_context(outputVideoStream->codec, inputFormatCtx->streams[inputVideoStreamIndex]->codec) != 0)
                {
                    NSLog(@"%s - %d # Failed to Copy Context", __PRETTY_FUNCTION__, __LINE__);
                }
                else
                {
                    AVStream *st = _formatCtx->streams[_videoStream];
                    outputVideoStream->sample_aspect_ratio.den = outputVideoStream->codec->sample_aspect_ratio.den;     // denominator
                    outputVideoStream->sample_aspect_ratio.num = st->codec->sample_aspect_ratio.num;    // numerator
                    NSLog(@"%s - %d # Copied Context 1", __PRETTY_FUNCTION__, __LINE__);
                    outputVideoStream->codec->codec_id = st->codec->codec_id;
                    outputVideoStream->codec->time_base.num = st->codec->time_base.num;
                    outputVideoStream->codec->time_base.den = STREAM_FRAME_RATE;
                    outputVideoStream->time_base.num = st->time_base.num;
                    outputVideoStream->time_base.den =  st->time_base.den;
                    outputVideoStream->r_frame_rate.num =st->r_frame_rate.num;
                    outputVideoStream->nb_frames = STREAM_NB_FRAMES;
                    outputVideoStream->r_frame_rate.den = st->r_frame_rate.den;
                    outputVideoStream->avg_frame_rate.den = st->avg_frame_rate.num;
                    outputVideoStream->avg_frame_rate.num = st->avg_frame_rate.num;
//                  outputVideoStream->duration = st->duration;
                }
            }
        }
    }

    if(outputFormat->audio_codec != AV_CODEC_ID_NONE && inputAudioStream != NULL)
    {
        outputAudioCodec = avcodec_find_encoder(outputFormat->audio_codec);

        if(NULL == outputAudioCodec)
        {
            NSLog(@"%s - %d # Could Not Find Out Aud Encoder", __PRETTY_FUNCTION__, __LINE__);
        }
        else
        {
            NSLog(@"%s - %d # Found Out Aud Encoder", __PRETTY_FUNCTION__, __LINE__);

            outputAudioStream = avformat_new_stream(outputContext, outputAudioCodec);

            if(NULL == outputAudioStream)
            {
                NSLog(@"%s - %d # Failed to Allocate Out Vid Strm", __PRETTY_FUNCTION__, __LINE__);
            }
            else
            {
                if(avcodec_copy_context(outputAudioStream->codec, inputFormatCtx->streams[inputAudioStreamIndex]->codec) != 0)
                {
                    NSLog(@"%s - %d # Failed to Copy Context", __PRETTY_FUNCTION__, __LINE__);
                }
                else
                {
                    //                    AVStream *st = _formatCtx->streams[_audioStream];

                    NSLog(@"%s - %d # Copied Context 2", __PRETTY_FUNCTION__, __LINE__);
                    outputAudioStream->codec->codec_id = inputAudioStream->codec->codec_id;
                    outputAudioStream->codec->codec_tag = 0;
                    //                    outputAudioStream->pts = inputAudioStream->pts;
//                    outputAudioStream->duration = inputAudioStream->duration;
                    outputAudioStream->time_base.num = inputAudioStream->time_base.num;
                    outputAudioStream->time_base.den = inputAudioStream->time_base.den;
                }
            }
        }
    }

    if (!(outputFormat->flags & AVFMT_NOFILE))
    {
        /*
         *  Create and initialize a AVIOContext for accessing the resource indicated by url.
         *
         *  avio_open2(AVIOContext **s, const char *url, int flags, const AVIOInterruptCB *int_cb, AVDictionary **options)
         */
        if (avio_open2(&outputContext->pb, [outputFilePath UTF8String], AVIO_FLAG_WRITE, NULL, NULL) < 0)
        {
            NSLog(@"%s - %d # Could Not Open File", __PRETTY_FUNCTION__, __LINE__);
        }
    }

    /* Write the stream header, if any. */
    /*
     *  Allocate the stream private data and write the stream header to an output media file.
     *
     *  avformat_write_header(AVFormatContext *s, AVDictionary **options);
     */
    if (avformat_write_header(outputContext, NULL) < 0)
    {
        NSLog(@"%s - %d # Error Occurred While Writing Header", __PRETTY_FUNCTION__, __LINE__);
    }
    else
    {
        NSLog(@"%s - %d # Written Output header", __PRETTY_FUNCTION__, __LINE__);

        initDone = true;
    }

    return kxMovieErrorNone;
}

更多信息,我联系了Red5Pro团队,并询问他们有关视频演示的信息。他们这样回答我

最有可能发生的事情是该项目正在加载的FFMPEG版本与我们SDK中嵌入的自定义版本不兼容,并且某些重复的定义错误导致加载了错误的版本。该项目中的其中一个库与SDK或sdk所需的支持库之一之间也可能存在许多冲突(我必须假设自编译之后,您确实添加了第四步中列出的库)此处:https://www.red5pro.com/docs/streaming/ios.html#project-setup),如果是这种情况,我不知道要解决此问题的好方法,因为追逐那些与我们的SDK不兼容的第三方库以解决这些问题超出了我们的能力范围球队。 (已编辑)

有人知道在哪里看吗?

谢谢

1 个答案:

答案 0 :(得分:5)

向Red5Pro SDK中嵌入的FFMPEG定制版本请求源代码。在您的代码中使用该版本可消除不兼容性。根据FFMPEG的GPL / LGPL许可,他们必须向您提供经过修改的源代码。