我正在尝试在我的项目中使用OMXCodec。我找到的唯一关于OMXCodec的好教程是post。但是,作者没有提到如何编写和获取头文件以及如何设置Android.mk。我真的很困惑。我的问题是我如何找到头文件并设置使这些代码工作。
#define FFMPEG_AVFORMAT_MOV "mov,mp4,m4a,3gp,3g2,mj2"
using namespace android;
class AVFormatSource : public MediaSource {
public:
AVFormatSource(const char *videoPath);
virtual status_t read(MediaBuffer **buffer, const MediaSource::ReadOptions *options);
virtual sp<MetaData> getFormat() { return mFormat; }
virtual status_t start(MetaData *params) { return OK; }
virtual status_t stop() { return OK; }
protected:
virtual ~AVFormatSource();
private:
AVFormatContext *mDataSource;
AVCodecContext *mVideoTrack;
AVBitStreamFilterContext *mConverter;
MediaBufferGroup mGroup;
sp<MetaData> mFormat;
int mVideoIndex;
};
AVFormatSource::AVFormatSource(const char *videoPath) {
av_register_all();
mDataSource = avformat_alloc_context();
avformat_open_input(&mDataSource, videoPath, NULL, NULL);
for (int i = 0; i < mDataSource->nb_streams; i++) {
if (mDataSource->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
mVideoIndex = i;
break;
}
}
mVideoTrack = mDataSource->streams[mVideoIndex]->codec;
size_t bufferSize = (mVideoTrack->width * mVideoTrack->height * 3) / 2;
mGroup.add_buffer(new MediaBuffer(bufferSize));
mFormat = new MetaData;
switch (mVideoTrack->codec_id) {
case CODEC_ID_H264:
mConverter = av_bitstream_filter_init("h264_mp4toannexb");
mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
if (mVideoTrack->extradata[0] == 1) {
mFormat->setData(kKeyAVCC, kTypeAVCC, mVideoTrack->extradata, mVideoTrack->extradata_size);
}
break;
case CODEC_ID_MPEG4:
mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
if (mDataSource->iformat && mDataSource->iformat->name && !strcasecmp(mDataSource->iformat->name, FFMPEG_AVFORMAT_MOV)) {
MOVContext *mov = (MOVContext *)(mDataSource->priv_data);
if (mov->esds_data != NULL && mov->esds_size > 0 && mov->esds_size < 256) {
mFormat->setData(kKeyESDS, kTypeESDS, mov->esds_data, mov->esds_size);
}
}
break;
case CODEC_ID_H263:
mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
break;
default:
break;
}
mFormat->setInt32(kKeyWidth, mVideoTrack->width);
mFormat->setInt32(kKeyHeight, mVideoTrack->height);
}
status_t AVFormatSource::read(MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
AVPacket packet;
status_t ret;
bool found = false;
while (!found) {
ret = av_read_frame(is->formatCtx, &packet);
if (ret < 0) {
return ERROR_END_OF_STREAM;
}
if (packet.stream_index == mVideoIndex) {
if (mConverter) {
av_bitstream_filter_filter(mConverter, mVideoTrack, NULL, &packet.data, &packet.size, packet.data, packet.size, packet.flags & AV_PKT_FLAG_KEY);
}
ret = mGroup.acquire_buffer(buffer);
if (ret == OK) {
memcpy((*buffer)->data(), packet.data, packet.size);
(*buffer)->set_range(0, packet.size);
(*buffer)->meta_data()->clear();
(*buffer)->meta_data()->setInt32(kKeyIsSyncFrame, packet.flags & AV_PKT_FLAG_KEY);
(*buffer)->meta_data()->setInt64(kKeyTime, packet.pts);
}
found = true;
}
av_free_packet(&packet);
}
return ret;
}
AVFormatSource::~AVFormatSource() {
if (mConverter) {
av_bitstream_filter_close(mConverter);
}
av_close_input_file(mDataSource);
}
和解码部分:
// At first, get an ANativeWindow from somewhere
sp<ANativeWindow> mNativeWindow = getNativeWindowFromSurface();
// Initialize the AVFormatSource from a video file
sp<MediaSource> mVideoSource = new AVFormatSource(filePath);
// Once we get an MediaSource, we can encapsulate it with the OMXCodec now
OMXClient mClient;
mClient.connect();
sp<MediaSource> mVideoDecoder = OMXCodec::Create(mClient.interface(), mVideoSource->getFormat(), false, mVideoSource, NULL, 0, mNativeWindow);
mVideoDecoder->start();
// Just loop to read decoded frames from mVideoDecoder
for (;;) {
MediaBuffer *mVideoBuffer;
MediaSource::ReadOptions options;
status_t err = mVideoDecoder->read(&mVideoBuffer, &options);
if (err == OK) {
if (mVideoBuffer->range_length() > 0) {
// If video frame availabe, render it to mNativeWindow
sp<MetaData> metaData = mVideoBuffer->meta_data();
int64_t timeUs = 0;
metaData->findInt64(kKeyTime, &timeUs)
native_window_set_buffers_timestamp(mNativeWindow.get(), timeUs * 1000);
err = mNativeWindow->queueBuffer(mNativeWindow.get(), mVideoBuffer->graphicBuffer().get());
if (err == 0) {
metaData->setInt32(kKeyRendered, 1);
}
}
mVideoBuffer->release();
}
}
// Finally release the resources
mVideoSource.clear();
mVideoDecoder->stop();
mVideoDecoder.clear();
mClient.disconnect();