我正在使用FFMPEG for android。在关闭视频后的某个时刻,我需要通知我的Java代码,它可以播放第二个视频,所以我在java代码中有一些将由C代码调用的监听器。但是当我试图从我的c代码中调用java静态方法时,它会在 FindClass()行之后给出错误并崩溃。如何解决这个问题?我试图捕捉异常,但不能。
我的代码:
void onFinish(JNIEnv *pEnv){
LOGI("onFinish called in C code ");
jclass clazz = (jclass)(*pEnv)->FindClass(pEnv,"com/wiznsystems/android/utils/FFMPEGPlayer"); //HERE IT"S CRASHING AS NEXT LINE IS NOT PRINTING
if ((*pEnv)->ExceptionCheck(pEnv)) {
LOGI("onFinish exception occured");
return;
}
LOGI("onFinish class Found ");
// Get the method that you want to call
jmethodID onFinish = (*pEnv)->GetStaticMethodID(pEnv,clazz, "onFinish", "()V");
// Call the method on the object
LOGI("onFinish method found");
(*pEnv)->CallStaticVoidMethod(pEnv,clazz,onFinish);
LOGI("onFinish method calling finished");
}
我的Java方法如下所示:
public static void onFinish(){
Log.d("ANURAN","onFinish called");
}
编辑(最后的理由)
这是我的整个JNI代码:
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
#include <android/bitmap.h>
#include <pthread.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <android/log.h>
#define LOG_TAG "android-ffmpeg-tutorial02"
#define LOGI(...) __android_log_print(4, LOG_TAG, __VA_ARGS__);
#define LOGE(...) __android_log_print(6, LOG_TAG, __VA_ARGS__);
JNIEnv* gEnv;
ANativeWindow* window;
char *videoFileName;
AVFormatContext *formatCtx = NULL;
int videoStream;
AVCodecContext *codecCtx = NULL;
AVFrame *decodedFrame = NULL;
AVFrame *frameRGBA = NULL;
jobject bitmap;
void* buffer;
struct SwsContext *sws_ctx = NULL;
int width;
int height;
int stop;
void onFinish(JNIEnv *pEnv){
LOGI("onFinish called in C code ");
jclass clazz = (jclass)(*gEnv)->FindClass(gEnv,"com/wiznsystems/android/utils/FFMPEGPlayer");
if ((*gEnv)->ExceptionCheck(gEnv)) {
LOGI("onFinish exception occured");
return;
}
LOGI("onFinish class Found ");
// Get the method that you want to call
jmethodID onFinish = (*gEnv)->GetStaticMethodID(gEnv,clazz, "onFinish", "()V");
// Call the method on the object
LOGI("onFinish method found");
(*gEnv)->CallStaticVoidMethod(gEnv,clazz,onFinish);
LOGI("onFinish method calling finished");
}
jint naInit(JNIEnv *pEnv, jobject pObj, jstring pFileName) {
AVCodec *pCodec = NULL;
int i;
AVDictionary *optionsDict = NULL;
av_log_set_level(AV_LOG_DEBUG);
videoFileName = (char *)(*pEnv)->GetStringUTFChars(pEnv, pFileName, NULL);
LOGI("video file name is %s", videoFileName);
// Register all formats and codecs
av_register_all();
// Open video file
LOGI("1");
AVDictionary *opts = 0;
av_dict_set(&opts, "rtsp_transport", "tcp", 0);
if (avformat_open_input(&formatCtx, videoFileName, NULL, &opts) != 0) {
LOGI("DEFAULT failed. Trying TCP");
// Trying with TCP.
AVDictionary *opts = 0;
av_dict_set(&opts, "rtsp_transport", "tcp", 0);
if (avformat_open_input(&formatCtx, videoFileName, NULL, &opts) != 0) {
return -1;
}
} // Couldn't open file
LOGI("2 %d ", formatCtx->video_codec_id);
LOGI("3 %d", formatCtx->audio_codec_id);
// Retrieve stream information
if (avformat_find_stream_info(formatCtx, NULL) < 0) {
return -1; // Couldn't find stream information
}
av_dump_format(formatCtx, 0, pFileName, 0);
LOGI("4 ");
// Dump information about file onto standard error
av_dump_format(formatCtx, 0, videoFileName, 0);
LOGI("5 ");
// Find the first video stream
videoStream=-1;
for(i=0; i<formatCtx->nb_streams; i++) {
if(formatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
videoStream=i;
break;
}
}
LOGI("6 %d", videoStream);
if(videoStream==-1)
return -1; // Didn't find a video stream
// Get a pointer to the codec context for the video stream
codecCtx=formatCtx->streams[videoStream]->codec;
// Find the decoder for the video stream
pCodec=avcodec_find_decoder(codecCtx->codec_id);
if(pCodec==NULL) {
fprintf(stderr, "Unsupported codec!\n");
return -1; // Codec not found
}
LOGI("7 %d", pCodec->id);
// Open codec
if(avcodec_open2(codecCtx, pCodec, &optionsDict)<0)
return -1; // Could not open codec
// Allocate video frame
decodedFrame = av_frame_alloc();
LOGI("8 %d", decodedFrame->pkt_size);
// Allocate an AVFrame structure
frameRGBA=av_frame_alloc();
if(frameRGBA==NULL)
return -1;
LOGI("9 %d", frameRGBA->pkt_size);
return 0;
}
jobject createBitmap(JNIEnv *pEnv, int pWidth, int pHeight) {
int i;
//get Bitmap class and createBitmap method ID
jclass javaBitmapClass = (jclass)(*pEnv)->FindClass(pEnv, "android/graphics/Bitmap");
jmethodID mid = (*pEnv)->GetStaticMethodID(pEnv, javaBitmapClass, "createBitmap", "(IILandroid/graphics/Bitmap$Config;)Landroid/graphics/Bitmap;");
//create Bitmap.Config
//reference: https://forums.oracle.com/thread/1548728
const wchar_t* configName = L"ARGB_8888";
int len = wcslen(configName);
jstring jConfigName;
if (sizeof(wchar_t) != sizeof(jchar)) {
//wchar_t is defined as different length than jchar(2 bytes)
jchar* str = (jchar*)malloc((len+1)*sizeof(jchar));
for (i = 0; i < len; ++i) {
str[i] = (jchar)configName[i];
}
str[len] = 0;
jConfigName = (*pEnv)->NewString(pEnv, (const jchar*)str, len);
} else {
//wchar_t is defined same length as jchar(2 bytes)
jConfigName = (*pEnv)->NewString(pEnv, (const jchar*)configName, len);
}
jclass bitmapConfigClass = (*pEnv)->FindClass(pEnv, "android/graphics/Bitmap$Config");
jobject javaBitmapConfig = (*pEnv)->CallStaticObjectMethod(pEnv, bitmapConfigClass,
(*pEnv)->GetStaticMethodID(pEnv, bitmapConfigClass, "valueOf", "(Ljava/lang/String;)Landroid/graphics/Bitmap$Config;"), jConfigName);
//create the bitmap
return (*pEnv)->CallStaticObjectMethod(pEnv, javaBitmapClass, mid, pWidth, pHeight, javaBitmapConfig);
}
jintArray naGetVideoRes(JNIEnv *pEnv, jobject pObj) {
jintArray lRes;
if (NULL == codecCtx) {
return NULL;
}
lRes = (*pEnv)->NewIntArray(pEnv, 2);
if (lRes == NULL) {
LOGI(1, "cannot allocate memory for video size");
return NULL;
}
jint lVideoRes[2];
lVideoRes[0] = codecCtx->width;
lVideoRes[1] = codecCtx->height;
(*pEnv)->SetIntArrayRegion(pEnv, lRes, 0, 2, lVideoRes);
return lRes;
}
void naSetSurface(JNIEnv *pEnv, jobject pObj, jobject pSurface) {
if (NULL != pSurface) {
// get the native window reference
window = ANativeWindow_fromSurface(pEnv, pSurface);
// set format and size of window buffer
ANativeWindow_setBuffersGeometry(window, 0, 0, WINDOW_FORMAT_RGBA_8888);
} else {
// release the native window
ANativeWindow_release(window);
}
}
jint naSetup(JNIEnv *pEnv, jobject pObj, int pWidth, int pHeight) {
av_free(buffer);
width = pWidth;
height = pHeight;
//create a bitmap as the buffer for frameRGBA
bitmap = createBitmap(pEnv, pWidth, pHeight);
if (AndroidBitmap_lockPixels(pEnv, bitmap, &buffer) < 0)
return -1;
//get the scaling context
sws_ctx = sws_getContext (
codecCtx->width,
codecCtx->height,
codecCtx->pix_fmt,
pWidth,
pHeight,
AV_PIX_FMT_RGBA,
SWS_BILINEAR,
NULL,
NULL,
NULL
);
// Assign appropriate parts of bitmap to image planes in pFrameRGBA
// Note that pFrameRGBA is an AVFrame, but AVFrame is a superset
// of AVPicture
avpicture_fill((AVPicture *)frameRGBA, buffer, AV_PIX_FMT_RGBA,
pWidth, pHeight);
return 0;
}
void finish(JNIEnv *pEnv) {
//unlock the bitmap
AndroidBitmap_unlockPixels(pEnv, bitmap);
// Free the RGB image
av_free(frameRGBA);
// Free the YUV frame
av_free(decodedFrame);
// Close the codec
avcodec_close(codecCtx);
// Close the video file
avformat_close_input(&formatCtx);
onFinish(pEnv);
}
void decodeAndRender(JNIEnv *pEnv) {
ANativeWindow_Buffer windowBuffer;
AVPacket packet;
int i=0;
int frameFinished;
int lineCnt;
while(av_read_frame(formatCtx, &packet)>=0 && !stop) {
// Is this a packet from the video stream?
if(packet.stream_index==videoStream) {
// Decode video frame
avcodec_decode_video2(codecCtx, decodedFrame, &frameFinished,
&packet);
// Did we get a video frame?
if(frameFinished) {
// Convert the image from its native format to RGBA
sws_scale
(
sws_ctx,
(uint8_t const * const *)decodedFrame->data,
decodedFrame->linesize,
0,
codecCtx->height,
frameRGBA->data,
frameRGBA->linesize
);
// lock the window buffer
if (buffer == NULL || ANativeWindow_lock(window, &windowBuffer, NULL) < 0) {
LOGE("cannot lock window");
} else {
// draw the frame on buffer
LOGI("copy buffer %d:%d:%d", width, height, width*height*4);
LOGI("window buffer: %d:%d:%d", windowBuffer.width,
windowBuffer.height, windowBuffer.stride);
for (int h = 0; h < height; h++)
{
memcpy(windowBuffer.bits + h * windowBuffer.stride * 4,
buffer + h * frameRGBA->linesize[0],
width*4);
}
// unlock the window buffer and post it to display
ANativeWindow_unlockAndPost(window);
// count number of frames
++i;
}
}
}
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
}
LOGI("total No. of frames decoded and rendered %d", i);
finish(pEnv);
}
/**
* start the video playback
*/
void naPlay(JNIEnv *pEnv, jobject pObj) {
//create a new thread for video decode and render
pthread_t decodeThread;
stop = 0;
pthread_create(&decodeThread, NULL, decodeAndRender, NULL);
}
/**
* stop the video playback
*/
void naStop(JNIEnv *pEnv, jobject pObj) {
stop = 1;
}
jint JNI_OnLoad(JavaVM* pVm, void* reserved) {
JNIEnv* env;
if ((*pVm)->GetEnv(pVm, (void **)&env, JNI_VERSION_1_6) != JNI_OK) {
return -1;
}
gEnv=env;
JNINativeMethod nm[8];
nm[0].name = "naInit";
nm[0].signature = "(Ljava/lang/String;)I";
nm[0].fnPtr = (void*)naInit;
nm[1].name = "naSetSurface";
nm[1].signature = "(Landroid/view/Surface;)V";
nm[1].fnPtr = (void*)naSetSurface;
nm[2].name = "naGetVideoRes";
nm[2].signature = "()[I";
nm[2].fnPtr = (void*)naGetVideoRes;
nm[3].name = "naSetup";
nm[3].signature = "(II)I";
nm[3].fnPtr = (void*)naSetup;
nm[4].name = "naPlay";
nm[4].signature = "()V";
nm[4].fnPtr = (void*)naPlay;
nm[5].name = "naStop";
nm[5].signature = "()V";
nm[5].fnPtr = (void*)naStop;
jclass cls = (*env)->FindClass(env, "com/wiznsystems/android/utils/FFMPEGPlayer");
//Register methods with env->RegisterNatives.
(*env)->RegisterNatives(env, cls, nm, 6);
return JNI_VERSION_1_6;
}
size_t
wcslen(const wchar_t *s) {
const wchar_t *p;
p = s;
while (*p)
p++;
return p - s;
}
我认为可能存在根据其他一些SO答案没有正确保存JNIEnv *的问题,因此在JNI_Onload上存储了JNIEnv *的全局引用,并在我的onFinish方法中使用了它。那也没有用。