我有一段本机代码,我正在进行mallocing(即分配)缓冲区。我喜欢使用Canvas绘制操作绘制到这个内存中。 但Canvas代码使用Bitmap作为其支持平面。我想知道是否有办法用Android Bitmap包装本机内存块。
感谢
Videoguy
答案 0 :(得分:3)
您可以从JAVA传递缓冲区,在Native代码中填充它,然后使用Canvas渲染它。完成,完美运作。
编辑添加示例:
警告,Java膨胀
/*
* Copyright (C) 2009 The Android Open Source Project
*/
package com.example.hellojni;
import android.app.Activity;
import android.widget.TextView;
import android.os.Bundle;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.widget.Toast;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.view.MotionEvent;
import android.view.KeyEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Window;
import android.view.WindowManager;
import android.graphics.PixelFormat;
import java.nio.ByteBuffer;
public class HelloJni extends Activity
{
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(new Panel(this));
}
public void onDestroy() {
super.onDestroy();
myEngineDestroy();
}
class Panel extends SurfaceView implements SurfaceHolder.Callback {
Bitmap renderbmp = null;
Paint paint = null;
public Panel(Context context) {
super(context);
getHolder().addCallback(this);
getHolder().setFormat(PixelFormat.RGB_565);
setFocusable(true);
setFocusableInTouchMode(true);
}
@Override
public boolean onKeyDown(int i, KeyEvent event) {
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if ((event.getAction() == MotionEvent.ACTION_DOWN) ||
(event.getAction() == MotionEvent.ACTION_MOVE) )
{
if( myEngineMouseInput( (int) event.getX(), (int) event.getY(), 0 ) == 1 )
drawFrame();
return true;
}
return false;
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// TODO Auto-generated method stub
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
if( renderbmp == null )
renderbmp = Bitmap.createBitmap( holder.getSurfaceFrame().right-holder.getSurfaceFrame().left, holder.getSurfaceFrame().bottom-holder.getSurfaceFrame().top,Bitmap.Config.RGB_565 );
if( paint == null )
paint = new Paint(Paint.FILTER_BITMAP_FLAG);
myEngineInit( renderbmp, renderbmp.getWidth(), renderbmp.getHeight(), PixelFormat.RGB_565 );
drawFrame();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
public void drawFrame() {
Canvas c;
c = getHolder().lockCanvas(null);
c.drawBitmap(renderbmp, 0, 0, paint);
if (c != null) getHolder().unlockCanvasAndPost(c);
}
}
/* A native method that is implemented by the
* 'hello-jni' native library, which is packaged
* with this application.
*/
public native void myEngineInit( Bitmap bmp, int w, int h, int pf );
public native int myEngineMouseInput( int x, int y, int mt );
public native void myEngineDestroy();
public native String unimplementedStringFromJNI();
static {
System.loadLibrary("hello-jni");
}
}
现在是NDK方
/*
* Copyright (C) 2010 The Android Open Source Project
*/
#include <android_native_app_glue.h>
#include <errno.h>
#include <jni.h>
#include <sys/time.h>
#include <time.h>
#include <android/log.h>
#include <android/bitmap.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#define LOG_TAG "myapp"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
/* Set to 1 to enable debug log traces. */
#define DEBUG 0
/* Return current time in milliseconds */
static double now_ms(void)
{
struct timeval tv;
gettimeofday(&tv, NULL);
return tv.tv_sec*1000. + tv.tv_usec/1000.;
}
jobject jbmp = NULL;
ANativeWindow_Buffer draw_buffer = { 0 };
static void cleanup_draw_buffer( JNIEnv* env )
{
if(jbmp) {
AndroidBitmap_unlockPixels( env, jbmp);
}
draw_buffer.bits = 0;
}
static int init_draw_buffer( JNIEnv* env, jobject jbitmap, int width, int height )
{
int res = 0, ret;
LOGI("init_draw_buffer");
LOGI("window w:%d, h:%d, format: %d", width, height, 4 );
if( draw_buffer.width != width ||
draw_buffer.height != height )
{
draw_buffer.width=width;
draw_buffer.height=height;
draw_buffer.stride = draw_buffer.width*2;
res = 1;
}
jbmp = NULL;
if ((ret = AndroidBitmap_lockPixels(env, jbitmap, &draw_buffer.bits)) < 0) {
LOGE("AndroidBitmap-lockPixels() failed ! error=%d", ret);
}
else {
LOGI("Successfully acquired bitmap pixels: %x", draw_buffer.bits );
jbmp = jbitmap;
}
return res;
}
/* simple stats management */
typedef struct {
double renderTime;
double frameTime;
} FrameStats;
#define MAX_FRAME_STATS 200
#define MAX_PERIOD_MS 1500
typedef struct {
double firstTime;
double lastTime;
double frameTime;
int firstFrame;
int numFrames;
FrameStats frames[ MAX_FRAME_STATS ];
} Stats;
static void
stats_init( Stats* s )
{
s->lastTime = now_ms();
s->firstTime = 0.;
s->firstFrame = 0;
s->numFrames = 0;
}
static void
stats_startFrame( Stats* s )
{
s->frameTime = now_ms();
}
static void
stats_endFrame( Stats* s )
{
double now = now_ms();
double renderTime = now - s->frameTime;
double frameTime = now - s->lastTime;
int nn;
if (now - s->firstTime >= MAX_PERIOD_MS) {
if (s->numFrames > 0) {
double minRender, maxRender, avgRender;
double minFrame, maxFrame, avgFrame;
int count;
nn = s->firstFrame;
minRender = maxRender = avgRender = s->frames[nn].renderTime;
minFrame = maxFrame = avgFrame = s->frames[nn].frameTime;
for (count = s->numFrames; count > 0; count-- ) {
nn += 1;
if (nn >= MAX_FRAME_STATS)
nn -= MAX_FRAME_STATS;
double render = s->frames[nn].renderTime;
if (render < minRender) minRender = render;
if (render > maxRender) maxRender = render;
double frame = s->frames[nn].frameTime;
if (frame < minFrame) minFrame = frame;
if (frame > maxFrame) maxFrame = frame;
avgRender += render;
avgFrame += frame;
}
avgRender /= s->numFrames;
avgFrame /= s->numFrames;
LOGI("frame/s (avg,min,max) = (%.1f,%.1f,%.1f) "
"render time ms (avg,min,max) = (%.1f,%.1f,%.1f)\n",
1000./avgFrame, 1000./maxFrame, 1000./minFrame,
avgRender, minRender, maxRender);
}
s->numFrames = 0;
s->firstFrame = 0;
s->firstTime = now;
}
nn = s->firstFrame + s->numFrames;
if (nn >= MAX_FRAME_STATS)
nn -= MAX_FRAME_STATS;
s->frames[nn].renderTime = renderTime;
s->frames[nn].frameTime = frameTime;
if (s->numFrames < MAX_FRAME_STATS) {
s->numFrames += 1;
} else {
s->firstFrame += 1;
if (s->firstFrame >= MAX_FRAME_STATS)
s->firstFrame -= MAX_FRAME_STATS;
}
s->lastTime = now;
}
// ----------------------------------------------------------------------
struct engine {
struct android_app* app;
Stats stats;
int animating;
};
void
Java_com_example_hellojni_HelloJni_myEngineDestroy( JNIEnv* env,
jobject thiz )
{
LOGI("Java_com_example_hellojni_HelloJni_myEngineDestroy");
cleanup_draw_buffer(env);
}
void
Java_com_example_hellojni_HelloJni_myEngineInit( JNIEnv* env,
jobject thiz, jobject jbitmap, int w, int h, int pf )
{
LOGI("Java_com_example_hellojni_HelloJni_myEngineInit");
init_draw_buffer( env, jbitmap, w, h );
}
jint
Java_com_example_hellojni_HelloJni_myEngineMouseInput( JNIEnv* env,
jobject thiz, int x, int y, int mt )
{
if( menuvisible )
// LOGI("Java_com_example_hellojni_HelloJni_myEngineMouseInput, x:%d y:%d mt:%d", x, y, mt);
return do_the_drawing_stuff(params_ommited); //drawing buffer is unsigned char *dest = draw_buffer.bits;
else
return 0;
}
因此主渲染部分称为上面的几行,如上所述,您的缓冲区位于draw_buffer结构。
请检查缓冲技术的传递,并通过JNI接口跟踪其从JAVA到C的路径。我小心翼翼地制作了一个没有任何转换的最终位图渲染操作 - 最快的方法IMO。
你可以省略那个帧计数部分,因为我把某种例子作为基础而只是想把手放在我得到的新设备上:)
答案 1 :(得分:1)
你可以做的是将缓冲区通过JNI传递到java中并从中创建一个位图。见this page。然后,您可以使用Canvas.setBitmap使其绘制到缓冲区中。
答案 2 :(得分:0)
默认位图use native memory。