3

我有一段本地代码,我在其中分配(即分配)缓冲区。我喜欢使用 Canvas 绘制操作来绘制这个内存。但 Canvas 代码使用 Bitmap 作为其支持平面。我想知道是否有办法用 Android Bitmap 包装原生内存块。

谢谢
Videoguy

4

3 回答 3

3

您可以从 JAVA 传递一个 Buffer,将其填充到 Native 代码中,然后使用 Canvas 渲染它。完成,完美运行。

编辑以添加示例:

警告,Java 膨胀

/*
 * Copyright (C) 2009 The Android Open Source Project
 */
package com.example.hellojni;

import android.app.Activity;
import android.widget.TextView;
import android.os.Bundle;

import android.app.AlertDialog;
import android.content.DialogInterface;
import android.widget.Toast;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.view.MotionEvent;
import android.view.KeyEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Window;

import android.view.WindowManager;
import android.graphics.PixelFormat;

import java.nio.ByteBuffer;

public class HelloJni extends Activity
{

    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState)
    {
        super.onCreate(savedInstanceState);
        requestWindowFeature(Window.FEATURE_NO_TITLE);

        setContentView(new Panel(this));
    }

    public void onDestroy() {
        super.onDestroy();
        myEngineDestroy();
    }

    class Panel extends SurfaceView implements SurfaceHolder.Callback {
    Bitmap renderbmp = null;
    Paint paint = null;

        public Panel(Context context) {
            super(context);
            getHolder().addCallback(this);
            getHolder().setFormat(PixelFormat.RGB_565);
            setFocusable(true);
            setFocusableInTouchMode(true);
        }

      @Override 
      public boolean onKeyDown(int i, KeyEvent event) {
      }

        @Override
        public boolean onTouchEvent(MotionEvent event) {

            if ((event.getAction() == MotionEvent.ACTION_DOWN) || 
            (event.getAction() == MotionEvent.ACTION_MOVE) )
        {
            if( myEngineMouseInput( (int) event.getX(), (int) event.getY(), 0 ) == 1 )
            drawFrame();
            return true;
            }
        return false;
        }

        @Override
        public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
            // TODO Auto-generated method stub
        }

        @Override
        public void surfaceCreated(SurfaceHolder holder) {

        if( renderbmp == null )
        renderbmp = Bitmap.createBitmap( holder.getSurfaceFrame().right-holder.getSurfaceFrame().left, holder.getSurfaceFrame().bottom-holder.getSurfaceFrame().top,Bitmap.Config.RGB_565 );

        if( paint == null )
        paint = new Paint(Paint.FILTER_BITMAP_FLAG);

        myEngineInit( renderbmp, renderbmp.getWidth(), renderbmp.getHeight(), PixelFormat.RGB_565 );

        drawFrame();
        }

    @Override
        public void surfaceDestroyed(SurfaceHolder holder) {
        }

    public void drawFrame() {
            Canvas c;
            c = getHolder().lockCanvas(null);
            c.drawBitmap(renderbmp, 0, 0, paint);
            if (c != null) getHolder().unlockCanvasAndPost(c);
    }

    }

    /* A native method that is implemented by the
     * 'hello-jni' native library, which is packaged
     * with this application.
     */
    public native void myEngineInit( Bitmap bmp, int w, int h, int pf );
    public native int  myEngineMouseInput( int x, int y, int mt );
    public native void myEngineDestroy();

    public native String  unimplementedStringFromJNI();

    static {
        System.loadLibrary("hello-jni");
    }
}

现在是 NDK 方面

/*
 * Copyright (C) 2010 The Android Open Source Project
 */

#include <android_native_app_glue.h>

#include <errno.h>
#include <jni.h>
#include <sys/time.h>
#include <time.h>
#include <android/log.h>
#include <android/bitmap.h>

#include <stdio.h>
#include <stdlib.h>
#include <math.h>

#define  LOG_TAG    "myapp"
#define  LOGI(...)  __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define  LOGW(...)  __android_log_print(ANDROID_LOG_WARN,LOG_TAG,__VA_ARGS__)
#define  LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)

/* Set to 1 to enable debug log traces. */
#define DEBUG 0

/* Return current time in milliseconds */
static double now_ms(void)
{
    struct timeval tv;
    gettimeofday(&tv, NULL);
    return tv.tv_sec*1000. + tv.tv_usec/1000.;
}

jobject jbmp = NULL;
ANativeWindow_Buffer draw_buffer = { 0 };

static void cleanup_draw_buffer( JNIEnv* env )
{
    if(jbmp) {
        AndroidBitmap_unlockPixels( env, jbmp);
    }
    draw_buffer.bits = 0;
}

static int init_draw_buffer( JNIEnv* env, jobject jbitmap, int width, int height )
{
    int res = 0, ret;
    LOGI("init_draw_buffer");

    LOGI("window w:%d, h:%d, format: %d", width, height, 4 );

    if( draw_buffer.width != width || 
        draw_buffer.height != height )
    {   
        draw_buffer.width=width;
        draw_buffer.height=height;
        draw_buffer.stride = draw_buffer.width*2;
        res = 1;
    }

    jbmp = NULL;
    if ((ret = AndroidBitmap_lockPixels(env, jbitmap, &draw_buffer.bits)) < 0) {
        LOGE("AndroidBitmap-lockPixels() failed ! error=%d", ret);
    }
    else {
        LOGI("Successfully acquired bitmap pixels: %x", draw_buffer.bits ); 
        jbmp = jbitmap;
    }

    return res;
}

/* simple stats management */
typedef struct {
    double  renderTime;
    double  frameTime;
} FrameStats;

#define  MAX_FRAME_STATS  200
#define  MAX_PERIOD_MS    1500

typedef struct {
    double  firstTime;
    double  lastTime;
    double  frameTime;

    int         firstFrame;
    int         numFrames;
    FrameStats  frames[ MAX_FRAME_STATS ];
} Stats;

static void
stats_init( Stats*  s )
{
    s->lastTime = now_ms();
    s->firstTime = 0.;
    s->firstFrame = 0;
    s->numFrames  = 0;
}

static void
stats_startFrame( Stats*  s )
{
    s->frameTime = now_ms();
}

static void
stats_endFrame( Stats*  s )
{
    double now = now_ms();
    double renderTime = now - s->frameTime;
    double frameTime  = now - s->lastTime;
    int nn;

    if (now - s->firstTime >= MAX_PERIOD_MS) {
        if (s->numFrames > 0) {
            double minRender, maxRender, avgRender;
            double minFrame, maxFrame, avgFrame;
            int count;

            nn = s->firstFrame;
            minRender = maxRender = avgRender = s->frames[nn].renderTime;
            minFrame  = maxFrame  = avgFrame  = s->frames[nn].frameTime;
            for (count = s->numFrames; count > 0; count-- ) {
                nn += 1;
                if (nn >= MAX_FRAME_STATS)
                    nn -= MAX_FRAME_STATS;
                double render = s->frames[nn].renderTime;
                if (render < minRender) minRender = render;
                if (render > maxRender) maxRender = render;
                double frame = s->frames[nn].frameTime;
                if (frame < minFrame) minFrame = frame;
                if (frame > maxFrame) maxFrame = frame;
                avgRender += render;
                avgFrame  += frame;
            }
            avgRender /= s->numFrames;
            avgFrame  /= s->numFrames;

            LOGI("frame/s (avg,min,max) = (%.1f,%.1f,%.1f) "
                 "render time ms (avg,min,max) = (%.1f,%.1f,%.1f)\n",
                 1000./avgFrame, 1000./maxFrame, 1000./minFrame,
                 avgRender, minRender, maxRender);
        }
        s->numFrames  = 0;
        s->firstFrame = 0;
        s->firstTime  = now;
    }

    nn = s->firstFrame + s->numFrames;
    if (nn >= MAX_FRAME_STATS)
        nn -= MAX_FRAME_STATS;

    s->frames[nn].renderTime = renderTime;
    s->frames[nn].frameTime  = frameTime;

    if (s->numFrames < MAX_FRAME_STATS) {
        s->numFrames += 1;
    } else {
        s->firstFrame += 1;
        if (s->firstFrame >= MAX_FRAME_STATS)
            s->firstFrame -= MAX_FRAME_STATS;
    }

    s->lastTime = now;
}

// ----------------------------------------------------------------------

struct engine {
    struct android_app* app;

    Stats stats;

    int animating;
};


void
Java_com_example_hellojni_HelloJni_myEngineDestroy( JNIEnv* env,
                                                  jobject thiz )
{
    LOGI("Java_com_example_hellojni_HelloJni_myEngineDestroy");
    cleanup_draw_buffer(env);
}

void
Java_com_example_hellojni_HelloJni_myEngineInit( JNIEnv* env,
jobject thiz, jobject jbitmap, int w, int h, int pf )
{
    LOGI("Java_com_example_hellojni_HelloJni_myEngineInit");    
    init_draw_buffer( env, jbitmap, w, h );
}

jint
Java_com_example_hellojni_HelloJni_myEngineMouseInput( JNIEnv* env,
jobject thiz, int x, int y, int mt )
{
    if( menuvisible )
//    LOGI("Java_com_example_hellojni_HelloJni_myEngineMouseInput, x:%d y:%d mt:%d", x, y, mt);
    return do_the_drawing_stuff(params_ommited); //drawing buffer is unsigned char *dest = draw_buffer.bits;
    else
    return 0;
}

因此,主要渲染部分在上面几行中被调用,如上所述,您的缓冲区位于 draw_buffer 结构中。

请检查缓冲区技术的传递,并通过 JNI 接口跟踪其从 JAVA 到 C 的路径。我注意在没有任何转换的情况下进行最终的位图渲染操作 - 最快的方法 IMO。

您可以省略帧计数部分,因为我以某种示例为基础,只是想把手放在我得到的那个新设备上:)

于 2011-07-08T23:14:03.170 回答
1

您可以做的是通过 JNI 将缓冲区传递到 java 并从中创建一个位图。看到这个页面。然后您可以使用Canvas.setBitmap将其绘制到缓冲区中。

于 2011-07-08T23:43:35.553 回答
0

位图默认使用本机内存

于 2011-07-08T23:10:16.350 回答