我在这里修改代码:
https://github.com/Sveder/CardboardPassthrough
可以在最新的Google VR SDK(1.180.0)上工作,但是我不知道下面的函数应该发生什么:
@Override
public void onFrameAvailable(SurfaceTexture arg0)
{
this.cardboardView.requestRender();
}
boardboardView的类型为com.google.vrtoolkit.cardboard.CardboardView。
“ requestRender()”在GvrView界面上不再存在,那么新的Google VR SDK当前等效的是什么?
我目前只看到黑屏,中间有一条小的白线,将2个视图分开。当然,这可能不仅仅是由于缺少对requestRender()的调用,所以我在下面包括了完整的代码以提供更多的上下文。
MainActivity.java
public class MainActivity extends GvrActivity implements GvrView.StereoRenderer, OnFrameAvailableListener
{
private static final String LOG_TAG = "PassthroughCardboardRMD";
private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
private Camera camera;
private static final int REQUEST_ALL_PERMISSIONS_ID = 1;
private static final String[] REQUIRED_PERMISSIONS = {CAMERA};
private final String vertexShaderCode =
"attribute vec4 position;" +
"attribute vec2 inputTextureCoordinate;" +
"varying vec2 textureCoordinate;" +
"void main()" +
"{" +
"gl_Position = position;" +
"textureCoordinate = inputTextureCoordinate;" +
"}";
private final String fragmentShaderCode =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;" +
"varying vec2 textureCoordinate; \n" +
"uniform samplerExternalOES s_texture; \n" +
"void main(void) {" +
" gl_FragColor = texture2D( s_texture, textureCoordinate );\n" +
//" gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);\n" +
"}";
private FloatBuffer vertexBuffer, textureVerticesBuffer, vertexBuffer2;
private ShortBuffer drawListBuffer, buf2;
private int mProgram;
private int mPositionHandle, mPositionHandle2;
private int mColorHandle;
private int mTextureCoordHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 2;
static float squareVertices[] = { // in counterclockwise order:
-1.0f, -1.0f, // 0.left - mid
1.0f, -1.0f, // 1. right - mid
-1.0f, 1.0f, // 2. left - top
1.0f, 1.0f, // 3. right - top
};
//, 1, 4, 3, 4, 5, 3
private short drawOrder[] = {0, 2, 1, 1, 2, 3}; // order to draw vertices
private short drawOrder2[] = {2, 0, 3, 3, 0, 1}; // order to draw vertices
static float textureVertices[] = {
0.0f, 1.0f, // A. left-bottom
1.0f, 1.0f, // B. right-bottom
0.0f, 0.0f, // C. left-top
1.0f, 0.0f // D. right-top
};
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
private ByteBuffer indexBuffer; // Buffer for index-array
private int texture;
//private CardboardOverlayView mOverlayView;
private GvrView gvrView;
private SurfaceTexture surface;
private float[] mCamera;
public void startCamera(int texture)
{
surface = new SurfaceTexture(texture);
surface.setOnFrameAvailableListener(this);
camera = Camera.open();
try
{
camera.setPreviewTexture(surface);
camera.startPreview();
} catch (IOException ioe)
{
Log.w(LOG_TAG, "CAM LAUNCH FAILED");
}
}
static private int createTexture()
{
int[] texture = new int[1];
GLES20.glGenTextures(1, texture, 0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture[0]);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
return texture[0];
}
/**
* Converts a raw text file, saved as a resource, into an OpenGL ES shader
*
* @param type The type of shader we will be creating.
* @param code The resource ID of the raw text file about to be turned into a shader.
* @return
*/
private int loadGLShader(int type, String code)
{
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, code);
GLES20.glCompileShader(shader);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0)
{
Log.e(LOG_TAG, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
if (shader == 0)
{
throw new RuntimeException("Error creating shader.");
}
return shader;
}
/**
* Checks if we've had an error inside of OpenGL ES, and if so what that error is.
*
* @param func
*/
private static void checkGLError(String func)
{
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR)
{
Log.e(LOG_TAG, func + ": glError " + error);
throw new RuntimeException(func + ": glError " + error);
}
}
/**
* Sets the view to our CardboardView and initializes the transformation matrices we will use
* to render our scene.
*
* @param savedInstanceState
*/
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.common_ui);
gvrView = (GvrView) findViewById(R.id.gvr_view);
//RENDERMODE_CONTINUOUSLY
setGvrView(gvrView);
mCamera = new float[16];
// mOverlayView = (CardboardOverlayView) findViewById(R.id.overlay);
// mOverlayView.show3DToast("Pull the magnet when you find an object.");
}
@Override
public void onRendererShutdown()
{
Log.i(LOG_TAG, "onRendererShutdown");
}
@Override
public void onSurfaceChanged(int width, int height)
{
Log.i(LOG_TAG, "onSurfaceChanged");
}
/**
* Creates the buffers we use to store information about the 3D world. OpenGL doesn't use Java
* arrays, but rather needs data in a format it can understand. Hence we use ByteBuffers.
*
* @param config The EGL configuration used when creating the surface.
*/
@Override
public void onSurfaceCreated(EGLConfig config)
{
Log.i(LOG_TAG, "onSurfaceCreated");
GLES20.glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well
ByteBuffer bb = ByteBuffer.allocateDirect(squareVertices.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareVertices);
vertexBuffer.position(0);
ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
ByteBuffer bb2 = ByteBuffer.allocateDirect(textureVertices.length * 4);
bb2.order(ByteOrder.nativeOrder());
textureVerticesBuffer = bb2.asFloatBuffer();
textureVerticesBuffer.put(textureVertices);
textureVerticesBuffer.position(0);
int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram);
texture = createTexture();
}
/**
* Prepares OpenGL ES before we draw a frame.
*
* @param headTransform The head transformation in the new frame.
*/
@Override
public void onNewFrame(HeadTransform headTransform)
{
float[] mtx = new float[16];
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
surface.updateTexImage();
surface.getTransformMatrix(mtx);
}
@Override
public void onFrameAvailable(SurfaceTexture arg0)
{
// TODO: this no longer exists
this.gvrView.requestRender();
}
/**
* Draws a frame for an eye. The transformation for that eye (from the camera) is passed in as
* a parameter.
*
* @param eyeTransform The transformations to apply to render this eye.
*/
@Override
public void onDrawEye(Eye eyeTransform)
{
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
GLES20.glActiveTexture(GL_TEXTURE_EXTERNAL_OES);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture);
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "position");
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
false, vertexStride, vertexBuffer);
mTextureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
GLES20.glEnableVertexAttribArray(mTextureCoordHandle);
GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
false, vertexStride, textureVerticesBuffer);
mColorHandle = GLES20.glGetAttribLocation(mProgram, "s_texture");
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisableVertexAttribArray(mTextureCoordHandle);
}
@Override
public void onFinishFrame(Viewport viewport)
{
}
/**
* Increment the score, hide the object, and give feedback if the user pulls the magnet while
* looking at the object. Otherwise, remind the user what to do.
*/
@Override
public void onCardboardTrigger()
{
}
@Override
protected void onPostCreate(@Nullable final Bundle savedInstanceState)
{
super.onPostCreate(savedInstanceState);
final boolean canFilm =
ContextCompat.checkSelfPermission(getApplicationContext(), CAMERA) ==
PackageManager.PERMISSION_GRANTED;
if (!canFilm)
{
ActivityCompat.requestPermissions(this, REQUIRED_PERMISSIONS,
REQUEST_ALL_PERMISSIONS_ID);
}
else
{
kickoffWithPermissions();
}
}
@Override
public void onRequestPermissionsResult(final int requestCode,
@NonNull final String[] permissions,
@NonNull final int[] grantResults)
{
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_ALL_PERMISSIONS_ID)
{
if (checkAllPermissionsGranted(grantResults))
{
Log.d(LOG_TAG, "All required permissions granted.");
kickoffWithPermissions();
}
else
{
Log.e(LOG_TAG,
"Failed to get all permissions to record audio. Grant Results: " + Arrays.toString(
grantResults) + ". Permissions: " + Arrays.toString(permissions));
}
}
}
private boolean checkAllPermissionsGranted(final int[] grantResults)
{
if (grantResults.length == REQUIRED_PERMISSIONS.length)
{
for (final int grantResult : grantResults)
{
if (grantResult != PackageManager.PERMISSION_GRANTED)
{
return false;
}
}
}
return true;
}
private void kickoffWithPermissions()
{
startCamera(texture);
}
}
common_ui.xml
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/ui_layout"
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<com.google.vr.sdk.base.GvrView
android:id="@+id/gvr_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_alignParentTop="true"
android:layout_alignParentLeft="true" />
</RelativeLayout>
AndroidManifest.xml
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.passthroughcardboard">
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus"/>
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity android:name=".MainActivity">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>