首先我们关注一下ovrFramebuffer这个结构体:
typedef struct {
int Width;
int Height;
int Multisamples;
int TextureSwapChainLength;
int TextureSwapChainIndex;
ovrTextureSwapChain * ColorTextureSwapChain;
GLuint * DepthBuffers;
GLuint * FrameBuffers;
} ovrFramebuffer;
//创建一个Texture,类型是2D,格式是colorFormat
frameBuffer->ColorTextureSwapChain = vrapi_CreateTextureSwapChain(
VRAPI_TEXTURE_TYPE_2D, colorFormat, width, height, 1, true);
其中的ovrHmdInfo是VR 头盔(眼镜)相关的一个结构体,包含,分辨率,刷新率,默认分辨率,和水平垂直视野角度。
frameBuffer->DepthBuffers = (GLuint *) malloc(
frameBuffer->TextureSwapChainLength * sizeof(GLuint));
frameBuffer->FrameBuffers = (GLuint *) malloc(
frameBuffer->TextureSwapChainLength * sizeof(GLuint));
//创建渲染缓冲区buffer,12章,P233,glGenRenderbuffers作用是分配n个渲染缓冲区对象名称
//这里分配了1个渲染缓冲区名称存储到frameBuffer->DepthBuffers[i]中,名称是不为0的无符号整数
// Create multisampled depth buffer.
GL(glGenRenderbuffers(1, &frameBuffer->DepthBuffers[i]));
//指定渲染缓冲区,第一个值必须指定为GL_RENDERBUFFER
GL(
glBindRenderbuffer(GL_RENDERBUFFER,
frameBuffer->DepthBuffers[i]));
GL(
glRenderbufferStorageMultisampleEXT(GL_RENDERBUFFER,
multisamples, GL_DEPTH_COMPONENT24, width, height));
GL(glBindRenderbuffer(GL_RENDERBUFFER, 0));
//创建帧缓冲区buffer
// Create the frame buffer.
//创建framebuffer
GL(glGenFramebuffers(1, &frameBuffer->FrameBuffers[i]));
GL(glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer->FrameBuffers[i]));
GL(glFramebufferTexture2DMultisampleEXT(GL_FRAMEBUFFER,
GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, colorTexture,
0, multisamples));
GL(glFramebufferRenderbuffer(GL_FRAMEBUFFER,
GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER,
frameBuffer->DepthBuffers[i]));
GL(GLenum renderFramebufferStatus = glCheckFramebufferStatus( GL_FRAMEBUFFER ));
GL(glBindFramebuffer(GL_FRAMEBUFFER, 0));
if (renderFramebufferStatus != GL_FRAMEBUFFER_COMPLETE) {
ALOGE("Incomplete frame buffer object: %s",
EglFrameBufferStatusString(renderFramebufferStatus));
return false;
}
了解完这一部分,我们现在来看下,程序是如何基于交换链对这些数据结构进行使用的:
ovrFramebuffer_Resolve(frameBuffer);
parms.Layers[VRAPI_FRAME_LAYER_TYPE_WORLD].Textures[eye].ColorTextureSwapChain =
frameBuffer->ColorTextureSwapChain;
parms.Layers[VRAPI_FRAME_LAYER_TYPE_WORLD].Textures[eye].TextureSwapChainIndex =
frameBuffer->TextureSwapChainIndex;
parms.Layers[VRAPI_FRAME_LAYER_TYPE_WORLD].Textures[eye].TexCoordsFromTanAngles =
renderer->TexCoordsTanAnglesMatrix; //纹理坐标
parms.Layers[VRAPI_FRAME_LAYER_TYPE_WORLD].Textures[eye].HeadPose =
updatedTracking.HeadPose;
ovrFramebuffer_Advance(frameBuffer);
因为要画的是左眼和右眼的画面。所以这里有左右眼对应的数据结构所在的对象params的类型
ovrFrameParms:
typedef struct
{
// Layers composited in the time warp.
ovrFrameLayer Layers[VRAPI_FRAME_LAYER_TYPE_MAX];
int LayerCount;
// Combination of ovrFrameOption flags.
int WarpOptions;
// Which program to run with these layers.
ovrFrameProgram WarpProgram;
// Program-specific tuning values.
float ProgramParms[4];
// Application controlled frame index that uniquely identifies this particular frame.
// This must be the same frame index that was passed to vrapi_GetPredictedDisplayTime()
// when synthesis of this frame started.
long long FrameIndex;
// WarpSwap will not return until at least this many V-syncs have
// passed since the previous WarpSwap returned.
// Setting to 2 will reduce power consumption and may make animation
// more regular for applications that can't hold full frame rate.
int MinimumVsyncs;
// Latency Mode.
ovrExtraLatencyMode ExtraLatencyMode;
// Rotation from a joypad can be added on generated frames to reduce
// judder in FPS style experiences when the application framerate is
// lower than the V-sync rate.
// This will be applied to the view space distorted
// eye vectors before applying the rest of the time warp.
// This will only be added when the same ovrFrameParms is used for
// more than one V-sync.
ovrMatrix4f ExternalVelocity;
// jobject that will be updated before each eye for minimal
// latency with VRAPI_FRAME_PROGRAM_MASKED_PLANE_EXTERNAL.
// IMPORTANT: This should be a JNI weak reference to the object.
// The system will try to convert it into a global reference before
// calling SurfaceTexture->Update, which allows it to be safely
// freed by the application.
jobject SurfaceTextureObject;
// CPU/GPU performance parameters.
ovrPerformanceParms PerformanceParms;
// For handling HMD events and power level state changes.
ovrJava Java;
} ovrFrameParms;
typedef struct
{
// Because OpenGL ES does not support clampToBorder, it is the
// application's responsibility to make sure that all mip levels
// of the primary eye texture have a black border that will show
// up when time warp pushes the texture partially off screen.
//CLAMP_TO_BORDER causes OpenGL to only take the border color at the edge of the texture
//rather than the average of the border color and texture edge texels.
//This allows for a perfect border around the texture.
ovrTextureSwapChain * ColorTextureSwapChain;
// The depth texture is optional for positional time warp.
ovrTextureSwapChain * DepthTextureSwapChain;
// Index to the texture from the set that should be displayed.
int TextureSwapChainIndex;
// Points on the screen are mapped by a distortion correction
// function into ( TanX, TanY, -1, 1 ) vectors that are transformed
// by this matrix to get ( S, T, Q, _ ) vectors that are looked
// up with texture2dproj() to get texels.
ovrMatrix4f TexCoordsFromTanAngles;
// Only texels within this range should be drawn.
// This is a sub-rectangle of the [(0,0)-(1,1)] texture coordinate range.
ovrRectf TextureRect;
// The tracking state for which ModelViewMatrix is correct.
// It is ok to update the orientation for each eye, which
// can help minimize black edge pull-in, but the position
// must remain the same for both eyes, or the position would
// seem to judder "backwards in time" if a frame is dropped.
ovrRigidBodyPosef HeadPose;
} ovrFrameLayerTexture;
static void ovrFramebuffer_SetCurrent(ovrFramebuffer * frameBuffer) {
GL(glBindFramebuffer(GL_FRAMEBUFFER,frameBuffer->FrameBuffers[frameBuffer->TextureSwapChainIndex]));
}
static void ovrFramebuffer_Resolve(ovrFramebuffer * frameBuffer) {
// Discard the depth buffer, so the tiler won't need to write it back out to memory.
const GLenum depthAttachment[1] = { GL_DEPTH_ATTACHMENT };
glInvalidateFramebuffer(GL_FRAMEBUFFER, 1, depthAttachment);
// Flush this frame worth of commands.
glFlush();
}
5.更新交换链
parms.Layers[VRAPI_FRAME_LAYER_TYPE_WORLD].Textures[eye].ColorTextureSwapChain =
frameBuffer->ColorTextureSwapChain;
parms.Layers[VRAPI_FRAME_LAYER_TYPE_WORLD].Textures[eye].TextureSwapChainIndex =
frameBuffer->TextureSwapChainIndex;
parms.Layers[VRAPI_FRAME_LAYER_TYPE_WORLD].Textures[eye].TexCoordsFromTanAngles =
renderer->TexCoordsTanAnglesMatrix; //纹理坐标
parms.Layers[VRAPI_FRAME_LAYER_TYPE_WORLD].Textures[eye].HeadPose =
updatedTracking.HeadPose;
static void ovrFramebuffer_Advance(ovrFramebuffer * frameBuffer) {
// Advance to the next texture from the set.
frameBuffer->TextureSwapChainIndex =
(frameBuffer->TextureSwapChainIndex + 1)
% frameBuffer->TextureSwapChainLength;
}
每一帧都不停地循环1到6这几个步骤,便实现了,一帧画左眼,一帧画右眼的效果。