简体   繁体   中英

C++ Depth Buffer Oculus Rift DK2 VR HMD

This is a question for anybody experienced with the Oculus Rift C++ SDK.


SDK Version: ovr_sdk_win_1.20.0_public

IDE: VS2013


I am trying to resolve a depth buffer problem in my code for a simplified 3D engine for the Oculus Rift DK2.

The OculusRoomTiny example supplied with the SDK is very complex as it is designed with versatility in mind. So, I've taken the SuperMinimal Sample code and used that as a basis for my engine. The SuperMinimal Sample did did not support multi-colour vertexes or depth buffers. So my code includes both.

I've had no problem creating depth buffers using Microsoft DirectX-11 libraries on PC apps so think it's an Oculus specific challenge. Note: the OculusRoomTiny example depth buffer works fine but is encapsulated in classes too complex for my code.

In my code, the Cube rendered as would be expected but inclusion of the Depth buffer only renders the world background colour.

I ripped the depth buffer code from the OculusRoomTiny_Advanced demo and am sure I have integrated it religiously. I've posted my code for comment. Please note, it is super minimal and does not clean up the COM objects.

Any advice would be welcome as I've been playing with the code now for 6 weeks!

main.cpp

#include "d3d11.h"
#include "d3dcompiler.h"
#include "OVR_CAPI_D3D.h"
#include "DirectXMath.h"
#include "models.h"

using namespace DirectX;
#pragma comment(lib, "d3dcompiler.lib")
#pragma comment(lib, "dxgi.lib")
#pragma comment(lib, "d3d11.lib")
void CreateSampleModel(ID3D11Device * Device, ID3D11DeviceContext * 
Context);
void RenderSampleModel(XMMATRIX * viewProj, ID3D11Device * Device, ID3D11DeviceContext * Context);

ID3D11Buffer * IndexBuffer;
ID3D11RenderTargetView * eyeRenderTexRtv[2][3];
ID3D11DepthStencilView *zbuffer[2]; // containing one for each eye
ovrLayerEyeFov ld = { { ovrLayerType_EyeFov } };    //Only using one layer Update this to an array if using several
ovrSession session;
ID3D11Device * Device;
ID3D11DeviceContext * Context;


void CreateDepthBufferForBothEyes(int eye)
{
    // Create the Depth Buffer Texture
    D3D11_TEXTURE2D_DESC texd;
    ZeroMemory(&texd, sizeof(texd));

    texd.Width = ld.Viewport[eye].Size.w;
    texd.Height = ld.Viewport[eye].Size.h;
    texd.ArraySize = 1;
    texd.MipLevels = 1;
    texd.SampleDesc.Count = 1; // This matches the RTV
    texd.Format = DXGI_FORMAT_D32_FLOAT;
    texd.BindFlags = D3D11_BIND_DEPTH_STENCIL;
    ID3D11Texture2D *pDepthBuffer;
    Device->CreateTexture2D(&texd, NULL, &pDepthBuffer);

    // Describe specific properties of the Depth Stencil Buffer
    D3D11_DEPTH_STENCIL_VIEW_DESC dsvd;
    ZeroMemory(&dsvd, sizeof(dsvd));

    dsvd.Format = DXGI_FORMAT_D32_FLOAT; // Make the same as the texture format
    dsvd.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;

    Device->CreateDepthStencilView(pDepthBuffer, &dsvd, &zbuffer[eye]);
    pDepthBuffer->Release();

}


//-------------------------------------------------------------------------------------------------
int WINAPI wWinMain(HINSTANCE hInstance, HINSTANCE, LPWSTR, int)
{
    // Init Rift and device
    ovr_Initialize(0);  
    ovrGraphicsLuid luid;
    ovr_Create(&session, &luid);
    IDXGIFactory * DXGIFactory; CreateDXGIFactory1(__uuidof(IDXGIFactory), (void**)(&DXGIFactory));
    IDXGIAdapter * DXGIAdapter;  DXGIFactory->EnumAdapters(0, &DXGIAdapter);

    D3D11CreateDevice(DXGIAdapter, D3D_DRIVER_TYPE_UNKNOWN, 0, 0, 0, 0, D3D11_SDK_VERSION, &Device, 0, &Context);

    // Create eye render buffers
    for (int eye = 0; eye < 2; eye++)
    {
        ld.Fov[eye] = ovr_GetHmdDesc(session).DefaultEyeFov[eye];
        ld.Viewport[eye].Size = ovr_GetFovTextureSize(session, (ovrEyeType)eye, ld.Fov[eye], 1.0f);

        ovrTextureSwapChainDesc desc = {};
        desc.Type = ovrTexture_2D;
        desc.ArraySize = 1;
        desc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
        desc.Width = ld.Viewport[eye].Size.w;
        desc.Height = ld.Viewport[eye].Size.h;
        desc.MipLevels = 1;
        desc.SampleCount = 1;
        desc.StaticImage = ovrFalse;
        desc.MiscFlags = ovrTextureMisc_DX_Typeless;
        desc.BindFlags = ovrTextureBind_DX_RenderTarget;

        ovr_CreateTextureSwapChainDX(session, Device, &desc, &ld.ColorTexture[eye]);
        int textureCount = 0; ovr_GetTextureSwapChainLength(session, ld.ColorTexture[eye], &textureCount);

        for (int j = 0; j < textureCount; j++)  // Creates 3 backbuffers for each eye
        {
            ID3D11Texture2D* tex; ovr_GetTextureSwapChainBufferDX(session, ld.ColorTexture[eye], j, IID_PPV_ARGS(&tex));
            D3D11_RENDER_TARGET_VIEW_DESC rtvd = { DXGI_FORMAT_R8G8B8A8_UNORM, D3D11_RTV_DIMENSION_TEXTURE2D };
            Device->CreateRenderTargetView(tex, &rtvd, &eyeRenderTexRtv[eye][j]);
        }
        CreateDepthBufferForBothEyes(eye);
    }

    // Create sample model to be rendered in VR
    CreateSampleModel(Device,Context);

    // Loop for some frames, then terminate
    float camX = 0.0f;
    float camY = 0.0f;
    float camZ = 0.0f;

    for (long long frameIndex = 0; frameIndex < 1000;)
    {

        if (GetKeyState(VK_LEFT) & 0x8000)
            camX -= 0.001f;
        if (GetKeyState(VK_RIGHT) & 0x8000)
            camX += 0.001f;

        if (GetKeyState(VK_UP) & 0x8000)
            camY += 0.001f;
        if (GetKeyState(VK_DOWN) & 0x8000)
            camY -= 0.001f;

        if (GetKeyState(VK_NEXT) & 0x8000)
            camZ += 0.001f;
        if (GetKeyState(VK_PRIOR) & 0x8000)
            camZ -= 0.001f;

        // Get pose using a default IPD
        ovrPosef HmdToEyePose[2] = {{{0,0,0,1}, {-0.032f, 0, 0}},
                                    {{0,0,0,1}, {+0.032f, 0, 0}}};
        ovrPosef pose[2]; ovr_GetEyePoses(session, 0, ovrTrue, HmdToEyePose, pose, &ld.SensorSampleTime);

        //for (int eye = 0; eye < 2; eye++) 
        //  ld.RenderPose[eye] = pose[eye]; // Update the Layer description with the new head position for each eye

        // Render to each eye
        for (int eye = 0; eye < 2; eye++)
        {
            ld.RenderPose[eye] = pose[eye]; // Update the Layer description with the new head position for each eye
            // Set and clear current render target, and set viewport
            int index = 0;  ovr_GetTextureSwapChainCurrentIndex(session, ld.ColorTexture[eye], &index);
            Context->OMSetRenderTargets(1, &eyeRenderTexRtv[eye][index], zbuffer[eye]); // zbuffer[eye]
            Context->ClearRenderTargetView(eyeRenderTexRtv[eye][index], new float[]{ 0.1f, 0.0f, 0.0f, 0.0f });
            Context->ClearDepthStencilView(zbuffer[eye], D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL, 1.0, 0); 

            D3D11_VIEWPORT D3Dvp; // = { 0, 0, (float)ld.Viewport[eye].Size.w, (float)ld.Viewport[eye].Size.h };
            D3Dvp.TopLeftX = 0.0f;
            D3Dvp.TopLeftY = 0.0f;
            D3Dvp.Width = (float)ld.Viewport[eye].Size.w;
            D3Dvp.Height = (float)ld.Viewport[eye].Size.h;
            D3Dvp.MinDepth = 0;
            D3Dvp.MaxDepth = 1;

            Context->RSSetViewports(1, &D3Dvp);

            pose[eye].Position.z = 2.0f + camZ; // Move camera 2m from cube
            pose[eye].Position.x = camX;
            pose[eye].Position.y = camY;
            // Calculate view and projection matrices using pose and SDK
            XMVECTOR rot = XMLoadFloat4((XMFLOAT4 *)&pose[eye].Orientation);
            XMVECTOR pos = XMLoadFloat3((XMFLOAT3 *)&pose[eye].Position);
            XMVECTOR up = XMVector3Rotate(XMVectorSet(0, 1, 0, 0), rot);
            XMVECTOR forward = XMVector3Rotate(XMVectorSet(0, 0, 1, 0), rot);
            XMMATRIX view = XMMatrixLookAtLH(pos, XMVectorAdd(pos, forward), up);
            ovrMatrix4f p = ovrMatrix4f_Projection(ld.Fov[eye], 0, 1, ovrProjection_None);
            XMMATRIX proj = XMMatrixTranspose(XMLoadFloat4x4((XMFLOAT4X4 *)&p)); 

            // Render model and commit frame
            RenderSampleModel(&XMMatrixMultiply(view, proj), Device, Context);
            ovr_CommitTextureSwapChain(session, ld.ColorTexture[eye]);
        }



        // Send rendered eye buffers to HMD, and increment the frame if we're visible
        ovrLayerHeader* layers[1] = { &ld.Header };
        if (ovrSuccess == ovr_SubmitFrame(session, 0, nullptr, layers, 1)) 
            frameIndex; // was frameIndex++; but changed to loop forever
    }

    ovr_Shutdown();
}




//---------------------------------------------------------------------------------------
// THIS CODE IS NOT SPECIFIC TO VR OR THE SDK, JUST USED TO DRAW SOMETHING IN VR
//---------------------------------------------------------------------------------------
void CreateSampleModel(ID3D11Device * Device, ID3D11DeviceContext * Context)
{
    // Create Vertex Buffer
    //#define V(n) (n&1?+1.0f:-1.0f), (n&2?-1.0f:+1.0f), (n&4?+1.0f:-1.0f) 
    //float vertices[] = { V(0), V(3), V(2), V(6), V(3), V(7), V(4), V(2), V(6), V(1), V(5), V(3), V(4), V(1), V(0), V(5), V(4), V(7) };

    D3D11_BUFFER_DESC vbDesc = { sizeof(VERTEX) * NUM_OF_VERTICES, D3D11_USAGE_DEFAULT, D3D11_BIND_VERTEX_BUFFER };
    D3D11_SUBRESOURCE_DATA initData = { tList };
    ID3D11Buffer* VertexBuffer;  
    Device->CreateBuffer(&vbDesc, &initData, &VertexBuffer);


    //create the index buffer
    D3D11_BUFFER_DESC bd;
    bd.Usage = D3D11_USAGE_DYNAMIC;
    bd.ByteWidth = sizeof(short) * NUM_OF_INDICES;    // 3 per triangle, 12 triangles

    bd.BindFlags = D3D11_BIND_INDEX_BUFFER;
    bd.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
    bd.MiscFlags = 0;
    Device->CreateBuffer(&bd, NULL, &IndexBuffer);
    D3D11_MAPPED_SUBRESOURCE ms;
    Context->Map(IndexBuffer, NULL, D3D11_MAP_WRITE_DISCARD, NULL, &ms);      // map the buffer
    memcpy(ms.pData, indexes, NUM_OF_INDICES * sizeof(short));                              // copy the data
    Context->Unmap(IndexBuffer, NULL);


    // Create Vertex Shader
    char* vShader = "float4x4 m;"

        "struct VOut { "
        "   float4 position : SV_POSITION;"
        "   float4 color : COLOR;"
        "}; "

        "VOut VS(float4 p1 : POSITION, float4 colour: COLOR)"
        "{"
        "   VOut output;"
        "   output.position = mul(m, p1);"
        "   output.color = colour;"
        "   return output;"
        "}";
    ID3D10Blob * pBlob; D3DCompile(vShader, strlen(vShader), "VS", 0, 0, "VS", "vs_4_0", 0, 0, &pBlob, 0);
    ID3D11VertexShader * VertexShader; 
    Device->CreateVertexShader(pBlob->GetBufferPointer(), pBlob->GetBufferSize(), 0, &VertexShader);

    // Create Input Layout
    D3D11_INPUT_ELEMENT_DESC elements[] = { 
        { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
        { "COLOR", 0, DXGI_FORMAT_B8G8R8A8_UNORM, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 },

    };
    ID3D11InputLayout  * InputLayout; 
    Device->CreateInputLayout(elements, 2, pBlob->GetBufferPointer(), pBlob->GetBufferSize(), &InputLayout);

    // Create Pixel Shader
    char* pShader = "float4 PS(float4 position : POSITION, float4 colour : COLOR) : SV_Target { return colour; }";
    D3DCompile(pShader, strlen(pShader), "PS", 0, 0, "PS", "ps_4_0", 0, 0, &pBlob, 0);
    ID3D11PixelShader  * PixelShader; 
    Device->CreatePixelShader(pBlob->GetBufferPointer(), pBlob->GetBufferSize(), 0, &PixelShader);


    Context->IASetInputLayout(InputLayout);
    Context->IASetIndexBuffer(IndexBuffer, DXGI_FORMAT_R16_UINT, 0);
    UINT stride = sizeof(float) * 4U;   // 7U
    UINT offset = 0;
    Context->IASetVertexBuffers(0, 1, &VertexBuffer, &stride, &offset);
    Context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
    Context->VSSetShader(VertexShader, 0, 0);
    Context->PSSetShader(PixelShader, 0, 0);
}
//------------------------------------------------------
void RenderSampleModel(XMMATRIX * viewProj, ID3D11Device * Device, ID3D11DeviceContext * Context)
{
    D3D11_BUFFER_DESC desc = { sizeof(XMMATRIX), D3D11_USAGE_DYNAMIC, D3D11_BIND_CONSTANT_BUFFER, D3D11_CPU_ACCESS_WRITE };
    D3D11_SUBRESOURCE_DATA initData = { viewProj };
    ID3D11Buffer * ConstantBuffer;  Device->CreateBuffer(&desc, &initData, &ConstantBuffer); 
    Context->VSSetConstantBuffers(0, 1, &ConstantBuffer);
    Context->DrawIndexed(NUM_OF_INDICES, 0, 0);
}

models.h

#ifndef MODELS_H
#define MODELS_H

#include "DirectXMath.h"

using namespace DirectX;

#define NUM_OF_MODELS 1

struct VERTEX{
    float x;
    float y;
    float z;
    uint32_t  C;    // Colour
};

#define NUM_OF_VERTICES 24
#define NUM_OF_INDICES 36

extern VERTEX tList[];
extern short indexes[];
#endif

models.cpp

#include "models.h"

VERTEX tList[] = {
    { -0.1f, 0.1f, 0.2f, 0xFF0000FF },      // Cube Vertex Index 0 
    { 0.1f, 0.1f, 0.2f, 0xFF0000FF },       // 1 
    { -0.1f, -0.1f, 0.2f, 0xFF0000FF },     // 2 
    { 0.1f, -0.1f, 0.2f, 0xFF0000FF },      // 3 

    { -0.1f, 0.1f, -0.0f, 0x00FF00FF },     // 4 
    { 0.1f, 0.1f, -0.0f, 0x00FF00FF },      // 5 
    { -0.1f, -0.1f, -0.0f, 0x00FF00FF },    // 6 
    { 0.1f, -0.1f, -0.0f, 0x00FF00FF },     // 7 

    { 0.1f, 0.1f, 0.2f, 0x0000FFFF },       // 8
    { 0.1f, 0.1f, -0.0f, 0x0000FFFF },      // 9
    { 0.08f, -0.1f, 0.2f, 0x0000FFFF },     // 10 Distorted in prep to test Depth
    { 0.08f, -0.1f, -0.0f, 0x0000FFFF },    // 11 Distorted in prep to test Depth

    { -0.1f, 0.1f, 0.2f, 0xFF0000FF },      // 12
    { -0.1f, 0.1f, -0.0f, 0xFF0000FF },     // 13
    { -0.1f, -0.1f, 0.2f, 0xFF0000FF },     // 14
    { -0.1f, -0.1f, -0.0f, 0xFF0000FF },        // 15 

    { -0.1f, 0.1f, -0.0f, 0x00FF00FF },     // 16
    { 0.1f, 0.1f, -0.0f, 0x00FF00FF },      // 17
    { -0.1f, 0.1f, 0.2f, 0x00FF00FF },      // 18
    { 0.1f, 0.1f, 0.2f, 0x00FF00FF },       // 19 

    { -0.1f, -0.1f, -0.0f, 0x00FFFFFF },        // 20
    { 0.1f, -0.1f, -0.0f, 0x00FFFFFF },     // 21
    { -0.1f, -0.1f, 0.2f, 0x00FFFFFF },     // 22
    { 0.1f, -0.1f, 0.2f, 0x00FFFFFF },      // 23 


};

short indexes[] = {
    0, 1, 2,    // FRONT QUAD 
    2, 1, 3,
    5, 4, 6,    // BACK QUAD
    5, 6, 7,
    8, 9, 11,   // RIGHT QUAD
    8, 11, 10,
    13, 12,14,  // LEFT QUAD
    13, 14, 15,
    18,16, 17,  // TOP QUAD
    18, 17, 19,
    22, 23, 21, // BOTTOM QUAD
    22, 21, 20
};

After much research I've found my own solution:

The line

ovrMatrix4f p = ovrMatrix4f_Projection(ld.Fov[eye], 0, 1, ovrProjection_None);

Creates the projection Matrix. A was simply a case of setting the projection depth correctly

float zNear = 0.1f;
float zFar = 1000.0f;

ovrMatrix4f p = ovrMatrix4f_Projection(ld.Fov[eye], zNear, zFar, ovrProjection_None);

Amazing how, now 7 weeks of investigating, revealed the flaw as a simple ommission!

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM