[英]Trying to render a 3d triagle Crushes the Nvidia Driver! WHY? [C++ && DirectX11 SDK]
Here is the Triangle.cpp code: 这是Triangle.cpp代码:
#include "Triangle.h"
Triangle::Triangle()
{
_vertexBuffer = NULL;
_indexBuffer = NULL;
_vertexShader = NULL;
_pixelShader = NULL;
_inputLayout = NULL;
_stride = sizeof(Vertex);
_offset = 0;
}
Triangle::~Triangle()
{
if (_vertexBuffer)
{
_vertexBuffer->Release();
_vertexBuffer = NULL;
}
if (_indexBuffer)
{
_indexBuffer->Release();
_indexBuffer = NULL;
}
if (_vertexShader)
{
_vertexShader->Release();
_vertexShader = NULL;
}
if (_pixelShader)
{
_pixelShader->Release();
_pixelShader = NULL;
}
if (_inputLayout)
{
_inputLayout->Release();
_inputLayout = NULL;
}
}
const bool Triangle::Initialize(DirectX * graphics)
{
_graphics = graphics;
InitTriangleData();
if (!InitializeVertexAndIndexBuffers(graphics->GetDevice()))
return false;
if (!InitializeShadersAndinputLayout(graphics->GetDevice()))
return false;
return true;
}
void Triangle::Render()
{
_graphics->GetDeviceContext()->IASetVertexBuffers(0, 1, &_vertexBuffer, &_stride, &_offset);
_graphics->GetDeviceContext()->IASetIndexBuffer(_indexBuffer, DXGI_FORMAT_D32_FLOAT, 0);
_graphics->GetDeviceContext()->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
_graphics->GetDeviceContext()->PSSetShader(_pixelShader, 0, 1);
_graphics->GetDeviceContext()->VSSetShader(_vertexShader, 0, 1);
_graphics->GetDeviceContext()->DrawIndexed(3, 0, 0);
_graphics->GetDeviceContext()->PSSetShader(0, 0, 0);
_graphics->GetDeviceContext()->VSSetShader(0, 0, 0);
}
void Triangle::InitTriangleData()
{
_vertices = new Vertex[3];
_indices = new unsigned long[3];
float halfX = 50;
float halfY = 50;
_vertices[0].position = D3DXVECTOR3(0.0f, halfY, 0.0f);
_vertices[0].uv = D3DXVECTOR2(0.0f, 0.0f);
_vertices[1].position = D3DXVECTOR3(halfX, -halfY, 0.0f);
_vertices[1].uv = D3DXVECTOR2(0.0f, 0.0f);
_vertices[2].position = D3DXVECTOR3(-halfX, -halfY, 0.0f);
_vertices[2].uv = D3DXVECTOR2(0.0f, 0.0f);
}
const bool Triangle::InitializeVertexAndIndexBuffers(ID3D11Device * device)
{
HRESULT result;
D3D11_BUFFER_DESC bufferDesc;
ZeroMemory(&bufferDesc, sizeof bufferDesc);
D3D11_SUBRESOURCE_DATA bufferData;
ZeroMemory(&bufferData, sizeof bufferData);
bufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bufferDesc.ByteWidth = _stride * 3;
bufferDesc.StructureByteStride = 0;
bufferDesc.MiscFlags = 0;
bufferDesc.CPUAccessFlags = 0;
bufferDesc.Usage = D3D11_USAGE_DEFAULT;
bufferData.pSysMem = _vertices;
result = device->CreateBuffer(&bufferDesc, &bufferData, &_vertexBuffer);
if (FAILED(result))
return false;
ZeroMemory(&bufferData, sizeof bufferData);
ZeroMemory(&bufferDesc, sizeof bufferDesc);
bufferDesc.BindFlags = D3D11_BIND_INDEX_BUFFER;
bufferDesc.ByteWidth = sizeof(unsigned long) *3;
bufferDesc.StructureByteStride = 0;
bufferDesc.MiscFlags = 0;
bufferDesc.CPUAccessFlags = 0;
bufferDesc.Usage = D3D11_USAGE_DEFAULT;
bufferData.pSysMem =_indices;
result = device->CreateBuffer(&bufferDesc, &bufferData, &_indexBuffer);
if (FAILED(result))
return false;
delete[] _vertices;
_vertices = 0;
delete[] _indices;
_indices = 0;
return true;
}
const bool Triangle::InitializeShadersAndinputLayout(ID3D11Device * device)
{
HRESULT result;
ID3D10Blob *data;
ID3D10Blob* error;
std::string name = ".//Resources//Shaders//BasicTextureShader//color.ps";
result = D3DX11CompileFromFileA(name.c_str(), NULL, NULL, "ColorPixelShader", "ps_5_0", D3D10_SHADER_ENABLE_STRICTNESS, 0, NULL, &data, &error, NULL);
if (FAILED(result)) {
if (error)
{
PrintOutMessage(error);
error->Release();
error = NULL;
}
return false;
}
result = device->CreatePixelShader(data->GetBufferPointer(), data->GetBufferSize(), NULL, &_pixelShader);
if (FAILED(result)) {
return false;
}
data->Release();
data = NULL;
name.clear();
name = ".//Resources//Shaders//BasicTextureShader//color.vs";
result = D3DX11CompileFromFileA(name.c_str(), NULL, NULL, "ColorVertexShader", "vs_5_0", D3D10_SHADER_ENABLE_STRICTNESS, 0, NULL, &data, &error, NULL);
if (FAILED(result)) {
if (error)
{
PrintOutMessage(error);
error->Release();
error = NULL;
}
return false;
}
result = device->CreateVertexShader(data->GetBufferPointer(), data->GetBufferSize(), NULL, &_vertexShader);
if (FAILED(result))
return false;
D3D11_INPUT_ELEMENT_DESC elements[2];
unsigned int elementsCount = 2;
elements[0].AlignedByteOffset = 0;
elements[0].Format = DXGI_FORMAT_R32G32B32_FLOAT;
elements[0].InputSlot = 0;
elements[0].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
elements[0].InstanceDataStepRate = 0;
elements[0].SemanticIndex = 0;
elements[0].SemanticName = "POSITION";
elements[1].AlignedByteOffset = D3D11_APPEND_ALIGNED_ELEMENT;
elements[1].Format = DXGI_FORMAT_R32G32_FLOAT;
elements[1].InputSlot = 0;
elements[1].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
elements[1].InstanceDataStepRate = 0;
elements[1].SemanticIndex = 0;
elements[1].SemanticName = "TEXCOORD";
result = device->CreateInputLayout(elements, elementsCount, data->GetBufferPointer(), data->GetBufferSize(), &_inputLayout);
if (FAILED(result))
return false;
data->Release();
data = NULL;
return true;
}
void Triangle::PrintOutMessage(ID3D10Blob * error)
{
if (!error)
return;
char * message = new char[error->GetBufferSize()];
message = (char*)error->GetBufferPointer();
std::fstream file;
file.open("ShaderErrorReport.txt", std::fstream::in | std::fstream::out | std::fstream::trunc);
if (!file.is_open())
return;
for (size_t i = 0; i < error->GetBufferSize(); i++)
{
file << message[i];
}
file.close();
}
And the Triangle.h code: 和Triangle.h代码:
#ifndef TRIANGLE_H
#define TRIANGLE_H
#include "Engine\DirectX.h"
#include <D3DX10math.h>
#include <string>
#include <fstream>
class Triangle
{
private:
struct ConstantBuffer {
D3DXMATRIX world;
D3DXMATRIX view;
D3DXMATRIX proj;
};
struct Vertex
{
D3DXVECTOR3 position;
D3DXVECTOR2 uv;
};
public:
Triangle();
~Triangle();
const bool Initialize(DirectX* graphics);
void Render();
private:
void InitTriangleData();
const bool InitializeVertexAndIndexBuffers(ID3D11Device* device);
const bool InitializeShadersAndinputLayout(ID3D11Device* device);
void PrintOutMessage(ID3D10Blob * error);
private:
Vertex* _vertices;
unsigned long * _indices;
unsigned int _vertexCount = 3;
unsigned int indexCount = 3;
unsigned int _stride;
unsigned int _offset;
ID3D11Buffer* _vertexBuffer, *_indexBuffer, *_constantBuffer;
ID3D11InputLayout *_inputLayout;
ID3D11PixelShader* _pixelShader;
ID3D11VertexShader* _vertexShader;
DirectX* _graphics;
};
#endif
I have a Nvidia GTX 760 gpu and when i run this it the Graphics driver crushes and recovers...and my window goes white.[the 'clear' color is black] 我有一个Nvidia GTX 760 gpu,当我运行它时,图形驱动程序粉碎并恢复......我的窗口变白了。['清晰'颜色为黑色]
The API ID3D11DeviceContext::PSSetShader
and ID3D11DeviceContext::VSSetShader
second and third arguments are use for dynamic linkage, in your case they should be nullptr
and 0
. API ID3D11DeviceContext::PSSetShader
和ID3D11DeviceContext::VSSetShader
第二个和第三个参数用于动态链接,在您的情况下它们应该是nullptr
和0
。 By telling the device 0,1
, you claim one class instance but give a nil pointer for it. 通过告诉设备0,1
,您声明了一个类实例,但为它指定了一个nil指针。 The device reject your invalid shader binding, and when you attempt to render, the gpu hang because it does not have a proper shader to run. 设备拒绝无效的着色器绑定,当您尝试渲染时,gpu会挂起,因为它没有正确的着色器来运行。
In your code, replace 在您的代码中,替换
_graphics->GetDeviceContext()->PSSetShader(_pixelShader, 0, 1);
_graphics->GetDeviceContext()->VSSetShader(_vertexShader, 0, 1);
by 通过
_graphics->GetDeviceContext()->PSSetShader(_pixelShader, nullptr, 0);
_graphics->GetDeviceContext()->VSSetShader(_vertexShader, nullptr, 0);
声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.