I am trying to make a simple test to understand DirectX11 better and how to work with ID3D11Texture2D
.
I am willing to create an ID3D11Texture2D
from unsigned char[]
, but for some reason it is giving me the following error:
Exception thrown at 0x00007FFBBCB9C21E (nvwgf2umx.dll) in TestDX.exe: 0xC0000005: Access violation reading location 0x0000024B1A7A4000.
I have created the D3D11CreateDevice
with debug flags and it does not show any more useful information.
After looking up in the internet, this topic suggests that my D3D11_TEXTURE2D_DESC
Format
property does not match the input data format.
To reproduce, here is my code:
const int width = 1920;
const int height = 1080;
unsigned char* pixels = new unsigned char[width * height * 4];
int numPixels = (int)(width * height);
// Setting up pixels
for (int i = 0; i < numPixels; i++)
{
*(pixels) = 255;
*(pixels + 1) = 255;
*(pixels + 2) = 255;
*(pixels + 3) = 255;
pixels += 4; // move the pointer along to the next rgba pixel
}
// Creating DirectX11 Device
UINT creationFlags = D3D11_CREATE_DEVICE_BGRA_SUPPORT | D3D11_CREATE_DEVICE_DEBUG;
D3D_FEATURE_LEVEL featureLevels[] =
{
D3D_FEATURE_LEVEL_11_1,
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0,
D3D_FEATURE_LEVEL_9_3,
D3D_FEATURE_LEVEL_9_1
};
ID3D11Texture2D* tex = nullptr;
ID3D11Device* device = nullptr;
ID3D11DeviceContext* context = nullptr;
if (FAILED(D3D11CreateDevice(
nullptr, // specify nullptr to use the default adapter
D3D_DRIVER_TYPE_HARDWARE,
nullptr, // specify nullptr because D3D_DRIVER_TYPE_HARDWARE
// indicates that this function uses hardware
creationFlags, // optionally set debug and Direct2D compatibility flags
featureLevels,
ARRAYSIZE(featureLevels),
D3D11_SDK_VERSION, // always set this to D3D11_SDK_VERSION
&device,
nullptr,
&context)))
{
cout << "Failed creating device" << endl;
return 0;
}
else
{
cout << "Created device" << endl;
}
D3D11_SUBRESOURCE_DATA initData = { 0 };
initData.pSysMem = (const void*)pixels;
initData.SysMemPitch = width * 4 * sizeof(unsigned char);
initData.SysMemSlicePitch = height * width * 4;
D3D11_TEXTURE2D_DESC tex_desc = {};
tex_desc.Height = height;
tex_desc.Width = width;
tex_desc.MipLevels = 1;
tex_desc.ArraySize = 1;
tex_desc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
tex_desc.SampleDesc.Count = 1;
tex_desc.SampleDesc.Quality = 0;
tex_desc.Usage = D3D11_USAGE_DEFAULT;
tex_desc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;
tex_desc.CPUAccessFlags = 0;
tex_desc.MiscFlags = D3D11_RESOURCE_MISC_GENERATE_MIPS;
HRESULT hres = device->CreateTexture2D(&tex_desc, &initData, &tex); // Causes exception
if (FAILED(hres))
{
cout << "Could not create texture" << endl;
return 0;
}
else
{
cout << "Created texture" << endl;
}
if (tex)
{
tex->Release();
}
I also tried creating an empty texture and using UpdateSubresource as someone suggested in the internet and created this scenario:
const int width = 1920;
const int height = 1080;
unsigned char* pixels = new unsigned char[width * height * 4];
int numPixels = (int)(width * height);
// Setting up pixels
for (int i = 0; i < numPixels; i++)
{
*(pixels) = 255;
*(pixels + 1) = 255;
*(pixels + 2) = 255;
*(pixels + 3) = 255;
pixels += 4; // move the pointer along to the next rgba pixel
}
// Creating DirectX11 Device
UINT creationFlags = D3D11_CREATE_DEVICE_BGRA_SUPPORT | D3D11_CREATE_DEVICE_DEBUG;
D3D_FEATURE_LEVEL featureLevels[] =
{
D3D_FEATURE_LEVEL_11_1,
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0,
D3D_FEATURE_LEVEL_9_3,
D3D_FEATURE_LEVEL_9_1
};
ID3D11Texture2D* tex = nullptr;
ID3D11Device* device = nullptr;
ID3D11DeviceContext* context = nullptr;
if (FAILED(D3D11CreateDevice(
nullptr, // specify nullptr to use the default adapter
D3D_DRIVER_TYPE_HARDWARE,
nullptr, // specify nullptr because D3D_DRIVER_TYPE_HARDWARE
// indicates that this function uses hardware
creationFlags, // optionally set debug and Direct2D compatibility flags
featureLevels,
ARRAYSIZE(featureLevels),
D3D11_SDK_VERSION, // always set this to D3D11_SDK_VERSION
&device,
nullptr,
&context)))
{
cout << "Failed creating device" << endl;
return 0;
}
else
{
cout << "Created device" << endl;
}
D3D11_TEXTURE2D_DESC tex_desc = {};
tex_desc.Height = height;
tex_desc.Width = width;
tex_desc.MipLevels = 1;
tex_desc.ArraySize = 1;
tex_desc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
tex_desc.SampleDesc.Count = 1;
tex_desc.SampleDesc.Quality = 0;
tex_desc.Usage = D3D11_USAGE_DEFAULT;
tex_desc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;
tex_desc.CPUAccessFlags = 0;
tex_desc.MiscFlags = D3D11_RESOURCE_MISC_GENERATE_MIPS;
HRESULT hres = device->CreateTexture2D(&tex_desc, NULL, &tex);
if (FAILED(hres))
{
cout << "Could not create texture" << endl;
return 0;
}
else
{
cout << "Created texture" << endl;
}
int row_pitch = (width * 4) * sizeof(unsigned char);
context->UpdateSubresource(tex, 0, NULL, pixels, row_pitch, 0); // Causes exception
if (tex)
{
tex->Release();
}
However, both scenarios lead to the same exception and I think it suggests that my input data is actually wrong, but I don't see why it is wrong. My pixels
do represent an RGBA image and I set the tex_desc.Format = DXGI_FORMAT_R8G8B8A8_UNORM
which looks correct to me but I am not experienced at all with this so I can't really judge.
I would appreciate any help.
This has nothing to do with DirectX, your code modifies the original pixels
pointer (pixels++
, etc.) so it passes garbage (and eventually pointer to locked/unallocated OS memory pages because it's a big-sized buffer) as initial data, hence the crash.
So, for example, change this:
unsigned char* pixels = new unsigned char[width * height * 4];
to this:
unsigned char* pixels = new unsigned char[width * height * 4];
unsigned char* mem = pixels;
and change this:
initData.pSysMem = (const void*)pixels;
to this:
initData.pSysMem = (const void*)mem;