DirectX 11 mipmaps - directx-11

How can i create texture mipmaps in DirectX? This is my code, in which i tried do this, but it doesn't work:
D3D11_TEXTURE2D_DESC desc{};
desc.Width = dims.X;!
desc.Height = dims.Y;
desc.ArraySize = 1;
desc.SampleDesc.Count = 1;
desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
desc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;
desc.MiscFlags = D3D11_RESOURCE_MISC_GENERATE_MIPS;
D3D11_SUBRESOURCE_DATA initData{};
initData.pSysMem = pixels;
initData.SysMemPitch = sizeof(unsigned char) * dims.X * 4;
D3D11_SHADER_RESOURCE_VIEW_DESC srvDesc{};
srvDesc.Format = desc.Format;
srvDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
srvDesc.Texture2D.MipLevels = 1;
Device->CreateTexture2D(&desc, nullptr, Texture.GetAddressOf());
Device->CreateShaderResourceView(Texture.Get(), &srvDesc, ShaderResource.GetAddressOf());
DeviceContext->UpdateSubresource(Texture.Get(), 0, 0, pixels, initData.SysMemPitch, 0);
DeviceContext->GenerateMips(ShaderResource.Get());
Texture now looks like this

Ok, i changed srvDesc.Texture2D.MipLevels to -1 and now it works. Thanks.

Related

Memory leak with CreateTexture2D

I take a dx11 frame, convert it to a cv mat and put it on a ecal message buffer. Profiling shows memory balloning at CreateTexture2D.
I've tried releasing the texture, the device the context. Tried flushing the context. I put everything in a method and I thought that's suppose to allow it to deconstruct. I've tried com ptrs, and maybe I'm doing it wrong, but it has the same effect.
There's noise in this snippet, just because I've been trying everything. This whole dx11 space sucks. What am I supposed to do to get these things to actually release? Is there a way to brute force them to free the memory?
flatbuffers::FlatBufferBuilder builder(1024);
D3D11_TEXTURE2D_DESC desc;
surfaceTexture->GetDesc(&desc);
D3D11_BOX my_box;
cv::Mat mat;
ID3D11Texture2D* myText;
my_box.front = 0;
my_box.back = 1;
my_box.left = 1600;
my_box.top = 480;
my_box.right = 2240;
my_box.bottom = -160;
desc.Width = 640;
desc.Height = 640;
desc.ArraySize = 1;
desc.BindFlags = 0;
desc.MiscFlags = 0;
desc.SampleDesc.Count = 1;
desc.SampleDesc.Quality = 0;
desc.MipLevels = 1;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
desc.Usage = D3D11_USAGE_STAGING;
auto d3dDevice2 = GetDXGIInterfaceFromObject<ID3D11Device>(m_device2);
d3dDevice2->GetImmediateContext(m_d3dContext2.put());
d3dDevice2->CreateTexture2D(&desc, NULL, &myText);
m_d3dContext2->CopySubresourceRegion(myText, D3D11CalcSubresource(0, 0, 1), 0, 0, 0, surfaceTexture.get(), 0, &my_box);
/*cv::directx::convertFromD3D11Texture2D(myText, mat);*/
d3dDevice2->Release();
m_d3dContext2->Flush();
m_d3dContext1->Flush();
m_d3dContext2->Release();
m_d3dContext2.get()->Flush();
m_d3dContext1.get()->Flush();
m_d3dContext2.get()->Release();
cv::cvtColor(mat, mat, cv::COLOR_RGBA2RGB);
auto byte_buffer = builder.CreateVector(mat.data, sizeof(mat.data));
//
//
auto mloc = Dx11::Frame::CreateFrame(builder, byte_buffer);
builder.Finish(mloc);
pub.Send(builder, -1);

Retrieving the palette of a bitmap image

I am loading a bitmap image from a file (type BMP) via the GDI function LoadImage, which returns a BITMAP handle.
I know how to access the bitmap bits. But the image is in the format 8BPP, hence palettized. How can I obtain the palette entries ?
Select the bitmap in to dc and call GetDIBColorTable. A temporary memory dc can be used here:
RGBQUAD rgb[256] = { 0 };
HDC memdc = CreateCompatibleDC(hdc);
auto oldbmp = SelectObject(memdc, hbitmap);
GetDIBColorTable(memdc, 0, 256, rgb);
SelectObject(memdc, oldbmp);
DeleteDC(memdc);
Alternatively use GetDIBits to read BITMAPINFO. You have to reserve enough memory to read the color table + all bytes + sizeof(BITMAPINFO).
Color table will be copied to BITMAPINFO -> bmiColors
Gdi+ is another option. Here is GDI example:
int main()
{
HBITMAP hbitmap = (HBITMAP)LoadImage(0, L"source.bmp",
IMAGE_BITMAP, 0, 0, LR_CREATEDIBSECTION | LR_DEFAULTSIZE | LR_LOADFROMFILE);
if (!hbitmap)
return 0;
BITMAP bm;
GetObject(hbitmap, sizeof(bm), &bm);
int width = bm.bmWidth;
int height = bm.bmHeight;
WORD clrbits = (WORD)(bm.bmPlanes * bm.bmBitsPixel);
if (clrbits == 8) clrbits = 1;
else if (clrbits <= 4) clrbits = 4;
else if (clrbits <= 8) clrbits = 8;
else if (clrbits <= 16) clrbits = 16;
else if (clrbits <= 24) clrbits = 24;
else clrbits = 32;
HDC hdc = GetDC(0);
if(clrbits == 8)
{
RGBQUAD rgb[256] = { 0 };
HDC memdc = CreateCompatibleDC(hdc);
auto oldbmp = SelectObject(memdc, hbitmap);
GetDIBColorTable(memdc, 0, 256, rgb);
SelectObject(memdc, oldbmp);
DeleteDC(memdc);
}
int palette_size = (clrbits < 24) ? sizeof(RGBQUAD) * (1 << clrbits) : 0;
BITMAPINFO* bmpinfo = (BITMAPINFO*)new BYTE[sizeof(BITMAPINFO) + palette_size];
int width_in_bytes = ((width * clrbits + 31) & ~31) / 8;
DWORD size = width_in_bytes * height;
bmpinfo->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
bmpinfo->bmiHeader.biWidth = width;
bmpinfo->bmiHeader.biHeight = height;
bmpinfo->bmiHeader.biPlanes = bm.bmPlanes;
bmpinfo->bmiHeader.biBitCount = bm.bmBitsPixel;
bmpinfo->bmiHeader.biClrUsed = (clrbits < 24) ? (1 << clrbits) : 0;
bmpinfo->bmiHeader.biCompression = BI_RGB;
bmpinfo->bmiHeader.biSizeImage = size;
BYTE* bits = new BYTE[size];
GetDIBits(hdc, hbitmap, 0, height, bits, bmpinfo, 0);
//palette size should be 1024 for 256 color
//it should be stored in `bmpinfo->bmiColors`
delete[]bits;
delete[](BYTE*)bmpinfo;
DeleteObject(hbitmap);
ReleaseDC(0, hdc);
return 0;
}

Can't get BITMAPINFOHEADER data to display odd width bmp images correctly

I am trying to display a 24-bit uncompressed bitmap with an odd width using standard Win32 API calls, but it seems like I have a stride problem.
According to the msdn:
https://msdn.microsoft.com/en-us/library/windows/desktop/dd318229%28v=vs.85%29.aspx
"For uncompressed RGB formats, the minimum stride is always the image width in bytes, rounded up to the nearest DWORD. You can use the following formula to calculate the stride:
stride = ((((biWidth * biBitCount) + 31) & ~31) >> 3)"
but this simply does not work for me and below is is the code:
void Init()
{
pImage = ReadBMP("data\\bird.bmp");
size_t imgSize = pImage->width * pImage->height * 3;
BITMAPINFOHEADER bmih;
bmih.biSize = sizeof(BITMAPINFOHEADER);
bmih.biBitCount = 24;
// This is probably where the bug is
LONG stride = ((((pImage->width * bmih.biBitCount) + 31) & ~31) >> 3);
//bmih.biWidth = pImage->width;
bmih.biWidth = stride;
bmih.biHeight = -((LONG)pImage->height);
bmih.biPlanes = 1;
bmih.biCompression = BI_RGB;
bmih.biSizeImage = 0;
bmih.biXPelsPerMeter = 1;
bmih.biYPelsPerMeter = 1;
bmih.biClrUsed = 0;
bmih.biClrImportant = 0;
BITMAPINFO dbmi;
ZeroMemory(&dbmi, sizeof(dbmi));
dbmi.bmiHeader = bmih;
dbmi.bmiColors->rgbBlue = 0;
dbmi.bmiColors->rgbGreen = 0;
dbmi.bmiColors->rgbRed = 0;
dbmi.bmiColors->rgbReserved = 0;
HDC hdc = ::GetDC(NULL);
mTestBMP = CreateDIBitmap(hdc,
&bmih,
CBM_INIT,
pImage->pSrc,
&dbmi,
DIB_RGB_COLORS);
hdc = ::GetDC(NULL);
}
and here the drawing fuction
RawBMP *pImage;
HBITMAP mTestBMP;
void UpdateScreen(HDC srcHDC)
{
if (pImage != nullptr && mTestBMP != 0x00)
{
HDC hdc = CreateCompatibleDC(srcHDC);
SelectObject(hdc, mTestBMP);
BitBlt(srcHDC,
0, // x
0, // y
// I tried passing the stride here and it did not work either
pImage->width, // width of the image
pImage->height, // height
hdc,
0, // x and
0, // y of upper left corner
SRCCOPY);
DeleteDC(hdc);
}
}
If I pass the original image width (odd number) instead of the stride
LONG stride = ((((pImage->width * bmih.biBitCount) + 31) & ~31) >> 3);
//bmih.biWidth = stride;
bmih.biWidth = pImage->width;
the picture looks skewed, below shows the differences:
and if I pass the stride according to msdn, then nothing shows up because the stride is too large.
any clues? Thank you!
thanks Jonathan for the solution. I need to copy row by row with the proper padding for odd width images. More or less the code for 24-bit uncompressed images:
const uint32_t bitCount = 24;
LONG strideInBytes;
// if the width is odd, then we need to add padding
if (width & 0x1)
{
strideInBytes = ((((width * bitCount) + 31) & ~31) >> 3);
}
else
{
strideInBytes = width * 3;
}
// allocate the new buffer
unsigned char *pBuffer = new unsigned char[strideInBytes * height];
memset(pBuffer, 0xaa, strideInBytes * height);
// Copy row by row
for (uint32_t yy = 0; yy < height; yy++)
{
uint32_t rowSizeInBytes = width * 3;
unsigned char *pDest = &pBuffer[yy * strideInBytes];
unsigned char *pSrc = &pData[yy * rowSizeInBytes];
memcpy(pDest, pSrc, rowSizeInBytes);
}
rawBMP->pSrc = pBuffer;
rawBMP->width = width;
rawBMP->height = height;
rawBMP->stride = strideInBytes;

How do I create a texture 3d programatically?

I am trying to create a texture3d programatically but I am not really understanding how it is done. Should each slice of the texture be a subresource? This what I am trying to do, but it is not working:
// Create texture3d
const int32 cWidth = 6;
const int32 cHeight = 7;
const int32 cDepth = 3;
D3D11_TEXTURE3D_DESC desc;
desc.Width = cWidth;
desc.Height = cHeight;
desc.MipLevels = 1;
desc.Depth = cDepth;
desc.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
desc.Usage = D3D11_USAGE_DEFAULT;
desc.BindFlags = D3D11_BIND_RENDER_TARGET;
desc.CPUAccessFlags = 0;
desc.MiscFlags = 0;
const uint32 bytesPerPixel = 4;
uint32 sliceSize = cWidth*cHeight*bytesPerPixel;
float tex3d[cWidth*cHeight*cDepth];
memset(tex3d, 0x00, sizeof(tex3d));
uint32 colorIndex = 0;
for (uint32 depthCount = 0; depthCount<depthSize; depthCount++)
{
for (uint32 ii=0; ii<cHeight; ii++)
{
for (uint32 jj=0; jj<cWidth; jj++)
{
// Add some dummy color
tex3d[colorIndex++] = 1.f;
tex3d[colorIndex++] = 0.f;
tex3d[colorIndex++] = 1.f;
tex3d[colorIndex++] = 0.f;
}
}
}
D3D11_SUBRESOURCE_DATA initData[cDepth] = {0};
uint8 *pMem = (uint8*)tex3d;
// What do I pass here? Each slice?
for (uint32 depthCount = 0; depthCount<depthSize; depthCount++)
{
initData[depthCount].pSysMem = static_cast<const void*>(pMem);
initData[depthCount].SysMemPitch = static_cast<UINT>(sliceSize); // not sure
initData[depthCount].SysMemSlicePitch = static_cast<UINT>(sliceSize); // not sure
pMem += sliceSize;
}
ID3D11Texture3D* tex = nullptr;
hr = m_d3dDevice->CreateTexture3D(&desc, &initData[0], &tex);
ID3D11RenderTargetView *pRTV = nullptr;
hr = m_d3dDevice->CreateRenderTargetView(tex, nullptr, &pRTV);
This creates the texture but when I gives me 1 sub-resource? Should it be 3?
I looked at this article, but it refers to texture2d;
D3D11: Creating a cube map from 6 images
If anyone has some snipped of a code that works, I'd like to take a look.
thx!
In Direct3D, 3D textures are laid out such that sub-resources are mipmap levels. Each mipmap level contains 1/2 as many slices as the previous, but in this case you only have 1 mipmap LOD, so you will only have 1 subresource (containing 3 slices).
As for the pitch, SysMemPitch is the number of bytes between rows in each image slice (cWidth * bytesPerPixel assuming you tightly pack this). SysMemSlicePitch is the number of bytes between 2D slices (cWidth * cHeight * bytesPerPixel). Thus, the memory for each mipmap needs to be arranged as a series of 2D images with the same dimensions.

Transparency of Icons In Windows 7 Explorer

I'm on Windows 7, and i am trying to display an icon with transparency on my contextual menu but it doesn't work.
I am trying to use LoadImage like this :
m_hMenuBmp = (HBITMAP)::LoadImage(g_hInst, L"C:\\Users\\nicolas\\AppData\\Roaming\\MyApp\\icon.bmp", IMAGE_BITMAP, 16, 16, LR_LOADFROMFILE | LR_LOADTRANSPARENT );
and my icon.bmp is set to 256 colors with white ( 255, 255, 255 ) on background ...
I don't know why this isn't working ...
I tried the ARGB Method of Raymon Chen but it didn't work neither !
int cx = GetSystemMetrics(SM_CXSMICON);
int cy = GetSystemMetrics(SM_CYSMICON);
BITMAPINFO bmi = {0};
bmi.bmiHeader.biSize =sizeof(bmi.bmiHeader);
bmi.bmiHeader.biWidth = cx;
bmi.bmiHeader.biHeight = cy;
bmi.bmiHeader.biPlanes = 1;
bmi.bmiHeader.biBitCount = 32;
bmi.bmiHeader.biCompression = BI_RGB;
DWORD *pBits;
m_hMenuBmp = CreateDIBSection(NULL, &bmi, DIB_RGB_COLORS, (void **)&pBits, NULL , 0);
if (m_hMenuBmp)
{
for (int y = 0; y < cy ; y++ )
{
for (int x = 0; x < cx; x++)
{
BYTE bAlpha = x * x * 255 / cx / cx;
DWORD dv = (bAlpha << 24) | (bAlpha << 16) | bAlpha ;
pBits[y *cx + x] - dv;
}
}
}
And I don't know why ... my icon isn't displayed with this method ..
I found a way to did this easily :
HICON hIcon = (HICON)LoadImage( NULL, L"icon.ico", IMAGE_ICON, 16, 16, LR_LOADFROMFILE );
HDC hDC = ::GetDC( NULL );
m_hMenuBmp = ::CreateCompatibleBitmap( hDC, 16, 16 );
HDC hDCTemp = ::CreateCompatibleDC( hDC );
::ReleaseDC( NULL, hDC );
HBITMAP hBitmapOld = ( HBITMAP ) ::SelectObject( hDCTemp, m_hMenuBmp );
::DrawIconEx( hDCTemp, 0, 0, hIcon, 16, 16, 0, ::GetSysColorBrush( COLOR_MENU ), DI_NORMAL );
::SelectObject( hDCTemp, hBitmapOld );
::DeleteDC( hDCTemp );
I was able to get this to work:
HBITMAP hBitmap = (HBITMAP)::LoadImage(NULL, "C:\\moo\\res\\bitmap1.bmp", IMAGE_BITMAP, 0, 0, LR_LOADFROMFILE | LR_LOADTRANSPARENT | LR_LOADMAP3DCOLORS);
m_pic.SetBitmap(hBitmap);
The trick was LR_LOADMAP3DCOLORS together with LR_LOADTRANSPARENT. This was for a dialog box, by the way. Without LR_LOADMAP3DCOLORS, my white background stayed white.

Resources