I have 2 buffers pointing to RGB32 images of different sizes, so my idea is to scale one buffer to match the other one and alphablend these images.
Currently I am able to mix StretchBlt (for scaling performance) and GDI+ drawimage function with a colormatrix for alphablending. This seem to be a bit slow and also it has issues with buffer being used by a different component that uses DirectX. For buffer issue I tried to copy the rows in reverse order and it works except in the DirectX related component.
Bitmap bmp1(width, height, 4bytesperpixel, RGB32, bufferpointer1);
Bitmap blend(width, height, 4bytesperpixel);
Graphics g(&newbmp)
using GDI function
Bitmap bmp2(scaleWidth, scaleHeight, 4bytesperpixel, RGB32, bufferpointer2)
HDC memdc = g.GetHDC();
//// scaling the bufferpointer2 to actual width & height
StretchDIBits(memdc, x,y, width, height, 0, 0,scaleWidth, scaleHeight, bufferpointer2,..)
g.ReleaseDC(memdc); // so that content is copied to the bitmap
//// Then alphablending bmp1 on top of the scaled imaged bmp2
//// Using lockbits to copy the bitmap bytes and unlocking it.
So I would need to replace the GDI+ functions and use Win32 function like AlphaBlend for this. I tried something like this and it shows a black screen
BITMAPINFO bminfo1 = {};
bminfo1.bmiHeader.biSize = sizeof( BITMAPINFO );
bminfo1.bmiHeader.biWidth = w;
bminfo1.bmiHeader.biHeight = h;
bminfo1.bmiHeader.biBitCount = m_nBytesPerPixel * 8;
bminfo1.bmiHeader.biCompression = BI_RGB;
bminfo1.bmiHeader.biPlanes = 1;
BITMAPINFO bminfo2 = {};
bminfo2.bmiHeader.biSize = sizeof( BITMAPINFO );
bminfo2.bmiHeader.biWidth = sW;
bminfo2.bmiHeader.biHeight = sH;
bminfo2.bmiHeader.biBitCount = m_nBytesPerPixel * 8;
bminfo2.bmiHeader.biCompression = BI_RGB;
bminfo2.bmiHeader.biPlanes = 1;
char* pBytes1, *pBytes2;
HDC hmemdc1 = CreateCompatibleDC(GetDC(0));
HDC hmemdc2 = CreateCompatibleDC(GetDC(0));
HBITMAP hBitmap1 = CreateDIBSection(hmemdc1, &bminfo1, DIB_RGB_COLORS, (void**) &pBytes1, NULL, 0);
SetDIBits(hmemdc1, hBitmap1, 0, bminfo1.bmiHeader.bih, pBuffer[0], &bminfo1, DIB_RGB_COLORS);
HBITMAP hBitmap2 = CreateDIBSection(hmemdc2, &bminfo2, DIB_RGB_COLORS, (void**) &pBytes2, NULL, 0);
SelectObject(hmemdc2,hBitmap2);
StretchDIBits(hmemdc2, 0, 0, w, h, 0, 0,
sW, sH, pBuffer[1], &bminfo2, DIB_RGB_COLORS, SRCCOPY );
BLENDFUNCTION bStruct;
bStruct.BlendOp = AC_SRC_OVER;
bStruct.BlendFlags = 0;
bStruct.SourceConstantAlpha = 255;
bStruct.AlphaFormat = AC_SRC_ALPHA;
SelectObject(hmemdc1,hBitmap1);
SelectObject(hmemdc2,hBitmap2);
//blend bmp2 on bmp1
BOOL res = AlphaBlend(hmemdc1, 0, 0, w, h, hmemdc2, 0, 0, w, h, bStruct);
//for testing output
SelectObject(hmemdc1,hBitmap1);
BitBlt(GetDC(0),0,0,width,height,hmemdc1,100,100,SRCCOPY);
//copy the bitmap buffer
memcpy(out, pBytes1, (w * m_nBytesPerPixel) * h);
I am not sure if it is possible to use AlphaBlend function to mix bitmaps per-pixel based from 2 memory DCs. Any help would be highly appreciated.
This part is wrong:
bminfo1.bmiHeader.biSize = sizeof( BITMAPINFO );
It should be sizeof(BITMAPINFOHEADER) otherwise it ruins everything. Also you can't use GetDC(0) for any proper painting. Use instead:
HDC hdc = GetDC(hwnd);
...
ReleaseDC(hwnd, hdc);
or use HDC from BeginPaint. Since you are using GDI+ then you must have HBITMAP handles from bmp->GetHBITMAP(), there is no reason to convert to memory and back to HBITMAP
For AlphaBlend set SourceConstantAlpha = 128; in case alpha channel is not set.
void blend(HDC hdc, RECT rc, HBITMAP hbitmap1, HBITMAP hbitmap2)
{
HDC memdc1 = CreateCompatibleDC(hdc);
HDC memdc2 = CreateCompatibleDC(hdc);
BITMAP bmp1, bmp2;
GetObject(hbitmap1, sizeof(BITMAP), &bmp1);
GetObject(hbitmap2, sizeof(BITMAP), &bmp2);
SelectObject(memdc1, hbitmap1);
SelectObject(memdc2, hbitmap2);
BLENDFUNCTION blend = { 0 };
blend.SourceConstantAlpha = 128;
SetStretchBltMode(hdc, COLORONCOLOR);
AlphaBlend(memdc2, 0, 0, bmp2.bmWidth, bmp2.bmHeight, memdc1, 0, 0, bmp1.bmWidth, bmp1.bmHeight, blend);
StretchBlt(hdc, 0, 0, rc.right, rc.bottom, memdc2, 0, 0, bmp2.bmWidth, bmp2.bmHeight, SRCCOPY);
//or create another memdc to get dibs
DeleteDC(memdc1);
DeleteDC(memdc2);
}
In case you want to get dibs, then don't draw on hdc, instead create a third memdc and another HBITMAP, then use GetDIBits
HDC memdc = CreateCompatibleDC(hdc);
HBITMAP hbmp = CreateCompatibleBitmap(hdc, rc.right, rc.bottom);
SelectObject(memdc, hbmp);
SetStretchBltMode(memdc, COLORONCOLOR);
StretchBlt(memdc, 0, 0, rc.right, rc.bottom,
memdc2, 0, 0, bmp2.bmWidth, bmp2.bmHeight, SRCCOPY);
int w = rc.right;
int h = rc.bottom;
BITMAPINFOHEADER bmpInfoHeader = { sizeof(BITMAPINFOHEADER) };
bmpInfoHeader.biWidth = w;
bmpInfoHeader.biHeight = h;
bmpInfoHeader.biBitCount = 32;
bmpInfoHeader.biCompression = BI_RGB;
bmpInfoHeader.biPlanes = 1;
DWORD size = w * 4 * h;
char *dib = new char[size];
GetDIBits(hdc, hbmp, 0, h, dib, (BITMAPINFO*)&bmpInfoHeader, DIB_RGB_COLORS);
...
DeleteDC(memdc);
DeleteObject(hbitmap);
delete[]dib;
Edit
Method 2: This method should be faster because it uses one StretchBlt and one AlphaBlend. This way you can use pre-computed alphas, although it's not necessary.
Use the other method with 2 AlphaBlend only if you want to blend both images with background.
void modify_bits(HDC hdc, HBITMAP hbitmap)
{ //expecting 32-bit bitmap
BITMAP bm = { 0 };
GetObject(hbitmap, sizeof(bm), &bm);
int w = bm.bmWidth;
int h = bm.bmHeight;
BITMAPINFOHEADER bmpInfoHeader = { sizeof(BITMAPINFOHEADER),
w, h, 1, 32, BI_RGB, 0, 0, 0, 0, 0 };
BYTE* bits = new BYTE[w * h * 4];
if (GetDIBits(hdc, hbitmap, 0, h, bits, (BITMAPINFO*)&bmpInfoHeader, DIB_RGB_COLORS)) {
BYTE* p = bits;
for (int x = 0; x < w; x++) {
for (int y = 0; y < h; y++) {
p[3] = 128;
p[0] = p[0] * p[3] / 255;
p[1] = p[1] * p[3] / 255;
p[2] = p[2] * p[3] / 255;
p += 4;
}
}
SetDIBits(hdc, hbitmap, 0, h, bits, (BITMAPINFO*)&bmpInfoHeader, DIB_RGB_COLORS);
}
delete[] bits;
}
void blend2(HDC hdc, RECT rc, HBITMAP hbitmap1, HBITMAP hbitmap2)
{
int w = rc.right;
int h = rc.bottom;
modify_bits(hdc, hbitmap2);
HDC memdc1 = CreateCompatibleDC(hdc);
HDC memdc2 = CreateCompatibleDC(hdc);
BITMAP bmp1, bmp2;
GetObject(hbitmap1, sizeof(BITMAP), &bmp1);
GetObject(hbitmap2, sizeof(BITMAP), &bmp2);
int w1 = bmp1.bmWidth;
int h1 = bmp1.bmHeight;
int w2 = bmp2.bmWidth;
int h2 = bmp2.bmHeight;
SelectObject(memdc1, hbitmap1);
SelectObject(memdc2, hbitmap2);
BLENDFUNCTION blend = { 0 };
blend.BlendOp = AC_SRC_OVER;
blend.BlendFlags = 0;
blend.SourceConstantAlpha = 255;
blend.AlphaFormat = AC_SRC_ALPHA;
SetStretchBltMode(hdc, COLORONCOLOR);
//draw first image normally:
StretchBlt(hdc, 0, 0, w, h, memdc1, 0, 0, w1, h1, SRCCOPY);
//AlphaBlend the second image:
AlphaBlend(hdc, 0, 0, w, h, memdc2, 0, 0, w2, h2, blend);
DeleteDC(memdc1);
DeleteDC(memdc2);
}
Related
I am loading a bitmap image from a file (type BMP) via the GDI function LoadImage, which returns a BITMAP handle.
I know how to access the bitmap bits. But the image is in the format 8BPP, hence palettized. How can I obtain the palette entries ?
Select the bitmap in to dc and call GetDIBColorTable. A temporary memory dc can be used here:
RGBQUAD rgb[256] = { 0 };
HDC memdc = CreateCompatibleDC(hdc);
auto oldbmp = SelectObject(memdc, hbitmap);
GetDIBColorTable(memdc, 0, 256, rgb);
SelectObject(memdc, oldbmp);
DeleteDC(memdc);
Alternatively use GetDIBits to read BITMAPINFO. You have to reserve enough memory to read the color table + all bytes + sizeof(BITMAPINFO).
Color table will be copied to BITMAPINFO -> bmiColors
Gdi+ is another option. Here is GDI example:
int main()
{
HBITMAP hbitmap = (HBITMAP)LoadImage(0, L"source.bmp",
IMAGE_BITMAP, 0, 0, LR_CREATEDIBSECTION | LR_DEFAULTSIZE | LR_LOADFROMFILE);
if (!hbitmap)
return 0;
BITMAP bm;
GetObject(hbitmap, sizeof(bm), &bm);
int width = bm.bmWidth;
int height = bm.bmHeight;
WORD clrbits = (WORD)(bm.bmPlanes * bm.bmBitsPixel);
if (clrbits == 8) clrbits = 1;
else if (clrbits <= 4) clrbits = 4;
else if (clrbits <= 8) clrbits = 8;
else if (clrbits <= 16) clrbits = 16;
else if (clrbits <= 24) clrbits = 24;
else clrbits = 32;
HDC hdc = GetDC(0);
if(clrbits == 8)
{
RGBQUAD rgb[256] = { 0 };
HDC memdc = CreateCompatibleDC(hdc);
auto oldbmp = SelectObject(memdc, hbitmap);
GetDIBColorTable(memdc, 0, 256, rgb);
SelectObject(memdc, oldbmp);
DeleteDC(memdc);
}
int palette_size = (clrbits < 24) ? sizeof(RGBQUAD) * (1 << clrbits) : 0;
BITMAPINFO* bmpinfo = (BITMAPINFO*)new BYTE[sizeof(BITMAPINFO) + palette_size];
int width_in_bytes = ((width * clrbits + 31) & ~31) / 8;
DWORD size = width_in_bytes * height;
bmpinfo->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
bmpinfo->bmiHeader.biWidth = width;
bmpinfo->bmiHeader.biHeight = height;
bmpinfo->bmiHeader.biPlanes = bm.bmPlanes;
bmpinfo->bmiHeader.biBitCount = bm.bmBitsPixel;
bmpinfo->bmiHeader.biClrUsed = (clrbits < 24) ? (1 << clrbits) : 0;
bmpinfo->bmiHeader.biCompression = BI_RGB;
bmpinfo->bmiHeader.biSizeImage = size;
BYTE* bits = new BYTE[size];
GetDIBits(hdc, hbitmap, 0, height, bits, bmpinfo, 0);
//palette size should be 1024 for 256 color
//it should be stored in `bmpinfo->bmiColors`
delete[]bits;
delete[](BYTE*)bmpinfo;
DeleteObject(hbitmap);
ReleaseDC(0, hdc);
return 0;
}
I have a layered window that I create myself with the WS_EX_LAYERED extended style and the UpdateLayeredWindow function.
Then I draw some text in it using GDI+ library, Graphics::DrawString method.
And the result is this:
Screenshot of the layered window.
As you can see, the japanese, korean and chinese characters are completely transparent. They even make the window's white background transparent, which is not transparent at all.
The problem occurs only on Windows Vista and Windows 7 when Desktop Composition (Aero theme) is disabled.
On Windows 10 it works fine, as Desktop Composition is always enabled there.
Why does this strange effect happen only with East-Asian characters?
And how can this be solved?
I don't have a Windows 7 machine to test on so I don't know if the alpha channel is the real issue but assuming that it is, you can work around it by setting the alpha channel back to the correct state after writing the buggy text:
enum { WIDTH = 255 * 3, HEIGHT = 25 };
#define CalcStride(w, bpp) ( ((((w) * (bpp)) + 31) & ~31) >> 3 )
#define PMC(c, a) ( (c) = ((int)(c) * (a) / 255) )
#define PM(q) PMC( (q).rgbRed, (q).rgbReserved), PMC( (q).rgbGreen, (q).rgbReserved), PMC( (q).rgbBlue, (q).rgbReserved)
RGBQUAD* GetPxPtr32(void*pBits, UINT x, UINT y)
{
return ((RGBQUAD*) ( ((char*)pBits) + (y * CalcStride(WIDTH, 32)) )) + x;
}
void SaveAlpha32(void*pBits, BYTE*buf)
{
for (UINT x = 0; x < WIDTH; ++x)
for (UINT y = 0; y < HEIGHT; ++y)
buf[(y * WIDTH) + x] = GetPxPtr32(pBits, x, y)->rgbReserved;
}
void RestoreAlpha32(void*pBits, const BYTE*buf)
{
for (UINT x = 0; x < WIDTH; ++x)
for (UINT y = 0; y < HEIGHT; ++y)
GetPxPtr32(pBits, x, y)->rgbReserved = buf[(y * WIDTH) + x];
}
void Draw(HDC hDC, HBITMAP hBM, void*pBits, UINT w, UINT h, bool isDwmActive)
{
// Fill with white and a silly gradient alpha channel:
for (UINT y = 0; y < h; ++y)
for (UINT x = 0; x < w; ++x)
(*(UINT32*)GetPxPtr32(pBits, x, y)) = 0xffffffff, GetPxPtr32(pBits, x, y)->rgbReserved = max(42, x % 255);
BYTE *alphas = isDwmActive ? 0 : (BYTE*) LocalAlloc(LPTR, sizeof(BYTE) * w * h), fillWithRed = true;
if (!isDwmActive) SaveAlpha32(pBits, alphas);
HGDIOBJ hBmOld = SelectObject(hDC, hBM);
RECT r = { 0, 0, WIDTH, HEIGHT };
int cbk = SetBkColor(hDC, RGB(255, 0, 0));
if (fillWithRed) ExtTextOut(hDC, 0, 0, ETO_OPAQUE, &r, NULL, 0, NULL);
int ctx = SetTextColor(hDC, RGB(0, 0, 0));
int mode = SetBkMode(hDC, TRANSPARENT);
DrawText(hDC, TEXT("Hello World Hello World Hello World Hello World Hello World"), -1, &r, DT_SINGLELINE|DT_VCENTER|DT_CENTER); // Plain GDI always destroys the alpha
SetBkMode(hDC, mode), SetBkColor(hDC, cbk), SetTextColor(hDC, ctx);
SelectObject(hDC, hBmOld), GdiFlush();
if (!isDwmActive) RestoreAlpha32(pBits, alphas), LocalFree(alphas);
for (UINT y = 0; y < h; ++y) for (UINT x = 0; x < w; ++x) PM(*GetPxPtr32(pBits, x, y));
}
int main()
{
const INT w = WIDTH, h = HEIGHT, bpp = 32, x = 222, y = 222;
HWND hWnd = CreateWindowEx(WS_EX_LAYERED|WS_EX_TOPMOST, WC_STATIC, 0, WS_VISIBLE|WS_POPUP, x, y, WIDTH, HEIGHT, 0, 0, 0, 0);
SetWindowLong(hWnd, GWLP_WNDPROC, (LONG_PTR) DefWindowProc); // HACK
BITMAPINFO bi;
ZeroMemory(&bi, sizeof(bi));
BITMAPINFOHEADER&bih = bi.bmiHeader;
bih.biSize = sizeof(BITMAPINFOHEADER);
bih.biWidth = w, bih.biHeight = -h;
bih.biPlanes = 1, bih.biBitCount = bpp;
bih.biCompression = BI_RGB;
void*bits;
HBITMAP hBmp = CreateDIBSection(NULL, &bi, DIB_RGB_COLORS, &bits, NULL, 0);
HDC hDCScreen = GetDC(NULL), hDC = CreateCompatibleDC(hDCScreen);
Draw(hDC, hBmp, bits, w, h, false);
HGDIOBJ hBmOld = SelectObject(hDC, hBmp);
BLENDFUNCTION blend = { 0 };
blend.BlendOp = AC_SRC_OVER, blend.AlphaFormat = AC_SRC_ALPHA, blend.SourceConstantAlpha = 255;
POINT location = { x, y }, srcpt = { 0, 0 };
SIZE szWnd = { w, h };
UpdateLayeredWindow(hWnd, hDCScreen, &location, &szWnd, hDC, &srcpt, 0, &blend, ULW_ALPHA);
SelectObject(hDC, hBmOld), DeleteObject(hBmp);
DeleteDC(hDC), ReleaseDC(NULL, hDCScreen);
struct Closer { Closer(HWND h) { SetTimer(h, 1, 1000 * 11, TP); } static void CALLBACK TP(HWND h,UINT,UINT_PTR,DWORD) { ExitProcess(666); } } closer(hWnd); // HACK
for (MSG msg; GetMessage(&msg, 0, 0, 0); ) DispatchMessage(&msg);
return 666;
}
If you don't care about Vista without the platform update, you can try using Direct2D instead of GDI+.
I have a bitmap image in form of array of 32-bit integers (ARGB pixels: uint32 *mypixels) and int width and int height. I need to output them to a printer.
I have the printer context: HDC hdcPrinter;
As I learned, I need first to create a compatible context:
HDC hdcMem = CreateCompatibleDC(hdcPrinter);
Then I need to create an HBITMAP object, select it into the compatible context, and render:
HBITMAP hBitmap = ...?
SelectObject(hdcMem, hBitmap);
BitBlt(printerContext, 0, 0, width, height, hdcMem, 0, 0, SRCCOPY);
And finally clean up:
DeleteObject(hBitmap);
DeleteDC(hdcMem);
My question is how do I create an HBITMAP object and put mypixels into it?
I found two options:
HBITMAP hBitmap = CreateCompatibleBitmap(hdcPrinter, width, height);
Looks good, but how do mypixels get into this bitmap?
HBITMAP hBitmap = CreateDIBSection(hdcPrinter /*or hdcMem?*/, ...);
Will it work? Is it better than option 1.?
This function creates a bitmap and sets it to an initial image.
Irt's a bit fiddly to access the bits directly, but it can be done.
HBITMAP MakeBitmap(unsigned char *rgba, int width, int height, VOID **buff)
{
VOID *pvBits; // pointer to DIB section
HBITMAP answer;
BITMAPINFO bmi;
HDC hdc;
int x, y;
int red, green, blue, alpha;
// setup bitmap info
bmi.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
bmi.bmiHeader.biWidth = width;
bmi.bmiHeader.biHeight = height;
bmi.bmiHeader.biPlanes = 1;
bmi.bmiHeader.biBitCount = 32; // four 8-bit components
bmi.bmiHeader.biCompression = BI_RGB;
bmi.bmiHeader.biSizeImage = width * height * 4;
hdc = CreateCompatibleDC(GetDC(0));
answer = CreateDIBSection(hdc, &bmi, DIB_RGB_COLORS, &pvBits, NULL, 0x0);
for (y = 0; y < height; y++)
{
for (x = 0; x < width; x++)
{
red = rgba[(y*width + x) * 4];
green = rgba[(y*width + x) * 4 + 1];
blue = rgba[(y*width + x) * 4 + 2];
alpha = rgba[(y*width + x) * 4 + 3];
red = (red * alpha) >> 8;
green = (green * alpha) >> 8;
blue = (blue * alpha) >> 8;
((UINT32 *)pvBits)[(height - y - 1) * width + x] = (alpha << 24) | (red << 16) | (green << 8) | blue;
}
}
DeleteDC(hdc);
*buff = pvBits;
return answer;
}
Win32 graphics is not my gig, but I have to do some alpha blending. The following code works fine in 32-bit color mode but displays nothing except the white background in 16-bit mode. Sorry for the length, but I don't know where it's going wrong. This is as compact as I could make it.
hbm is a 32-bit ARGB bitmap with varying per-pixel alpha, size 16x16 (so, cx = cy = 16).
// Create a memory DC to construct the bits
HDC hdc = GetDC(hWnd);
HDC hdcMem = CreateCompatibleDC(hdc);
HBITMAP hbmMem = CreateBitmap(cx, cy, 1, 32, NULL);
SelectObject(hdcMem, hbmMem);
// Fill the BG
RECT rc = { 0, 0, cx, cy };
FillRect(hdcMem, &rc, (HBRUSH)GetStockObject(WHITE_BRUSH));
// Get the bitmap bits
BITMAPINFO bmi;
ZeroMemory(&bmi, sizeof(bmi));
bmi.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
bmi.bmiHeader.biWidth = cx;
bmi.bmiHeader.biHeight = cy;
bmi.bmiHeader.biBitCount = 32;
bmi.bmiHeader.biPlanes = 1;
bmi.bmiHeader.biCompression = BI_RGB;
std::unique_ptr<BYTE[]> pvBits(new BYTE[cx * cy * 4]);
GetDIBits(hdcMem, hbm, 0, cy, reinterpret_cast<void*>(pvBits.get()), &bmi, DIB_RGB_COLORS);
// Premultiply all color channel values by the per-pixel alpha.
int ctPixels = cx * cy;
BYTE *prgba = pvBits.get();
for (int i = 0; i < ctPixels; ++i)
{
int alpha = *(prgba + 3);
for (int j = 0; j <= 2; ++j)
{
int k = *prgba;
*prgba++ = k * alpha / 255;
}
++prgba;
}
// Put the new bits back
SetDIBits(hdcMem, hbm, 0, cy, reinterpret_cast<void*>(pvBits.get()), &bmi, DIB_RGB_COLORS);
// Alpha blend into memory DC
HDC hdcSrc = CreateCompatibleDC(hdcMem);
HBITMAP hbmOld = static_cast<HBITMAP>(SelectObject(hdcSrc, hbm));
BLENDFUNCTION bfn = { AC_SRC_OVER, 0, 255, AC_SRC_ALPHA };
AlphaBlend(hdcMem, 0, 0, cx, cy, hdcSrc, 0, 0, cx, cy, bfn);
SelectObject(hdcSrc, hbmOld);
DeleteDC(hdcSrc);
// Blit the memory DC to the screen
BitBlt(hdc, 0, 0, cx, cy, hdcMem, 0, 0, SRCCOPY);
I have some vague suspicions about CreateCompatibleDC but other than that I'm flying blind.
Any help appreciated. TIA.
According to Microsoft's documentation per pixel alpha values are only supported with 32-bit bitmaps. SetDIBits converts your 32-bit DIB into a 16-bit DDB if the display using 16-bit colour, and a 16-bit DDB has no space to store alpha values. You'll have to set bfn.SourceConstantAlpha to your alpha value instead to get this to work. You won't need to premultiply your bitmap in that case either.
So, I'm testing the following function FindPixel with the following app. The HWND and COLORREF are constants that I've determined with Spy++ and Color Cop for debugging; the final version of the program will find these automatically.
I've confirmed that the part of this algorithm which determines whether the color exists works (ie: if the color exists anywhere in the window, the if statement for that is true eventually, and if it doesn't the if statement is never true), however, I cannot figure out how to isolate which pixel this occurs on. The line SetCursorPos(rect.left+i, rect.top+i2); does not move the mouse anywhere near the correct location.
The window I'm debugging this with is entirely white save for the one pixel with the value 16776960. The function can tell that it's there, but the values of (i, i2) are not the (x, y) coordinates of where it occurs.
Is there something I'm missing here?
#include <Windows.h>
void FindPixel(HWND hWnd, COLORREF target)
{
HDC hDC = GetDC(hWnd);
HDC memDC = CreateCompatibleDC (hDC);
BYTE *ScreenData = NULL;
HBITMAP hBitmap;
BITMAPINFOHEADER bmHeader = {0};
RECT rect;
int width, height;
int i, i2;
GetWindowRect(hWnd, &rect);
width = rect.right-rect.left;
height = rect.bottom-rect.top;
ScreenData = (BYTE*)malloc(4*width*height);
hBitmap = CreateCompatibleBitmap(hDC, width, height);
bmHeader.biSize = sizeof(BITMAPINFOHEADER);
bmHeader.biPlanes = 1;
bmHeader.biBitCount = 24;
bmHeader.biWidth = width;
bmHeader.biHeight = -height;
bmHeader.biCompression = BI_RGB;
SelectObject(memDC, hBitmap);
BitBlt(memDC, 0, 0, width, height, hDC, 0, 0, SRCCOPY);
GetDIBits(hDC, hBitmap, 0, height, ScreenData, (BITMAPINFO*)&bmHeader, DIB_RGB_COLORS);
// i=0;
for(i = 0; i < width; i++)
{
for(i2 = 0; i2 < height; i2++)
{
if(RGB((ScreenData[3*((i2*width)+i)+2]),(ScreenData[3*((i2*width)+i)+1]), ScreenData[3*((i2*width)+i)])==target)
{
SetCursorPos(rect.left+i, rect.top+i2);
DeleteObject(hBitmap);
DeleteDC(memDC);
free(ScreenData);
ReleaseDC(hWnd, hDC);
return;
}
}
}
DeleteObject(hBitmap);
DeleteDC(memDC);
free(ScreenData);
ReleaseDC(hWnd, hDC);
}
int APIENTRY WinMain(HINSTANCE hi, HINSTANCE hpi, LPSTR lpcl, int nsc)
{
const COLORREF px = 16776960;
const HWND hWnd = (HWND)0x000C04BC;
Sleep(1000);
FindPixel(hWnd, px);
return 0;
}
The problem was indeed that I was not taking the stride into account. Here is the working loop:
stride = ((((width * 24) + 31) & ~31) >> 3);
totalpx = stride*height;
for(i = 0; i < totalpx; i++)
{
//int x = i % width;
//int y = ((i-x)/width);
if(RGB(
(ScreenData[(3*i)+2]),
(ScreenData[(3*i)+1]),
(ScreenData[(3*i)+0]))==target)
{
int x = i % stride;
int y = ((i-x)/width);
SetCursorPos(rect.left+x,rect.top+y);
DeleteObject(hBitmap);
DeleteDC(memDC);
free(ScreenData);
ReleaseDC(hWnd, hDC);
return;
}
}