TCP buffer parameters not being honoured on Win7 machine - windows-7

Note: I have tagged this with both programming and windows networking tags, so please don't shout, I'm just trying to expose this to as many people as may be able to help!
I am trying to set the receive and send buffers for a small client and server I have written, so that when I perform a network capture, I see the window size I have set in the TCP handshake.
For the programmers, please consider the following very simple code for a client and server.
For the none-programmers, please skip past this section to my image.
Client:
#include <WinSock2.h>
#include <mstcpip.h>
#include <Ws2tcpip.h>
#include <thread>
#include <iostream>
using namespace std;
int OutputWindowSize(SOCKET s, unsigned int nType)
{
int buflen = 0;
int nSize = sizeof(buflen);
if (getsockopt(s, SOL_SOCKET, nType, (char *)&buflen, &nSize) == 0)
return buflen;
return -1;
}
bool SetWindowSizeVal(SOCKET s, unsigned int nSize)
{
if (setsockopt(s, SOL_SOCKET, SO_SNDBUF, (char *)&nSize, sizeof(nSize)) == 0)
if (setsockopt(s, SOL_SOCKET, SO_RCVBUF, (char *)&nSize, sizeof(nSize)) == 0)
return true;
return false;
}
int main(int argc, char** argv)
{
if (argc != 3) { cout << "not enough args!\n"; return 0; }
const char* pszHost = argv[1];
const int nPort = atoi(argv[2]);
WSADATA wsaData;
DWORD Ret = 0;
if ((Ret = WSAStartup((2, 2), &wsaData)) != 0)
{
printf("WSAStartup() failed with error %d\n", Ret);
return 1;
}
struct sockaddr_in sockaddr_IPv4;
memset(&sockaddr_IPv4, 0, sizeof(struct sockaddr_in));
sockaddr_IPv4.sin_family = AF_INET;
sockaddr_IPv4.sin_port = htons(nPort);
if (!InetPtonA(AF_INET, pszHost, &sockaddr_IPv4.sin_addr)) { return 0; }
SOCKET clientSock = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP); // Create active socket: one which is passed to connect().
if (!SetWindowSizeVal(clientSock, 12345))
{
cout << "Failed to set window size " << endl;
return -1;
}
cout << "Set window size on client socket as: RECV" << OutputWindowSize(clientSock, SO_RCVBUF) <<
" SEND: " << OutputWindowSize(clientSock, SO_SNDBUF) << endl;
int nRet = connect(clientSock, (sockaddr*)&sockaddr_IPv4, sizeof(sockaddr_in));
if (nRet != 0) { return 0; }
char buf[100] = { 0 };
nRet = recv(clientSock, buf, 100, 0);
cout << "Received " << buf << " from the server!" << endl;
nRet = send(clientSock, "Hello from the client!\n", strlen("Hello from the client!\n"), 0);
closesocket(clientSock);
return 0;
}
Server:
#include <WinSock2.h>
#include <mstcpip.h>
#include <Ws2tcpip.h>
#include <iostream>
using namespace std;
int OutputWindowSize(SOCKET s, unsigned int nType)
{
int buflen = 0;
int nSize = sizeof(buflen);
if (getsockopt(s, SOL_SOCKET, nType, (char *)&buflen, &nSize) == 0)
return buflen;
return -1;
}
bool SetWindowSizeVal(SOCKET s, unsigned int nSize)
{
if (setsockopt(s, SOL_SOCKET, SO_SNDBUF, (char *)&nSize, sizeof(nSize)) == 0)
if (setsockopt(s, SOL_SOCKET, SO_RCVBUF, (char *)&nSize, sizeof(nSize)) == 0)
return true;
return false;
}
int main()
{
WSADATA wsaData;
DWORD Ret = 0;
if ((Ret = WSAStartup((2, 2), &wsaData)) != 0)
{
printf("WSAStartup() failed with error %d\n", Ret);
return 1;
}
struct sockaddr_in sockaddr_IPv4;
memset(&sockaddr_IPv4, 0, sizeof(struct sockaddr_in));
sockaddr_IPv4.sin_family = AF_INET;
sockaddr_IPv4.sin_port = htons(19982);
int y = InetPton(AF_INET, L"127.0.0.1", &sockaddr_IPv4.sin_addr);
if (y != 1) return 0;
socklen_t addrlen = sizeof(sockaddr_IPv4);
SOCKET sock = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (!SetWindowSizeVal(sock, 12345))
{
cout << "Failed to set window size " << endl;
return -1;
}
cout << "Set window size on listen socket as: RECV" << OutputWindowSize(sock, SO_RCVBUF) <<
" SEND: " << OutputWindowSize(sock, SO_SNDBUF) << endl;
if (bind(sock, (sockaddr*)&sockaddr_IPv4, sizeof(sockaddr_IPv4)) != 0) { /* error */ }
if (listen(sock, SOMAXCONN) != 0) { return 0; }
while (1)
{
SOCKET sockAccept = accept(sock, (struct sockaddr *) &sockaddr_IPv4, &addrlen);
if (!SetWindowSizeVal(sockAccept, 12345))
{
cout << "Failed to set window size " << endl;
return -1;
}
cout << "Set window size as on accepted socket as: RECV" << OutputWindowSize(sock, SO_RCVBUF) <<
" SEND: " << OutputWindowSize(sock, SO_SNDBUF) << endl;
if (sockAccept == -1) return 0;
int nRet = send(sockAccept, "Hello from the server!\n", strlen("Hello from the server!\n"), 0);
if (!nRet) return 0;
char buf[100] = { 0 };
nRet = recv(sockAccept, buf, 100, 0);
cout << "Received " << buf << " from the client!" << endl;
if (nRet == 0) { cout << "client disonnected!" << endl; }
closesocket(sockAccept);
}
return 0;
}
The output from my program states that the window sizes have been set succesfully:
Set window size on listen socket as: RECV12345 SEND: 12345
Set window size as on accepted socket as: RECV12345 SEND: 12345
for the server, and for the client:
Set window size on listen socket as: RECV12345 SEND: 12345
However, when I capture the traffic using RawCap, I see that the client window size is set fine, but server's window size is not what I set it to be, it is 8192:
Now, I have read this MS link and it says to add a registry value; I did this, adding the value 0x00001234, but it still made no difference.
The interesting thing is, the same code works fine on a Windows 10 machine, which makes me think it is Windows 7 specific. However, I'm not 100% sure on my code, there might be some errors in it.
Can anyone suggest how I can get Windows to honour my requested parameters please?

These are not 'window sizes'. They are send and receive buffer sizes.
There is no such thing as 'output window size'. There is a receive window and a congestion window, and the latter is not relevant to your question.
The send buffer size has exactly nothing to do with the receive window size, and the receive buffer size only determines the maximum receive window size.
The actual receive window size is adjusted dynamically by the protocol. It is the actual size that you are seeing in Wireshark.
The platform is entitled by the specification to adjust the supplied values for the send and receive buffers up or down, and the documentation advises you to get the corresponding values if you want to be sure what they really are.
There is no problem here to solve.
NB You don't have to set the receive window size on an accepted socket if you already set it on the listening socket. It is inherited.

Related

WSAEFAULT 10014, Why setsockopt returning -1?

My setsockopt() call is returning -1, I used WSAGetLastError() which returns WSAEFAULT.
WSAEFAULT
10014
Bad address.
The system detected an invalid pointer address in attempting to use a pointer argument of a call. This error occurs if an application passes an invalid pointer value, or if the length of the buffer is too small. For instance, if the length of an argument, which is a sockaddr structure, is smaller than the sizeof(sockaddr).
I am using MSVC 2022
#include<iostream>
#include<WinSock2.h>
#pragma comment(lib, "ws2_32.lib")
using namespace std;
#define PORT 9909
struct sockaddr_in srv;
fd_set fr, fw, fe;
int nMaxFd;
int main() {
int nRet = 0;
// Initialize the WSA variables
WSADATA ws;
if (WSAStartup(MAKEWORD(2, 2), &ws) < 0) {
cout << endl << "WSA failed to initialize";
WSACleanup();
exit(EXIT_FAILURE);
}
else
cout << endl << "WSA initialized";
// Initialize the socket
int nSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (nSocket < 0) {
cout << endl << "The socket not opened";
WSACleanup();
exit(EXIT_FAILURE);
}
else {
cout << endl << "The socket opened successfully"<<nSocket;
}
// Initialize the environment for sockaddr structure
srv.sin_family = AF_INET;
srv.sin_port = htons(PORT);
srv.sin_addr.s_addr = INADDR_ANY;
memset(srv.sin_zero, 0, 8);
// setsockpot
int nOptVal = 1;
int nOptLen = sizeof(nOptVal);
nRet = setsockopt(nSocket, SOL_SOCKET, SO_REUSEADDR, (const char*)nOptVal, nOptLen);
cout << nRet;
if (!nRet) {
cout << endl << "The setsockopt call successful";
}
else {
cout << endl << "THe setsockopt call failed";
cout << endl << WSAGetLastError();
WSACleanup();
exit(EXIT_FAILURE);
}
/* In default, every socket is Blocking socket, we have to use explicitly if wer want non blocking sockets
// About the Blocking and Non Blocking sockets
u_long optval = 0;
nRet = ioctlsocket(nSocket, FIONBIO, &optval);
if (nRet != 0) {
cout << endl << "ioctlsocket call failed";
}
else {
cout << endl << "ioctlsocket call passed";
}
*/
/*
// Bind the socket to the local port
nRet = bind(nSocket, (sockaddr*)&srv, sizeof(sockaddr));
if (nRet < 0) {
cout << endl << "Fail to bind to local port";
exit(EXIT_FAILURE);
}
else
cout << endl << "Successfully bind to local port";
// Listen the request from client (queues the requests)
nRet = listen(nSocket, 5);
if (nRet < 0) {
cout << endl << "Fail to start listen to local port";
WSACleanup();
exit(EXIT_FAILURE);
}
else {
cout << endl << "Started listening to local port";
}
nMaxFd = nSocket;
struct timeval tv;
tv.tv_sec = 1;
tv.tv_usec = 0;
while (1) {
FD_ZERO(&fr);
FD_ZERO(&fw);
FD_ZERO(&fe);
FD_SET(nSocket, &fr);
FD_SET(nSocket, &fe);
cout << endl << "Before select call: " << fr.fd_count;
// Keep waiting for new requests and proceed as per the request
nRet = select(nMaxFd + 1, &fr, &fw, &fe, &tv);
if (nRet > 0) {
// When someone connects or communicates with a message over a dedicated connection
}
else if (nRet == 0) {
// No connection or any communication request made or you can say that none of the
// socket descriptors are ready
cout << endl << "Nothing on port: " << PORT;
}
else {
// It failed and your application should show some useful message
cout << endl << "I failed...";
WSACleanup();
exit(EXIT_FAILURE);
}
cout << endl << "After the select call: " << fr.fd_count;
}
*/
}
I assumed my setsockopt() will return 0, but it returns -1:
int nOptVal = 1;
int nOptLen = sizeof(nOptVal);
nRet = setsockopt(nSocket, SOL_SOCKET, SO_REUSEADDR, (const char*)nOptVal, nOptLen);
Per the setsockopt() documentation, the WSAEFAULT error is telling you that you are passing in an invalid pointer in the optval parameter:
Error code
Meaning
WSAEFAULT
The buffer pointed to by the optval parameter is not in a valid part of the process address space or the optlen parameter is too small.
When calling setsockopt(), you need to pass in the memory address of the nOptVal variable, but you are passing in its value instead, typecasted to a pointer. 1 is not a valid memory address of the variable, hence the error.
Try this instead:
int nOptVal = 1;
int nOptLen = sizeof(nOptVal);
nRet = setsockopt(nSocket, SOL_SOCKET, SO_REUSEADDR, (const char*)&nOptVal, nOptLen);
^
add this!

win32 socket bind func fails with error 10014 c++

I am trying to run a tcp server on my machine, this is mostly a tutorial code but for some reason the bind is failing with error 10014. sizeof(serverAddr) returns 16 in the below code if anyone is interested. I know what error 10014 means but can't seem to figure out why it's coming here on bind everything seems ok.
#include <iostream>
#include <winsock2.h>
#pragma comment(lib,"Ws2_32.lib")
int main()
{
WSADATA WSAData;
SOCKET server, client;
SOCKADDR_IN serverAddr, clientAddr;
int iResult = WSAStartup(MAKEWORD(2, 2), &WSAData);
if (iResult != NO_ERROR) {
std::cout << "startup error" << std::endl;
return 1;
}
server = socket(AF_INET, SOCK_STREAM, 0);
serverAddr.sin_addr.s_addr = INADDR_ANY;
serverAddr.sin_family = AF_INET;
serverAddr.sin_port = htons(5555);
if (bind(server, (SOCKADDR*)&serverAddr, sizeof(serverAddr) != 0))
{
std::cout << "bind " << WSAGetLastError() << " sizeof " << sizeof(serverAddr) << std::endl;
std::cout << "cannot start server" << std::endl;
return 0;
}
listen(server, 0);
std::cout << "Listening for incoming connections..." << std::endl;
char buffer[1024];
int clientAddrSize = sizeof(clientAddr);
if ((client = accept(server, (SOCKADDR*)&clientAddr, &clientAddrSize)) != INVALID_SOCKET)
{
std::cout << "Client connected!" << std::endl;
recv(client, buffer, sizeof(buffer), 0);
std::cout << "Client says: " << buffer << std::endl;
memset(buffer, 0, sizeof(buffer));
closesocket(client);
std::cout << "Client disconnected." << std::endl;
}
return 0;
}
There is an error with the parantheses. This line:
if (bind(server, (SOCKADDR*)&serverAddr, sizeof(serverAddr) != 0))
Shoud be:
if (bind(server, (SOCKADDR*)&serverAddr, sizeof(serverAddr)) != 0)

Windows - sound recording program giving noise

I wrote the following program to record the sound through soundcard in windows and print the PCM data from buffer of waveheader. But it gives only the data 32600-32700. Is the problem with my soundcard? I have used WAVE_MAPPER which automatically selects the source...Please help me
#include <Windows.h>
#include <MMSystem.h>
#include <iostream>
using namespace std;
int main(){
HWAVEIN microHandle;
WAVEHDR waveHeader;
MIXERCAPS mixerCaps;
WAVEFORMATEX format;
//HWAVEOUT hwo; // play
while (1){
const int NUMPTS = 44100 * 0.01; // 10 seconds
int sampleRate = 44100; //can get frequency from here
short int waveIn[NUMPTS]; // 'short int' is a 16-bit type; I request 16-bit samples below
// for 8-bit capture, you'd use 'unsigned char' or 'BYTE' 8-bit types
MMRESULT result = 0;
format.wFormatTag = WAVE_FORMAT_PCM; // simple, uncompressed format
format.wBitsPerSample = 8; // 16 for high quality, 8 for telephone-grade
format.nChannels = 1; // 1=mono, 2=stereo
format.nSamplesPerSec = sampleRate; // 22050
format.nAvgBytesPerSec = format.nSamplesPerSec*format.nChannels*format.wBitsPerSample / 8;
// = nSamplesPerSec * n.Channels * wBitsPerSample/8
format.nBlockAlign = format.nChannels*format.wBitsPerSample / 8;
// = n.Channels * wBitsPerSample/8
format.cbSize = 0;
result = waveInOpen(&microHandle, WAVE_MAPPER, &format, 0L, 0L, WAVE_FORMAT_DIRECT);
cout << "checking step 1" << endl;
if (result)
{
cout << "Fail step 1" << endl;
cout << result << endl;
Sleep(10000);
return 0;
}
// Set up and prepare header for input
waveHeader.lpData = (LPSTR)waveIn;
waveHeader.dwBufferLength = NUMPTS;// *2;//why *2
waveHeader.dwBytesRecorded = 0;
waveHeader.dwUser = 0L;
waveHeader.dwFlags = 0L;
waveHeader.dwLoops = 0L;
waveInPrepareHeader(microHandle, &waveHeader, sizeof(WAVEHDR));
// Insert a wave input buffer
result = waveInAddBuffer(microHandle, &waveHeader, sizeof(WAVEHDR));
int NumOfMixers = mixerGetNumDevs();
cout << NumOfMixers << endl;
//cout<<(char*)mixerCaps.szPname<<endl;
//system("pause");
cout << "checking step 2" << endl;
if (result)
{
cout << "Fail step 2" << endl;
cout << result << endl;
Sleep(10000);
return 0;
}
//system("pause");
result = waveInStart(microHandle);
cout << "checking step 3......started recording..." << endl;
if (result)
{
cout << "Fail step 3" << endl;
cout << result << endl;
Sleep(10000);
return 0;
}
// Wait until finished recording
do { cout << "still"; } while (waveInUnprepareHeader(microHandle, &waveHeader, sizeof(WAVEHDR)) == WAVERR_STILLPLAYING);
waveInStop(microHandle);
waveInReset(microHandle);
//waveInUnprepareHeader(hwi, lpWaveHdr, sizeof(WAVEHDR));
waveInClose(microHandle);
//printing the buffer
for (int i = 0; i < waveHeader.dwBufferLength; i++)
{
if (waveIn[i] > 0)
cout << i << "\t" << waveIn[i] << endl;
}
}
system("pause");
//waveInClose(microHandle);
return 0;
}
I would be very grateful if someone could help me...
One obvious problem is that you're mixing the usage of 16 and 8 bits. You're buffer is defined as a 16-bit short. Notice your own comment:
short int waveIn[NUMPTS]; // 'short int' is a 16-bit type; I request 16-bit samples below
// for 8-bit capture, you'd use 'unsigned char' or 'BYTE' 8-bit types
Yet, when defining the audio format you are specifying 8 bits:
format.wBitsPerSample = 8; // 16 for high quality, 8 for telephone-grade
Either change format.wBitsPerSample = 16 or if you want 8 bit audio then do something like this:
unsigned char waveIn[NUMPTS];
...
format.wBitsPerSample = 8; // 16 for high quality, 8 for telephone-grade
//printing the buffer
for (int i = 0; i < waveHeader.dwBufferLength; i++)
{
cout << i << "\t" << (int)waveIn[i] << endl;
}

ReadProcessMemory returns the same data for any address

Working on WinXP SP3.
Visual Studio 2005.
Trying to read memory of another process.
std::cout<<"Reading Process Memory\n";
const DWORD pid = 3476;
HANDLE handle = OpenProcess(PROCESS_VM_READ,FALSE,pid);
if(handle == NULL) {std::cout<<"Failed to open process\n";return 0;}
char* buffer1 = new char[256];
char* buffer2 = new char[256];
memset(buffer1,0,256*sizeof(char));
memset(buffer2,0,256*sizeof(char));
DWORD nbr = 0;
int address = 0x400000;
BOOL result = ReadProcessMemory(handle,&address,buffer1,32,&nbr);
if(result!=1) std::cout<<"Failed to read memory\n";
address = 0x400000+0x1000;
result = ReadProcessMemory(handle,&address,buffer2,32,&nbr);
if(result!=1) std::cout<<"Failed to read memory\n";
int i = 0;
while(i++<10)
{
if(buffer1[i]!=buffer2[i]) {std::cout<<"Buffers are different\n";break;}
}
delete[] buffer1;
delete[] buffer2;
CloseHandle(handle);
std::cin>>i;
return 0;
The problem is that both buffers are getting the same values. ReadProcMemory returns 1 and number of bytes read is the same as requested.
Your calls to ReadProcessMemory are incorrect. You should be using address directly, not &address. You may need to cast it to a const void *.
result = ReadProcessMemory(handle, reinterpret_cast<const void *>(address), buffer, 32, &nbr);
And you probably should declaring address as a type large enough to handle a pointer, like std::ssize_t or INT_PTR.
INT_PTR address = 0x400000;
buffer couldn't be a char, it has to be int, thats a working example
#include <windows.h>
#include <iostream>
#include <string.h>
using namespace std;
int main()
{
int point1=0;
int i=0;
int d=0;
char* value[4];
SIZE_T stBytes = 0;
HWND hwnd;
HANDLE phandle;
DWORD pid;
hwnd = FindWindow(NULL, "calc"); // calc is the name of the windows process
if (hwnd != 0) {
GetWindowThreadProcessId(hwnd, &pid);
phandle = OpenProcess(PROCESS_ALL_ACCESS, 0, pid);
} else {
cout << "process is not executing";
cin.get();
return 0;
}
if (phandle != 0) {
for(i=0;i<4;i++) // 4 or wathever
{
cout << "The pointer is 0x1001000" << endl; //Print the pointer
ReadProcessMemory(phandle, (LPVOID)0x1001000+i, &point1, 4, &stBytes); //Get the content from 0x1001000 and store it in point1
cout << "decimal content point1 " << point1 << " (DEC)" << endl; //Print the decimal content of point1
printf("%x \n",point1); // print hexadecimal content of point1
char *p=(char*)&point1; // point point1 buffer
for(d=0;d<4;d++)
printf("%x",(unsigned int)(unsigned char) *(p+d)); // print backwards (because the buffer is like a LIFO) and see the dbg debugger
}
ReadProcessMemory(phandle, (LPVOID)point1, &value, 6, &stBytes); //Get the value that is in the address pointed by the pointer
cout << "The value in the non-static address is " << (char*)value << endl << endl; //Print the value
cout << "Press ENTER to exit." << endl;
cin.get();
} else {
cout << "Couldn't get a handle";
cin.get();
// address 0x1001000 content hex 5278DA77
}
}

Why the audio recorder code can work for 8 bit, but cannot work for 16 bit??

I am trying to record the audio at windows, here is my code. it works well for 8 bit, but it cannot work for 16 bit. Can anyone help me?
#include
#include
#include
#pragma comment(lib,"winmm.lib")
using namespace std;
int test(){
HWAVEIN microHandle;
WAVEHDR waveHeader;
MMRESULT result = 0;
WAVEFORMATEX waveformat;
waveformat.wFormatTag = WAVE_FORMAT_PCM;
waveformat.wBitsPerSample=8;
waveformat.nSamplesPerSec=16000;//8000;
waveformat.nAvgBytesPerSec=waveformat.nSamplesPerSec*waveformat.nSamplesPerSec/8;
waveformat.nChannels=1;
waveformat.nBlockAlign=waveformat.nChannels*waveformat.wBitsPerSample/8;
waveformat.cbSize=0;
result = waveInOpen(&microHandle, WAVE_MAPPER, &waveformat, 0L, 0L, CALLBACK_EVENT);
if (result)
{
cout << "Fail step 1" << endl;
cout << result << endl;
Sleep(10000);
return 0;
}
const int BUFSIZE = 16000*4;
char * buf = (char *)malloc(BUFSIZE);
// Set up and prepare header for input
waveHeader.lpData = (LPSTR)buf;
waveHeader.dwBufferLength = BUFSIZE;
waveHeader.dwBytesRecorded=0;
waveHeader.dwUser = 0L;
waveHeader.dwFlags = 0L;
waveHeader.dwLoops = 0L;
waveInPrepareHeader(microHandle, &waveHeader, sizeof(WAVEHDR));
// Insert a wave input buffer
result = waveInAddBuffer(microHandle, &waveHeader, sizeof(WAVEHDR));
if (result)
{
cout << "Fail step 2" << endl;
cout << result << endl;
Sleep(10000);
return 0;
}
result = waveInStart(microHandle);
if (result)
{
cout << "Fail step 3" << endl;
cout << result << endl;
Sleep(10000);
return 0;
}
// Wait until finished recording
do {} while (waveInUnprepareHeader(microHandle, &waveHeader, sizeof(WAVEHDR))==WAVERR_STILLPLAYING);
FILE *fp = fopen("output.pcm","w");
fwrite(buf,1,BUFSIZE,fp);
fclose(fp);
waveInClose(microHandle);
return 0;
}
void main()
{
test();
}
If I set the parameter waveformat.wBitsPerSample = 8, it can record the audio correctly,
but if i set it waveformat.wBitsPerSample = 16, it record the Noise!!!
Can anyone help me?
thanks.
it should bu FILE *fp = fopen("output.pcm","wb"); NOT FILE *fp = fopen("output.pcm","w");

Resources