codeblocks include order cwchar error - c++11

I was trying to include windows.h and iostream together in the following little test program.
when iostream was included after windows.h as in the example below:
#include <windows.h>
#include <stdio.h>
#include <iostream>
int main(void)
{
printf("hello world");
return 0;
}
it caused the following error:
||=== Build: Debug in TEST10 (compiler: GNU GCC Compiler) ===|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|177|error: '::wcscat' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|178|error: '::wcscmp' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|179|error: '::wcscoll' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|180|error: '::wcscpy' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|181|error: '::wcscspn' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|183|error: '::wcslen' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|184|error: '::wcsncat' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|185|error: '::wcsncmp' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|186|error: '::wcsncpy' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|188|error: '::wcsspn' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|193|error: '::wcstok' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|196|error: '::wcsxfrm' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|204|error: '::wcschr' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|205|error: '::wcspbrk' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|206|error: '::wcsrchr' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|207|error: '::wcsstr' has not been declared|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar||In function 'wchar_t* std::wcschr(wchar_t*, wchar_t)':|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|213|error: invalid conversion from 'const wchar_t*' to 'wchar_t*' [-fpermissive]|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|212|note: initializing argument 1 of 'wchar_t* std::wcschr(wchar_t*, wchar_t)'|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar||In function 'wchar_t* std::wcspbrk(wchar_t*, const wchar_t*)':|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|217|error: invalid conversion from 'const wchar_t*' to 'wchar_t*' [-fpermissive]|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|216|note: initializing argument 1 of 'wchar_t* std::wcspbrk(wchar_t*, const wchar_t*)'|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar||In function 'wchar_t* std::wcsrchr(wchar_t*, wchar_t)':|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|221|error: invalid conversion from 'const wchar_t*' to 'wchar_t*' [-fpermissive]|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|220|note: initializing argument 1 of 'wchar_t* std::wcsrchr(wchar_t*, wchar_t)'|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar||In function 'wchar_t* std::wcsstr(wchar_t*, const wchar_t*)':|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|225|error: invalid conversion from 'const wchar_t*' to 'wchar_t*' [-fpermissive]|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\cwchar|224|note: initializing argument 1 of 'wchar_t* std::wcsstr(wchar_t*, const wchar_t*)'|
c:\mingw\lib\gcc\mingw32\5.3.0\include\c++\bits\char_traits.h|358|error: 'wcslen' was not declared in this scope|
||=== Build failed: 21 error(s), 0 warning(s) (0 minute(s), 5 second(s)) ===|
however when changed to the following it worked fine.
#include <iostream>
#include <windows.h>
#include <stdio.h>
int main(void)
{
printf("hello world");
return 0;
}
I'm not sure why the order of the includes should matter and am curious as to why this occurs?

Related

An error occurred while converting the C extension file( test1.c ) compiled by the pro*c file into a wasm file through Emscripten

An error occurred while converting the C extension file( test1.c ) compiled by the pro*c file into a wasm file through Emscripten.
I am using Emscripten to convert legacy code to WebAssembly's WASM file. After compiling the Pro*C file into a .C file, converting it using Emscipten, the following error occurred.
Thank you in advance.
The following is the output of the execution command and errors.
D:\C-connection\FASSDB>emcc test1.c -s WASM=1 -s -o public/test.js
test1.c:117:8: error: type specifier missing, defaults to 'int'; ISO C99 and later do not support implicit int [-Wimplicit-int]
extern sqlcxt ( void **, unsigned int *,
~~~~~~ ^
int
test1.c:119:8: error: type specifier missing, defaults to 'int'; ISO C99 and later do not support implicit int [-Wimplicit-int]
extern sqlcx2t( void **, unsigned int *,
~~~~~~ ^
int
test1.c:121:8: error: type specifier missing, defaults to 'int'; ISO C99 and later do not support implicit int [-Wimplicit-int]
extern sqlbuft( void **, char * );
~~~~~~ ^
int
test1.c:122:8: error: type specifier missing, defaults to 'int'; ISO C99 and later do not support implicit int [-Wimplicit-int]
extern sqlgs2t( void **, char * );
~~~~~~ ^
int
test1.c:123:8: error: type specifier missing, defaults to 'int'; ISO C99 and later do not support implicit int [-Wimplicit-int]
extern sqlorat( void **, unsigned int *, void * );
~~~~~~ ^
int
5 errors generated.
emcc: error: 'D:/C-connection/emsdk/upstream/bin\clang.exe -target wasm32-unknown-emscripten -fignore-exceptions -fvisibility=default -mllvm -combiner-global-alias-analysis=false -mllvm -enable-emscripten-sjlj -mllvm -disable-lsr -DEMSCRIPTEN -D__EMSCRIPTEN_major__=3 -D__EMSCRIPTEN_minor__=1 -D__EMSCRIPTEN_tiny__=18 -Werror=implicit-function-declaration -ID:\C-connection\emsdk\upstream\emscripten\cache\sysroot\include\SDL --sysroot=D:\C-connection\emsdk\upstream\emscripten\cache\sysroot -Xclang -iwithsysroot/include\compat test1.c -c -o C:\Users\REALHO~1\AppData\Local\Temp\emscripten_temp_337o8xxh\test1_0.o' failed (returned 1)
The following is a Pro*C file before compiling into *.c.
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
int main()
{
EXEC SQL INCLUDE sqlca.h;
EXEC SQL BEGIN DECLARE SECTION;
varchar id[50];
varchar password[50];
varchar tns[50];
EXEC SQL END DECLARE SECTION;
memset(&id, 0x00, sizeof(id));
memset(&password, 0x00, sizeof(password));
memset(&tns, 0x00, sizeof(tns));
strcpy((char*)id.arr, "dbid");
strcpy((char*)password.arr, "dbpassword");
strcpy((char*)tns.arr, "orcl");
id.len = strlen((char*)id.arr);
password.len = strlen((char*)password.arr);
tns.len = strlen((char*)tns.arr);
// EXEC SQL CONNECT :id IDENTIFIED BY :password ;
EXEC SQL CONNECT :id IDENTIFIED BY :password USING :tns ;
if(sqlca.sqlcode < 0) {
printf("connect error\n");
}
else {
printf("connect success\n");
}
return 0;
}

LNK2019 when define UNICODE/_UNICODE

I am using TCHAR and I get the error when I define UNICODE / _UNICODE. Here is my code.
function.h
#define UNICODE
#define _UNICODE
#include <tchar.h>
#include <windows.h>
BOOL bLogging(LPCTSTR szContent, ...);
function.cpp
#include "function.h"
BOOL bLogging(LPCTSTR szContent, ...)
{
body of function
}
plugin.cpp
#include "function.h"
bool pluginInit(PLUG_INITSTRUCT* initStruct)
{
bLogging(TEXT("hello, world!"));
}
The above code makes the error
========== Build: 0 succeeded, 1 failed, 0 up-to-date, 0 skipped ==========
LNK2019 unresolved external symbol "int __cdecl bLogging(char const *,...)" (?bLogging##YAHPBDZZ) referenced in function "bool __cdecl pluginInit(struct PLUG_INITSTRUCT *)" (?pluginInit##YA_NPAUPLUG_INITSTRUCT###Z)
But without the UNICODE / _UNICODE, it builds as ASCII characters, it works well.
========== Build: 1 succeeded, 0 failed, 0 up-to-date, 0 skipped ==========
Raymond's comment is probably the correct answer. Otherwise, your code looks fine.
Unless you are trying to have backwards compatibility support for Windows 95, you don't need TCHAR macros. Simpler to just avoid this stuff and use either const wchar_t* or LPCWSTR as your string type.
Adjusting your code as an example:
function.h
#pragma once
#include <windows.h>
BOOL bLogging(const wchar_t* szContent, ...);
function.cpp
#include "function.h"
BOOL bLogging(const wchar_t* szContent, ...)
{
body of function
}
plugin.cpp
#include "function.h"
bool pluginInit(PLUG_INITSTRUCT* initStruct)
{
bLogging(L"Hello World");
}

The result of using strtol() under stdlib.h and stdio.h is different

When trying to parse a number too big to fit a long, strtol() returns 0 instead of LONG_MAX (stdio.h). If I read the POSIX spec correctly, it should be LONG_MAX. There is a different between stdio.h and stdlib.h
#include "stdio.h"
int main(void){
printf("%ld\n", strtol("99999999999999999999999"));
return 0;
} # 0
#include "stdio.h"
//#include "stdlib.h"
int main(void){
char *end[500];
printf("%ld\n", strtol("99999999999999999999999", end, 10));
return 0;
} # 9223372036854775807
strtol is declared in header <stdlib.h> as
long strtol( const char *restrict str, char **restrict str_end, int base );
// ^^^^^^^^ ^^^^^^^^ since C99
In the first posted snippet, <stdlib.h> is not included and the function is called with one single argument, so that, if compiled with -Wall -Wextra -std=gnu11, gcc produces the following explanatory warnings before outputting 0:
prog.c: In function 'main':
prog.c:5:21: warning: implicit declaration of function 'strtol' [-Wimplicit-function-declaration]
printf("%ld\n", strtol("99999999999999999999999"));
^~~~~~
prog.c:5:15: warning: format '%ld' expects argument of type 'long int', but argument 2 has type 'int' [-Wformat=]
printf("%ld\n", strtol("99999999999999999999999"));
~~^ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
%d
Which means that the library function is not called at all and an implicitly declared function with the same name is called, an int with value 0 is returned and printed (with the wrong format specifier, which is itself undefined behavior).
Note that the same code fails to compile with clang, which reports the following:
prog.c:4:21: warning: implicitly declaring library function 'strtol' with type 'long (const char *, char **, int)' [-Wimplicit-function-declaration]
printf("%ld\n", strtol("99999999999999999999999"));
^
prog.c:4:21: note: include the header &ltstdlib.h&gt or explicitly provide a declaration for 'strtol'
prog.c:4:53: error: too few arguments to function call, expected 3, have 1
printf("%ld\n", strtol("99999999999999999999999"));
~~~~~~ ^
1 warning and 1 error generated.
In the second snippet, strtol is called with the right number of arguments, but, as posted (with the #include commented out), has the same missing header problem. To produce the expected output, LONG_MAX, header stdlib.h has to be included.

linking with two version of icu

I tried to create simple program
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include "/usr/local/icu/include/unicode/ustring.h"
UChar*
u_strdup(const UChar *in)
{
uint32_t len = u_strlen(in) + 1;
UChar *result = malloc(sizeof(UChar) * len);
if (!result)
return NULL;
u_memcpy(result, in, len);
return result;
}
int main()
{
return 0;
}
when i compile with
gcc test1.c -o tes1 `/usr/local/icu/bin/icu-config --ldflags`
/tmp/cc5h5bjr.o: In function `u_strdup':
test1.c:(.text+0x14): undefined reference to `u_strlen_50'
test1.c:(.text+0x50): undefined reference to `u_memcpy_50'
collect2: error: ld returned 1 exit status
as you can see, compiler still use the old version of icu
icu version:
50.1.2 (old, system)
60.2 (new)
What am I missing?
change your compile line to
gcc test1.c -o tes1 `/usr/local/icu/bin/icu-config --ldflags --cppflags`
and use #include <unicode/ustring.h>

go runtime fails to compile

I am just wondering why would go runtime fail to build. How do we pass flags (-fpermissive in this case) to the c compiler which golang compiler is using to build the runtime. I am using gcc-4.6.2 on ubuntu 12.04
../../../thirdparty/go1.4.2/x86_64-unknown-linux-gnu/x86_64-unknown-linux-gnu/src/runtime/cgo/gcc_linux_amd64.c: In function ‘void _cgo_sys_thread_start(ThreadStart*)’:
../../../thirdparty/go1.4.2/x86_64-unknown-linux-gnu/x86_64-unknown-linux-gnu/src/runtime/cgo/gcc_linux_amd64.c:45:41: error: invalid conversion from ‘void*’ to ‘__sigset_t*’ [-fpermissive]
A sample program written also fails to compile, it seems the nil defined in the go code is the problem, i wonder how others are working, when does the golang compiler compiles this runtime code ?
gcc t.c -lpthread -o t
t.c: In function ‘void* hello_world(void*)’:
t.c:12:41: error: invalid conversion from ‘void*’ to ‘__sigset_t*’ [-fpermissive]
/usr/include/x86_64-linux-gnu/bits/sigthread.h:31:12: error: initializing argument 3 of ‘int pthread_sigmask(int, const __sigset_t*, __sigset_t*)’ [-fpermissive]
rk#rk-VirtualBox:~$ gcc -fpermissive t.c -lpthread -o t
t.c: In function ‘void* hello_world(void*)’:
t.c:12:41: warning: invalid conversion from ‘void*’ to ‘__sigset_t*’ [-fpermissive]
rk#rk-VirtualBox:~$ cat t.c
#include<pthread.h>
#include<stdio.h>
#include<signal.h>
#define nil ((void*)0)
static void*
hello_world(void *vptr)
{
sigset_t set;
sigemptyset(&set);
pthread_sigmask(SIG_BLOCK, &set, nil);
printf("hello world");
return NULL;
}
int main(int ac, char **av)
{
pthread_t t;
pthread_create(&t, NULL, hello_world, NULL);
pthread_join(t, NULL);
return 0;
}
/usr/include/x86_64-linux-gnu/bits/sigthread.h:31:12: error: initializing argument 3 of ‘int pthread_sigmask(int, const __sigset_t*, __sigset_t*)’ [-fpermissive]
make: *** [rulemanager] Error 2

Resources