I am testing on C data size with following program
#include <stdio.h>
#define MAXOF(n) (typeof(n))(-1)
#define hexfmt "%#lx"
#define printtype(n) \
{\
n _a; \
printf("%25s \t(%d bytes)\t " hexfmt "\n",#n,sizeof(n), MAXOF(_a) );\
}
int main(int argc, char const *argv[])
{
printtype(signed char);
printtype(unsigned char);
printtype(signed short);
printtype(unsigned short);
printtype(signed short int);
printtype(unsigned short int);
printtype(signed int);
printtype(unsigned int);
printtype(signed);
printtype(unsigned);
printtype(signed long);
printtype(unsigned long);
printtype(signed long int);
printtype(unsigned long int);
printtype(signed long long);
printtype(unsigned long long);
printtype(signed long long int);
printtype(unsigned long long int);
return 0;
}
here is the output
Tested with gcc version 5.4.0 20160609 (Ubuntu 5.4.0-6ubuntu1~16.04.11)
signed char (1 bytes) 0xffffffff
unsigned char (1 bytes) 0xff
signed short (2 bytes) 0xffffffff
unsigned short (2 bytes) 0xffff
signed short int (2 bytes) 0xffffffff
unsigned short int (2 bytes) 0xffff
signed int (4 bytes) 0xffffffff
unsigned int (4 bytes) 0xffffffff
signed (4 bytes) 0xffffffff
unsigned (4 bytes) 0xffffffff
signed long (8 bytes) 0xffffffffffffffff
unsigned long (8 bytes) 0xffffffffffffffff
signed long int (8 bytes) 0xffffffffffffffff
unsigned long int (8 bytes) 0xffffffffffffffff
signed long long (8 bytes) 0xffffffffffffffff
unsigned long long (8 bytes) 0xffffffffffffffff
signed long long int (8 bytes) 0xffffffffffffffff
unsigned long long int (8 bytes) 0xffffffffffffffff
Although the sizeof(signed char) is 1 bytes the representation when using printf is 0xffffffff (which is 4 bytes). Is that mean signed type always be 32bits?
User contributions licensed under CC BY-SA 3.0