G
Guest
I get odd behavior in the debugger with the following code. The output of
the program itself is correct, but when I add the g_bad_int64_max and
g_bad_int64_min to the watch window they show values of -1 and 0
repsectively. This only happens when initializing a const __int64 in global
scope with a literal constant. I fooled around with the initial value and it
looks like the upper 32 bits are being ignored. Is this a known issue?
#include <iostream>
#include <limits.h>
using namespace std;
//This is displayed incorrectly in debugger
const __int64 g_bad_int64_max = LLONG_MAX;
const __int64 g_bad_int64_min = LLONG_MIN;
// This works
__int64 g_int64_max_initializer = LLONG_MAX;
__int64 g_int64_min_initializer = LLONG_MIN;
const __int64 g_good_int64_max = g_int64_max_initializer;
const __int64 g_good_int64_min = g_int64_min_initializer;
main()
{
// This works
const __int64 my_int64_max = LLONG_MAX;
const __int64 my_int64_min = LLONG_MIN;
cout << "\n g_bad_int64_max: " << g_bad_int64_max
<< "\n g_bad_int64_min: " << g_bad_int64_min
<< "\n g_odd_int64_max: " << g_good_int64_max
<< "\n g_good_int64_min: " << g_good_int64_min
<< "\n my_int64_max: " << my_int64_max
<< "\n my_int64_min: " << my_int64_min
<< endl;
}
the program itself is correct, but when I add the g_bad_int64_max and
g_bad_int64_min to the watch window they show values of -1 and 0
repsectively. This only happens when initializing a const __int64 in global
scope with a literal constant. I fooled around with the initial value and it
looks like the upper 32 bits are being ignored. Is this a known issue?
#include <iostream>
#include <limits.h>
using namespace std;
//This is displayed incorrectly in debugger
const __int64 g_bad_int64_max = LLONG_MAX;
const __int64 g_bad_int64_min = LLONG_MIN;
// This works
__int64 g_int64_max_initializer = LLONG_MAX;
__int64 g_int64_min_initializer = LLONG_MIN;
const __int64 g_good_int64_max = g_int64_max_initializer;
const __int64 g_good_int64_min = g_int64_min_initializer;
main()
{
// This works
const __int64 my_int64_max = LLONG_MAX;
const __int64 my_int64_min = LLONG_MIN;
cout << "\n g_bad_int64_max: " << g_bad_int64_max
<< "\n g_bad_int64_min: " << g_bad_int64_min
<< "\n g_odd_int64_max: " << g_good_int64_max
<< "\n g_good_int64_min: " << g_good_int64_min
<< "\n my_int64_max: " << my_int64_max
<< "\n my_int64_min: " << my_int64_min
<< endl;
}