enum foo { a, b, c };
void main()
{
print("b: " + b); // "b: 1"
{
dictionary d = { {'enumVal', 1} };
int val = int(d['enumVal']);
print("val: " + val); // "val: 1"
}
{
dictionary d = { {'enumVal', b} };
int val = int(d['enumVal']);
print("val: " + val); // "val: -1076139224"
}
{
dictionary d = { {'enumVal', b} };
foo val = foo(d['enumVal']);
print("val: " + val); // "val: 1"
}
MyAppDefinedEnum mode = MyAppDefinedEnum(10);
print("mode: " + mode); // "mode: 10"
{
dictionary d = { {'mode', mode} };
MyAppDefinedEnum m = MyAppDefinedEnum(d['mode']);
print("m: " + m); // "m: 10"
one(d); // "m: 211380360"
two(d); // "m: 10"
}
{
dictionary d;
d['mode'] = mode;
MyAppDefinedEnum m = MyAppDefinedEnum(d['mode']);
print("m: " + m); // "m: 10"
one(d); // "m: 3"
two(d); // "m: 10"
}
}
void one(dictionary@ d)
{
MyAppDefinedEnum m = MyAppDefinedEnum(d['mode']);
print("m: " + m);
}
void two(dictionary@ d)
{
int m = int(d['mode']);
print("m: " + m);
}
When you're storing a script-defined enum in a dictionary, it doesn't implicitly 'cast' to an int.. retrieving it as an int (which, logically, is what it is) returns a bogus value.
When you're storing an application-defined enum in a dictionary and passing the dictionary by handle to a function, you cannot retrieve values as the application-defined enum, you have to do it as an int (in order to get the correct value).
32-bit linux, revision 1985 (Version 2.29.2 WIP - 2014/07/2014).
Thank you!