|
|
|
|
@ -94,7 +94,39 @@ static BOOL selector_types_equal(const char *t1, const char *t2)
|
|
|
|
|
{
|
|
|
|
|
t1 = skip_irrelevant_type_info(t1);
|
|
|
|
|
t2 = skip_irrelevant_type_info(t2);
|
|
|
|
|
if (*t1 != *t2)
|
|
|
|
|
// This is a really ugly hack. For some stupid reason, the people
|
|
|
|
|
// designing Objective-C type encodings decided to allow * as a
|
|
|
|
|
// shorthand for char*, because strings are 'special'. Unfortunately,
|
|
|
|
|
// FSF GCC generates "*" for @encode(BOOL*), while Clang and Apple GCC
|
|
|
|
|
// generate "^c" or "^C" (depending on whether BOOL is declared
|
|
|
|
|
// unsigned).
|
|
|
|
|
//
|
|
|
|
|
// The correct fix is to remove * completely from type encodings, but
|
|
|
|
|
// unfortunately my time machine is broken so I can't travel to 1986
|
|
|
|
|
// and apply a cluebat to those responsible.
|
|
|
|
|
if ((*t1 == '*') && (*t2 != '*'))
|
|
|
|
|
{
|
|
|
|
|
if (*t2 == '^' && (((*(t2+2) == 'C') || (*(t2+2) == 'C'))))
|
|
|
|
|
{
|
|
|
|
|
t2++;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
return NO;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else if ((*t2 == '*') && (*t1 != '*'))
|
|
|
|
|
{
|
|
|
|
|
if (*t1 == '^' && (((*(t1+2) == 'C') || (*(t1+2) == 'C'))))
|
|
|
|
|
{
|
|
|
|
|
t1++;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
return NO;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else if (*t1 != *t2)
|
|
|
|
|
{
|
|
|
|
|
return NO;
|
|
|
|
|
}
|
|
|
|
|
|