Fix VASprintfTest on darwin
The way of injecting an error into the printf call was not working on
darwin - the C library still happily format the character. It only
returns an error after we use a wide character that does not fit into a
single byte, so switch the test to use that.
llvm-svn: 295443
diff --git a/lldb/unittests/Utility/VASprintfTest.cpp b/lldb/unittests/Utility/VASprintfTest.cpp
index 73a43dd..0b440942e 100644
--- a/lldb/unittests/Utility/VASprintfTest.cpp
+++ b/lldb/unittests/Utility/VASprintfTest.cpp
@@ -49,11 +49,11 @@
setlocale(LC_ALL, ".932");
wchar_t Invalid[2];
- Invalid[0] = 129;
+ Invalid[0] = 0x100;
Invalid[1] = 0;
llvm::SmallString<32> Buffer;
EXPECT_FALSE(Sprintf(Buffer, "%ls", Invalid));
EXPECT_EQ("<Encoding error>", Buffer);
- setlocale(LC_CTYPE, Current.c_str());
+ setlocale(LC_ALL, Current.c_str());
}