Output UTF-16 string literals independent of host byte order.
 - Steve, can you take a look at this? It seems like this code should live
   elsewhere, and there is a FIXME about having Sema validates the UTF-8 to
   UTF-16 conversion.


git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@76915 91177308-0d34-0410-b5e6-96231b3b80d8
diff --git a/test/CodeGen/darwin-string-literals.c b/test/CodeGen/darwin-string-literals.c
index 90662d1..2f94d55 100644
--- a/test/CodeGen/darwin-string-literals.c
+++ b/test/CodeGen/darwin-string-literals.c
@@ -1,8 +1,14 @@
-// RUN: clang-cc -triple i386-apple-darwin9 -emit-llvm %s -o - | FileCheck %s
+// RUN: clang-cc -triple i386-apple-darwin9 -emit-llvm %s -o - | FileCheck -check-prefix LSB %s
 
-// CHECK: @.str = private constant [8 x i8] c"string0\00"
-// CHECK: @.str1 = private constant [8 x i8] c"string1\00", section "__TEXT,__cstring,cstring_literals"
-// CHECK: @__utf16_string_ = internal global [35 x i8] c"h\00e\00l\00l\00o\00 \00\92! \00\03& \00\90! \00w\00o\00r\00l\00d\00\00", section "__TEXT,__ustring", align 2
+// CHECK-LSB: @.str = private constant [8 x i8] c"string0\00"
+// CHECK-LSB: @.str1 = private constant [8 x i8] c"string1\00", section "__TEXT,__cstring,cstring_literals"
+// CHECK-LSB: @__utf16_string_ = internal global [35 x i8] c"h\00e\00l\00l\00o\00 \00\92! \00\03& \00\90! \00w\00o\00r\00l\00d\00\00", section "__TEXT,__ustring", align 2
+
+// RUN: clang-cc -triple powerpc-apple-darwin9 -emit-llvm %s -o - | FileCheck -check-prefix MSB %s
+
+// CHECK-MSB: @.str = private constant [8 x i8] c"string0\00"
+// CHECK-MSB: @.str1 = private constant [8 x i8] c"string1\00", section "__TEXT,__cstring,cstring_literals"
+// CHECK-MSB: @__utf16_string_ = internal global [35 x i8] c"\00h\00e\00l\00l\00o\00 !\92\00 &\03\00 !\90\00 \00w\00o\00r\00l\00d\00", section "__TEXT,__ustring", align 2
 
 const char *g0 = "string0";
 const void *g1 = __builtin___CFStringMakeConstantString("string1");