Change the driver's logic about Objective-C runtimes:  abstract out a
structure to hold inferred information, then propagate each invididual
bit down to -cc1.  Separate the bits of "supports weak" and "has a native
ARC runtime";  make the latter a CodeGenOption.

The tool chain is still driving this decision, because it's the place that
has the required deployment target information on Darwin, but at least it's
better-factored now.



git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@134453 91177308-0d34-0410-b5e6-96231b3b80d8
diff --git a/lib/Frontend/InitPreprocessor.cpp b/lib/Frontend/InitPreprocessor.cpp
index b07fb43..9428cd5 100644
--- a/lib/Frontend/InitPreprocessor.cpp
+++ b/lib/Frontend/InitPreprocessor.cpp
@@ -250,7 +250,7 @@
         << "}\n"
         << "\n";
       
-    if (!LangOpts.ObjCNoAutoRefCountRuntime) {
+    if (LangOpts.ObjCRuntimeHasWeak) {
       Out << "template <class _Tp>\n"
           << "inline __attribute__ ((__visibility__(\"hidden\"),"
           << "__always_inline__))\n"
@@ -318,7 +318,7 @@
         << "};\n"
         << "\n";
       
-    if (!LangOpts.ObjCNoAutoRefCountRuntime) {
+    if (LangOpts.ObjCRuntimeHasWeak) {
       Out << "template<typename _Tp>\n"
           << "struct __is_scalar<__attribute__((objc_ownership(weak))) _Tp> {\n"
           << "  enum { __value = 0 };\n"