Our commands that end up displaying a ValueObject as part of their workflow use OptionGroupValueObjectDisplay as their currency for deciding the final representation
ValueObjects themselves use DumpValueObjectOptions as the currency for the same purpose
The code to convert between these two units was replicated (to varying degrees of correctness) in several spots in the code
This checkin provides one and only one (and hopefully correct :-) entry point for this conversion
git-svn-id: https://llvm.org/svn/llvm-project/lldb/trunk@178044 91177308-0d34-0410-b5e6-96231b3b80d8
diff --git a/source/Commands/CommandObjectTarget.cpp b/source/Commands/CommandObjectTarget.cpp
index 3869efb..14a743a 100644
--- a/source/Commands/CommandObjectTarget.cpp
+++ b/source/Commands/CommandObjectTarget.cpp
@@ -637,19 +637,8 @@
void
DumpValueObject (Stream &s, VariableSP &var_sp, ValueObjectSP &valobj_sp, const char *root_name)
{
- ValueObject::DumpValueObjectOptions options;
+ ValueObject::DumpValueObjectOptions options(m_varobj_options.GetAsDumpOptions());
- options.SetMaximumPointerDepth(m_varobj_options.ptr_depth)
- .SetMaximumDepth(m_varobj_options.max_depth)
- .SetShowTypes(m_varobj_options.show_types)
- .SetShowLocation(m_varobj_options.show_location)
- .SetUseObjectiveC(m_varobj_options.use_objc)
- .SetUseDynamicType(m_varobj_options.use_dynamic)
- .SetUseSyntheticValue(m_varobj_options.use_synth)
- .SetFlatOutput(m_varobj_options.flat_output)
- .SetOmitSummaryDepth(m_varobj_options.no_summary_depth)
- .SetIgnoreCap(m_varobj_options.ignore_cap);
-
switch (var_sp->GetScope())
{
case eValueTypeVariableGlobal: