+/* This is a specialized subset of expand_expr for use by dbxout_symbol in
+ evaluating DECL_VALUE_EXPR. In particular, we stop if we find decls that
+ haven't been expanded, or if the expression is getting so complex we won't
+ be able to represent it in stabs anyway. Returns NULL on failure. */
+
+static rtx
+dbxout_expand_expr (tree expr)
+{
+ switch (TREE_CODE (expr))
+ {
+ case VAR_DECL:
+ /* We can't handle emulated tls variables, because the address is an
+ offset to the return value of __emutls_get_address, and there is no
+ way to express that in stabs. Also, there are name mangling issues
+ here. We end up with references to undefined symbols if we don't
+ disable debug info for these variables. */
+ if (!targetm.have_tls && DECL_THREAD_LOCAL_P (expr))
+ return NULL;
+ if (TREE_STATIC (expr)
+ && !TREE_ASM_WRITTEN (expr)
+ && !DECL_HAS_VALUE_EXPR_P (expr)
+ && !TREE_PUBLIC (expr)
+ && DECL_RTL_SET_P (expr)
+ && MEM_P (DECL_RTL (expr)))
+ {
+ /* If this is a var that might not be actually output,
+ return NULL, otherwise stabs might reference an undefined
+ symbol. */
+ struct varpool_node *node = varpool_get_node (expr);
+ if (!node || !node->needed)
+ return NULL;
+ }
+ /* FALLTHRU */
+
+ case PARM_DECL:
+ case RESULT_DECL:
+ if (DECL_HAS_VALUE_EXPR_P (expr))
+ return dbxout_expand_expr (DECL_VALUE_EXPR (expr));
+ /* FALLTHRU */
+
+ case CONST_DECL:
+ return DECL_RTL_IF_SET (expr);
+
+ case INTEGER_CST:
+ return expand_expr (expr, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
+
+ case COMPONENT_REF:
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ case BIT_FIELD_REF:
+ {
+ enum machine_mode mode;
+ HOST_WIDE_INT bitsize, bitpos;
+ tree offset, tem;
+ int volatilep = 0, unsignedp = 0;
+ rtx x;
+
+ tem = get_inner_reference (expr, &bitsize, &bitpos, &offset,
+ &mode, &unsignedp, &volatilep, true);
+
+ x = dbxout_expand_expr (tem);
+ if (x == NULL || !MEM_P (x))
+ return NULL;
+ if (offset != NULL)
+ {
+ if (!host_integerp (offset, 0))
+ return NULL;
+ x = adjust_address_nv (x, mode, tree_low_cst (offset, 0));
+ }
+ if (bitpos != 0)
+ x = adjust_address_nv (x, mode, bitpos / BITS_PER_UNIT);
+
+ return x;
+ }
+
+ default:
+ return NULL;
+ }
+}
+
+/* Helper function for output_used_types. Queue one entry from the
+ used types hash to be output. */
+
+static int
+output_used_types_helper (void **slot, void *data)
+{
+ tree type = (tree) *slot;
+ VEC(tree, heap) **types_p = (VEC(tree, heap) **) data;
+
+ if ((TREE_CODE (type) == RECORD_TYPE
+ || TREE_CODE (type) == UNION_TYPE
+ || TREE_CODE (type) == QUAL_UNION_TYPE
+ || TREE_CODE (type) == ENUMERAL_TYPE)
+ && TYPE_STUB_DECL (type)
+ && DECL_P (TYPE_STUB_DECL (type))
+ && ! DECL_IGNORED_P (TYPE_STUB_DECL (type)))
+ VEC_quick_push (tree, *types_p, TYPE_STUB_DECL (type));
+ else if (TYPE_NAME (type)
+ && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL)
+ VEC_quick_push (tree, *types_p, TYPE_NAME (type));
+
+ return 1;
+}
+
+/* This is a qsort callback which sorts types and declarations into a
+ predictable order (types, then declarations, sorted by UID
+ within). */
+
+static int
+output_types_sort (const void *pa, const void *pb)
+{
+ const tree lhs = *((const tree *)pa);
+ const tree rhs = *((const tree *)pb);
+
+ if (TYPE_P (lhs))
+ {
+ if (TYPE_P (rhs))
+ return TYPE_UID (lhs) - TYPE_UID (rhs);
+ else
+ return 1;
+ }
+ else
+ {
+ if (TYPE_P (rhs))
+ return -1;
+ else
+ return DECL_UID (lhs) - DECL_UID (rhs);
+ }
+}
+
+
+/* Force all types used by this function to be output in debug
+ information. */
+
+static void
+output_used_types (void)
+{
+ if (cfun && cfun->used_types_hash)
+ {
+ VEC(tree, heap) *types;
+ int i;
+ tree type;
+
+ types = VEC_alloc (tree, heap, htab_elements (cfun->used_types_hash));
+ htab_traverse (cfun->used_types_hash, output_used_types_helper, &types);
+
+ /* Sort by UID to prevent dependence on hash table ordering. */
+ VEC_qsort (tree, types, output_types_sort);
+
+ FOR_EACH_VEC_ELT (tree, types, i, type)
+ debug_queue_symbol (type);
+
+ VEC_free (tree, heap, types);
+ }
+}
+