/* Xtensa-specific support for 32-bit ELF.
- Copyright (C) 2003-2015 Free Software Foundation, Inc.
+ Copyright (C) 2003-2016 Free Software Foundation, Inc.
This file is part of BFD, the Binary File Descriptor library.
#include "libbfd.h"
#include "elf-bfd.h"
#include "elf/xtensa.h"
+#include "splay-tree.h"
#include "xtensa-isa.h"
#include "xtensa-config.h"
if (r_type >= (unsigned int) R_XTENSA_max)
{
- _bfd_error_handler (_("%A: invalid XTENSA reloc number: %d"), abfd, r_type);
+ _bfd_error_handler (_("%B: invalid XTENSA reloc number: %d"), abfd, r_type);
r_type = 0;
}
cache_ptr->howto = &elf_howto_table[r_type];
static const bfd_byte elf_xtensa_be_plt_entry[PLT_ENTRY_SIZE] =
{
+#if XSHAL_ABI == XTHAL_ABI_WINDOWED
0x6c, 0x10, 0x04, /* entry sp, 32 */
+#endif
0x18, 0x00, 0x00, /* l32r a8, [got entry for rtld's resolver] */
0x1a, 0x00, 0x00, /* l32r a10, [got entry for rtld's link map] */
0x1b, 0x00, 0x00, /* l32r a11, [literal for reloc index] */
static const bfd_byte elf_xtensa_le_plt_entry[PLT_ENTRY_SIZE] =
{
+#if XSHAL_ABI == XTHAL_ABI_WINDOWED
0x36, 0x41, 0x00, /* entry sp, 32 */
+#endif
0x81, 0x00, 0x00, /* l32r a8, [got entry for rtld's resolver] */
0xa1, 0x00, 0x00, /* l32r a10, [got entry for rtld's link map] */
0xb1, 0x00, 0x00, /* l32r a11, [literal for reloc index] */
const Elf_Internal_Rela *rel;
const Elf_Internal_Rela *rel_end;
- if (info->relocatable || (sec->flags & SEC_ALLOC) == 0)
+ if (bfd_link_relocatable (info) || (sec->flags & SEC_ALLOC) == 0)
return TRUE;
BFD_ASSERT (is_xtensa_elf (abfd));
switch (r_type)
{
case R_XTENSA_TLSDESC_FN:
- if (info->shared)
+ if (bfd_link_pic (info))
{
tls_type = GOT_TLS_GD;
is_got = TRUE;
break;
case R_XTENSA_TLSDESC_ARG:
- if (info->shared)
+ if (bfd_link_pic (info))
{
tls_type = GOT_TLS_GD;
is_got = TRUE;
break;
case R_XTENSA_TLS_DTPOFF:
- if (info->shared)
+ if (bfd_link_pic (info))
tls_type = GOT_TLS_GD;
else
tls_type = GOT_TLS_IE;
case R_XTENSA_TLS_TPOFF:
tls_type = GOT_TLS_IE;
- if (info->shared)
+ if (bfd_link_pic (info))
info->flags |= DF_STATIC_TLS;
- if (info->shared || h)
+ if (bfd_link_pic (info) || h)
is_got = TRUE;
break;
elf_xtensa_make_sym_local (struct bfd_link_info *info,
struct elf_link_hash_entry *h)
{
- if (info->shared)
+ if (bfd_link_pic (info))
{
if (h->plt.refcount > 0)
{
if (htab == NULL)
return FALSE;
- if (info->relocatable)
+ if (bfd_link_relocatable (info))
return TRUE;
if ((sec->flags & SEC_ALLOC) == 0)
switch (r_type)
{
case R_XTENSA_TLSDESC_FN:
- if (info->shared)
+ if (bfd_link_pic (info))
{
is_got = TRUE;
is_tlsfunc = TRUE;
break;
case R_XTENSA_TLSDESC_ARG:
- if (info->shared)
+ if (bfd_link_pic (info))
is_got = TRUE;
else
{
break;
case R_XTENSA_TLS_TPOFF:
- if (info->shared || h)
+ if (bfd_link_pic (info) || h)
is_got = TRUE;
break;
{
if (is_plt)
{
+ /* If the symbol has been localized its plt.refcount got moved
+ to got.refcount. Handle it as GOT. */
if (h->plt.refcount > 0)
h->plt.refcount--;
+ else
+ is_got = TRUE;
}
- else if (is_got)
+ if (is_got)
{
if (h->got.refcount > 0)
h->got.refcount--;
&& htab->sgotloc != NULL);
/* Set the contents of the .interp section to the interpreter. */
- if (info->executable)
+ if (bfd_link_executable (info) && !info->nointerp)
{
s = bfd_get_linker_section (dynobj, ".interp");
if (s == NULL)
/* If we are generating a shared object, we also need space in
".rela.got" for R_XTENSA_RELATIVE relocs for literals that
reference local symbols. */
- if (info->shared)
+ if (bfd_link_pic (info))
elf_xtensa_allocate_local_got_size (info);
/* Allocate space in ".plt" to match the size of ".rela.plt". For
#define add_dynamic_entry(TAG, VAL) \
_bfd_elf_add_dynamic_entry (info, TAG, VAL)
- if (info->executable)
+ if (bfd_link_executable (info))
{
if (!add_dynamic_entry (DT_DEBUG, 0))
return FALSE;
{
asection *splt, *sgotplt;
bfd_vma plt_base, got_base;
- bfd_vma code_offset, lit_offset;
+ bfd_vma code_offset, lit_offset, abi_offset;
int chunk;
chunk = reloc_index / PLT_ENTRIES_PER_CHUNK;
? elf_xtensa_be_plt_entry
: elf_xtensa_le_plt_entry),
PLT_ENTRY_SIZE);
+ abi_offset = XSHAL_ABI == XTHAL_ABI_WINDOWED ? 3 : 0;
bfd_put_16 (output_bfd, l32r_offset (got_base + 0,
- plt_base + code_offset + 3),
- splt->contents + code_offset + 4);
+ plt_base + code_offset + abi_offset),
+ splt->contents + code_offset + abi_offset + 1);
bfd_put_16 (output_bfd, l32r_offset (got_base + 4,
- plt_base + code_offset + 6),
- splt->contents + code_offset + 7);
+ plt_base + code_offset + abi_offset + 3),
+ splt->contents + code_offset + abi_offset + 4);
bfd_put_16 (output_bfd, l32r_offset (got_base + lit_offset,
- plt_base + code_offset + 9),
- splt->contents + code_offset + 10);
+ plt_base + code_offset + abi_offset + 6),
+ splt->contents + code_offset + abi_offset + 7);
return plt_base + code_offset;
}
unresolved_reloc = FALSE;
warned = FALSE;
- if (howto->partial_inplace && !info->relocatable)
+ if (howto->partial_inplace && !bfd_link_relocatable (info))
{
/* Because R_XTENSA_32 was made partial_inplace to fix some
problems with DWARF info in partial links, there may be
RELOC_AGAINST_DISCARDED_SECTION (info, input_bfd, input_section,
rel, 1, relend, howto, 0, contents);
- if (info->relocatable)
+ if (bfd_link_relocatable (info))
{
bfd_vma dest_addr;
asection * sym_sec = get_elf_r_symndx_section (input_bfd, r_symndx);
case R_XTENSA_PLT:
if (elf_hash_table (info)->dynamic_sections_created
&& (input_section->flags & SEC_ALLOC) != 0
- && (dynamic_symbol || info->shared))
+ && (dynamic_symbol || bfd_link_pic (info)))
{
Elf_Internal_Rela outrel;
bfd_byte *loc;
case R_XTENSA_TLS_TPOFF:
/* Switch to LE model for local symbols in an executable. */
- if (! info->shared && ! dynamic_symbol)
+ if (! bfd_link_pic (info) && ! dynamic_symbol)
{
relocation = tpoff (info, relocation);
break;
{
if (r_type == R_XTENSA_TLSDESC_FN)
{
- if (! info->shared || (tls_type & GOT_TLS_IE) != 0)
+ if (! bfd_link_pic (info) || (tls_type & GOT_TLS_IE) != 0)
r_type = R_XTENSA_NONE;
}
else if (r_type == R_XTENSA_TLSDESC_ARG)
{
- if (info->shared)
+ if (bfd_link_pic (info))
{
if ((tls_type & GOT_TLS_IE) != 0)
r_type = R_XTENSA_TLS_TPOFF;
break;
case R_XTENSA_TLS_DTPOFF:
- if (! info->shared)
+ if (! bfd_link_pic (info))
/* Switch from LD model to LE model. */
relocation = tpoff (info, relocation);
else
}
/* Combine adjacent literal table entries. */
- BFD_ASSERT (! info->relocatable);
+ BFD_ASSERT (! bfd_link_relocatable (info));
sxtlit = bfd_get_section_by_name (output_bfd, ".xt.lit");
sgotloc = htab->sgotloc;
BFD_ASSERT (sgotloc);
break;
case DT_XTENSA_GOT_LOC_OFF:
- dyn.d_un.d_ptr = htab->sgotloc->output_section->vma;
+ dyn.d_un.d_ptr = (htab->sgotloc->output_section->vma
+ + htab->sgotloc->output_offset);
break;
case DT_PLTGOT:
- dyn.d_un.d_ptr = htab->sgot->output_section->vma;
+ dyn.d_un.d_ptr = (htab->sgot->output_section->vma
+ + htab->sgot->output_offset);
break;
case DT_JMPREL:
- dyn.d_un.d_ptr = htab->srelplt->output_section->vma;
+ dyn.d_un.d_ptr = (htab->srelplt->output_section->vma
+ + htab->srelplt->output_offset);
break;
case DT_PLTRELSZ:
- dyn.d_un.d_val = htab->srelplt->output_section->size;
+ dyn.d_un.d_val = htab->srelplt->size;
break;
case DT_RELASZ:
for .rela.plt to follow all other relocation sections, we
don't have to worry about changing the DT_RELA entry. */
if (htab->srelplt)
- dyn.d_un.d_val -= htab->srelplt->output_section->size;
+ dyn.d_un.d_val -= htab->srelplt->size;
break;
}
bfd_vma virtual_offset; /* Zero except for adding literals. */
int removed_bytes;
literal_value value; /* Only valid when adding literals. */
+};
- text_action *next;
+struct removal_by_action_entry_struct
+{
+ bfd_vma offset;
+ int removed;
+ int eq_removed;
+ int eq_removed_before_fill;
+};
+typedef struct removal_by_action_entry_struct removal_by_action_entry;
+
+struct removal_by_action_map_struct
+{
+ unsigned n_entries;
+ removal_by_action_entry *entry;
};
+typedef struct removal_by_action_map_struct removal_by_action_map;
/* List of all of the actions taken on a text section. */
struct text_action_list_struct
{
- text_action *head;
+ unsigned count;
+ splay_tree tree;
+ removal_by_action_map map;
};
static text_action *
find_fill_action (text_action_list *l, asection *sec, bfd_vma offset)
{
- text_action **m_p;
+ text_action a;
/* It is not necessary to fill at the end of a section. */
if (sec->size == offset)
return NULL;
- for (m_p = &l->head; *m_p && (*m_p)->offset <= offset; m_p = &(*m_p)->next)
- {
- text_action *t = *m_p;
- /* When the action is another fill at the same address,
- just increase the size. */
- if (t->offset == offset && t->action == ta_fill)
- return t;
- }
+ a.offset = offset;
+ a.action = ta_fill;
+
+ splay_tree_node node = splay_tree_lookup (l->tree, (splay_tree_key)&a);
+ if (node)
+ return (text_action *)node->value;
return NULL;
}
}
+static int
+text_action_compare (splay_tree_key a, splay_tree_key b)
+{
+ text_action *pa = (text_action *)a;
+ text_action *pb = (text_action *)b;
+ static const int action_priority[] =
+ {
+ [ta_fill] = 0,
+ [ta_none] = 1,
+ [ta_convert_longcall] = 2,
+ [ta_narrow_insn] = 3,
+ [ta_remove_insn] = 4,
+ [ta_remove_longcall] = 5,
+ [ta_remove_literal] = 6,
+ [ta_widen_insn] = 7,
+ [ta_add_literal] = 8,
+ };
+
+ if (pa->offset == pb->offset)
+ {
+ if (pa->action == pb->action)
+ return 0;
+ return action_priority[pa->action] - action_priority[pb->action];
+ }
+ else
+ return pa->offset < pb->offset ? -1 : 1;
+}
+
+static text_action *
+action_first (text_action_list *action_list)
+{
+ splay_tree_node node = splay_tree_min (action_list->tree);
+ return node ? (text_action *)node->value : NULL;
+}
+
+static text_action *
+action_next (text_action_list *action_list, text_action *action)
+{
+ splay_tree_node node = splay_tree_successor (action_list->tree,
+ (splay_tree_key)action);
+ return node ? (text_action *)node->value : NULL;
+}
+
/* Add a modification action to the text. For the case of adding or
removing space, modify any current fill and assume that
"unreachable_space" bytes can be freely contracted. Note that a
bfd_vma offset,
int removed)
{
- text_action **m_p;
text_action *ta;
+ text_action a;
/* It is not necessary to fill at the end of a section. */
if (action == ta_fill && sec->size == offset)
if (action == ta_fill && removed == 0)
return;
- for (m_p = &l->head; *m_p && (*m_p)->offset <= offset; m_p = &(*m_p)->next)
+ a.action = action;
+ a.offset = offset;
+
+ if (action == ta_fill)
{
- text_action *t = *m_p;
+ splay_tree_node node = splay_tree_lookup (l->tree, (splay_tree_key)&a);
- if (action == ta_fill)
+ if (node)
{
- /* When the action is another fill at the same address,
- just increase the size. */
- if (t->offset == offset && t->action == ta_fill)
- {
- t->removed_bytes += removed;
- return;
- }
- /* Fills need to happen before widens so that we don't
- insert fill bytes into the instruction stream. */
- if (t->offset == offset && t->action == ta_widen_insn)
- break;
+ ta = (text_action *)node->value;
+ ta->removed_bytes += removed;
+ return;
}
}
+ else
+ BFD_ASSERT (splay_tree_lookup (l->tree, (splay_tree_key)&a) == NULL);
- /* Create a new record and fill it up. */
ta = (text_action *) bfd_zmalloc (sizeof (text_action));
ta->action = action;
ta->sec = sec;
ta->offset = offset;
ta->removed_bytes = removed;
- ta->next = (*m_p);
- *m_p = ta;
+ splay_tree_insert (l->tree, (splay_tree_key)ta, (splay_tree_value)ta);
+ ++l->count;
}
const literal_value *value,
int removed)
{
- text_action **m_p;
text_action *ta;
asection *sec = r_reloc_get_section (loc);
bfd_vma offset = loc->target_offset;
BFD_ASSERT (action == ta_add_literal);
- for (m_p = &l->head; *m_p != NULL; m_p = &(*m_p)->next)
- {
- if ((*m_p)->offset > offset
- && ((*m_p)->offset != offset
- || (*m_p)->virtual_offset > virtual_offset))
- break;
- }
-
/* Create a new record and fill it up. */
ta = (text_action *) bfd_zmalloc (sizeof (text_action));
ta->action = action;
ta->virtual_offset = virtual_offset;
ta->value = *value;
ta->removed_bytes = removed;
- ta->next = (*m_p);
- *m_p = ta;
+
+ BFD_ASSERT (splay_tree_lookup (l->tree, (splay_tree_key)ta) == NULL);
+ splay_tree_insert (l->tree, (splay_tree_key)ta, (splay_tree_value)ta);
+ ++l->count;
}
so that each search may begin where the previous one left off. */
static int
-removed_by_actions (text_action **p_start_action,
+removed_by_actions (text_action_list *action_list,
+ text_action **p_start_action,
bfd_vma offset,
bfd_boolean before_fill)
{
int removed = 0;
r = *p_start_action;
+ if (r)
+ {
+ splay_tree_node node = splay_tree_lookup (action_list->tree,
+ (splay_tree_key)r);
+ BFD_ASSERT (node != NULL && r == (text_action *)node->value);
+ }
+
while (r)
{
if (r->offset > offset)
removed += r->removed_bytes;
- r = r->next;
+ r = action_next (action_list, r);
}
*p_start_action = r;
static bfd_vma
offset_with_removed_text (text_action_list *action_list, bfd_vma offset)
{
- text_action *r = action_list->head;
- return offset - removed_by_actions (&r, offset, FALSE);
+ text_action *r = action_first (action_list);
+
+ return offset - removed_by_actions (action_list, &r, offset, FALSE);
}
static unsigned
action_list_count (text_action_list *action_list)
{
- text_action *r = action_list->head;
- unsigned count = 0;
- for (r = action_list->head; r != NULL; r = r->next)
+ return action_list->count;
+}
+
+typedef struct map_action_fn_context_struct map_action_fn_context;
+struct map_action_fn_context_struct
+{
+ int removed;
+ removal_by_action_map map;
+ bfd_boolean eq_complete;
+};
+
+static int
+map_action_fn (splay_tree_node node, void *p)
+{
+ map_action_fn_context *ctx = p;
+ text_action *r = (text_action *)node->value;
+ removal_by_action_entry *ientry = ctx->map.entry + ctx->map.n_entries;
+
+ if (ctx->map.n_entries && (ientry - 1)->offset == r->offset)
+ {
+ --ientry;
+ }
+ else
+ {
+ ++ctx->map.n_entries;
+ ctx->eq_complete = FALSE;
+ ientry->offset = r->offset;
+ ientry->eq_removed_before_fill = ctx->removed;
+ }
+
+ if (!ctx->eq_complete)
+ {
+ if (r->action != ta_fill || r->removed_bytes >= 0)
+ {
+ ientry->eq_removed = ctx->removed;
+ ctx->eq_complete = TRUE;
+ }
+ else
+ ientry->eq_removed = ctx->removed + r->removed_bytes;
+ }
+
+ ctx->removed += r->removed_bytes;
+ ientry->removed = ctx->removed;
+ return 0;
+}
+
+static void
+map_removal_by_action (text_action_list *action_list)
+{
+ map_action_fn_context ctx;
+
+ ctx.removed = 0;
+ ctx.map.n_entries = 0;
+ ctx.map.entry = bfd_malloc (action_list_count (action_list) *
+ sizeof (removal_by_action_entry));
+ ctx.eq_complete = FALSE;
+
+ splay_tree_foreach (action_list->tree, map_action_fn, &ctx);
+ action_list->map = ctx.map;
+}
+
+static int
+removed_by_actions_map (text_action_list *action_list, bfd_vma offset,
+ bfd_boolean before_fill)
+{
+ unsigned a, b;
+
+ if (!action_list->map.entry)
+ map_removal_by_action (action_list);
+
+ if (!action_list->map.n_entries)
+ return 0;
+
+ a = 0;
+ b = action_list->map.n_entries;
+
+ while (b - a > 1)
+ {
+ unsigned c = (a + b) / 2;
+
+ if (action_list->map.entry[c].offset <= offset)
+ a = c;
+ else
+ b = c;
+ }
+
+ if (action_list->map.entry[a].offset < offset)
+ {
+ return action_list->map.entry[a].removed;
+ }
+ else if (action_list->map.entry[a].offset == offset)
+ {
+ return before_fill ?
+ action_list->map.entry[a].eq_removed_before_fill :
+ action_list->map.entry[a].eq_removed;
+ }
+ else
{
- count++;
+ return 0;
}
- return count;
+}
+
+static bfd_vma
+offset_with_removed_text_map (text_action_list *action_list, bfd_vma offset)
+{
+ int removed = removed_by_actions_map (action_list, offset, FALSE);
+ return offset - removed;
}
static text_action *
find_insn_action (text_action_list *action_list, bfd_vma offset)
{
- text_action *t;
- for (t = action_list->head; t; t = t->next)
+ static const text_action_t action[] =
{
- if (t->offset == offset)
- {
- switch (t->action)
- {
- case ta_none:
- case ta_fill:
- break;
- case ta_remove_insn:
- case ta_remove_longcall:
- case ta_convert_longcall:
- case ta_narrow_insn:
- case ta_widen_insn:
- return t;
- case ta_remove_literal:
- case ta_add_literal:
- BFD_ASSERT (0);
- break;
- }
- }
+ ta_convert_longcall,
+ ta_remove_longcall,
+ ta_widen_insn,
+ ta_narrow_insn,
+ ta_remove_insn,
+ };
+ text_action a;
+ unsigned i;
+
+ a.offset = offset;
+ for (i = 0; i < sizeof (action) / sizeof (*action); ++i)
+ {
+ splay_tree_node node;
+
+ a.action = action[i];
+ node = splay_tree_lookup (action_list->tree, (splay_tree_key)&a);
+ if (node)
+ return (text_action *)node->value;
}
return NULL;
}
#if DEBUG
static void
-print_action_list (FILE *fp, text_action_list *action_list)
+print_action (FILE *fp, text_action *r)
+{
+ const char *t = "unknown";
+ switch (r->action)
+ {
+ case ta_remove_insn:
+ t = "remove_insn"; break;
+ case ta_remove_longcall:
+ t = "remove_longcall"; break;
+ case ta_convert_longcall:
+ t = "convert_longcall"; break;
+ case ta_narrow_insn:
+ t = "narrow_insn"; break;
+ case ta_widen_insn:
+ t = "widen_insn"; break;
+ case ta_fill:
+ t = "fill"; break;
+ case ta_none:
+ t = "none"; break;
+ case ta_remove_literal:
+ t = "remove_literal"; break;
+ case ta_add_literal:
+ t = "add_literal"; break;
+ }
+
+ fprintf (fp, "%s: %s[0x%lx] \"%s\" %d\n",
+ r->sec->owner->filename,
+ r->sec->name, (unsigned long) r->offset, t, r->removed_bytes);
+}
+
+static int
+print_action_list_fn (splay_tree_node node, void *p)
{
- text_action *r;
+ text_action *r = (text_action *)node->value;
- fprintf (fp, "Text Action\n");
- for (r = action_list->head; r != NULL; r = r->next)
- {
- const char *t = "unknown";
- switch (r->action)
- {
- case ta_remove_insn:
- t = "remove_insn"; break;
- case ta_remove_longcall:
- t = "remove_longcall"; break;
- case ta_convert_longcall:
- t = "convert_longcall"; break;
- case ta_narrow_insn:
- t = "narrow_insn"; break;
- case ta_widen_insn:
- t = "widen_insn"; break;
- case ta_fill:
- t = "fill"; break;
- case ta_none:
- t = "none"; break;
- case ta_remove_literal:
- t = "remove_literal"; break;
- case ta_add_literal:
- t = "add_literal"; break;
- }
+ print_action (p, r);
+ return 0;
+}
- fprintf (fp, "%s: %s[0x%lx] \"%s\" %d\n",
- r->sec->owner->filename,
- r->sec->name, (unsigned long) r->offset, t, r->removed_bytes);
- }
+static void
+print_action_list (FILE *fp, text_action_list *action_list)
+{
+ fprintf (fp, "Text Action\n");
+ splay_tree_foreach (action_list->tree, print_action_list_fn, fp);
}
#endif /* DEBUG */
by the "from" offset field. */
typedef struct removed_literal_struct removed_literal;
+typedef struct removed_literal_map_entry_struct removed_literal_map_entry;
typedef struct removed_literal_list_struct removed_literal_list;
struct removed_literal_struct
removed_literal *next;
};
+struct removed_literal_map_entry_struct
+{
+ bfd_vma addr;
+ removed_literal *literal;
+};
+
struct removed_literal_list_struct
{
removed_literal *head;
removed_literal *tail;
+
+ unsigned n_map;
+ removed_literal_map_entry *map;
};
}
}
+static void
+map_removed_literal (removed_literal_list *removed_list)
+{
+ unsigned n_map = 0;
+ unsigned i;
+ removed_literal_map_entry *map = NULL;
+ removed_literal *r = removed_list->head;
+
+ for (i = 0; r; ++i, r = r->next)
+ {
+ if (i == n_map)
+ {
+ n_map = (n_map * 2) + 2;
+ map = bfd_realloc (map, n_map * sizeof (*map));
+ }
+ map[i].addr = r->from.target_offset;
+ map[i].literal = r;
+ }
+ removed_list->map = map;
+ removed_list->n_map = i;
+}
+
+static int
+removed_literal_compare (const void *a, const void *b)
+{
+ const removed_literal_map_entry *pa = a;
+ const removed_literal_map_entry *pb = b;
+
+ if (pa->addr == pb->addr)
+ return 0;
+ else
+ return pa->addr < pb->addr ? -1 : 1;
+}
/* Check if the list of removed literals contains an entry for the
given address. Return the entry if found. */
static removed_literal *
find_removed_literal (removed_literal_list *removed_list, bfd_vma addr)
{
- removed_literal *r = removed_list->head;
- while (r && r->from.target_offset < addr)
- r = r->next;
- if (r && r->from.target_offset == addr)
- return r;
- return NULL;
+ removed_literal_map_entry *p;
+ removed_literal *r = NULL;
+
+ if (removed_list->map == NULL)
+ map_removed_literal (removed_list);
+
+ p = bsearch (&addr, removed_list->map, removed_list->n_map,
+ sizeof (*removed_list->map), removed_literal_compare);
+ if (p)
+ {
+ while (p != removed_list->map && (p - 1)->addr == addr)
+ --p;
+ r = p->literal;
+ }
+ return r;
}
relax_info->removed_list.head = NULL;
relax_info->removed_list.tail = NULL;
- relax_info->action_list.head = NULL;
+ relax_info->action_list.tree = splay_tree_new (text_action_compare,
+ NULL, NULL);
+ relax_info->action_list.map.n_entries = 0;
+ relax_info->action_list.map.entry = NULL;
relax_info->fix_list = NULL;
relax_info->fix_array = NULL;
(bfd *, asection *, struct bfd_link_info *);
static bfd_boolean compute_ebb_proposed_actions (ebb_constraint *);
static bfd_boolean compute_ebb_actions (ebb_constraint *);
+typedef struct reloc_range_list_struct reloc_range_list;
static bfd_boolean check_section_ebb_pcrels_fit
- (bfd *, asection *, bfd_byte *, Elf_Internal_Rela *, const ebb_constraint *,
+ (bfd *, asection *, bfd_byte *, Elf_Internal_Rela *,
+ reloc_range_list *, const ebb_constraint *,
const xtensa_opcode *);
static bfd_boolean check_section_ebb_reduces (const ebb_constraint *);
static void text_action_add_proposed
/* For relocatable sections, we can only simplify when the output
section of the target is the same as the output section of the
source. */
- if (link_info->relocatable
+ if (bfd_link_relocatable (link_info)
&& (target_sec->output_section != sec->output_section
|| is_reloc_sym_weak (abfd, irel)))
return FALSE;
return reloc_opcodes;
}
+struct reloc_range_struct
+{
+ bfd_vma addr;
+ bfd_boolean add; /* TRUE if start of a range, FALSE otherwise. */
+ /* Original irel index in the array of relocations for a section. */
+ unsigned irel_index;
+};
+typedef struct reloc_range_struct reloc_range;
+
+typedef struct reloc_range_list_entry_struct reloc_range_list_entry;
+struct reloc_range_list_entry_struct
+{
+ reloc_range_list_entry *next;
+ reloc_range_list_entry *prev;
+ Elf_Internal_Rela *irel;
+ xtensa_opcode opcode;
+ int opnum;
+};
+
+struct reloc_range_list_struct
+{
+ /* The rest of the structure is only meaningful when ok is TRUE. */
+ bfd_boolean ok;
+
+ unsigned n_range; /* Number of range markers. */
+ reloc_range *range; /* Sorted range markers. */
+
+ unsigned first; /* Index of a first range element in the list. */
+ unsigned last; /* One past index of a last range element in the list. */
+
+ unsigned n_list; /* Number of list elements. */
+ reloc_range_list_entry *reloc; /* */
+ reloc_range_list_entry list_root;
+};
+
+static int
+reloc_range_compare (const void *a, const void *b)
+{
+ const reloc_range *ra = a;
+ const reloc_range *rb = b;
+
+ if (ra->addr != rb->addr)
+ return ra->addr < rb->addr ? -1 : 1;
+ if (ra->add != rb->add)
+ return ra->add ? -1 : 1;
+ return 0;
+}
+
+static void
+build_reloc_ranges (bfd *abfd, asection *sec,
+ bfd_byte *contents,
+ Elf_Internal_Rela *internal_relocs,
+ xtensa_opcode *reloc_opcodes,
+ reloc_range_list *list)
+{
+ unsigned i;
+ size_t n = 0;
+ size_t max_n = 0;
+ reloc_range *ranges = NULL;
+ reloc_range_list_entry *reloc =
+ bfd_malloc (sec->reloc_count * sizeof (*reloc));
+
+ memset (list, 0, sizeof (*list));
+ list->ok = TRUE;
+
+ for (i = 0; i < sec->reloc_count; i++)
+ {
+ Elf_Internal_Rela *irel = &internal_relocs[i];
+ int r_type = ELF32_R_TYPE (irel->r_info);
+ reloc_howto_type *howto = &elf_howto_table[r_type];
+ r_reloc r_rel;
+
+ if (r_type == R_XTENSA_ASM_SIMPLIFY
+ || r_type == R_XTENSA_32_PCREL
+ || !howto->pc_relative)
+ continue;
+
+ r_reloc_init (&r_rel, abfd, irel, contents,
+ bfd_get_section_limit (abfd, sec));
+
+ if (r_reloc_get_section (&r_rel) != sec)
+ continue;
+
+ if (n + 2 > max_n)
+ {
+ max_n = (max_n + 2) * 2;
+ ranges = bfd_realloc (ranges, max_n * sizeof (*ranges));
+ }
+
+ ranges[n].addr = irel->r_offset;
+ ranges[n + 1].addr = r_rel.target_offset;
+
+ ranges[n].add = ranges[n].addr < ranges[n + 1].addr;
+ ranges[n + 1].add = !ranges[n].add;
+
+ ranges[n].irel_index = i;
+ ranges[n + 1].irel_index = i;
+
+ n += 2;
+
+ reloc[i].irel = irel;
+
+ /* Every relocation won't possibly be checked in the optimized version of
+ check_section_ebb_pcrels_fit, so this needs to be done here. */
+ if (is_alt_relocation (ELF32_R_TYPE (irel->r_info)))
+ {
+ /* None of the current alternate relocs are PC-relative,
+ and only PC-relative relocs matter here. */
+ }
+ else
+ {
+ xtensa_opcode opcode;
+ int opnum;
+
+ if (reloc_opcodes)
+ opcode = reloc_opcodes[i];
+ else
+ opcode = get_relocation_opcode (abfd, sec, contents, irel);
+
+ if (opcode == XTENSA_UNDEFINED)
+ {
+ list->ok = FALSE;
+ break;
+ }
+
+ opnum = get_relocation_opnd (opcode, ELF32_R_TYPE (irel->r_info));
+ if (opnum == XTENSA_UNDEFINED)
+ {
+ list->ok = FALSE;
+ break;
+ }
+
+ /* Record relocation opcode and opnum as we've calculated them
+ anyway and they won't change. */
+ reloc[i].opcode = opcode;
+ reloc[i].opnum = opnum;
+ }
+ }
+
+ if (list->ok)
+ {
+ ranges = bfd_realloc (ranges, n * sizeof (*ranges));
+ qsort (ranges, n, sizeof (*ranges), reloc_range_compare);
+
+ list->n_range = n;
+ list->range = ranges;
+ list->reloc = reloc;
+ list->list_root.prev = &list->list_root;
+ list->list_root.next = &list->list_root;
+ }
+ else
+ {
+ free (ranges);
+ free (reloc);
+ }
+}
+
+static void reloc_range_list_append (reloc_range_list *list,
+ unsigned irel_index)
+{
+ reloc_range_list_entry *entry = list->reloc + irel_index;
+
+ entry->prev = list->list_root.prev;
+ entry->next = &list->list_root;
+ entry->prev->next = entry;
+ entry->next->prev = entry;
+ ++list->n_list;
+}
+
+static void reloc_range_list_remove (reloc_range_list *list,
+ unsigned irel_index)
+{
+ reloc_range_list_entry *entry = list->reloc + irel_index;
+
+ entry->next->prev = entry->prev;
+ entry->prev->next = entry->next;
+ --list->n_list;
+}
+
+/* Update relocation list object so that it lists all relocations that cross
+ [first; last] range. Range bounds should not decrease with successive
+ invocations. */
+static void reloc_range_list_update_range (reloc_range_list *list,
+ bfd_vma first, bfd_vma last)
+{
+ /* This should not happen: EBBs are iterated from lower addresses to higher.
+ But even if that happens there's no need to break: just flush current list
+ and start from scratch. */
+ if ((list->last > 0 && list->range[list->last - 1].addr > last) ||
+ (list->first > 0 && list->range[list->first - 1].addr >= first))
+ {
+ list->first = 0;
+ list->last = 0;
+ list->n_list = 0;
+ list->list_root.next = &list->list_root;
+ list->list_root.prev = &list->list_root;
+ fprintf (stderr, "%s: move backwards requested\n", __func__);
+ }
+
+ for (; list->last < list->n_range &&
+ list->range[list->last].addr <= last; ++list->last)
+ if (list->range[list->last].add)
+ reloc_range_list_append (list, list->range[list->last].irel_index);
+
+ for (; list->first < list->n_range &&
+ list->range[list->first].addr < first; ++list->first)
+ if (!list->range[list->first].add)
+ reloc_range_list_remove (list, list->range[list->first].irel_index);
+}
+
+static void free_reloc_range_list (reloc_range_list *list)
+{
+ free (list->range);
+ free (list->reloc);
+}
/* The compute_text_actions function will build a list of potential
transformation actions for code in the extended basic block of each
property_table_entry *prop_table = 0;
int ptblsize = 0;
bfd_size_type sec_size;
+ reloc_range_list relevant_relocs;
relax_info = get_xtensa_relax_info (sec);
BFD_ASSERT (relax_info);
goto error_return;
}
+ /* Precompute the opcode for each relocation. */
+ reloc_opcodes = build_reloc_opcodes (abfd, sec, contents, internal_relocs);
+
+ build_reloc_ranges (abfd, sec, contents, internal_relocs, reloc_opcodes,
+ &relevant_relocs);
+
for (i = 0; i < sec->reloc_count; i++)
{
Elf_Internal_Rela *irel = &internal_relocs[i];
ebb->start_reloc_idx = i;
ebb->end_reloc_idx = i;
- /* Precompute the opcode for each relocation. */
- if (reloc_opcodes == NULL)
- reloc_opcodes = build_reloc_opcodes (abfd, sec, contents,
- internal_relocs);
-
if (!extend_ebb_bounds (ebb)
|| !compute_ebb_proposed_actions (&ebb_table)
|| !compute_ebb_actions (&ebb_table)
|| !check_section_ebb_pcrels_fit (abfd, sec, contents,
- internal_relocs, &ebb_table,
- reloc_opcodes)
+ internal_relocs,
+ &relevant_relocs,
+ &ebb_table, reloc_opcodes)
|| !check_section_ebb_reduces (&ebb_table))
{
/* If anything goes wrong or we get unlucky and something does
free_ebb_constraint (&ebb_table);
}
+ free_reloc_range_list (&relevant_relocs);
+
#if DEBUG
- if (relax_info->action_list.head)
+ if (action_list_count (&relax_info->action_list))
print_action_list (stderr, &relax_info->action_list);
#endif
return e->new_address - e->orig_address + offset;
}
+typedef struct xlate_map_context_struct xlate_map_context;
+struct xlate_map_context_struct
+{
+ xlate_map_t *map;
+ xlate_map_entry_t *current_entry;
+ int removed;
+};
+
+static int
+xlate_map_fn (splay_tree_node node, void *p)
+{
+ text_action *r = (text_action *)node->value;
+ xlate_map_context *ctx = p;
+ unsigned orig_size = 0;
+
+ switch (r->action)
+ {
+ case ta_none:
+ case ta_remove_insn:
+ case ta_convert_longcall:
+ case ta_remove_literal:
+ case ta_add_literal:
+ break;
+ case ta_remove_longcall:
+ orig_size = 6;
+ break;
+ case ta_narrow_insn:
+ orig_size = 3;
+ break;
+ case ta_widen_insn:
+ orig_size = 2;
+ break;
+ case ta_fill:
+ break;
+ }
+ ctx->current_entry->size =
+ r->offset + orig_size - ctx->current_entry->orig_address;
+ if (ctx->current_entry->size != 0)
+ {
+ ctx->current_entry++;
+ ctx->map->entry_count++;
+ }
+ ctx->current_entry->orig_address = r->offset + orig_size;
+ ctx->removed += r->removed_bytes;
+ ctx->current_entry->new_address = r->offset + orig_size - ctx->removed;
+ ctx->current_entry->size = 0;
+ return 0;
+}
/* Build a binary searchable offset translation map from a section's
action list. */
static xlate_map_t *
build_xlate_map (asection *sec, xtensa_relax_info *relax_info)
{
- xlate_map_t *map = (xlate_map_t *) bfd_malloc (sizeof (xlate_map_t));
text_action_list *action_list = &relax_info->action_list;
unsigned num_actions = 0;
- text_action *r;
- int removed;
- xlate_map_entry_t *current_entry;
+ xlate_map_context ctx;
- if (map == NULL)
+ ctx.map = (xlate_map_t *) bfd_malloc (sizeof (xlate_map_t));
+
+ if (ctx.map == NULL)
return NULL;
num_actions = action_list_count (action_list);
- map->entry = (xlate_map_entry_t *)
+ ctx.map->entry = (xlate_map_entry_t *)
bfd_malloc (sizeof (xlate_map_entry_t) * (num_actions + 1));
- if (map->entry == NULL)
+ if (ctx.map->entry == NULL)
{
- free (map);
+ free (ctx.map);
return NULL;
}
- map->entry_count = 0;
+ ctx.map->entry_count = 0;
- removed = 0;
- current_entry = &map->entry[0];
+ ctx.removed = 0;
+ ctx.current_entry = &ctx.map->entry[0];
- current_entry->orig_address = 0;
- current_entry->new_address = 0;
- current_entry->size = 0;
+ ctx.current_entry->orig_address = 0;
+ ctx.current_entry->new_address = 0;
+ ctx.current_entry->size = 0;
- for (r = action_list->head; r != NULL; r = r->next)
- {
- unsigned orig_size = 0;
- switch (r->action)
- {
- case ta_none:
- case ta_remove_insn:
- case ta_convert_longcall:
- case ta_remove_literal:
- case ta_add_literal:
- break;
- case ta_remove_longcall:
- orig_size = 6;
- break;
- case ta_narrow_insn:
- orig_size = 3;
- break;
- case ta_widen_insn:
- orig_size = 2;
- break;
- case ta_fill:
- break;
- }
- current_entry->size =
- r->offset + orig_size - current_entry->orig_address;
- if (current_entry->size != 0)
- {
- current_entry++;
- map->entry_count++;
- }
- current_entry->orig_address = r->offset + orig_size;
- removed += r->removed_bytes;
- current_entry->new_address = r->offset + orig_size - removed;
- current_entry->size = 0;
- }
+ splay_tree_foreach (action_list->tree, xlate_map_fn, &ctx);
- current_entry->size = (bfd_get_section_limit (sec->owner, sec)
- - current_entry->orig_address);
- if (current_entry->size != 0)
- map->entry_count++;
+ ctx.current_entry->size = (bfd_get_section_limit (sec->owner, sec)
+ - ctx.current_entry->orig_address);
+ if (ctx.current_entry->size != 0)
+ ctx.map->entry_count++;
- return map;
+ return ctx.map;
}
asection *sec,
bfd_byte *contents,
Elf_Internal_Rela *internal_relocs,
+ reloc_range_list *relevant_relocs,
const ebb_constraint *constraint,
const xtensa_opcode *reloc_opcodes)
{
unsigned i, j;
+ unsigned n = sec->reloc_count;
Elf_Internal_Rela *irel;
xlate_map_t *xmap = NULL;
bfd_boolean ok = TRUE;
xtensa_relax_info *relax_info;
+ reloc_range_list_entry *entry = NULL;
relax_info = get_xtensa_relax_info (sec);
can still be used. */
}
- for (i = 0; i < sec->reloc_count; i++)
+ if (relevant_relocs && constraint->action_count)
+ {
+ if (!relevant_relocs->ok)
+ {
+ ok = FALSE;
+ n = 0;
+ }
+ else
+ {
+ bfd_vma min_offset, max_offset;
+ min_offset = max_offset = constraint->actions[0].offset;
+
+ for (i = 1; i < constraint->action_count; ++i)
+ {
+ proposed_action *action = &constraint->actions[i];
+ bfd_vma offset = action->offset;
+
+ if (offset < min_offset)
+ min_offset = offset;
+ if (offset > max_offset)
+ max_offset = offset;
+ }
+ reloc_range_list_update_range (relevant_relocs, min_offset,
+ max_offset);
+ n = relevant_relocs->n_list;
+ entry = &relevant_relocs->list_root;
+ }
+ }
+ else
+ {
+ relevant_relocs = NULL;
+ }
+
+ for (i = 0; i < n; i++)
{
r_reloc r_rel;
bfd_vma orig_self_offset, orig_target_offset;
reloc_howto_type *howto;
int self_removed_bytes, target_removed_bytes;
- irel = &internal_relocs[i];
+ if (relevant_relocs)
+ {
+ entry = entry->next;
+ irel = entry->irel;
+ }
+ else
+ {
+ irel = internal_relocs + i;
+ }
r_type = ELF32_R_TYPE (irel->r_info);
howto = &elf_howto_table[r_type];
xtensa_opcode opcode;
int opnum;
- if (reloc_opcodes)
- opcode = reloc_opcodes[i];
- else
- opcode = get_relocation_opcode (abfd, sec, contents, irel);
- if (opcode == XTENSA_UNDEFINED)
+ if (relevant_relocs)
{
- ok = FALSE;
- break;
+ opcode = entry->opcode;
+ opnum = entry->opnum;
}
-
- opnum = get_relocation_opnd (opcode, ELF32_R_TYPE (irel->r_info));
- if (opnum == XTENSA_UNDEFINED)
+ else
{
- ok = FALSE;
- break;
+ if (reloc_opcodes)
+ opcode = reloc_opcodes[relevant_relocs ?
+ (unsigned)(entry - relevant_relocs->reloc) : i];
+ else
+ opcode = get_relocation_opcode (abfd, sec, contents, irel);
+ if (opcode == XTENSA_UNDEFINED)
+ {
+ ok = FALSE;
+ break;
+ }
+
+ opnum = get_relocation_opnd (opcode, ELF32_R_TYPE (irel->r_info));
+ if (opnum == XTENSA_UNDEFINED)
+ {
+ ok = FALSE;
+ break;
+ }
}
if (!pcrel_reloc_fits (opcode, opnum, self_offset, target_offset))
sec_size = bfd_get_section_limit (abfd, sec);
final_static_link =
- (!link_info->relocatable
+ (!bfd_link_relocatable (link_info)
&& !elf_hash_table (link_info)->dynamic_sections_created);
/* The placement algorithm first checks to see if the literal is
/* For relocatable links, do not try to move literals. To do it
correctly might increase the number of relocations in an input
section making the default relocatable linking fail. */
- if (!link_info->relocatable && !literal_placed
+ if (!bfd_link_relocatable (link_info) && !literal_placed
&& values->has_last_loc && !(*last_loc_is_prev_p))
{
asection *target_sec = r_reloc_get_section (&values->last_loc);
/* Check all of the PC-relative relocations to make sure they still fit. */
relocs_fit = check_section_ebb_pcrels_fit (target_sec->owner, target_sec,
target_sec_cache->contents,
- target_sec_cache->relocs,
+ target_sec_cache->relocs, NULL,
&ebb_table, NULL);
if (!relocs_fit)
\f
/* Second relaxation pass. */
+static int
+action_remove_bytes_fn (splay_tree_node node, void *p)
+{
+ bfd_size_type *final_size = p;
+ text_action *action = (text_action *)node->value;
+
+ *final_size -= action->removed_bytes;
+ return 0;
+}
+
/* Modify all of the relocations to point to the right spot, and if this
is a relaxable section, delete the unwanted literals and fix the
section size. */
internal_relocs = retrieve_internal_relocs (abfd, sec,
link_info->keep_memory);
- if (!internal_relocs && !relax_info->action_list.head)
+ if (!internal_relocs && !action_list_count (&relax_info->action_list))
return TRUE;
contents = retrieve_contents (abfd, sec, link_info->keep_memory);
if (elf_hash_table (link_info)->dynamic_sections_created)
shrink_dynamic_reloc_sections (link_info, abfd, sec, irel);
irel->r_info = ELF32_R_INFO (0, R_XTENSA_NONE);
- irel->r_offset = offset_with_removed_text
+ irel->r_offset = offset_with_removed_text_map
(&relax_info->action_list, irel->r_offset);
continue;
}
}
/* Update the action so that the code that moves
the contents will do the right thing. */
+ /* ta_remove_longcall and ta_remove_insn actions are
+ grouped together in the tree as well as
+ ta_convert_longcall and ta_none, so that changes below
+ can be done w/o removing and reinserting action into
+ the tree. */
+
if (action->action == ta_remove_longcall)
action->action = ta_remove_insn;
else
}
}
- source_offset = offset_with_removed_text
+ source_offset = offset_with_removed_text_map
(&relax_info->action_list, irel->r_offset);
irel->r_offset = source_offset;
}
break;
}
- new_end_offset = offset_with_removed_text
+ new_end_offset = offset_with_removed_text_map
(&target_relax_info->action_list,
r_rel.target_offset + diff_value);
diff_value = new_end_offset - new_reloc.target_offset;
if ((relax_info->is_relaxable_literal_section
|| relax_info->is_relaxable_asm_section)
- && relax_info->action_list.head)
+ && action_list_count (&relax_info->action_list))
{
/* Walk through the planned actions and build up a table
of move, copy and fill records. Use the move, copy and
fill records to perform the actions once. */
- int removed = 0;
bfd_size_type final_size, copy_size, orig_insn_size;
bfd_byte *scratch = NULL;
bfd_byte *dup_contents = NULL;
bfd_vma orig_dot_vo = 0; /* Virtual offset from orig_dot. */
bfd_vma dup_dot = 0;
- text_action *action = relax_info->action_list.head;
+ text_action *action;
final_size = sec->size;
- for (action = relax_info->action_list.head; action;
- action = action->next)
- {
- final_size -= action->removed_bytes;
- }
+ splay_tree_foreach (relax_info->action_list.tree,
+ action_remove_bytes_fn, &final_size);
scratch = (bfd_byte *) bfd_zmalloc (final_size);
dup_contents = (bfd_byte *) bfd_zmalloc (final_size);
print_action_list (stderr, &relax_info->action_list);
#endif
- for (action = relax_info->action_list.head; action;
- action = action->next)
+ for (action = action_first (&relax_info->action_list); action;
+ action = action_next (&relax_info->action_list, action))
{
virtual_action = FALSE;
if (action->offset > orig_dot)
break;
}
- removed += action->removed_bytes;
BFD_ASSERT (dup_dot <= final_size);
BFD_ASSERT (orig_dot <= orig_size);
}
xtensa_relax_info *relax_info;
removed_literal *removed;
bfd_vma target_offset, base_offset;
- text_action *act;
*new_rel = *orig_rel;
offset. */
base_offset = r_reloc_get_target_offset (new_rel) - new_rel->rela.r_addend;
- act = relax_info->action_list.head;
if (base_offset <= target_offset)
{
- int base_removed = removed_by_actions (&act, base_offset, FALSE);
- int addend_removed = removed_by_actions (&act, target_offset, FALSE);
+ int base_removed = removed_by_actions_map (&relax_info->action_list,
+ base_offset, FALSE);
+ int addend_removed = removed_by_actions_map (&relax_info->action_list,
+ target_offset, FALSE) -
+ base_removed;
+
new_rel->target_offset = target_offset - base_removed - addend_removed;
new_rel->rela.r_addend -= addend_removed;
}
else
{
/* Handle a negative addend. The base offset comes first. */
- int tgt_removed = removed_by_actions (&act, target_offset, FALSE);
- int addend_removed = removed_by_actions (&act, base_offset, FALSE);
+ int tgt_removed = removed_by_actions_map (&relax_info->action_list,
+ target_offset, FALSE);
+ int addend_removed = removed_by_actions_map (&relax_info->action_list,
+ base_offset, FALSE) -
+ tgt_removed;
+
new_rel->target_offset = target_offset - tgt_removed;
new_rel->rela.r_addend += addend_removed;
}
if ((r_type == R_XTENSA_32 || r_type == R_XTENSA_PLT)
&& (input_section->flags & SEC_ALLOC) != 0
- && (dynamic_symbol || info->shared))
+ && (dynamic_symbol || bfd_link_pic (info)))
{
asection *srel;
bfd_boolean is_plt = FALSE;
bfd_put_32 (abfd, lit->value, contents + offset);
/* Currently, we cannot move relocations during a relocatable link. */
- BFD_ASSERT (!link_info->relocatable);
+ BFD_ASSERT (!bfd_link_relocatable (link_info));
fix = reloc_bfd_fix_init (sec, offset, r_type,
r_reloc_get_section (r_rel),
r_rel->target_offset + r_rel->virtual_offset,
bfd_vma old_offset = val.r_rel.target_offset;
bfd_vma new_offset;
long old_size, new_size;
- text_action *act = target_relax_info->action_list.head;
- new_offset = old_offset -
- removed_by_actions (&act, old_offset, FALSE);
+ int removed_by_old_offset =
+ removed_by_actions_map (&target_relax_info->action_list,
+ old_offset, FALSE);
+ new_offset = old_offset - removed_by_old_offset;
/* Assert that we are not out of bounds. */
old_size = bfd_get_32 (abfd, size_p);
/* Recompute the new_offset, but this time don't
include any fill inserted by relaxation. */
- act = target_relax_info->action_list.head;
- new_offset = old_offset -
- removed_by_actions (&act, old_offset, TRUE);
+ removed_by_old_offset =
+ removed_by_actions_map (&target_relax_info->action_list,
+ old_offset, TRUE);
+ new_offset = old_offset - removed_by_old_offset;
/* If it is not unreachable and we have not yet
seen an unreachable at this address, place it
}
}
else
- new_size -=
- removed_by_actions (&act, old_offset + old_size, TRUE);
+ {
+ int removed_by_old_offset_size =
+ removed_by_actions_map (&target_relax_info->action_list,
+ old_offset + old_size, TRUE);
+ new_size -= removed_by_old_offset_size - removed_by_old_offset;
+ }
if (new_size != old_size)
{
finish_dynamic_sections() but at that point it's too late to
reclaim the space in the output section, so we do this twice. */
- if (internal_relocs && (!link_info->relocatable
+ if (internal_relocs && (!bfd_link_relocatable (link_info)
|| xtensa_is_littable_section (sec)))
{
Elf_Internal_Rela *last_irel = NULL;
if (isym->st_shndx == sec_shndx)
{
- text_action *act = relax_info->action_list.head;
bfd_vma orig_addr = isym->st_value;
+ int removed = removed_by_actions_map (&relax_info->action_list,
+ orig_addr, FALSE);
- isym->st_value -= removed_by_actions (&act, orig_addr, FALSE);
-
+ isym->st_value -= removed;
if (ELF32_ST_TYPE (isym->st_info) == STT_FUNC)
isym->st_size -=
- removed_by_actions (&act, orig_addr + isym->st_size, FALSE);
+ removed_by_actions_map (&relax_info->action_list,
+ orig_addr + isym->st_size, FALSE) -
+ removed;
}
}
|| sym_hash->root.type == bfd_link_hash_defweak)
&& sym_hash->root.u.def.section == sec)
{
- text_action *act = relax_info->action_list.head;
bfd_vma orig_addr = sym_hash->root.u.def.value;
+ int removed = removed_by_actions_map (&relax_info->action_list,
+ orig_addr, FALSE);
- sym_hash->root.u.def.value -=
- removed_by_actions (&act, orig_addr, FALSE);
+ sym_hash->root.u.def.value -= removed;
if (sym_hash->type == STT_FUNC)
sym_hash->size -=
- removed_by_actions (&act, orig_addr + sym_hash->size, FALSE);
+ removed_by_actions_map (&relax_info->action_list,
+ orig_addr + sym_hash->size, FALSE) -
+ removed;
}
}