230 lines
6.0 KiB
C++
230 lines
6.0 KiB
C++
#include <stage0.h>
|
|
#include <unordered_map>
|
|
#include <functional>
|
|
|
|
#include <safe-langhooks.h>
|
|
|
|
static UID next_uid = 0;
|
|
static std::unordered_map<UID, TargetType> targets;
|
|
|
|
static tree get_record_main_variant(tree t) {
|
|
if (!t || TREE_CODE(t) != RECORD_TYPE)
|
|
return NULL_TREE;
|
|
|
|
return TYPE_MAIN_VARIANT(t);
|
|
}
|
|
|
|
TargetType::TargetType(tree t) : m_uid{ UID_INVALID }, m_flags{ 0 } {
|
|
if (!(m_main_variant = get_record_main_variant(t)))
|
|
return;
|
|
|
|
m_flags |= FLAG_MAIN_VARIANT;
|
|
}
|
|
|
|
TargetType::~TargetType() {}
|
|
|
|
bool TargetType::valid() const {
|
|
return (m_flags & FLAG_MAIN_VARIANT) != 0;
|
|
}
|
|
|
|
bool TargetType::fields() const {
|
|
return (m_flags & FLAG_FIELDS) != 0;
|
|
}
|
|
|
|
std::string TargetType::name() const {
|
|
const char* error_name = "<error>";
|
|
const char* anonymous_name = "<anonymous>";
|
|
|
|
if (!valid())
|
|
return { error_name };
|
|
|
|
tree name_tree = TYPE_NAME(m_main_variant);
|
|
if (!name_tree)
|
|
return { anonymous_name };
|
|
|
|
if (TREE_CODE(name_tree) == TYPE_DECL && DECL_NAME(name_tree))
|
|
return { IDENTIFIER_POINTER(DECL_NAME(name_tree)) };
|
|
else if (TREE_CODE(name_tree) == IDENTIFIER_NODE)
|
|
return { IDENTIFIER_POINTER(name_tree) };
|
|
|
|
return { anonymous_name };
|
|
}
|
|
|
|
const TargetType::Field* TargetType::field(std::size_t off, bool exact) const {
|
|
if (!valid() || !(m_flags & FLAG_FIELDS))
|
|
return nullptr;
|
|
|
|
auto it = m_fields.upper_bound(off); // Next element
|
|
if (it == m_fields.begin())
|
|
return nullptr;
|
|
|
|
--it; // Element of interest
|
|
|
|
const TargetType::Field& maybe = it->second;
|
|
|
|
if (off >= maybe.offset + maybe.size)
|
|
return nullptr;
|
|
|
|
if (exact && maybe.offset != off)
|
|
return nullptr;
|
|
|
|
return &maybe;
|
|
}
|
|
|
|
void TargetType::add(tree t) {
|
|
if (find(t) != nullptr)
|
|
return;
|
|
|
|
TargetType tmp { t };
|
|
if (!tmp.valid())
|
|
return;
|
|
|
|
tmp.m_uid = next_uid++;
|
|
targets.emplace(tmp.m_uid, tmp);
|
|
}
|
|
|
|
std::size_t TargetType::count() {
|
|
return targets.size();
|
|
}
|
|
|
|
const TargetType* TargetType::find(tree t) {
|
|
tree main_variant = get_record_main_variant(t);
|
|
if (!main_variant)
|
|
return nullptr;
|
|
|
|
for (const auto& [uid, target] : targets) {
|
|
if (lang_hooks.types_compatible_p(main_variant, target.m_main_variant))
|
|
return ⌖
|
|
}
|
|
|
|
return nullptr;
|
|
}
|
|
|
|
TargetType* TargetType::find_mutable(tree t) {
|
|
tree main_variant = get_record_main_variant(t);
|
|
if (!main_variant)
|
|
return nullptr;
|
|
|
|
for (auto& [uid, target] : targets) {
|
|
if (lang_hooks.types_compatible_p(main_variant, target.m_main_variant))
|
|
return ⌖
|
|
}
|
|
|
|
return nullptr;
|
|
}
|
|
|
|
const TargetType* TargetType::find(UID uid) {
|
|
auto it = targets.find(uid);
|
|
if (it == targets.end())
|
|
return nullptr;
|
|
|
|
return &it->second;
|
|
}
|
|
|
|
static bool foreach_record_field(tree t, std::function<bool(const TargetType::Field&)> callback) {
|
|
if (!t || TREE_CODE(t) != RECORD_TYPE)
|
|
return false;
|
|
|
|
if (!COMPLETE_TYPE_P(t))
|
|
return false;
|
|
|
|
for (tree field_decl = TYPE_FIELDS(t); field_decl; field_decl = DECL_CHAIN(field_decl)) {
|
|
if (TREE_CODE(field_decl) != FIELD_DECL)
|
|
continue;
|
|
|
|
HOST_WIDE_INT field_byte_offset = 0;
|
|
if (TREE_CODE(DECL_FIELD_OFFSET(field_decl)) == INTEGER_CST)
|
|
field_byte_offset = tree_to_uhwi(DECL_FIELD_OFFSET(field_decl));
|
|
|
|
HOST_WIDE_INT field_bit_offset = 0;
|
|
if (TREE_CODE(DECL_FIELD_BIT_OFFSET(field_decl)) == INTEGER_CST)
|
|
field_bit_offset = tree_to_uhwi(DECL_FIELD_BIT_OFFSET(field_decl));
|
|
|
|
HOST_WIDE_INT field_bit_offset_bytes = field_bit_offset / 8;
|
|
field_byte_offset += field_bit_offset_bytes;
|
|
field_bit_offset -= field_bit_offset_bytes * 8;
|
|
|
|
HOST_WIDE_INT field_bit_size = 0;
|
|
if (TREE_CODE(DECL_SIZE(field_decl)) == INTEGER_CST)
|
|
field_bit_size = tree_to_uhwi(DECL_SIZE(field_decl));
|
|
|
|
bool is_bitfield = (DECL_BIT_FIELD_TYPE(field_decl) != NULL_TREE);
|
|
bool is_multibyte = (field_bit_size % 8 == 0 && field_bit_offset == 0);
|
|
bool is_dangerous = (is_bitfield || !is_multibyte);
|
|
|
|
HOST_WIDE_INT field_offset_bit_size = field_bit_offset + field_bit_size;
|
|
HOST_WIDE_INT effective_field_size = field_offset_bit_size / 8;
|
|
if (field_offset_bit_size % 8 != 0)
|
|
effective_field_size += 1;
|
|
|
|
TargetType::Field field;
|
|
field.offset = static_cast<decltype(field.offset)>(field_byte_offset);
|
|
field.size = static_cast<decltype(field.size)>(effective_field_size);
|
|
field.flags = (is_dangerous ? TargetType::Field::FLAG_DANGEROUS : 0);
|
|
|
|
if (!callback(field))
|
|
return false;
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
static bool field_map_add(std::map<std::size_t, TargetType::Field>& map, const TargetType::Field& field) {
|
|
TargetType::Field tmp_field;
|
|
tmp_field.offset = field.offset;
|
|
tmp_field.size = (field.size == 0 ? 1 : field.size);
|
|
tmp_field.flags = (field.size == 0 ? TargetType::Field::FLAG_DANGEROUS : 0) | field.flags;
|
|
|
|
// Overlaps are dangerous -> remove and integrate into member
|
|
auto overlap_end = map.lower_bound(tmp_field.offset + tmp_field.size);
|
|
for (auto it = std::make_reverse_iterator(overlap_end); it != map.rend();) {
|
|
const TargetType::Field& existing_field = it->second;
|
|
|
|
if (existing_field.offset + existing_field.size <= tmp_field.offset)
|
|
break;
|
|
|
|
auto combined_end = std::max<decltype(tmp_field.offset)>(tmp_field.offset + tmp_field.size,
|
|
existing_field.offset + existing_field.size);
|
|
|
|
auto combined_offset = std::min<decltype(tmp_field.offset)>(tmp_field.offset, existing_field.offset);
|
|
auto combined_size = combined_end - combined_offset;
|
|
|
|
tmp_field.flags |= (existing_field.flags | TargetType::Field::FLAG_DANGEROUS);
|
|
tmp_field.offset = combined_offset;
|
|
tmp_field.size = combined_size;
|
|
|
|
// Erase overlapping member
|
|
auto tmp_forward = std::prev(it.base());
|
|
tmp_forward = map.erase(tmp_forward);
|
|
it = std::make_reverse_iterator(tmp_forward);
|
|
}
|
|
|
|
map.emplace(tmp_field.offset, tmp_field);
|
|
return true;
|
|
}
|
|
|
|
bool TargetType::fetch_fields(bool redo) {
|
|
if (!valid())
|
|
return false;
|
|
|
|
if ((m_flags & FLAG_FIELDS) != 0 && !redo)
|
|
return true;
|
|
|
|
m_flags &= ~FLAG_FIELDS;
|
|
m_fields.clear();
|
|
|
|
std::map<std::size_t, Field> tmp_fields;
|
|
|
|
auto per_field_callback = [&tmp_fields](const Field& field) -> bool {
|
|
return field_map_add(tmp_fields, field);
|
|
};
|
|
|
|
if (!foreach_record_field(m_main_variant, per_field_callback))
|
|
return false;
|
|
|
|
m_fields = std::move(tmp_fields);
|
|
m_flags |= FLAG_FIELDS;
|
|
return true;
|
|
}
|
|
|