Pinpoint plugin type logging

This commit is contained in:
York Jasper Niebuhr 2025-10-24 20:50:47 +02:00
parent bfb64990dc
commit b725c35be5
3 changed files with 129 additions and 186 deletions

View File

@ -27,7 +27,7 @@ private:
tree m_main_variant;
// Fields are identified by their offsets
std::map<std::size_t, Field> m_field;
std::map<std::size_t, Field> m_fields;
public:
TargetType(const TargetType& other) = default;
TargetType& operator=(const TargetType& other) = default;
@ -40,6 +40,7 @@ public:
bool valid() const;
bool fields() const;
std::string name() const;
const Field* field(std::size_t off, bool exact = true) const;
static void add(tree t);
static std::size_t count();
@ -47,7 +48,7 @@ public:
static const TargetType* find(UID uid); // O(1)
private:
friend void on_finish_type(void*, void*);
bool fetch_fields();
bool fetch_fields(bool redo = false);
static TargetType* find_mutable(tree t);
};

View File

@ -1,5 +1,6 @@
#include <stage0.h>
#include <unordered_map>
#include <functional>
#include <safe-langhooks.h>
@ -49,6 +50,27 @@ std::string TargetType::name() const {
return { anonymous_name };
}
const TargetType::Field* TargetType::field(std::size_t off, bool exact) const {
if (!valid() || !(m_flags & FLAG_FIELDS))
return nullptr;
auto it = m_fields.upper_bound(off); // Next element
if (it == m_fields.begin())
return nullptr;
--it; // Element of interest
const TargetType::Field& maybe = it->second;
if (off >= maybe.offset + maybe.size)
return nullptr;
if (exact && maybe.offset != off)
return nullptr;
return &maybe;
}
void TargetType::add(tree t) {
if (find(t) != nullptr)
return;
@ -99,8 +121,109 @@ const TargetType* TargetType::find(UID uid) {
return &it->second;
}
bool TargetType::fetch_fields() {
// TODO
static bool foreach_record_field(tree t, std::function<bool(const TargetType::Field&)> callback) {
if (!t || TREE_CODE(t) != RECORD_TYPE)
return false;
if (!COMPLETE_TYPE_P(t))
return false;
for (tree field_decl = TYPE_FIELDS(t); field_decl; field_decl = DECL_CHAIN(field_decl)) {
if (TREE_CODE(field_decl) != FIELD_DECL)
continue;
HOST_WIDE_INT field_byte_offset = 0;
if (TREE_CODE(DECL_FIELD_OFFSET(field_decl)) == INTEGER_CST)
field_byte_offset = tree_to_uhwi(DECL_FIELD_OFFSET(field_decl));
HOST_WIDE_INT field_bit_offset = 0;
if (TREE_CODE(DECL_FIELD_BIT_OFFSET(field_decl)) == INTEGER_CST)
field_bit_offset = tree_to_uhwi(DECL_FIELD_BIT_OFFSET(field_decl));
HOST_WIDE_INT field_bit_offset_bytes = field_bit_offset / 8;
field_byte_offset += field_bit_offset_bytes;
field_bit_offset -= field_bit_offset_bytes * 8;
HOST_WIDE_INT field_bit_size = 0;
if (TREE_CODE(DECL_SIZE(field_decl)) == INTEGER_CST)
field_bit_size = tree_to_uhwi(DECL_SIZE(field_decl));
bool is_bitfield = (DECL_BIT_FIELD_TYPE(field_decl) != NULL_TREE);
bool is_multibyte = (field_bit_size % 8 == 0 && field_bit_offset == 0);
bool is_dangerous = (is_bitfield || !is_multibyte);
HOST_WIDE_INT field_offset_bit_size = field_bit_offset + field_bit_size;
HOST_WIDE_INT effective_field_size = field_offset_bit_size / 8;
if (field_offset_bit_size % 8 != 0)
effective_field_size += 1;
TargetType::Field field;
field.offset = static_cast<decltype(field.offset)>(field_byte_offset);
field.size = static_cast<decltype(field.size)>(effective_field_size);
field.flags = (is_dangerous ? TargetType::Field::FLAG_DANGEROUS : 0);
if (!callback(field))
return false;
}
return true;
}
static bool field_map_add(std::map<std::size_t, TargetType::Field>& map, const TargetType::Field& field) {
TargetType::Field tmp_field;
tmp_field.offset = field.offset;
tmp_field.size = (field.size == 0 ? 1 : field.size);
tmp_field.flags = (field.size == 0 ? TargetType::Field::FLAG_DANGEROUS : 0) | field.flags;
// Overlaps are dangerous -> remove and integrate into member
auto overlap_end = map.lower_bound(tmp_field.offset + tmp_field.size);
for (auto it = std::make_reverse_iterator(overlap_end); it != map.rend();) {
const TargetType::Field& existing_field = it->second;
if (existing_field.offset + existing_field.size <= tmp_field.offset)
break;
auto combined_end = std::max<decltype(tmp_field.offset)>(tmp_field.offset + tmp_field.size,
existing_field.offset + existing_field.size);
auto combined_offset = std::min<decltype(tmp_field.offset)>(tmp_field.offset, existing_field.offset);
auto combined_size = combined_end - combined_offset;
tmp_field.flags |= (existing_field.flags | TargetType::Field::FLAG_DANGEROUS);
tmp_field.offset = combined_offset;
tmp_field.size = combined_size;
// Erase overlapping member
auto tmp_forward = std::prev(it.base());
tmp_forward = map.erase(tmp_forward);
it = std::make_reverse_iterator(tmp_forward);
}
map.emplace(tmp_field.offset, tmp_field);
return true;
}
bool TargetType::fetch_fields(bool redo) {
if (!valid())
return false;
if ((m_flags & FLAG_FIELDS) != 0 && !redo)
return true;
m_flags &= ~FLAG_FIELDS;
m_fields.clear();
std::map<std::size_t, Field> tmp_fields;
auto per_field_callback = [&tmp_fields](const Field& field) -> bool {
return field_map_add(tmp_fields, field);
};
if (!foreach_record_field(m_main_variant, per_field_callback))
return false;
m_fields = std::move(tmp_fields);
m_flags |= FLAG_FIELDS;
return true;
}

View File

@ -34,187 +34,6 @@ int plugin_is_GPL_compatible;
#define UNSPEC_SPSLR_OFFSETOF 1042
#endif
// Recognize __attribute__((spslr)) on structures and track their layout
struct Member {
using OFF = unsigned long;
using SIZE = unsigned long;
static constexpr int FLAG_DANGERZONE = 1;
OFF offset = 0;
SIZE size = 0;
int flags = 0;
};
struct Target {
using UID = unsigned long;
UID uid;
std::map<Member::OFF, Member> members;
void log_member(const Member& member);
const Member* get_member(Member::OFF offset) const;
};
struct TargetTree {
tree t;
Target::UID uid;
};
static Target::UID next_target_uid = 0;
static std::unordered_map<Target::UID, Target> targets;
static std::list<TargetTree> target_trees;
static std::unordered_set<tree> target_markings;
static bool find_target_log(tree t, Target::UID& uid) {
for (const TargetTree& tt : target_trees) {
if (lang_hooks.types_compatible_p(tt.t, t)) {
uid = tt.uid;
return true;
}
}
return false;
}
void Target::log_member(const Member& member) {
Member tmp_member;
tmp_member.offset = member.offset;
tmp_member.size = (member.size == 0 ? 1 : member.size);
tmp_member.flags = (member.size == 0 ? Member::FLAG_DANGERZONE : 0) | member.flags;
// Overlaps are dangerous -> remove and integrate into member
auto overlap_end = members.lower_bound(tmp_member.offset + tmp_member.size);
for (auto it = std::make_reverse_iterator(overlap_end); it != members.rend();) {
const Member& existing_member = it->second;
if (existing_member.offset + existing_member.size <= tmp_member.offset)
break;
Member::OFF combined_end = std::max<Member::OFF>(tmp_member.offset + tmp_member.size,
existing_member.offset + existing_member.size);
Member::OFF combined_offset = std::min<Member::OFF>(tmp_member.offset, existing_member.offset);
Member::SIZE combined_size = combined_end - combined_offset;
tmp_member.flags |= (existing_member.flags | Member::FLAG_DANGERZONE);
tmp_member.offset = combined_offset;
tmp_member.size = combined_size;
// Erase overlapping member
auto tmp_forward = std::prev(it.base());
tmp_forward = members.erase(tmp_forward);
it = std::make_reverse_iterator(tmp_forward);
}
members.emplace(tmp_member.offset, tmp_member);
}
const Member* Target::get_member(Member::OFF offset) const {
auto it = members.find(offset);
if (it == members.end())
return nullptr;
return &it->second;
}
static void log_target(tree node) {
node = TYPE_MAIN_VARIANT(node);
if (!node)
return;
if (TREE_CODE(node) != RECORD_TYPE)
return;
auto marking = target_markings.find(node);
if (marking == target_markings.end())
return;
Target::UID existing_target;
if (find_target_log(node, existing_target))
return;
Target::UID tuid = next_target_uid++;
TargetTree new_tt;
new_tt.t = node;
new_tt.uid = tuid;
target_trees.push_back(new_tt);
targets.emplace(tuid, Target{});
Target& target = targets.at(tuid);
target.uid = tuid;
// Log all members
for (tree field = TYPE_FIELDS(node); field; field = DECL_CHAIN(field)) {
if (TREE_CODE(field) != FIELD_DECL)
continue;
HOST_WIDE_INT field_byte_offset = 0;
if (TREE_CODE(DECL_FIELD_OFFSET(field)) == INTEGER_CST)
field_byte_offset = tree_to_uhwi(DECL_FIELD_OFFSET(field));
HOST_WIDE_INT field_bit_offset = 0;
if (TREE_CODE(DECL_FIELD_BIT_OFFSET(field)) == INTEGER_CST)
field_bit_offset = tree_to_uhwi(DECL_FIELD_BIT_OFFSET(field));
HOST_WIDE_INT field_bit_offset_bytes = field_bit_offset / 8;
field_byte_offset += field_bit_offset_bytes;
field_bit_offset -= field_bit_offset_bytes * 8;
HOST_WIDE_INT field_bit_size = 0;
if (TREE_CODE(DECL_SIZE(field)) == INTEGER_CST)
field_bit_size = tree_to_uhwi(DECL_SIZE(field));
bool is_bitfield = (DECL_BIT_FIELD_TYPE(field) != NULL_TREE);
bool is_multibyte = (field_bit_size % 8 == 0 && field_bit_offset == 0);
bool is_dangerous = (is_bitfield || !is_multibyte);
HOST_WIDE_INT field_offset_bit_size = field_bit_offset + field_bit_size;
HOST_WIDE_INT effective_field_size = field_offset_bit_size / 8;
if (field_offset_bit_size % 8 != 0)
effective_field_size += 1;
Member member;
member.offset = (Member::OFF)field_byte_offset;
member.size = (Member::SIZE)effective_field_size;
member.flags = (is_dangerous ? Member::FLAG_DANGERZONE : 0);
target.log_member(member);
}
}
static tree log_target_attribute(tree* node, tree name, tree args, int flags, bool* no_add_attrs) {
if (!node)
return NULL_TREE;
if (TREE_CODE(*node) != RECORD_TYPE)
return NULL_TREE;
tree type_main_variant = TYPE_MAIN_VARIANT(*node);
if (!type_main_variant)
return NULL_TREE;
target_markings.insert(type_main_variant);
return NULL_TREE;
}
static struct attribute_spec spslr_attribute = {
"spslr", 0, 0, false, false, false, false, log_target_attribute, NULL
};
void on_register_attributes(void* event_data, void* data) {
register_attribute(&spslr_attribute);
}
static void on_type_complete(void* event_data, void* user_data) {
tree type = (tree)event_data;
log_target(type);
}
// Early hook to make COMPONENT_REF nodes survice front end
struct SPSLROffsetofCallData {