Fix generic_ivar_set_shape_ivptr for table rebuild

[Bug #21438]

Previously GC could trigger a table rebuild of the generic ivar
st_table in the middle of calling the st_update callback. This could
cause entries to be reallocated or rearranged and the update to be for
the wrong entry.

This commit adds an assertion to make that case easier to detect, and
replaces the st_update with a separate st_lookup and st_insert.

Also free after insert in generic_ivar_set_shape_ivptr

Previously we were performing a realloc and then inserting the new value
into the table. If the table was flagged as requiring a rebuild, this
could trigger GC work and marking within that GC could access the ivptr
freed by realloc.

Co-authored-by: Aaron Patterson <tenderlove@ruby-lang.org>
Co-authored-by: Jean Boussier <byroot@ruby-lang.org>
This commit is contained in:
John Hawthorn 2025-06-13 18:52:32 -07:00 committed by nagachika
parent 111a49e77e
commit 3471ee0749
3 changed files with 71 additions and 56 deletions

9
st.c
View File

@ -1482,7 +1482,16 @@ st_update(st_table *tab, st_data_t key,
value = entry->record;
}
old_key = key;
unsigned int rebuilds_num = tab->rebuilds_num;
retval = (*func)(&key, &value, arg, existing);
// We need to make sure that the callback didn't cause a table rebuild
// Ideally we would make sure no operations happened
assert(rebuilds_num == tab->rebuilds_num);
(void)rebuilds_num;
switch (retval) {
case ST_CONTINUE:
if (! existing) {

View File

@ -393,20 +393,38 @@ class TestVariable < Test::Unit::TestCase
@a = 1
@b = 2
@c = 3
@d = 4
@e = 5
@f = 6
@g = 7
@h = 8
end
def ivars
[@a, @b, @c]
[@a, @b, @c, @d, @e, @f, @g, @h]
end
end
def test_external_ivars
3.times{
# check inline cache for external ivar access
assert_equal [1, 2, 3], ExIvar.new.ivars
assert_equal [1, 2, 3, 4, 5, 6, 7, 8], ExIvar.new.ivars
}
end
def test_exivar_resize_with_compaction_stress
objs = 10_000.times.map do
ExIvar.new
end
EnvUtil.under_gc_compact_stress do
10.times do
x = ExIvar.new
x.instance_variable_set(:@resize, 1)
x
end
end
end
def test_local_variables_with_kwarg
bug11674 = '[ruby-core:71437] [Bug #11674]'
v = with_kwargs_11(v1:1,v2:2,v3:3,v4:4,v5:5,v6:6,v7:7,v8:8,v9:9,v10:10,v11:11)

View File

@ -1162,22 +1162,6 @@ gen_ivtbl_bytes(size_t n)
return offsetof(struct gen_ivtbl, as.shape.ivptr) + n * sizeof(VALUE);
}
static struct gen_ivtbl *
gen_ivtbl_resize(struct gen_ivtbl *old, uint32_t n)
{
RUBY_ASSERT(n > 0);
uint32_t len = old ? old->as.shape.numiv : 0;
struct gen_ivtbl *ivtbl = xrealloc(old, gen_ivtbl_bytes(n));
ivtbl->as.shape.numiv = n;
for (; len < n; len++) {
ivtbl->as.shape.ivptr[len] = Qundef;
}
return ivtbl;
}
void
rb_mark_generic_ivar(VALUE obj)
{
@ -1632,41 +1616,6 @@ struct gen_ivar_lookup_ensure_size {
bool resize;
};
static int
generic_ivar_lookup_ensure_size(st_data_t *k, st_data_t *v, st_data_t u, int existing)
{
ASSERT_vm_locking();
struct gen_ivar_lookup_ensure_size *ivar_lookup = (struct gen_ivar_lookup_ensure_size *)u;
struct gen_ivtbl *ivtbl = existing ? (struct gen_ivtbl *)*v : NULL;
if (!existing || ivar_lookup->resize) {
if (existing) {
RUBY_ASSERT(ivar_lookup->shape->type == SHAPE_IVAR);
RUBY_ASSERT(rb_shape_get_shape_by_id(ivar_lookup->shape->parent_id)->capacity < ivar_lookup->shape->capacity);
}
else {
FL_SET_RAW((VALUE)*k, FL_EXIVAR);
}
ivtbl = gen_ivtbl_resize(ivtbl, ivar_lookup->shape->capacity);
*v = (st_data_t)ivtbl;
}
RUBY_ASSERT(FL_TEST((VALUE)*k, FL_EXIVAR));
ivar_lookup->ivtbl = ivtbl;
if (ivar_lookup->shape) {
#if SHAPE_IN_BASIC_FLAGS
rb_shape_set_shape(ivar_lookup->obj, ivar_lookup->shape);
#else
ivtbl->shape_id = rb_shape_id(ivar_lookup->shape);
#endif
}
return ST_CONTINUE;
}
static VALUE *
generic_ivar_set_shape_ivptr(VALUE obj, void *data)
{
@ -1674,9 +1623,48 @@ generic_ivar_set_shape_ivptr(VALUE obj, void *data)
struct gen_ivar_lookup_ensure_size *ivar_lookup = data;
// We can't use st_update, since when resizing the fields table GC can
// happen, which will modify the st_table and may rebuild it
RB_VM_LOCK_ENTER();
{
st_update(generic_ivtbl(obj, ivar_lookup->id, false), (st_data_t)obj, generic_ivar_lookup_ensure_size, (st_data_t)ivar_lookup);
struct gen_ivtbl *ivtbl = NULL;
st_table *tbl = generic_ivtbl(obj, ivar_lookup->id, false);
int existing = st_lookup(tbl, (st_data_t)obj, (st_data_t *)&ivtbl);
if (!existing || ivar_lookup->resize) {
uint32_t new_capa = ivar_lookup->shape->capacity;
uint32_t old_capa = rb_shape_get_shape_by_id(ivar_lookup->shape->parent_id)->capacity;
if (existing) {
RUBY_ASSERT(ivar_lookup->shape->type == SHAPE_IVAR);
RUBY_ASSERT(old_capa < new_capa);
RUBY_ASSERT(ivtbl);
} else {
RUBY_ASSERT(!ivtbl);
RUBY_ASSERT(old_capa == 0);
}
RUBY_ASSERT(new_capa > 0);
struct gen_ivtbl *old_ivtbl = ivtbl;
ivtbl = xmalloc(gen_ivtbl_bytes(new_capa));
if (old_ivtbl) {
memcpy(ivtbl, old_ivtbl, gen_ivtbl_bytes(old_capa));
}
ivtbl->as.shape.numiv = new_capa;
for (uint32_t i = old_capa; i < new_capa; i++) {
ivtbl->as.shape.ivptr[i] = Qundef;
}
st_insert(tbl, (st_data_t)obj, (st_data_t)ivtbl);
if (old_ivtbl) {
xfree(old_ivtbl);
}
}
ivar_lookup->ivtbl = ivtbl;
if (ivar_lookup->shape) {
rb_shape_set_shape(ivar_lookup->obj, ivar_lookup->shape);
}
}
RB_VM_LOCK_LEAVE();
@ -2134,8 +2122,8 @@ rb_copy_generic_ivar(VALUE clone, VALUE obj)
new_ivtbl->as.complex.table = st_copy(obj_ivtbl->as.complex.table);
}
else {
new_ivtbl = gen_ivtbl_resize(0, obj_ivtbl->as.shape.numiv);
new_ivtbl = xmalloc(gen_ivtbl_bytes(obj_ivtbl->as.shape.numiv));
new_ivtbl->as.shape.numiv = obj_ivtbl->as.shape.numiv;
for (uint32_t i=0; i<obj_ivtbl->as.shape.numiv; i++) {
RB_OBJ_WRITE(clone, &new_ivtbl->as.shape.ivptr[i], obj_ivtbl->as.shape.ivptr[i]);
}