Skip to content

Commit 8581769

Browse files
jhawthornparacycle
authored andcommitted
Simplify iseq mark_and_move CC handling
We should special-case the empty CC cases in the parent, since we already had to check for that to avoid unnecessary writes hurting copy-on-write. This also changes reference updating to only update the CC reference rather than also checking for the CC being active. There's less advantage to removing a reference in that phase as the object still can't be freed until the next time mark is run, so we might as well just wait.
1 parent 2124c57 commit 8581769

File tree

2 files changed

+28
-25
lines changed

2 files changed

+28
-25
lines changed

internal/gc.h

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -226,6 +226,11 @@ void rb_gc_after_fork(rb_pid_t pid);
226226
if (_obj != (VALUE)*(ptr)) *(ptr) = (void *)_obj; \
227227
} while (0)
228228

229+
#define rb_gc_move_ptr(ptr) do { \
230+
VALUE _obj = rb_gc_location((VALUE)*(ptr)); \
231+
if (_obj != (VALUE)*(ptr)) *(ptr) = (void *)_obj; \
232+
} while (0)
233+
229234
RUBY_SYMBOL_EXPORT_BEGIN
230235
/* exports for objspace module */
231236
void rb_objspace_reachable_objects_from(VALUE obj, void (func)(VALUE, void *), void *data);

iseq.c

Lines changed: 23 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -340,28 +340,12 @@ rb_iseq_mark_and_move_each_body_value(const rb_iseq_t *iseq, VALUE *original_ise
340340
}
341341

342342
static bool
343-
cc_is_active(const struct rb_callcache *cc, bool reference_updating)
343+
cc_is_active(const struct rb_callcache *cc)
344344
{
345-
if (cc) {
346-
if (cc == rb_vm_empty_cc() || cc == rb_vm_empty_cc_for_super()) {
347-
return false;
348-
}
349-
350-
if (reference_updating) {
351-
cc = (const struct rb_callcache *)rb_gc_location((VALUE)cc);
352-
}
345+
RUBY_ASSERT(cc);
346+
RUBY_ASSERT(vm_cc_markable(cc));
353347

354-
if (vm_cc_markable(cc) && vm_cc_valid(cc)) {
355-
const struct rb_callable_method_entry_struct *cme = vm_cc_cme(cc);
356-
if (reference_updating) {
357-
cme = (const struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)cme);
358-
}
359-
if (!METHOD_ENTRY_INVALIDATED(cme)) {
360-
return true;
361-
}
362-
}
363-
}
364-
return false;
348+
return vm_cc_valid(cc) && !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc));
365349
}
366350

367351
void
@@ -385,16 +369,30 @@ rb_iseq_mark_and_move(rb_iseq_t *iseq, bool reference_updating)
385369
if (body->mandatory_only_iseq) rb_gc_mark_and_move_ptr(&body->mandatory_only_iseq);
386370

387371
if (body->call_data) {
372+
struct rb_call_data *cds = body->call_data;
388373
for (unsigned int i = 0; i < body->ci_size; i++) {
389-
struct rb_call_data *cds = body->call_data;
390374

391375
if (cds[i].ci) rb_gc_mark_and_move_ptr(&cds[i].ci);
392376

393-
if (cc_is_active(cds[i].cc, reference_updating)) {
394-
rb_gc_mark_and_move_ptr(&cds[i].cc);
377+
const struct rb_callcache *cc = cds[i].cc;
378+
if (!cc || cc == rb_vm_empty_cc() || cc == rb_vm_empty_cc_for_super()) {
379+
// No need for marking, reference updating, or clearing
380+
// We also want to avoid reassigning to improve CoW
381+
continue;
382+
}
383+
384+
if (reference_updating) {
385+
rb_gc_move_ptr(&cds[i].cc);
395386
}
396-
else if (cds[i].cc != rb_vm_empty_cc()) {
397-
cds[i].cc = rb_vm_empty_cc();
387+
else {
388+
if (cc_is_active(cc)) {
389+
rb_gc_mark_movable((VALUE)cc);
390+
}
391+
else {
392+
// Either the CC or CME has been invalidated. Replace
393+
// it with the empty CC so that it can be GC'd.
394+
cds[i].cc = rb_vm_empty_cc();
395+
}
398396
}
399397
}
400398
}

0 commit comments

Comments
 (0)