11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/struct.h"
35#include "insns_info.inc"
42 int argc,
const VALUE *argv,
int priv);
54ruby_vm_special_exception_copy(
VALUE exc)
57 rb_obj_copy_ivar(e, exc);
65 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
66 ec->raised_flag = RAISED_STACKOVERFLOW;
68 VALUE at = rb_ec_backtrace_object(ec);
69 mesg = ruby_vm_special_exception_copy(mesg);
74 EC_JUMP_TAG(ec, TAG_RAISE);
77NORETURN(
static void vm_stackoverflow(
void));
79NOINLINE(
static COLDFUNC
void vm_stackoverflow(
void));
85 ec_stack_overflow(GET_EC(), TRUE);
93 rb_bug(
"system stack overflow during GC. Faulty native extension?");
96 ec->raised_flag = RAISED_STACKOVERFLOW;
97 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
98 EC_JUMP_TAG(ec, TAG_RAISE);
100#ifdef USE_SIGALTSTACK
101 ec_stack_overflow(ec, TRUE);
103 ec_stack_overflow(ec, FALSE);
110callable_class_p(
VALUE klass)
112#if VM_CHECK_MODE >= 2
113 if (!klass)
return FALSE;
141 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment));
143 if (callable_class_p(cme->defined_class)) {
153vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
155 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
156 enum imemo_type cref_or_me_type = imemo_env;
159 cref_or_me_type = imemo_type(cref_or_me);
161 if (
type & VM_FRAME_FLAG_BMETHOD) {
165 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
166 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
168 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
169 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
173 if (cref_or_me_type != imemo_ment) {
174 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
178 if (req_cref && cref_or_me_type != imemo_cref) {
179 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
182 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
183 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
187 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
193 if (cref_or_me_type == imemo_ment) {
196 if (!callable_method_entry_p(me)) {
197 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
201 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
202 VM_ASSERT(iseq == NULL ||
204 RUBY_VM_NORMAL_ISEQ_P(iseq)
208 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
218 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
221#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
223 vm_check_frame_detail(type, req_block, req_me, req_cref, \
224 specval, cref_or_me, is_cframe, iseq); \
226 switch (given_magic) {
228 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
230 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
231 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
232 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
233 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
234 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
235 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
236 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
238 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
243static VALUE vm_stack_canary;
244static bool vm_stack_canary_was_born =
false;
247MJIT_FUNC_EXPORTED
void
253 if (! LIKELY(vm_stack_canary_was_born)) {
256 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
260 else if (! (iseq = GET_ISEQ())) {
263 else if (LIKELY(sp[0] != vm_stack_canary)) {
272 const VALUE *orig = rb_iseq_original_iseq(iseq);
273 const VALUE *encoded = ISEQ_BODY(iseq)->iseq_encoded;
274 const ptrdiff_t pos = GET_PC() - encoded;
275 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
276 const char *name = insn_name(insn);
277 const VALUE iseqw = rb_iseqw_new(iseq);
279 const char *stri = rb_str_to_cstr(inspection);
280 const VALUE disasm = rb_iseq_disasm(iseq);
281 const char *strd = rb_str_to_cstr(disasm);
287 "We are killing the stack canary set by %s, "
288 "at %s@pc=%"PRIdPTR
"\n"
289 "watch out the C stack trace.\n"
291 name, stri, pos, strd);
295#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
298#define vm_check_canary(ec, sp)
299#define vm_check_frame(a, b, c, d)
304vm_push_frame_debug_counter_inc(
311 RB_DEBUG_COUNTER_INC(frame_push);
313 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
314 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
315 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
318 RB_DEBUG_COUNTER_INC(frame_R2R);
321 RB_DEBUG_COUNTER_INC(frame_R2C);
326 RB_DEBUG_COUNTER_INC(frame_C2R);
329 RB_DEBUG_COUNTER_INC(frame_C2C);
334 switch (
type & VM_FRAME_MAGIC_MASK) {
335 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
336 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
337 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
338 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
339 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
340 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
341 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
342 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
343 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
349#define vm_push_frame_debug_counter_inc(ec, cfp, t)
352STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
353STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
354STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
370 vm_check_frame(
type, specval, cref_or_me, iseq);
371 VM_ASSERT(local_size >= 0);
374 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
375 vm_check_canary(ec, sp);
380 for (
int i=0; i < local_size; i++) {
408 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
409 atomic_signal_fence(memory_order_seq_cst);
417 vm_push_frame_debug_counter_inc(ec, cfp,
type);
425 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
426 if (VMDEBUG == 2) SDR();
428 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
435 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
437 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
438 if (VMDEBUG == 2) SDR();
440 RUBY_VM_CHECK_INTS(ec);
441 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
443 return flags & VM_FRAME_FLAG_FINISH;
449 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
456 VALUE tmpbuf = rb_imemo_tmpbuf_auto_free_pointer();
458 rb_imemo_tmpbuf_set_ptr(tmpbuf, ptr);
462 dmy_iseq->body = dmy_body;
463 dmy_body->type = ISEQ_TYPE_TOP;
464 dmy_body->location.pathobj = fname;
468 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
470 VM_BLOCK_HANDLER_NONE,
482rb_arity_error_new(
int argc,
int min,
int max)
484 VALUE err_mess =
rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
499rb_error_arity(
int argc,
int min,
int max)
506NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
509vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
512 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
513 VM_FORCE_WRITE(&ep[index], v);
514 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
515 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
519vm_env_write(
const VALUE *ep,
int index,
VALUE v)
521 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
522 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
523 VM_STACK_ENV_WRITE(ep, index, v);
526 vm_env_write_slowpath(ep, index, v);
533 if (block_handler == VM_BLOCK_HANDLER_NONE) {
537 switch (vm_block_handler_type(block_handler)) {
538 case block_handler_type_iseq:
539 case block_handler_type_ifunc:
540 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
541 case block_handler_type_symbol:
542 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
543 case block_handler_type_proc:
544 return VM_BH_TO_PROC(block_handler);
546 VM_UNREACHABLE(rb_vm_bh_to_procval);
555vm_svar_valid_p(
VALUE svar)
558 switch (imemo_type(svar)) {
567 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
577 if (lep && (ec == NULL || ec->root_lep != lep)) {
578 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
581 svar = ec->root_svar;
584 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
592 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
594 if (lep && (ec == NULL || ec->root_lep != lep)) {
595 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
598 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
605 const struct vm_svar *svar = lep_svar(ec, lep);
610 case VM_SVAR_LASTLINE:
611 return svar->lastline;
612 case VM_SVAR_BACKREF:
613 return svar->backref;
615 const VALUE ary = svar->others;
621 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
636 struct vm_svar *svar = lep_svar(ec, lep);
639 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
643 case VM_SVAR_LASTLINE:
646 case VM_SVAR_BACKREF:
650 VALUE ary = svar->others;
655 rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
666 val = lep_svar_get(ec, lep, key);
669 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
686 rb_bug(
"unexpected back-ref");
698check_method_entry(
VALUE obj,
int can_be_svar)
700 if (obj ==
Qfalse)
return NULL;
706 switch (imemo_type(obj)) {
717 rb_bug(
"check_method_entry: svar should not be there:");
726 const VALUE *ep = cfp->ep;
729 while (!VM_ENV_LOCAL_P(ep)) {
730 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
731 ep = VM_ENV_PREV_EP(ep);
734 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
740 switch (me->def->type) {
741 case VM_METHOD_TYPE_ISEQ:
742 return me->def->body.iseq.
iseqptr;
751 switch (me->def->type) {
752 case VM_METHOD_TYPE_ISEQ:
753 return me->def->body.iseq.
cref;
759#if VM_CHECK_MODE == 0
763check_cref(
VALUE obj,
int can_be_svar)
765 if (obj ==
Qfalse)
return NULL;
771 switch (imemo_type(obj)) {
782 rb_bug(
"check_method_entry: svar should not be there:");
789vm_env_cref(
const VALUE *ep)
793 while (!VM_ENV_LOCAL_P(ep)) {
794 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
795 ep = VM_ENV_PREV_EP(ep);
798 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
802is_cref(
const VALUE v,
int can_be_svar)
805 switch (imemo_type(v)) {
818vm_env_cref_by_cref(
const VALUE *ep)
820 while (!VM_ENV_LOCAL_P(ep)) {
821 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
822 ep = VM_ENV_PREV_EP(ep);
824 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
828cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
830 const VALUE v = *vptr;
834 switch (imemo_type(v)) {
837 new_cref = vm_cref_dup(cref);
842 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
847 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
851 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
860vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
862 if (vm_env_cref_by_cref(ep)) {
866 while (!VM_ENV_LOCAL_P(ep)) {
867 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
868 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
871 ep = VM_ENV_PREV_EP(ep);
873 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
874 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
877 rb_bug(
"vm_cref_dup: unreachable");
882vm_get_cref(
const VALUE *ep)
890 rb_bug(
"vm_get_cref: unreachable");
895rb_vm_get_cref(
const VALUE *ep)
897 return vm_get_cref(ep);
908 return vm_get_cref(cfp->ep);
912vm_get_const_key_cref(
const VALUE *ep)
919 FL_TEST(CREF_CLASS(cref), RCLASS_CLONED)) {
922 cref = CREF_NEXT(cref);
935 if (CREF_CLASS(cref) == old_klass) {
936 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
937 *new_cref_ptr = new_cref;
940 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
941 cref = CREF_NEXT(cref);
942 *new_cref_ptr = new_cref;
943 new_cref_ptr = &new_cref->next;
945 *new_cref_ptr = NULL;
954 prev_cref = vm_env_cref(ep);
960 prev_cref = vm_env_cref(cfp->ep);
964 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
968vm_get_cbase(
const VALUE *ep)
972 return CREF_CLASS_FOR_DEFINITION(cref);
976vm_get_const_base(
const VALUE *ep)
981 if (!CREF_PUSHED_BY_EVAL(cref)) {
982 return CREF_CLASS_FOR_DEFINITION(cref);
984 cref = CREF_NEXT(cref);
991vm_check_if_namespace(
VALUE klass)
999vm_ensure_not_refinement_module(
VALUE self)
1002 rb_warn(
"not defined at the refinement, but at the outer class/module");
1018 if (
NIL_P(orig_klass) && allow_nil) {
1020 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1024 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1025 root_cref = CREF_NEXT(root_cref);
1028 while (cref && CREF_NEXT(cref)) {
1029 if (CREF_PUSHED_BY_EVAL(cref)) {
1033 klass = CREF_CLASS(cref);
1035 cref = CREF_NEXT(cref);
1037 if (!
NIL_P(klass)) {
1041 if ((ce = rb_const_lookup(klass,
id))) {
1042 rb_const_warn_if_deprecated(ce, klass,
id);
1045 if (am == klass)
break;
1047 if (is_defined)
return 1;
1048 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1050 goto search_continue;
1057 if (UNLIKELY(!rb_ractor_main_p())) {
1060 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass),
rb_id2name(
id));
1071 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1072 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1086 vm_check_if_namespace(orig_klass);
1088 return rb_public_const_defined_from(orig_klass,
id);
1091 return rb_public_const_get_from(orig_klass,
id);
1099 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1107 int allow_nil = TRUE;
1108 if (segments[0] == idNULL) {
1113 while (segments[idx]) {
1114 ID id = segments[idx++];
1115 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1128 rb_bug(
"vm_get_cvar_base: no cref");
1131 while (CREF_NEXT(cref) &&
1133 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1134 cref = CREF_NEXT(cref);
1136 if (top_level_raise && !CREF_NEXT(cref)) {
1140 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1148ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1150fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1153 vm_cc_attr_index_set(cc, index, shape_id);
1156 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1160#define ractor_incidental_shareable_p(cond, val) \
1161 (!(cond) || rb_ractor_shareable_p(val))
1162#define ractor_object_incidental_shareable_p(obj, val) \
1163 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1165#define ATTR_INDEX_NOT_SET (attr_index_t)-1
1173 shape_id_t shape_id;
1180#if SHAPE_IN_BASIC_FLAGS
1181 shape_id = RBASIC_SHAPE_ID(obj);
1189#if !SHAPE_IN_BASIC_FLAGS
1190 shape_id = ROBJECT_SHAPE_ID(obj);
1196 if (UNLIKELY(!rb_ractor_main_p())) {
1206 ivar_list = RCLASS_IVPTR(obj);
1208#if !SHAPE_IN_BASIC_FLAGS
1209 shape_id = RCLASS_SHAPE_ID(obj);
1217 rb_gen_ivtbl_get(obj,
id, &ivtbl);
1218#if !SHAPE_IN_BASIC_FLAGS
1219 shape_id = ivtbl->shape_id;
1221 ivar_list = ivtbl->ivptr;
1228 shape_id_t cached_id;
1232 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1235 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1238 if (LIKELY(cached_id == shape_id)) {
1239 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1241 if (index == ATTR_INDEX_NOT_SET) {
1245 val = ivar_list[index];
1251 if (cached_id != INVALID_SHAPE_ID) {
1252 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1255 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1259 if (cached_id != INVALID_SHAPE_ID) {
1260 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1263 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1268 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1270 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1271 if (!st_lookup(ROBJECT_IV_HASH(obj),
id, &val)) {
1276 if (rb_shape_get_iv_index(shape,
id, &index)) {
1279 fill_ivar_cache(iseq, ic, cc, is_attr, index, shape_id);
1282 val = ivar_list[index];
1287 vm_cc_attr_index_initialize(cc, shape_id);
1290 vm_ic_attr_index_initialize(ic, shape_id);
1305 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1316populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1318 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1322 vm_cc_attr_index_set(cc, index, next_shape_id);
1325 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1342 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1344 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1346 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1347 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1350 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit);
1359 shape_id_t next_shape_id = rb_shape_get_shape_id(obj);
1360 rb_shape_t *next_shape = rb_shape_get_shape_by_id(next_shape_id);
1363 if (rb_shape_get_iv_index(next_shape,
id, &index)) {
1364 if (index >= MAX_IVARS) {
1368 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1371 rb_bug(
"didn't find the id\n");
1378 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1385 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1391 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1394NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1396vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1398#if SHAPE_IN_BASIC_FLAGS
1399 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1401 shape_id_t shape_id = rb_generic_shape_id(obj);
1407 if (shape_id == dest_shape_id) {
1408 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1411 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1413 else if (dest_shape_id != INVALID_SHAPE_ID) {
1414 rb_shape_t * dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1415 shape_id_t source_shape_id = dest_shape->parent_id;
1417 if (shape_id == source_shape_id && dest_shape->edge_name ==
id && dest_shape->type == SHAPE_IVAR) {
1418 ivtbl = rb_ensure_generic_iv_list_size(obj, dest_shape, index + 1);
1419#if SHAPE_IN_BASIC_FLAGS
1420 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1433 VALUE *ptr = ivtbl->ivptr;
1437 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1443vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1451 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1452 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1454 if (LIKELY(shape_id == dest_shape_id)) {
1455 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1458 else if (dest_shape_id != INVALID_SHAPE_ID) {
1459 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1460 shape_id_t source_shape_id = dest_shape->parent_id;
1462 if (shape_id == source_shape_id && dest_shape->edge_name ==
id) {
1463 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1465 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1467 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id),
id) == dest_shape);
1483 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1489 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1501 VALUE defined_class = 0;
1505 defined_class =
RBASIC(defined_class)->klass;
1508 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1510 rb_bug(
"the cvc table should be set");
1514 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1515 rb_bug(
"should have cvar cache entry");
1520 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1536 cref = vm_get_cref(GET_EP());
1538 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1539 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1541 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1547 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1549 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1555 return vm_getclassvariable(iseq, cfp,
id, ic);
1562 cref = vm_get_cref(GET_EP());
1564 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1565 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1567 rb_class_ivar_set(ic->entry->class_value,
id, val);
1571 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1575 update_classvariable_cache(iseq, klass,
id, cref, ic);
1581 vm_setclassvariable(iseq, cfp,
id, val, ic);
1587 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE);
1598 shape_id_t dest_shape_id;
1600 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1602 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1609 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1613 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1620 vm_setinstancevariable(iseq, obj,
id, val, ic);
1629 ec->tag->state =
FIX2INT(err);
1632 ec->tag->state = TAG_THROW;
1634 else if (THROW_DATA_P(err)) {
1635 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1638 ec->tag->state = TAG_RAISE;
1645 const int flag,
const VALUE throwobj)
1653 else if (state == TAG_BREAK) {
1655 const VALUE *ep = GET_EP();
1656 const rb_iseq_t *base_iseq = GET_ISEQ();
1657 escape_cfp = reg_cfp;
1659 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1660 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1661 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1662 ep = escape_cfp->ep;
1663 base_iseq = escape_cfp->iseq;
1666 ep = VM_ENV_PREV_EP(ep);
1667 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1668 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1669 VM_ASSERT(escape_cfp->iseq == base_iseq);
1673 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1679 ep = VM_ENV_PREV_EP(ep);
1681 while (escape_cfp < eocfp) {
1682 if (escape_cfp->ep == ep) {
1683 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1684 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1689 for (i=0; i < ct->size; i++) {
1691 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1693 if (entry->type == CATCH_TYPE_BREAK &&
1694 entry->iseq == base_iseq &&
1695 entry->start < epc && entry->end >= epc) {
1696 if (entry->cont == epc) {
1705 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1710 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1713 else if (state == TAG_RETRY) {
1714 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1716 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1718 else if (state == TAG_RETURN) {
1719 const VALUE *current_ep = GET_EP();
1720 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1721 int in_class_frame = 0;
1723 escape_cfp = reg_cfp;
1726 while (!VM_ENV_LOCAL_P(ep)) {
1727 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1730 ep = VM_ENV_PREV_EP(ep);
1734 while (escape_cfp < eocfp) {
1735 const VALUE *lep = VM_CF_LEP(escape_cfp);
1741 if (lep == target_lep &&
1742 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1743 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1748 if (lep == target_lep) {
1749 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1751 if (in_class_frame) {
1756 const VALUE *tep = current_ep;
1758 while (target_lep != tep) {
1759 if (escape_cfp->ep == tep) {
1761 if (tep == target_ep) {
1765 goto unexpected_return;
1768 tep = VM_ENV_PREV_EP(tep);
1772 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1773 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1775 case ISEQ_TYPE_MAIN:
1777 if (in_class_frame)
goto unexpected_return;
1778 if (target_ep == NULL) {
1782 goto unexpected_return;
1786 case ISEQ_TYPE_EVAL:
1787 case ISEQ_TYPE_CLASS:
1796 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1797 if (target_ep == NULL) {
1801 goto unexpected_return;
1805 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1808 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1814 rb_bug(
"isns(throw): unsupported throw type");
1817 ec->tag->state = state;
1818 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1823 rb_num_t throw_state,
VALUE throwobj)
1825 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1826 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1829 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1832 return vm_throw_continue(ec, throwobj);
1837vm_expandarray(
VALUE *sp,
VALUE ary, rb_num_t num,
int flag)
1839 int is_splat = flag & 0x01;
1840 rb_num_t space_size = num + is_splat;
1841 VALUE *base = sp - 1;
1844 const VALUE obj = ary;
1856 if (space_size == 0) {
1859 else if (flag & 0x02) {
1864 for (i=0; i<num-len; i++) {
1868 for (j=0; i<num; i++, j++) {
1869 VALUE v = ptr[len - j - 1];
1879 VALUE *bptr = &base[space_size - 1];
1881 for (i=0; i<num; i++) {
1883 for (; i<num; i++) {
1892 *bptr = rb_ary_new();
1910#if VM_CHECK_MODE > 0
1911 ccs->debug_sig = ~(
VALUE)ccs;
1917 ccs->entries = NULL;
1919 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
1927 if (! vm_cc_markable(cc)) {
1930 else if (! vm_ci_markable(ci)) {
1934 if (UNLIKELY(ccs->len == ccs->capa)) {
1935 if (ccs->capa == 0) {
1937 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
1941 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
1944 VM_ASSERT(ccs->len < ccs->capa);
1946 const int pos = ccs->len++;
1950 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
1956#if VM_CHECK_MODE > 0
1960 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
1961 for (
int i=0; i<ccs->len; i++) {
1962 vm_ci_dump(ccs->entries[i].ci);
1963 rp(ccs->entries[i].cc);
1970 VM_ASSERT(vm_ccs_p(ccs));
1971 VM_ASSERT(ccs->len <= ccs->capa);
1973 for (
int i=0; i<ccs->len; i++) {
1974 const struct rb_callinfo *ci = ccs->entries[i].ci;
1977 VM_ASSERT(vm_ci_p(ci));
1978 VM_ASSERT(vm_ci_mid(ci) == mid);
1979 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
1980 VM_ASSERT(vm_cc_class_check(cc, klass));
1981 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
1994 const ID mid = vm_ci_mid(ci);
1995 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
2000 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2002 const int ccs_len = ccs->len;
2004 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2005 rb_vm_ccs_free(ccs);
2006 rb_id_table_delete(cc_tbl, mid);
2010 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2012 for (
int i=0; i<ccs_len; i++) {
2013 const struct rb_callinfo *ccs_ci = ccs->entries[i].ci;
2014 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2016 VM_ASSERT(vm_ci_p(ccs_ci));
2017 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2020 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2022 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2023 VM_ASSERT(ccs_cc->klass == klass);
2024 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2033 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2036 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2042 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2044 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2047 cme = rb_callable_method_entry(klass, mid);
2050 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2054 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2055 return &vm_empty_cc;
2058 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2063 VM_ASSERT(cc_tbl != NULL);
2065 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2071 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2075 cme = check_overloaded_cme(cme, ci);
2077 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general);
2078 vm_ccs_push(klass, ccs, ci, cc);
2080 VM_ASSERT(vm_cc_cme(cc) != NULL);
2081 VM_ASSERT(cme->called_id == mid);
2082 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2096 cc = vm_search_cc(klass, ci);
2099 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2100 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2101 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2102 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2103 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2114#if USE_DEBUG_COUNTER
2118 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2120#if OPT_INLINE_METHOD_CACHE
2131#if USE_DEBUG_COUNTER
2132 if (old_cc == empty_cc) {
2134 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2136 else if (old_cc == cc) {
2137 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2139 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2140 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2142 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2143 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2144 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2147 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2152 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2153 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2166#if OPT_INLINE_METHOD_CACHE
2167 if (LIKELY(vm_cc_class_check(cc, klass))) {
2168 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2169 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2170 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2171 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2172 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2173 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2177 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2180 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2184 return vm_search_method_slowpath0(cd_owner, cd, klass);
2191 VM_ASSERT(klass !=
Qfalse);
2194 return vm_search_method_fastpath(cd_owner, cd, klass);
2197#if __has_attribute(transparent_union)
2210 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2211 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2212 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2213 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2214 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2215 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2229 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2230 VM_ASSERT(callable_method_entry_p(me));
2232 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2236#if __has_attribute(transparent_union)
2237 return me->def->body.cfunc.func == func.anyargs;
2239 return me->def->body.cfunc.func == func;
2248 VM_ASSERT(iseq != NULL);
2250 return check_cfunc(vm_cc_cme(cc), func);
2253#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2285opt_equality_specialized(
VALUE recv,
VALUE obj)
2287 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2288 goto compare_by_identity;
2290 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2291 goto compare_by_identity;
2294 goto compare_by_identity;
2303#if MSC_VERSION_BEFORE(1300)
2307 else if (isnan(b)) {
2312 return RBOOL(a == b);
2319 return rb_str_eql_internal(obj, recv);
2324 compare_by_identity:
2325 return RBOOL(recv == obj);
2331 VM_ASSERT(cd_owner != NULL);
2333 VALUE val = opt_equality_specialized(recv, obj);
2334 if (!UNDEF_P(val))
return val;
2336 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2340 return RBOOL(recv == obj);
2344#undef EQ_UNREDEFINED_P
2349NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2352opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2354 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, 1);
2356 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2357 return RBOOL(recv == obj);
2367 VALUE val = opt_equality_specialized(recv, obj);
2368 if (!UNDEF_P(val)) {
2372 return opt_equality_by_mid_slowpath(recv, obj, mid);
2379 return opt_equality_by_mid(obj1, obj2, idEq);
2385 return opt_equality_by_mid(obj1, obj2, idEqlP);
2397 case VM_CHECKMATCH_TYPE_WHEN:
2399 case VM_CHECKMATCH_TYPE_RESCUE:
2404 case VM_CHECKMATCH_TYPE_CASE: {
2405 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2408 rb_bug(
"check_match: unreachable");
2413#if MSC_VERSION_BEFORE(1300)
2414#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2416#define CHECK_CMP_NAN(a, b)
2420double_cmp_lt(
double a,
double b)
2422 CHECK_CMP_NAN(a, b);
2423 return RBOOL(a < b);
2427double_cmp_le(
double a,
double b)
2429 CHECK_CMP_NAN(a, b);
2430 return RBOOL(a <= b);
2434double_cmp_gt(
double a,
double b)
2436 CHECK_CMP_NAN(a, b);
2437 return RBOOL(a > b);
2441double_cmp_ge(
double a,
double b)
2443 CHECK_CMP_NAN(a, b);
2444 return RBOOL(a >= b);
2447static inline VALUE *
2453 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2454 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2455 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD) {
2459#if VM_DEBUG_BP_CHECK
2460 if (bp != cfp->bp_check) {
2461 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2462 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2463 (
long)(bp - GET_EC()->vm_stack));
2464 rb_bug(
"vm_base_ptr: unreachable");
2489static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2494 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2496 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2502 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2505 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2506 int param = ISEQ_BODY(iseq)->param.size;
2507 int local = ISEQ_BODY(iseq)->local_table_size;
2508 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2514 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2515 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2516 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2517 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2518 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2519 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2520 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2523MJIT_FUNC_EXPORTED
bool
2524rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2526 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2527 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2528 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2529 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2530 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2531 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2532 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2535MJIT_FUNC_EXPORTED
bool
2536rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2538 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2539 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2540 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2541 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2542 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2543 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2548rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
2550 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
2559 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2564 vm_caller_setup_arg_splat(cfp, calling);
2565 if (!IS_ARGS_KW_OR_KW_SPLAT(ci) &&
2566 calling->argc > 0 &&
2568 (((
struct RHash *)final_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2569 *(cfp->sp - 1) = rb_hash_dup(final_hash);
2570 calling->kw_splat = 1;
2573 if (UNLIKELY(IS_ARGS_KW_OR_KW_SPLAT(ci))) {
2574 if (IS_ARGS_KEYWORD(ci)) {
2579 vm_caller_setup_arg_kw(cfp, calling, ci);
2582 VALUE keyword_hash = cfp->sp[-1];
2585 cfp->sp[-1] = rb_hash_dup(rb_to_hash_type(keyword_hash));
2587 else if (!IS_ARGS_KW_SPLAT_MUT(ci)) {
2591 cfp->sp[-1] = rb_hash_dup(keyword_hash);
2602 if (UNLIKELY(calling->kw_splat)) {
2609 calling->kw_splat = 0;
2614#define USE_OPT_HIST 0
2617#define OPT_HIST_MAX 64
2618static int opt_hist[OPT_HIST_MAX+1];
2622opt_hist_show_results_at_exit(
void)
2624 for (
int i=0; i<OPT_HIST_MAX; i++) {
2625 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2635 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2636 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2637 const int opt = calling->argc - lead_num;
2638 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2639 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2640 const int param = ISEQ_BODY(iseq)->param.size;
2641 const int local = ISEQ_BODY(iseq)->local_table_size;
2642 const int delta = opt_num - opt;
2644 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2647 if (opt_pc < OPT_HIST_MAX) {
2651 opt_hist[OPT_HIST_MAX]++;
2655 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2663 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2664 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2665 const int opt = calling->argc - lead_num;
2666 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2668 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2671 if (opt_pc < OPT_HIST_MAX) {
2675 opt_hist[OPT_HIST_MAX]++;
2679 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2684 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2685 VALUE *
const locals);
2694 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2695 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2697 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2698 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2700 const int ci_kw_len = kw_arg->keyword_len;
2701 const VALUE *
const ci_keywords = kw_arg->keywords;
2702 VALUE *argv = cfp->sp - calling->argc;
2703 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2704 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2706 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2707 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2709 int param = ISEQ_BODY(iseq)->param.size;
2710 int local = ISEQ_BODY(iseq)->local_table_size;
2711 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2718 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->ci;
2721 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2722 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2724 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2725 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2726 VALUE *
const argv = cfp->sp - calling->argc;
2727 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2730 for (i=0; i<kw_param->num; i++) {
2731 klocals[i] = kw_param->default_values[i];
2738 int param = ISEQ_BODY(iseq)->param.size;
2739 int local = ISEQ_BODY(iseq)->local_table_size;
2740 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2745 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
2749 bool cacheable_ci = vm_ci_markable(ci);
2751 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
2752 if (LIKELY(rb_simple_iseq_p(iseq))) {
2754 CALLER_SETUP_ARG(cfp, calling, ci);
2755 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2757 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
2758 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
2761 VM_ASSERT(ci == calling->ci);
2762 VM_ASSERT(cc == calling->cc);
2763 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size), cacheable_ci && vm_call_iseq_optimizable_p(ci, cc));
2766 else if (rb_iseq_only_optparam_p(iseq)) {
2768 CALLER_SETUP_ARG(cfp, calling, ci);
2769 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2771 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2772 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2773 const int argc = calling->argc;
2774 const int opt = argc - lead_num;
2776 if (opt < 0 || opt > opt_num) {
2777 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
2780 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2781 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
2782 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2783 cacheable_ci && vm_call_cacheable(ci, cc));
2786 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
2787 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2788 cacheable_ci && vm_call_cacheable(ci, cc));
2792 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
2793 for (
int i=argc; i<lead_num + opt_num; i++) {
2796 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
2798 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
2799 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2800 const int argc = calling->argc;
2801 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2803 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
2806 if (argc - kw_arg->keyword_len == lead_num) {
2807 const int ci_kw_len = kw_arg->keyword_len;
2808 const VALUE *
const ci_keywords = kw_arg->keywords;
2810 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2812 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2813 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2815 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
2816 cacheable_ci && vm_call_cacheable(ci, cc));
2821 else if (argc == lead_num) {
2823 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2824 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
2826 if (klocals[kw_param->num] ==
INT2FIX(0)) {
2828 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
2829 cacheable_ci && vm_call_cacheable(ci, cc));
2837 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
2843 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
2846 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2847 const int param_size = ISEQ_BODY(iseq)->param.size;
2848 const int local_size = ISEQ_BODY(iseq)->local_table_size;
2849 const int opt_pc = vm_callee_setup_arg(ec, calling, def_iseq_ptr(vm_cc_cme(cc)->def), cfp->sp - calling->argc, param_size, local_size);
2850 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
2855 int opt_pc,
int param_size,
int local_size)
2860 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2861 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
2864 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2870 int opt_pc,
int param_size,
int local_size)
2872 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2873 VALUE *argv = cfp->sp - calling->argc;
2874 VALUE *sp = argv + param_size;
2875 cfp->sp = argv - 1 ;
2877 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
2878 calling->block_handler, (
VALUE)me,
2879 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
2880 local_size - param_size,
2881 ISEQ_BODY(iseq)->stack_max);
2890 VALUE *argv = cfp->sp - calling->argc;
2892 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2893 VALUE *src_argv = argv;
2894 VALUE *sp_orig, *sp;
2895 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
2897 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
2898 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
2899 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
2900 dst_captured->code.val = src_captured->code.val;
2901 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
2902 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
2905 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
2909 vm_pop_frame(ec, cfp, cfp->ep);
2912 sp_orig = sp = cfp->sp;
2915 sp[0] = calling->recv;
2919 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
2920 *sp++ = src_argv[i];
2923 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
2924 calling->recv, calling->block_handler, (
VALUE)me,
2925 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
2926 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
2927 ISEQ_BODY(iseq)->stack_max);
2935ractor_unsafe_check(
void)
2937 if (!rb_ractor_main_p()) {
2938 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
2945 ractor_unsafe_check();
2953 ractor_unsafe_check();
2955 return (*f)(argc, argv, recv);
2961 ractor_unsafe_check();
2969 ractor_unsafe_check();
2971 return (*f)(recv, argv[0]);
2977 ractor_unsafe_check();
2979 return (*f)(recv, argv[0], argv[1]);
2985 ractor_unsafe_check();
2987 return (*f)(recv, argv[0], argv[1], argv[2]);
2993 ractor_unsafe_check();
2995 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3001 ractor_unsafe_check();
3002 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3003 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3009 ractor_unsafe_check();
3010 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3011 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3017 ractor_unsafe_check();
3018 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3019 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3025 ractor_unsafe_check();
3026 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3027 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3033 ractor_unsafe_check();
3034 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3035 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3041 ractor_unsafe_check();
3042 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3043 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3049 ractor_unsafe_check();
3050 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3051 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3057 ractor_unsafe_check();
3058 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3059 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3065 ractor_unsafe_check();
3066 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3067 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3073 ractor_unsafe_check();
3074 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3075 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3081 ractor_unsafe_check();
3082 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3083 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3097 return (*f)(argc, argv, recv);
3111 return (*f)(recv, argv[0]);
3118 return (*f)(recv, argv[0], argv[1]);
3125 return (*f)(recv, argv[0], argv[1], argv[2]);
3132 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3138 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3139 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3145 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3146 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3152 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3153 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3159 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3160 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3166 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3167 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3173 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3174 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3180 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3181 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3187 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3188 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3194 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3195 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3201 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3202 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3208 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3209 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3215 const int ov_flags = RAISED_STACKOVERFLOW;
3216 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3217 if (rb_ec_raised_p(ec, ov_flags)) {
3218 rb_ec_raised_reset(ec, ov_flags);
3224#define CHECK_CFP_CONSISTENCY(func) \
3225 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3226 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3232#if VM_DEBUG_VERIFY_METHOD_CACHE
3233 switch (me->def->type) {
3234 case VM_METHOD_TYPE_CFUNC:
3235 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3237# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3239 METHOD_BUG(ATTRSET);
3241 METHOD_BUG(BMETHOD);
3244 METHOD_BUG(OPTIMIZED);
3245 METHOD_BUG(MISSING);
3246 METHOD_BUG(REFINED);
3250 rb_bug(
"wrong method type: %d", me->def->type);
3253 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3259 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3265 int len = cfunc->argc;
3267 VALUE recv = calling->recv;
3268 VALUE block_handler = calling->block_handler;
3269 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3270 int argc = calling->argc;
3271 int orig_argc = argc;
3273 if (UNLIKELY(calling->kw_splat)) {
3274 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3277 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3280 vm_push_frame(ec, NULL, frame_type, recv,
3281 block_handler, (
VALUE)me,
3282 0, ec->cfp->sp, 0, 0);
3286 reg_cfp->sp -= orig_argc + 1;
3287 val = (*cfunc->invoker)(recv, argc, reg_cfp->sp + 1, cfunc->func);
3289 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3291 rb_vm_pop_frame(ec);
3293 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3294 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3303 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3305 CALLER_SETUP_ARG(reg_cfp, calling, ci);
3306 CALLER_REMOVE_EMPTY_KW_SPLAT(reg_cfp, calling, ci);
3307 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat);
3308 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3315 RB_DEBUG_COUNTER_INC(ccf_ivar);
3317 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE);
3324 RB_DEBUG_COUNTER_INC(ccf_attrset);
3325 VALUE val = *(cfp->sp - 1);
3327 attr_index_t index = vm_cc_attr_index(cc);
3328 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3329 ID id = vm_cc_cme(cc)->def->body.attr.id;
3331 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
3340 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
3341 if (!UNDEF_P(res)) {
3346 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
3354 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
3358rb_vm_call_ivar_attrset_p(
const vm_call_handler ch)
3360 return (ch == vm_call_ivar || ch == vm_call_attrset);
3370 VALUE procv = cme->def->body.bmethod.proc;
3373 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3378 GetProcPtr(procv, proc);
3379 val = rb_vm_invoke_bmethod(ec, proc, calling->recv, calling->argc, argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
3387 RB_DEBUG_COUNTER_INC(ccf_bmethod);
3393 CALLER_SETUP_ARG(cfp, calling, ci);
3394 argc = calling->argc;
3397 cfp->sp += - argc - 1;
3399 return vm_call_bmethod_body(ec, calling, argv);
3402MJIT_FUNC_EXPORTED
VALUE
3403rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
3405 VALUE klass = current_class;
3413 while (
RTEST(klass)) {
3415 if (owner == target_owner) {
3421 return current_class;
3430 if (orig_me->defined_class == 0) {
3431 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
3433 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
3435 if (me->def->reference_count == 1) {
3436 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
3440 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
3448 VM_ASSERT(callable_method_entry_p(cme));
3455 return aliased_callable_method_entry(me);
3461 calling->cc = &VM_CC_ON_STACK(
Qundef,
3464 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
3466 return vm_call_method_each_type(ec, cfp, calling);
3469static enum method_missing_reason
3472 enum method_missing_reason stat = MISSING_NOENTRY;
3473 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
3474 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
3475 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
3485 ASSUME(calling->argc >= 0);
3488 enum method_missing_reason missing_reason = MISSING_NOENTRY;
3489 int argc = calling->argc;
3490 VALUE recv = calling->recv;
3493 flags |= VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3495 if (UNLIKELY(! mid)) {
3496 mid = idMethodMissing;
3497 missing_reason = ci_missing_reason(ci);
3498 ec->method_missing_reason = missing_reason;
3514 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3517 argc = ++calling->argc;
3519 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
3522 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3523 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
3524 VALUE exc = rb_make_no_method_exception(
3534 calling->ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci));
3535 calling->cc = &VM_CC_ON_STACK(klass,
3537 { .method_missing_reason = missing_reason },
3538 rb_callable_method_entry_with_refinements(klass, mid, NULL));
3540 if (flags & VM_CALL_FCALL) {
3541 return vm_call_method(ec, reg_cfp, calling);
3545 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
3547 if (vm_cc_cme(cc) != NULL) {
3548 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
3549 case METHOD_VISI_PUBLIC:
3550 return vm_call_method_each_type(ec, reg_cfp, calling);
3551 case METHOD_VISI_PRIVATE:
3552 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
3554 case METHOD_VISI_PROTECTED:
3555 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
3558 VM_UNREACHABLE(vm_call_method);
3560 return vm_call_method_missing(ec, reg_cfp, calling);
3563 return vm_call_method_nome(ec, reg_cfp, calling);
3569 RB_DEBUG_COUNTER_INC(ccf_opt_send);
3574 CALLER_SETUP_ARG(reg_cfp, calling, calling->ci);
3576 i = calling->argc - 1;
3578 if (calling->argc == 0) {
3603 return vm_call_symbol(ec, reg_cfp, calling, calling->ci, sym, VM_CALL_FCALL);
3609 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
3611 RB_DEBUG_COUNTER_INC(ccf_method_missing);
3613 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
3616 CALLER_SETUP_ARG(reg_cfp, calling, orig_ci);
3617 argc = calling->argc + 1;
3619 unsigned int flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3620 calling->argc = argc;
3623 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3624 vm_check_canary(ec, reg_cfp->sp);
3628 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
3631 ec->method_missing_reason = reason;
3632 calling->ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci));
3633 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
3634 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
3635 return vm_call_method(ec, reg_cfp, calling);
3641 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->ci, vm_cc_cmethod_missing_reason(calling->cc));
3652 return vm_call_method_nome(ec, cfp, calling);
3654 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
3655 cme->def->body.refined.orig_me) {
3656 cme = refined_method_callable_without_refinement(cme);
3659 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
3661 return vm_call_method_each_type(ec, cfp, calling);
3665find_refinement(
VALUE refinements,
VALUE klass)
3667 if (
NIL_P(refinements)) {
3670 return rb_hash_lookup(refinements, klass);
3679 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
3680 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
3683 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
3684 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
3688 }
while (cfp->iseq != local_iseq);
3699 if (orig_me->defined_class == 0) {
3707 VM_ASSERT(callable_method_entry_p(cme));
3709 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
3719 ID mid = vm_ci_mid(calling->ci);
3720 const rb_cref_t *cref = vm_get_cref(cfp->ep);
3724 for (; cref; cref = CREF_NEXT(cref)) {
3725 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
3726 if (
NIL_P(refinement))
continue;
3729 rb_callable_method_entry(refinement, mid);
3732 if (vm_cc_call(cc) == vm_call_super_method) {
3735 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
3740 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
3741 cme->def != ref_me->def) {
3744 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
3753 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
3754 return refined_method_callable_without_refinement(vm_cc_cme(cc));
3767 search_refined_method(ec, cfp, calling));
3769 if (vm_cc_cme(ref_cc)) {
3770 calling->cc= ref_cc;
3771 return vm_call_method(ec, cfp, calling);
3774 return vm_call_method_nome(ec, cfp, calling);
3780NOINLINE(
static VALUE
3788 int argc = calling->argc;
3791 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
3794 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
3800 RB_DEBUG_COUNTER_INC(ccf_opt_call);
3803 VALUE procval = calling->recv;
3804 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
3810 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
3812 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
3815 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
3816 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
3819 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
3820 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
3821 return vm_call_general(ec, reg_cfp, calling);
3828 VALUE recv = calling->recv;
3831 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3832 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
3834 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3835 return internal_RSTRUCT_GET(recv, off);
3841 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
3843 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
3851 VALUE recv = calling->recv;
3854 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3855 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
3859 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3860 internal_RSTRUCT_SET(recv, off, val);
3868 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
3870 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
3882 switch (vm_cc_cme(cc)->def->body.optimized.type) {
3883 case OPTIMIZED_METHOD_TYPE_SEND:
3884 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
3885 return vm_call_opt_send(ec, cfp, calling);
3886 case OPTIMIZED_METHOD_TYPE_CALL:
3887 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
3888 return vm_call_opt_call(ec, cfp, calling);
3889 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
3890 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
3891 return vm_call_opt_block_call(ec, cfp, calling);
3892 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
3893 CALLER_SETUP_ARG(cfp, calling, ci);
3894 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3896 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3897 return vm_call_opt_struct_aref(ec, cfp, calling);
3899 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
3900 CALLER_SETUP_ARG(cfp, calling, ci);
3901 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3903 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3904 return vm_call_opt_struct_aset(ec, cfp, calling);
3906 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
3910#define VM_CALL_METHOD_ATTR(var, func, nohook) \
3911 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
3912 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
3913 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
3915 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
3916 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
3931 switch (cme->def->type) {
3932 case VM_METHOD_TYPE_ISEQ:
3933 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
3934 return vm_call_iseq_setup(ec, cfp, calling);
3936 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3937 case VM_METHOD_TYPE_CFUNC:
3938 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
3939 return vm_call_cfunc(ec, cfp, calling);
3941 case VM_METHOD_TYPE_ATTRSET:
3942 CALLER_SETUP_ARG(cfp, calling, ci);
3943 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3947 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG);
3949 if (vm_cc_markable(cc)) {
3950 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
3951 VM_CALL_METHOD_ATTR(v,
3952 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
3953 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
3959 VM_CALLCACHE_UNMARKABLE |
3960 ((
VALUE)INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT) |
3961 VM_CALLCACHE_ON_STACK,
3967 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
3972 VM_CALL_METHOD_ATTR(v,
3973 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
3974 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
3978 case VM_METHOD_TYPE_IVAR:
3979 CALLER_SETUP_ARG(cfp, calling, ci);
3980 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3982 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
3983 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT);
3984 VM_CALL_METHOD_ATTR(v,
3985 vm_call_ivar(ec, cfp, calling),
3986 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
3989 case VM_METHOD_TYPE_MISSING:
3990 vm_cc_method_missing_reason_set(cc, 0);
3991 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
3992 return vm_call_method_missing(ec, cfp, calling);
3994 case VM_METHOD_TYPE_BMETHOD:
3995 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
3996 return vm_call_bmethod(ec, cfp, calling);
3998 case VM_METHOD_TYPE_ALIAS:
3999 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4000 return vm_call_alias(ec, cfp, calling);
4002 case VM_METHOD_TYPE_OPTIMIZED:
4003 return vm_call_optimized(ec, cfp, calling, ci, cc);
4005 case VM_METHOD_TYPE_UNDEF:
4008 case VM_METHOD_TYPE_ZSUPER:
4009 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4011 case VM_METHOD_TYPE_REFINED:
4014 return vm_call_refined(ec, cfp, calling);
4017 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4027 const int stat = ci_missing_reason(ci);
4029 if (vm_ci_mid(ci) == idMethodMissing) {
4031 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4032 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4035 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4047 VALUE defined_class = me->defined_class;
4048 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4049 return NIL_P(refined_class) ? defined_class : refined_class;
4058 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4060 if (vm_cc_cme(cc) != NULL) {
4061 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4062 case METHOD_VISI_PUBLIC:
4063 return vm_call_method_each_type(ec, cfp, calling);
4065 case METHOD_VISI_PRIVATE:
4066 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4067 enum method_missing_reason stat = MISSING_PRIVATE;
4068 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4070 vm_cc_method_missing_reason_set(cc, stat);
4071 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4072 return vm_call_method_missing(ec, cfp, calling);
4074 return vm_call_method_each_type(ec, cfp, calling);
4076 case METHOD_VISI_PROTECTED:
4077 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4078 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4080 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4081 return vm_call_method_missing(ec, cfp, calling);
4085 VM_ASSERT(vm_cc_cme(cc) != NULL);
4088 calling->cc = &cc_on_stack;
4089 return vm_call_method_each_type(ec, cfp, calling);
4092 return vm_call_method_each_type(ec, cfp, calling);
4099 return vm_call_method_nome(ec, cfp, calling);
4106 RB_DEBUG_COUNTER_INC(ccf_general);
4107 return vm_call_method(ec, reg_cfp, calling);
4113 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4114 VM_ASSERT(cc != vm_cc_empty());
4116 *(vm_call_handler *)&cc->call_ = vm_call_general;
4122 RB_DEBUG_COUNTER_INC(ccf_super_method);
4127 if (ec == NULL)
rb_bug(
"unreachable");
4130 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4131 return vm_call_method(ec, reg_cfp, calling);
4137vm_search_normal_superclass(
VALUE klass)
4142 klass =
RBASIC(klass)->klass;
4144 klass = RCLASS_ORIGIN(klass);
4148NORETURN(
static void vm_super_outside(
void));
4151vm_super_outside(
void)
4157empty_cc_for_super(
void)
4160 return rb_vm_empty_cc_for_super();
4162 return &vm_empty_cc_for_super;
4169 VALUE current_defined_class;
4176 current_defined_class = vm_defined_class_for_protected_call(me);
4179 reg_cfp->iseq != method_entry_iseqptr(me) &&
4182 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
4186 "self has wrong type to call super in this context: "
4187 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
4192 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
4194 "implicit argument passing of super from method defined"
4195 " by define_method() is not supported."
4196 " Specify all arguments explicitly.");
4199 ID mid = me->def->original_id;
4202 cd->ci = vm_ci_new_runtime(mid,
4205 vm_ci_kwarg(cd->ci));
4211 VALUE klass = vm_search_normal_superclass(me->defined_class);
4215 cc = vm_cc_new(klass, NULL, vm_call_method_missing);
4219 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
4223 if (cached_cme == NULL) {
4225 cd->cc = empty_cc_for_super();
4227 else if (cached_cme->called_id != mid) {
4230 cc = vm_cc_new(klass, cme, vm_call_super_method);
4234 cd->cc = cc = empty_cc_for_super();
4238 switch (cached_cme->def->type) {
4240 case VM_METHOD_TYPE_REFINED:
4242 case VM_METHOD_TYPE_ATTRSET:
4243 case VM_METHOD_TYPE_IVAR:
4244 vm_cc_call_set(cc, vm_call_super_method);
4252 VM_ASSERT((vm_cc_cme(cc),
true));
4260block_proc_is_lambda(
const VALUE procval)
4265 GetProcPtr(procval, proc);
4266 return proc->is_lambda;
4276 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
4279 int is_lambda = FALSE;
4280 VALUE val, arg, blockarg;
4282 const struct vm_ifunc *ifunc = captured->code.ifunc;
4287 else if (argc == 0) {
4294 blockarg = rb_vm_bh_to_procval(ec, block_handler);
4296 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
4298 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
4301 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
4304 VM_GUARDED_PREV_EP(captured->ep),
4306 0, ec->cfp->sp, 0, 0);
4307 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
4308 rb_vm_pop_frame(ec);
4316 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
4325 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4327 for (i=0; i<len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
4335vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
4337 VALUE ary, arg0 = argv[0];
4338 ary = rb_check_array_type(arg0);
4342 VM_ASSERT(argv[0] == arg0);
4350 if (rb_simple_iseq_p(iseq)) {
4354 CALLER_SETUP_ARG(cfp, calling, ci);
4355 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
4357 if (arg_setup_type == arg_setup_block &&
4358 calling->argc == 1 &&
4359 ISEQ_BODY(iseq)->param.flags.has_lead &&
4360 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
4361 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
4362 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
4365 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
4366 if (arg_setup_type == arg_setup_block) {
4367 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
4369 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4370 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
4371 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
4373 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
4374 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
4378 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
4385 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
4394 calling = &calling_entry;
4395 calling->argc = argc;
4396 calling->block_handler = block_handler;
4397 calling->kw_splat = kw_splat;
4399 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, (kw_splat ? VM_CALL_KW_SPLAT : 0), 0, 0);
4401 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
4409 bool is_lambda,
VALUE block_handler)
4412 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4413 const int arg_size = ISEQ_BODY(iseq)->param.size;
4414 VALUE *
const rsp = GET_SP() - calling->argc;
4415 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, rsp, is_lambda ? arg_setup_method : arg_setup_block);
4419 vm_push_frame(ec, iseq,
4420 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
4422 VM_GUARDED_PREV_EP(captured->ep), 0,
4423 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4425 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
4433 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
4435 if (calling->argc < 1) {
4439 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
4440 CALLER_SETUP_ARG(reg_cfp, calling, ci);
4441 calling->recv = TOPN(--calling->argc);
4442 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, 0);
4449 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
4454 CALLER_SETUP_ARG(ec->cfp, calling, ci);
4455 CALLER_REMOVE_EMPTY_KW_SPLAT(ec->cfp, calling, ci);
4456 argc = calling->argc;
4457 val = vm_yield_with_cfunc(ec, captured, captured->self, argc, STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
4463vm_proc_to_block_handler(
VALUE procval)
4465 const struct rb_block *block = vm_proc_block(procval);
4467 switch (vm_block_type(block)) {
4468 case block_type_iseq:
4469 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
4470 case block_type_ifunc:
4471 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
4472 case block_type_symbol:
4473 return VM_BH_FROM_SYMBOL(block->as.symbol);
4474 case block_type_proc:
4475 return VM_BH_FROM_PROC(block->as.proc);
4477 VM_UNREACHABLE(vm_yield_with_proc);
4484 bool is_lambda,
VALUE block_handler)
4486 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
4487 VALUE proc = VM_BH_TO_PROC(block_handler);
4488 is_lambda = block_proc_is_lambda(proc);
4489 block_handler = vm_proc_to_block_handler(proc);
4492 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4498 bool is_lambda,
VALUE block_handler)
4502 bool is_lambda,
VALUE block_handler);
4504 switch (vm_block_handler_type(block_handler)) {
4505 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
4506 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
4507 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
4508 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
4509 default:
rb_bug(
"vm_invoke_block: unreachable");
4512 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4516vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
4523 rb_bug(
"vm_make_proc_with_iseq: unreachable");
4526 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
4527 captured->code.iseq = blockiseq;
4529 return rb_vm_make_proc(ec, captured,
rb_cProc);
4533vm_once_exec(
VALUE iseq)
4540vm_once_clear(
VALUE data)
4543 is->once.running_thread = NULL;
4555 args[0] = obj; args[1] =
Qfalse;
4557 if (!UNDEF_P(r) &&
RTEST(r)) {
4569 enum defined_type
type = (
enum defined_type)op_type;
4576 return rb_gvar_defined(
SYM2ID(obj));
4578 case DEFINED_CVAR: {
4579 const rb_cref_t *cref = vm_get_cref(GET_EP());
4580 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
4585 case DEFINED_CONST_FROM: {
4586 bool allow_nil =
type == DEFINED_CONST;
4588 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
4593 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
4595 case DEFINED_METHOD:{
4600 switch (METHOD_ENTRY_VISI(me)) {
4601 case METHOD_VISI_PRIVATE:
4603 case METHOD_VISI_PROTECTED:
4607 case METHOD_VISI_PUBLIC:
4611 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
4615 return check_respond_to_missing(obj, v);
4620 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
4624 case DEFINED_ZSUPER:
4629 VALUE klass = vm_search_normal_superclass(me->defined_class);
4630 ID id = me->def->original_id;
4641 rb_bug(
"unimplemented defined? type (VM)");
4651 return vm_defined(ec, reg_cfp, op_type, obj, v);
4655vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
4658 const VALUE *ep = reg_ep;
4659 for (i = 0; i < lv; i++) {
4660 ep = GET_PREV_EP(ep);
4666vm_get_special_object(
const VALUE *
const reg_ep,
4667 enum vm_special_object_type
type)
4670 case VM_SPECIAL_OBJECT_VMCORE:
4671 return rb_mRubyVMFrozenCore;
4672 case VM_SPECIAL_OBJECT_CBASE:
4673 return vm_get_cbase(reg_ep);
4674 case VM_SPECIAL_OBJECT_CONST_BASE:
4675 return vm_get_const_base(reg_ep);
4677 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
4684 const VALUE ary2 = ary2st;
4685 VALUE tmp1 = rb_check_to_array(ary1);
4686 VALUE tmp2 = rb_check_to_array(ary2);
4697 tmp1 = rb_ary_dup(ary1);
4699 return rb_ary_concat(tmp1, tmp2);
4707 return vm_concat_array(ary1, ary2st);
4713 VALUE tmp = rb_check_to_array(ary);
4717 else if (
RTEST(flag)) {
4718 return rb_ary_dup(tmp);
4730 return vm_splat_array(flag, ary);
4736 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
4738 if (flag & VM_CHECKMATCH_ARRAY) {
4742 for (i = 0; i < n; i++) {
4744 VALUE c = check_match(ec, v, target,
type);
4753 return check_match(ec, pattern, target,
type);
4758vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
4760 const VALUE kw_bits = *(ep - bits);
4763 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
4764 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
4777 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
4778 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
4779 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
4780 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
4784 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
4787 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
4790 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
4793 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
4800vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
4805 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
4806 return rb_public_const_get_at(cbase,
id);
4814vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
4819 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
4824 "superclass mismatch for class %"PRIsVALUE
"",
4837vm_check_if_module(
ID id,
VALUE mod)
4856vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
4859 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
4867vm_declare_module(
ID id,
VALUE cbase)
4873NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
4877 VALUE name = rb_id2str(
id);
4880 VALUE location = rb_const_source_location_at(cbase,
id);
4881 if (!
NIL_P(location)) {
4882 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
4883 " previous definition of %"PRIsVALUE
" was here",
4884 rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
4890vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
4894 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
4896 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
4900 vm_check_if_namespace(cbase);
4904 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
4905 if (!vm_check_if_class(
id, flags, super, klass))
4906 unmatched_redefinition(
"class", cbase,
id, klass);
4910 return vm_declare_class(
id, flags, cbase, super);
4915vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
4919 vm_check_if_namespace(cbase);
4920 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
4921 if (!vm_check_if_module(
id, mod))
4922 unmatched_redefinition(
"module", cbase,
id, mod);
4926 return vm_declare_module(
id, cbase);
4931vm_find_or_create_class_by_id(
ID id,
4936 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
4939 case VM_DEFINECLASS_TYPE_CLASS:
4941 return vm_define_class(
id, flags, cbase, super);
4943 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
4947 case VM_DEFINECLASS_TYPE_MODULE:
4949 return vm_define_module(
id, flags, cbase);
4952 rb_bug(
"unknown defineclass type: %d", (
int)
type);
4956static rb_method_visibility_t
4961 if (!vm_env_cref_by_cref(cfp->ep)) {
4962 return METHOD_VISI_PUBLIC;
4965 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
4974 if (!vm_env_cref_by_cref(cfp->ep)) {
4978 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
4986 rb_method_visibility_t visi;
4991 visi = METHOD_VISI_PUBLIC;
4994 klass = CREF_CLASS_FOR_DEFINITION(cref);
4995 visi = vm_scope_visibility_get(ec);
5002 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
5006 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval);
5009 if (!is_singleton && vm_scope_module_func_check(ec)) {
5011 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5021 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5023 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5024 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5027 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5035 return vm_search_method((
VALUE)reg_cfp->iseq, cd, recv);
5042 .flags =
T_IMEMO | (imemo_callcache <<
FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
5045 .call_ = vm_invokeblock_i,
5051# define mexp_search_method vm_search_method_wrap
5052# define mexp_search_super vm_search_super_method
5053# define mexp_search_invokeblock vm_search_invokeblock
5055enum method_explorer_type {
5057 mexp_search_invokeblock,
5071 VALUE block_handler,
5075 enum method_explorer_type method_explorer
5081 int argc = vm_ci_argc(ci);
5082 VALUE recv = TOPN(argc);
5084 .block_handler = block_handler,
5085 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5093 calling.cc = cc = method_explorer(GET_CFP(), cd, recv);
5094 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5096 switch (method_explorer) {
5097 case mexp_search_method:
5098 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
5099 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5101 case mexp_search_super:
5102 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5103 calling.ci = cd->ci;
5104 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5106 case mexp_search_invokeblock:
5107 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5112 if (!UNDEF_P(val)) {
5126 if (ISEQ_BODY(GET_ISEQ())->catch_except_p) {
5127 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
5128 return vm_exec(ec,
true);
5130 else if (UNDEF_P(val = jit_exec(ec))) {
5131 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
5132 return vm_exec(ec,
false);
5140 return jit_exec(ec);
5176 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
5182 val = rb_mod_to_s(recv);
5188 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
5189 return rb_nil_to_s(recv);
5193 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
5194 return rb_true_to_s(recv);
5198 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
5199 return rb_false_to_s(recv);
5203 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
5204 return rb_fix_to_s(recv);
5212vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
5214 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5228 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
5233 VALUE result = *ptr;
5234 rb_snum_t i = num - 1;
5236 const VALUE v = *++ptr;
5237 if (OPTIMIZED_CMP(v, result) > 0) {
5252 return vm_opt_newarray_max(ec, num, ptr);
5258 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
5263 VALUE result = *ptr;
5264 rb_snum_t i = num - 1;
5266 const VALUE v = *++ptr;
5267 if (OPTIMIZED_CMP(v, result) < 0) {
5282 return vm_opt_newarray_min(ec, num, ptr);
5287#define IMEMO_CONST_CACHE_SHAREABLE IMEMO_FL_USER0
5290vm_track_constant_cache(
ID id,
void *ic)
5292 struct rb_id_table *const_cache = GET_VM()->constant_cache;
5293 VALUE lookup_result;
5296 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
5300 ics = st_init_numtable();
5301 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
5304 st_insert(ics, (st_data_t) ic, (st_data_t)
Qtrue);
5312 for (
int i = 0; segments[i]; i++) {
5313 ID id = segments[i];
5314 if (
id == idNULL)
continue;
5315 vm_track_constant_cache(
id, ic);
5325 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
5326 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
5328 return (ic_cref == NULL ||
5329 ic_cref == vm_get_cref(reg_ep));
5337 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
5338 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
5343rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
5345 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
5351 if (ruby_vm_const_missing_count > 0) {
5352 ruby_vm_const_missing_count = 0;
5359 ice->ic_cref = vm_get_const_key_cref(reg_ep);
5364 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
5365 rb_yjit_constant_ic_update(iseq, ic, pos);
5366 rb_mjit_constant_ic_update(iseq, ic, pos);
5376 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
5377 return is->once.value;
5379 else if (is->once.running_thread == NULL) {
5381 is->once.running_thread = th;
5385 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
5388 else if (is->once.running_thread == th) {
5390 return vm_once_exec((
VALUE)iseq);
5394 RUBY_VM_CHECK_INTS(ec);
5401vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
5403 switch (OBJ_BUILTIN_TYPE(key)) {
5409 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
5410 SYMBOL_REDEFINED_OP_FLAG |
5411 INTEGER_REDEFINED_OP_FLAG |
5412 FLOAT_REDEFINED_OP_FLAG |
5413 NIL_REDEFINED_OP_FLAG |
5414 TRUE_REDEFINED_OP_FLAG |
5415 FALSE_REDEFINED_OP_FLAG |
5416 STRING_REDEFINED_OP_FLAG)) {
5420 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
5424 if (rb_hash_stlike_lookup(hash, key, &val)) {
5444 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
5445 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
5446 static const char stack_consistency_error[] =
5447 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
5448#if defined RUBY_DEVEL
5454 rb_bug(stack_consistency_error, nsp, nbp);
5461 if (FIXNUM_2_P(recv, obj) &&
5462 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
5463 return rb_fix_plus_fix(recv, obj);
5465 else if (FLONUM_2_P(recv, obj) &&
5466 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5474 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5479 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
5480 return rb_str_opt_plus(recv, obj);
5484 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
5485 return rb_ary_plus(recv, obj);
5495 if (FIXNUM_2_P(recv, obj) &&
5496 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
5497 return rb_fix_minus_fix(recv, obj);
5499 else if (FLONUM_2_P(recv, obj) &&
5500 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5508 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5519 if (FIXNUM_2_P(recv, obj) &&
5520 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
5521 return rb_fix_mul_fix(recv, obj);
5523 else if (FLONUM_2_P(recv, obj) &&
5524 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5532 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5543 if (FIXNUM_2_P(recv, obj) &&
5544 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
5545 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
5547 else if (FLONUM_2_P(recv, obj) &&
5548 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5549 return rb_flo_div_flo(recv, obj);
5556 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5557 return rb_flo_div_flo(recv, obj);
5567 if (FIXNUM_2_P(recv, obj) &&
5568 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
5569 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
5571 else if (FLONUM_2_P(recv, obj) &&
5572 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5580 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5591 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
5592 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
5594 if (!UNDEF_P(val)) {
5595 return RBOOL(!
RTEST(val));
5605 if (FIXNUM_2_P(recv, obj) &&
5606 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
5609 else if (FLONUM_2_P(recv, obj) &&
5610 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5618 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5630 if (FIXNUM_2_P(recv, obj) &&
5631 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
5634 else if (FLONUM_2_P(recv, obj) &&
5635 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5643 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5655 if (FIXNUM_2_P(recv, obj) &&
5656 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
5659 else if (FLONUM_2_P(recv, obj) &&
5660 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5668 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5680 if (FIXNUM_2_P(recv, obj) &&
5681 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
5684 else if (FLONUM_2_P(recv, obj) &&
5685 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5693 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5710 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
5719 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
5720 return rb_ary_push(recv, obj);
5737 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
5748 if (FIXNUM_2_P(recv, obj) &&
5749 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
5761 if (FIXNUM_2_P(recv, obj) &&
5762 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
5763 return rb_fix_aref(recv, obj);
5768 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
5770 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
5773 return rb_ary_aref1(recv, obj);
5777 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
5778 return rb_hash_aref(recv, obj);
5792 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
5794 rb_ary_store(recv,
FIX2LONG(obj), set);
5798 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
5799 rb_hash_aset(recv, obj, set);
5811 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
5812 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
5813 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
5814 return rb_hash_aref(recv, key);
5825 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
5826 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
5827 return rb_hash_aset(recv, key, val);
5835vm_opt_length(
VALUE recv,
int bop)
5841 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5842 if (bop == BOP_EMPTY_P) {
5850 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
5854 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
5863vm_opt_empty_p(
VALUE recv)
5865 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
5878 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
5881 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
5897 case RSHIFT(~0UL, 1):
5900 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
5918vm_opt_succ(
VALUE recv)
5921 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
5922 return fix_succ(recv);
5928 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
5939 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
5940 return RBOOL(!
RTEST(recv));
5955 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
5959 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
5977 VALUE self = GET_SELF();
5979 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
5981 if (event & global_hooks->events) {
5984 vm_dtrace(event, ec);
5985 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
5991 if (local_hooks != NULL) {
5992 if (event & local_hooks->events) {
5995 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
6007 return check_cfunc(vm_cc_cme(cc), rb_obj_equal);
6008 case BIN(opt_nil_p):
6009 return check_cfunc(vm_cc_cme(cc), rb_false);
6011 return check_cfunc(vm_cc_cme(cc), rb_obj_not);
6017#define VM_TRACE_HOOK(target_event, val) do { \
6018 if ((pc_events & (target_event)) & enabled_flags) { \
6019 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
6026 const VALUE *pc = reg_cfp->pc;
6027 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
6030 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
6036 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
6039 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
6040 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
6044 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
6045 enabled_flags |= iseq_local_events;
6047 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
6049 if (bmethod_frame) {
6051 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
6052 bmethod_local_hooks = me->def->body.bmethod.hooks;
6053 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
6054 if (bmethod_local_hooks) {
6055 bmethod_local_events = bmethod_local_hooks->events;
6060 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
6064 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
6072 else if (ec->trace_arg != NULL) {
6080 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
6083 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
6087 (
int)rb_iseq_line_no(iseq, pos),
6090 VM_ASSERT(reg_cfp->pc == pc);
6091 VM_ASSERT(pc_events != 0);
6100 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
6101 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
6119#if VM_CHECK_MODE > 0
6120NORETURN( NOINLINE( COLDFUNC
6121void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
6124Init_vm_stack_canary(
void)
6127 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
6128 vm_stack_canary |= 0x01;
6130 vm_stack_canary_was_born =
true;
6135MJIT_FUNC_EXPORTED
void
6136rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
6140 const char *insn = rb_insns_name(i);
6144 rb_bug(
"dead canary found at %s: %s", insn, str);
6149void Init_vm_stack_canary(
void) { }
6181 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
6188 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
6195 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
6202 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
6209 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
6216 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
6223 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
6230 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
6237 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
6243 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
6244 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
6250 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
6251 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
6257 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
6258 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
6264 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
6265 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
6271 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
6272 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
6278 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
6279 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
6285 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
6286 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
6291static builtin_invoker
6292lookup_builtin_invoker(
int argc)
6294 static const builtin_invoker invokers[] = {
6313 return invokers[argc];
6319 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_inline_p;
6320 SETUP_CANARY(canary_p);
6321 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, (rb_insn_func_t)bf->func_ptr);
6322 CHECK_CANARY(canary_p, BIN(invokebuiltin));
6329 return invoke_bf(ec, cfp, bf, argv);
6336 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
6337 for (
int i=0; i<bf->argc; i++) {
6338 ruby_debug_printf(
":%s ",
rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
6340 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc, bf->func_ptr);
6343 if (bf->argc == 0) {
6344 return invoke_bf(ec, cfp, bf, NULL);
6347 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
6348 return invoke_bf(ec, cfp, bf, argv);
6358 return cfp->ep[index];
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class#inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define FL_SINGLETON
Old name of RUBY_FL_SINGLETON.
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_raise(VALUE exc, const char *fmt,...)
Exception entry point.
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
void rb_bug(const char *fmt,...)
Interpreter panic switch.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports always regardless of runtime -W flag.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
VALUE rb_eArgError
ArgumentError exception.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
#define rb_check_frozen
Just another name of rb_check_frozen.
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
#define rb_check_frozen_internal(obj)
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_sym_to_s(VALUE sym)
This is an rb_sym2str() + rb_str_dup() combo.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
ID rb_intern(const char *name)
Finds or creates a symbol of the given name.
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
VALUE rb_sprintf(const char *fmt,...)
Ruby's extended sprintf(3).
VALUE rb_str_catf(VALUE dst, const char *fmt,...)
Identical to rb_sprintf(), except it renders the output to the specified object rather than creating ...
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
#define RARRAY_CONST_PTR_TRANSIENT
Just another name of rb_array_const_ptr_transient.
#define RARRAY_AREF(a, i)
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
static long RSTRING_LEN(VALUE str)
Queries the length of the string.
static char * RSTRING_PTR(VALUE str)
Queries the contents pointer of the string.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.