Ruby 2.7.6p219 (2022-04-12 revision c9c2245c0a25176072e02db9254f0e0c84c805cd)
vm_insnhelper.c
Go to the documentation of this file.
1/**********************************************************************
2
3 vm_insnhelper.c - instruction helper functions.
4
5 $Author$
6
7 Copyright (C) 2007 Koichi Sasada
8
9**********************************************************************/
10
11/* finish iseq array */
12#include "insns.inc"
13#ifndef MJIT_HEADER
14#include "insns_info.inc"
15#endif
16#include <math.h>
17#include "constant.h"
18#include "internal.h"
19#include "ruby/config.h"
20#include "debug_counter.h"
21#include "variable.h"
22
24extern void rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts);
27 int argc, const VALUE *argv, int priv);
28
29/* control stack frame */
30
31static rb_control_frame_t *vm_get_ruby_level_caller_cfp(const rb_execution_context_t *ec, const rb_control_frame_t *cfp);
32
35{
38 return e;
39}
40
41NORETURN(static void ec_stack_overflow(rb_execution_context_t *ec, int));
42static void
43ec_stack_overflow(rb_execution_context_t *ec, int setup)
44{
45 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
47 if (setup) {
50 rb_ivar_set(mesg, idBt, at);
51 rb_ivar_set(mesg, idBt_locations, at);
52 }
53 ec->errinfo = mesg;
55}
56
57NORETURN(static void vm_stackoverflow(void));
58
59static void
60vm_stackoverflow(void)
61{
62 ec_stack_overflow(GET_EC(), TRUE);
63}
64
66MJIT_STATIC void
68{
69 if (crit || rb_during_gc()) {
71 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
73 }
74#ifdef USE_SIGALTSTACK
75 ec_stack_overflow(ec, TRUE);
76#else
77 ec_stack_overflow(ec, FALSE);
78#endif
79}
80
81
82#if VM_CHECK_MODE > 0
83static int
84callable_class_p(VALUE klass)
85{
86#if VM_CHECK_MODE >= 2
87 if (!klass) return FALSE;
88 switch (RB_BUILTIN_TYPE(klass)) {
89 case T_ICLASS:
90 if (!RB_TYPE_P(RCLASS_SUPER(klass), T_MODULE)) break;
91 case T_MODULE:
92 return TRUE;
93 }
94 while (klass) {
95 if (klass == rb_cBasicObject) {
96 return TRUE;
97 }
99 }
100 return FALSE;
101#else
102 return klass != 0;
103#endif
104}
105
106static int
107callable_method_entry_p(const rb_callable_method_entry_t *me)
108{
109 if (me == NULL || callable_class_p(me->defined_class)) {
110 return TRUE;
111 }
112 else {
113 return FALSE;
114 }
115}
116
117static void
118vm_check_frame_detail(VALUE type, int req_block, int req_me, int req_cref, VALUE specval, VALUE cref_or_me, int is_cframe, const rb_iseq_t *iseq)
119{
120 unsigned int magic = (unsigned int)(type & VM_FRAME_MAGIC_MASK);
121 enum imemo_type cref_or_me_type = imemo_env; /* impossible value */
122
123 if (RB_TYPE_P(cref_or_me, T_IMEMO)) {
124 cref_or_me_type = imemo_type(cref_or_me);
125 }
127 req_me = TRUE;
128 }
129
130 if (req_block && (type & VM_ENV_FLAG_LOCAL) == 0) {
131 rb_bug("vm_push_frame: specval (%p) should be a block_ptr on %x frame", (void *)specval, magic);
132 }
133 if (!req_block && (type & VM_ENV_FLAG_LOCAL) != 0) {
134 rb_bug("vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (void *)specval, magic);
135 }
136
137 if (req_me) {
138 if (cref_or_me_type != imemo_ment) {
139 rb_bug("vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
140 }
141 }
142 else {
143 if (req_cref && cref_or_me_type != imemo_cref) {
144 rb_bug("vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
145 }
146 else { /* cref or Qfalse */
147 if (cref_or_me != Qfalse && cref_or_me_type != imemo_cref) {
148 if (((type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
149 /* ignore */
150 }
151 else {
152 rb_bug("vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
153 }
154 }
155 }
156 }
157
158 if (cref_or_me_type == imemo_ment) {
159 const rb_callable_method_entry_t *me = (const rb_callable_method_entry_t *)cref_or_me;
160
161 if (!callable_method_entry_p(me)) {
162 rb_bug("vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
163 }
164 }
165
167 VM_ASSERT(iseq == NULL ||
168 RUBY_VM_NORMAL_ISEQ_P(iseq) /* argument error. it should be fixed */);
169 }
170 else {
171 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
172 }
173}
174
175static void
177 VALUE specval,
178 VALUE cref_or_me,
179 const rb_iseq_t *iseq)
180{
181 VALUE given_magic = type & VM_FRAME_MAGIC_MASK;
183
184#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
185 case magic: \
186 vm_check_frame_detail(type, req_block, req_me, req_cref, \
187 specval, cref_or_me, is_cframe, iseq); \
188 break
189 switch (given_magic) {
190 /* BLK ME CREF CFRAME */
200 default:
201 rb_bug("vm_push_frame: unknown type (%x)", (unsigned int)given_magic);
202 }
203#undef CHECK
204}
205
206static VALUE vm_stack_canary; /* Initialized later */
207static bool vm_stack_canary_was_born = false;
208
209static void
211{
212 const struct rb_control_frame_struct *reg_cfp = ec->cfp;
213 const struct rb_iseq_struct *iseq;
214
215 if (! LIKELY(vm_stack_canary_was_born)) {
216 return; /* :FIXME: isn't it rather fatal to enter this branch? */
217 }
218 else if ((VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
219 /* This is at the very beginning of a thread. cfp does not exist. */
220 return;
221 }
222 else if (! (iseq = GET_ISEQ())) {
223 return;
224 }
225 else if (LIKELY(sp[0] != vm_stack_canary)) {
226 return;
227 }
228 else {
229 /* we are going to call methods below; squash the canary to
230 * prevent infinite loop. */
231 sp[0] = Qundef;
232 }
233
234 const VALUE *orig = rb_iseq_original_iseq(iseq);
235 const VALUE *encoded = iseq->body->iseq_encoded;
236 const ptrdiff_t pos = GET_PC() - encoded;
237 const enum ruby_vminsn_type insn = (enum ruby_vminsn_type)orig[pos];
238 const char *name = insn_name(insn);
239 const VALUE iseqw = rb_iseqw_new(iseq);
240 const VALUE inspection = rb_inspect(iseqw);
241 const char *stri = rb_str_to_cstr(inspection);
242 const VALUE disasm = rb_iseq_disasm(iseq);
243 const char *strd = rb_str_to_cstr(disasm);
244
245 /* rb_bug() is not capable of outputting this large contents. It
246 is designed to run form a SIGSEGV handler, which tends to be
247 very restricted. */
249 "We are killing the stack canary set by %s, "
250 "at %s@pc=%"PRIdPTR"\n"
251 "watch out the C stack trace.\n"
252 "%s",
253 name, stri, pos, strd);
254 rb_bug("see above.");
255}
256#else
257#define vm_check_canary(ec, sp)
258#define vm_check_frame(a, b, c, d)
259#endif /* VM_CHECK_MODE > 0 */
260
261static inline rb_control_frame_t *
262vm_push_frame(rb_execution_context_t *ec,
263 const rb_iseq_t *iseq,
264 VALUE type,
265 VALUE self,
266 VALUE specval,
267 VALUE cref_or_me,
268 const VALUE *pc,
269 VALUE *sp,
270 int local_size,
271 int stack_max)
272{
274
275 vm_check_frame(type, specval, cref_or_me, iseq);
276 VM_ASSERT(local_size >= 0);
277
278 /* check stack overflow */
279 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
280 vm_check_canary(ec, sp);
281
282 ec->cfp = cfp;
283
284 /* setup new frame */
285 cfp->pc = (VALUE *)pc;
286 cfp->iseq = (rb_iseq_t *)iseq;
287 cfp->self = self;
289
290 /* setup vm value stack */
291
292 /* initialize local variables */
293 for (int i=0; i < local_size; i++) {
294 *sp++ = Qnil;
295 }
296
297 /* setup ep with managing data */
301 *sp++ = cref_or_me; /* ep[-2] / Qnil or T_IMEMO(cref) or T_IMEMO(ment) */
302 *sp++ = specval /* ep[-1] / block handler or prev env ptr */;
303 *sp = type; /* ep[-0] / ENV_FLAGS */
304
305 /* Store initial value of ep as bp to skip calculation cost of bp on JIT cancellation. */
306 cfp->ep = sp;
307 cfp->__bp__ = cfp->sp = sp + 1;
308
309#if VM_DEBUG_BP_CHECK
310 cfp->bp_check = sp + 1;
311#endif
312
313 if (VMDEBUG == 2) {
314 SDR();
315 }
316
317#if USE_DEBUG_COUNTER
318 RB_DEBUG_COUNTER_INC(frame_push);
319 switch (type & VM_FRAME_MAGIC_MASK) {
320 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method); break;
321 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block); break;
322 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class); break;
323 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top); break;
324 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc); break;
325 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc); break;
326 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval); break;
327 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue); break;
328 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy); break;
329 default: rb_bug("unreachable");
330 }
331 {
333 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
334 int cur_ruby_frame = VM_FRAME_RUBYFRAME_P(cfp);
335 int pre_ruby_frame = VM_FRAME_RUBYFRAME_P(prev_cfp);
336
337 pre_ruby_frame ? (cur_ruby_frame ? RB_DEBUG_COUNTER_INC(frame_R2R) :
338 RB_DEBUG_COUNTER_INC(frame_R2C)):
339 (cur_ruby_frame ? RB_DEBUG_COUNTER_INC(frame_C2R) :
340 RB_DEBUG_COUNTER_INC(frame_C2C));
341 }
342 }
343#endif
344
345 return cfp;
346}
347
348/* return TRUE if the frame is finished */
349static inline int
350vm_pop_frame(rb_execution_context_t *ec, rb_control_frame_t *cfp, const VALUE *ep)
351{
353
355 if (VMDEBUG == 2) SDR();
356
359
361}
362
363MJIT_STATIC void
365{
366 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
367}
368
369/* method dispatch */
370static inline VALUE
371rb_arity_error_new(int argc, int min, int max)
372{
373 VALUE err_mess = 0;
374 if (min == max) {
375 err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d)", argc, min);
376 }
377 else if (max == UNLIMITED_ARGUMENTS) {
378 err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d+)", argc, min);
379 }
380 else {
381 err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d..%d)", argc, min, max);
382 }
383 return rb_exc_new3(rb_eArgError, err_mess);
384}
385
386MJIT_STATIC void
387rb_error_arity(int argc, int min, int max)
388{
389 rb_exc_raise(rb_arity_error_new(argc, min, max));
390}
391
392/* lvar */
393
394NOINLINE(static void vm_env_write_slowpath(const VALUE *ep, int index, VALUE v));
395
396static void
397vm_env_write_slowpath(const VALUE *ep, int index, VALUE v)
398{
399 /* remember env value forcely */
400 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
401 VM_FORCE_WRITE(&ep[index], v);
402 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
403 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
404}
405
406static inline void
407vm_env_write(const VALUE *ep, int index, VALUE v)
408{
410 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
411 VM_STACK_ENV_WRITE(ep, index, v);
412 }
413 else {
414 vm_env_write_slowpath(ep, index, v);
415 }
416}
417
420{
422 return Qnil;
423 }
424 else {
425 switch (vm_block_handler_type(block_handler)) {
428 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler), rb_cProc);
430 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
432 return VM_BH_TO_PROC(block_handler);
433 default:
435 }
436 }
437}
438
439/* svar */
440
441#if VM_CHECK_MODE > 0
442static int
443vm_svar_valid_p(VALUE svar)
444{
445 if (RB_TYPE_P((VALUE)svar, T_IMEMO)) {
446 switch (imemo_type(svar)) {
447 case imemo_svar:
448 case imemo_cref:
449 case imemo_ment:
450 return TRUE;
451 default:
452 break;
453 }
454 }
455 rb_bug("vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
456 return FALSE;
457}
458#endif
459
460static inline struct vm_svar *
461lep_svar(const rb_execution_context_t *ec, const VALUE *lep)
462{
463 VALUE svar;
464
465 if (lep && (ec == NULL || ec->root_lep != lep)) {
466 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
467 }
468 else {
469 svar = ec->root_svar;
470 }
471
472 VM_ASSERT(svar == Qfalse || vm_svar_valid_p(svar));
473
474 return (struct vm_svar *)svar;
475}
476
477static inline void
478lep_svar_write(const rb_execution_context_t *ec, const VALUE *lep, const struct vm_svar *svar)
479{
480 VM_ASSERT(vm_svar_valid_p((VALUE)svar));
481
482 if (lep && (ec == NULL || ec->root_lep != lep)) {
483 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (VALUE)svar);
484 }
485 else {
486 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
487 }
488}
489
490static VALUE
491lep_svar_get(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key)
492{
493 const struct vm_svar *svar = lep_svar(ec, lep);
494
495 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) return Qnil;
496
497 switch (key) {
498 case VM_SVAR_LASTLINE:
499 return svar->lastline;
500 case VM_SVAR_BACKREF:
501 return svar->backref;
502 default: {
503 const VALUE ary = svar->others;
504
505 if (NIL_P(ary)) {
506 return Qnil;
507 }
508 else {
509 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
510 }
511 }
512 }
513}
514
515static struct vm_svar *
516svar_new(VALUE obj)
517{
518 return (struct vm_svar *)rb_imemo_new(imemo_svar, Qnil, Qnil, Qnil, obj);
519}
520
521static void
522lep_svar_set(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, VALUE val)
523{
524 struct vm_svar *svar = lep_svar(ec, lep);
525
526 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) {
527 lep_svar_write(ec, lep, svar = svar_new((VALUE)svar));
528 }
529
530 switch (key) {
531 case VM_SVAR_LASTLINE:
532 RB_OBJ_WRITE(svar, &svar->lastline, val);
533 return;
534 case VM_SVAR_BACKREF:
535 RB_OBJ_WRITE(svar, &svar->backref, val);
536 return;
537 default: {
538 VALUE ary = svar->others;
539
540 if (NIL_P(ary)) {
541 RB_OBJ_WRITE(svar, &svar->others, ary = rb_ary_new());
542 }
544 }
545 }
546}
547
548static inline VALUE
549vm_getspecial(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, rb_num_t type)
550{
551 VALUE val;
552
553 if (type == 0) {
554 val = lep_svar_get(ec, lep, key);
555 }
556 else {
557 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
558
559 if (type & 0x01) {
560 switch (type >> 1) {
561 case '&':
563 break;
564 case '`':
566 break;
567 case '\'':
569 break;
570 case '+':
572 break;
573 default:
574 rb_bug("unexpected back-ref");
575 }
576 }
577 else {
578 val = rb_reg_nth_match((int)(type >> 1), backref);
579 }
580 }
581 return val;
582}
583
584PUREFUNC(static rb_callable_method_entry_t *check_method_entry(VALUE obj, int can_be_svar));
586check_method_entry(VALUE obj, int can_be_svar)
587{
588 if (obj == Qfalse) return NULL;
589
590#if VM_CHECK_MODE > 0
591 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_method_entry: unknown type: %s", rb_obj_info(obj));
592#endif
593
594 switch (imemo_type(obj)) {
595 case imemo_ment:
597 case imemo_cref:
598 return NULL;
599 case imemo_svar:
600 if (can_be_svar) {
601 return check_method_entry(((struct vm_svar *)obj)->cref_or_me, FALSE);
602 }
603 default:
604#if VM_CHECK_MODE > 0
605 rb_bug("check_method_entry: svar should not be there:");
606#endif
607 return NULL;
608 }
609}
610
613{
614 const VALUE *ep = cfp->ep;
616
617 while (!VM_ENV_LOCAL_P(ep)) {
618 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return me;
619 ep = VM_ENV_PREV_EP(ep);
620 }
621
622 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
623}
624
625static rb_cref_t *
626method_entry_cref(rb_callable_method_entry_t *me)
627{
628 switch (me->def->type) {
630 return me->def->body.iseq.cref;
631 default:
632 return NULL;
633 }
634}
635
636#if VM_CHECK_MODE == 0
637PUREFUNC(static rb_cref_t *check_cref(VALUE, int));
638#endif
639static rb_cref_t *
640check_cref(VALUE obj, int can_be_svar)
641{
642 if (obj == Qfalse) return NULL;
643
644#if VM_CHECK_MODE > 0
645 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_cref: unknown type: %s", rb_obj_info(obj));
646#endif
647
648 switch (imemo_type(obj)) {
649 case imemo_ment:
650 return method_entry_cref((rb_callable_method_entry_t *)obj);
651 case imemo_cref:
652 return (rb_cref_t *)obj;
653 case imemo_svar:
654 if (can_be_svar) {
655 return check_cref(((struct vm_svar *)obj)->cref_or_me, FALSE);
656 }
657 default:
658#if VM_CHECK_MODE > 0
659 rb_bug("check_method_entry: svar should not be there:");
660#endif
661 return NULL;
662 }
663}
664
665static inline rb_cref_t *
666vm_env_cref(const VALUE *ep)
667{
668 rb_cref_t *cref;
669
670 while (!VM_ENV_LOCAL_P(ep)) {
671 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return cref;
672 ep = VM_ENV_PREV_EP(ep);
673 }
674
675 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
676}
677
678static int
679is_cref(const VALUE v, int can_be_svar)
680{
681 if (RB_TYPE_P(v, T_IMEMO)) {
682 switch (imemo_type(v)) {
683 case imemo_cref:
684 return TRUE;
685 case imemo_svar:
686 if (can_be_svar) return is_cref(((struct vm_svar *)v)->cref_or_me, FALSE);
687 default:
688 break;
689 }
690 }
691 return FALSE;
692}
693
694static int
695vm_env_cref_by_cref(const VALUE *ep)
696{
697 while (!VM_ENV_LOCAL_P(ep)) {
698 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) return TRUE;
699 ep = VM_ENV_PREV_EP(ep);
700 }
701 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
702}
703
704static rb_cref_t *
705cref_replace_with_duplicated_cref_each_frame(const VALUE *vptr, int can_be_svar, VALUE parent)
706{
707 const VALUE v = *vptr;
708 rb_cref_t *cref, *new_cref;
709
710 if (RB_TYPE_P(v, T_IMEMO)) {
711 switch (imemo_type(v)) {
712 case imemo_cref:
713 cref = (rb_cref_t *)v;
714 new_cref = vm_cref_dup(cref);
715 if (parent) {
716 RB_OBJ_WRITE(parent, vptr, new_cref);
717 }
718 else {
719 VM_FORCE_WRITE(vptr, (VALUE)new_cref);
720 }
721 return (rb_cref_t *)new_cref;
722 case imemo_svar:
723 if (can_be_svar) {
724 return cref_replace_with_duplicated_cref_each_frame((const VALUE *)&((struct vm_svar *)v)->cref_or_me, FALSE, v);
725 }
726 /* fall through */
727 case imemo_ment:
728 rb_bug("cref_replace_with_duplicated_cref_each_frame: unreachable");
729 default:
730 break;
731 }
732 }
733 return FALSE;
734}
735
736static rb_cref_t *
737vm_cref_replace_with_duplicated_cref(const VALUE *ep)
738{
739 if (vm_env_cref_by_cref(ep)) {
740 rb_cref_t *cref;
741 VALUE envval;
742
743 while (!VM_ENV_LOCAL_P(ep)) {
744 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
745 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
746 return cref;
747 }
748 ep = VM_ENV_PREV_EP(ep);
749 }
750 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
751 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
752 }
753 else {
754 rb_bug("vm_cref_dup: unreachable");
755 }
756}
757
758static rb_cref_t *
759vm_get_cref(const VALUE *ep)
760{
761 rb_cref_t *cref = vm_env_cref(ep);
762
763 if (cref != NULL) {
764 return cref;
765 }
766 else {
767 rb_bug("vm_get_cref: unreachable");
768 }
769}
770
771static rb_cref_t *
772vm_ec_cref(const rb_execution_context_t *ec)
773{
775
776 if (cfp == NULL) {
777 return NULL;
778 }
779 return vm_get_cref(cfp->ep);
780}
781
782static const rb_cref_t *
783vm_get_const_key_cref(const VALUE *ep)
784{
785 const rb_cref_t *cref = vm_get_cref(ep);
786 const rb_cref_t *key_cref = cref;
787
788 while (cref) {
789 if (FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
790 FL_TEST(CREF_CLASS(cref), RCLASS_CLONED)) {
791 return key_cref;
792 }
793 cref = CREF_NEXT(cref);
794 }
795
796 /* does not include singleton class */
797 return NULL;
798}
799
800void
801rb_vm_rewrite_cref(rb_cref_t *cref, VALUE old_klass, VALUE new_klass, rb_cref_t **new_cref_ptr)
802{
803 rb_cref_t *new_cref;
804
805 while (cref) {
806 if (CREF_CLASS(cref) == old_klass) {
807 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
808 *new_cref_ptr = new_cref;
809 return;
810 }
811 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
812 cref = CREF_NEXT(cref);
813 *new_cref_ptr = new_cref;
814 new_cref_ptr = (rb_cref_t **)&new_cref->next;
815 }
816 *new_cref_ptr = NULL;
817}
818
819static rb_cref_t *
820vm_cref_push(const rb_execution_context_t *ec, VALUE klass, const VALUE *ep, int pushed_by_eval)
821{
822 rb_cref_t *prev_cref = NULL;
823
824 if (ep) {
825 prev_cref = vm_env_cref(ep);
826 }
827 else {
828 rb_control_frame_t *cfp = vm_get_ruby_level_caller_cfp(ec, ec->cfp);
829
830 if (cfp) {
831 prev_cref = vm_env_cref(cfp->ep);
832 }
833 }
834
835 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval);
836}
837
838static inline VALUE
839vm_get_cbase(const VALUE *ep)
840{
841 const rb_cref_t *cref = vm_get_cref(ep);
843
844 while (cref) {
845 if ((klass = CREF_CLASS(cref)) != 0) {
846 break;
847 }
848 cref = CREF_NEXT(cref);
849 }
850
851 return klass;
852}
853
854static inline VALUE
855vm_get_const_base(const VALUE *ep)
856{
857 const rb_cref_t *cref = vm_get_cref(ep);
859
860 while (cref) {
861 if (!CREF_PUSHED_BY_EVAL(cref) &&
862 (klass = CREF_CLASS(cref)) != 0) {
863 break;
864 }
865 cref = CREF_NEXT(cref);
866 }
867
868 return klass;
869}
870
871static inline void
872vm_check_if_namespace(VALUE klass)
873{
875 rb_raise(rb_eTypeError, "%+"PRIsVALUE" is not a class/module", klass);
876 }
877}
878
879static inline void
880vm_ensure_not_refinement_module(VALUE self)
881{
882 if (RB_TYPE_P(self, T_MODULE) && FL_TEST(self, RMODULE_IS_REFINEMENT)) {
883 rb_warn("not defined at the refinement, but at the outer class/module");
884 }
885}
886
887static inline VALUE
888vm_get_iclass(rb_control_frame_t *cfp, VALUE klass)
889{
890 return klass;
891}
892
893static inline VALUE
894vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, bool allow_nil, int is_defined)
895{
897 VALUE val;
898
899 if (orig_klass == Qnil && allow_nil) {
900 /* in current lexical scope */
901 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
902 const rb_cref_t *cref;
903 VALUE klass = Qnil;
904
905 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
906 root_cref = CREF_NEXT(root_cref);
907 }
908 cref = root_cref;
909 while (cref && CREF_NEXT(cref)) {
910 if (CREF_PUSHED_BY_EVAL(cref)) {
911 klass = Qnil;
912 }
913 else {
914 klass = CREF_CLASS(cref);
915 }
916 cref = CREF_NEXT(cref);
917
918 if (!NIL_P(klass)) {
919 VALUE av, am = 0;
921 search_continue:
922 if ((ce = rb_const_lookup(klass, id))) {
924 val = ce->value;
925 if (val == Qundef) {
926 if (am == klass) break;
927 am = klass;
928 if (is_defined) return 1;
929 if (rb_autoloading_value(klass, id, &av, NULL)) return av;
931 goto search_continue;
932 }
933 else {
934 if (is_defined) {
935 return 1;
936 }
937 else {
938 return val;
939 }
940 }
941 }
942 }
943 }
944
945 /* search self */
946 if (root_cref && !NIL_P(CREF_CLASS(root_cref))) {
947 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
948 }
949 else {
950 klass = CLASS_OF(ec->cfp->self);
951 }
952
953 if (is_defined) {
954 return rb_const_defined(klass, id);
955 }
956 else {
957 return rb_const_get(klass, id);
958 }
959 }
960 else {
961 vm_check_if_namespace(orig_klass);
962 if (is_defined) {
963 return rb_public_const_defined_from(orig_klass, id);
964 }
965 else {
966 return rb_public_const_get_from(orig_klass, id);
967 }
968 }
969}
970
971static inline VALUE
972vm_get_cvar_base(const rb_cref_t *cref, rb_control_frame_t *cfp)
973{
974 VALUE klass;
975
976 if (!cref) {
977 rb_bug("vm_get_cvar_base: no cref");
978 }
979
980 while (CREF_NEXT(cref) &&
981 (NIL_P(CREF_CLASS(cref)) || FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
982 CREF_PUSHED_BY_EVAL(cref))) {
983 cref = CREF_NEXT(cref);
984 }
985 if (!CREF_NEXT(cref)) {
986 rb_warn("class variable access from toplevel");
987 }
988
989 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
990
991 if (NIL_P(klass)) {
992 rb_raise(rb_eTypeError, "no class variables available");
993 }
994 return klass;
995}
996
997static VALUE
998vm_search_const_defined_class(const VALUE cbase, ID id)
999{
1000 if (rb_const_defined_at(cbase, id)) return cbase;
1001 if (cbase == rb_cObject) {
1002 VALUE tmp = RCLASS_SUPER(cbase);
1003 while (tmp) {
1004 if (rb_const_defined_at(tmp, id)) return tmp;
1005 tmp = RCLASS_SUPER(tmp);
1006 }
1007 }
1008 return 0;
1009}
1010
1011ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, IVC, struct rb_call_cache *, int));
1012static inline VALUE
1013vm_getivar(VALUE obj, ID id, IVC ic, struct rb_call_cache *cc, int is_attr)
1014{
1015#if OPT_IC_FOR_IVAR
1016 VALUE val = Qundef;
1017
1018 if (SPECIAL_CONST_P(obj)) {
1019 // frozen?
1020 }
1021 else if (LIKELY(is_attr ?
1022 RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_unset, cc->aux.index > 0) :
1023 RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_serial,
1024 ic->ic_serial == RCLASS_SERIAL(RBASIC(obj)->klass)))) {
1025 st_index_t index = !is_attr ? ic->index : (cc->aux.index - 1);
1026
1027 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1028
1029 if (LIKELY(BUILTIN_TYPE(obj) == T_OBJECT) &&
1031 val = ROBJECT_IVPTR(obj)[index];
1032 }
1033 else if (FL_TEST_RAW(obj, FL_EXIVAR)) {
1034 struct gen_ivtbl *ivtbl;
1035
1037 LIKELY(index < ivtbl->numiv)) {
1038 val = ivtbl->ivptr[index];
1039 }
1040 }
1041 goto ret;
1042 }
1043 else {
1044 struct st_table *iv_index_tbl;
1045 st_index_t numiv;
1046 VALUE *ivptr;
1047
1049
1050 if (BUILTIN_TYPE(obj) == T_OBJECT) {
1051 iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
1052 numiv = ROBJECT_NUMIV(obj);
1053 ivptr = ROBJECT_IVPTR(obj);
1054
1055 fill:
1056 if (iv_index_tbl) {
1057 if (st_lookup(iv_index_tbl, id, &index)) {
1058 if (!is_attr) {
1059 ic->index = index;
1061 }
1062 else { /* call_info */
1063 cc->aux.index = (int)index + 1;
1064 }
1065
1066 if (index < numiv) {
1067 val = ivptr[index];
1068 }
1069 }
1070 }
1071 }
1072 else if (FL_TEST_RAW(obj, FL_EXIVAR)) {
1073 struct gen_ivtbl *ivtbl;
1074
1076 numiv = ivtbl->numiv;
1077 ivptr = ivtbl->ivptr;
1078 iv_index_tbl = RCLASS_IV_INDEX_TBL(rb_obj_class(obj));
1079 goto fill;
1080 }
1081 }
1082 else {
1083 // T_CLASS / T_MODULE
1084 goto general_path;
1085 }
1086
1087 ret:
1088 if (LIKELY(val != Qundef)) {
1089 return val;
1090 }
1091 else {
1092 if (!is_attr && RTEST(ruby_verbose)) {
1093 rb_warning("instance variable %"PRIsVALUE" not initialized", QUOTE_ID(id));
1094 }
1095 return Qnil;
1096 }
1097 }
1098 general_path:
1099#endif /* OPT_IC_FOR_IVAR */
1100 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1101
1102 if (is_attr) {
1103 return rb_attr_get(obj, id);
1104 }
1105 else {
1106 return rb_ivar_get(obj, id);
1107 }
1108}
1109
1110static inline VALUE
1111vm_setivar(VALUE obj, ID id, VALUE val, IVC ic, struct rb_call_cache *cc, int is_attr)
1112{
1113#if OPT_IC_FOR_IVAR
1115
1116 if (LIKELY(RB_TYPE_P(obj, T_OBJECT))) {
1117 VALUE klass = RBASIC(obj)->klass;
1119
1120 if (LIKELY(
1121 (!is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_serial, ic->ic_serial == RCLASS_SERIAL(klass))) ||
1122 ( is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_unset, cc->aux.index > 0)))) {
1124 index = !is_attr ? ic->index : cc->aux.index-1;
1125
1126 if (RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_oorange, index < ROBJECT_NUMIV(obj))) {
1127 RB_OBJ_WRITE(obj, &ptr[index], val);
1128 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1129 return val; /* inline cache hit */
1130 }
1131 }
1132 else {
1133 struct st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
1134
1135 if (iv_index_tbl && st_lookup(iv_index_tbl, (st_data_t)id, &index)) {
1136 if (!is_attr) {
1137 ic->index = index;
1139 }
1140 else if (index >= INT_MAX) {
1141 rb_raise(rb_eArgError, "too many instance variables");
1142 }
1143 else {
1144 cc->aux.index = (int)(index + 1);
1145 }
1146 }
1147 /* fall through */
1148 }
1149 }
1150 else {
1151 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1152 }
1153#endif /* OPT_IC_FOR_IVAR */
1154 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1155 return rb_ivar_set(obj, id, val);
1156}
1157
1158static inline VALUE
1159vm_getinstancevariable(VALUE obj, ID id, IVC ic)
1160{
1161 return vm_getivar(obj, id, ic, NULL, FALSE);
1162}
1163
1164static inline void
1165vm_setinstancevariable(VALUE obj, ID id, VALUE val, IVC ic)
1166{
1167 vm_setivar(obj, id, val, ic, 0, 0);
1168}
1169
1170static VALUE
1171vm_throw_continue(const rb_execution_context_t *ec, VALUE err)
1172{
1173 /* continue throw */
1174
1175 if (FIXNUM_P(err)) {
1176 ec->tag->state = FIX2INT(err);
1177 }
1178 else if (SYMBOL_P(err)) {
1179 ec->tag->state = TAG_THROW;
1180 }
1181 else if (THROW_DATA_P(err)) {
1182 ec->tag->state = THROW_DATA_STATE((struct vm_throw_data *)err);
1183 }
1184 else {
1185 ec->tag->state = TAG_RAISE;
1186 }
1187 return err;
1188}
1189
1190static VALUE
1191vm_throw_start(const rb_execution_context_t *ec, rb_control_frame_t *const reg_cfp, enum ruby_tag_type state,
1192 const int flag, const VALUE throwobj)
1193{
1194 const rb_control_frame_t *escape_cfp = NULL;
1195 const rb_control_frame_t * const eocfp = RUBY_VM_END_CONTROL_FRAME(ec); /* end of control frame pointer */
1196
1197 if (flag != 0) {
1198 /* do nothing */
1199 }
1200 else if (state == TAG_BREAK) {
1201 int is_orphan = 1;
1202 const VALUE *ep = GET_EP();
1203 const rb_iseq_t *base_iseq = GET_ISEQ();
1204 escape_cfp = reg_cfp;
1205
1206 while (base_iseq->body->type != ISEQ_TYPE_BLOCK) {
1207 if (escape_cfp->iseq->body->type == ISEQ_TYPE_CLASS) {
1208 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1209 ep = escape_cfp->ep;
1210 base_iseq = escape_cfp->iseq;
1211 }
1212 else {
1213 ep = VM_ENV_PREV_EP(ep);
1214 base_iseq = base_iseq->body->parent_iseq;
1215 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1216 VM_ASSERT(escape_cfp->iseq == base_iseq);
1217 }
1218 }
1219
1220 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1221 /* lambda{... break ...} */
1222 is_orphan = 0;
1223 state = TAG_RETURN;
1224 }
1225 else {
1226 ep = VM_ENV_PREV_EP(ep);
1227
1228 while (escape_cfp < eocfp) {
1229 if (escape_cfp->ep == ep) {
1230 const rb_iseq_t *const iseq = escape_cfp->iseq;
1231 const VALUE epc = escape_cfp->pc - iseq->body->iseq_encoded;
1232 const struct iseq_catch_table *const ct = iseq->body->catch_table;
1233 unsigned int i;
1234
1235 if (!ct) break;
1236 for (i=0; i < ct->size; i++) {
1237 const struct iseq_catch_table_entry *const entry =
1239
1240 if (entry->type == CATCH_TYPE_BREAK &&
1241 entry->iseq == base_iseq &&
1242 entry->start < epc && entry->end >= epc) {
1243 if (entry->cont == epc) { /* found! */
1244 is_orphan = 0;
1245 }
1246 break;
1247 }
1248 }
1249 break;
1250 }
1251
1252 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1253 }
1254 }
1255
1256 if (is_orphan) {
1257 rb_vm_localjump_error("break from proc-closure", throwobj, TAG_BREAK);
1258 }
1259 }
1260 else if (state == TAG_RETRY) {
1261 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1262
1263 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1264 }
1265 else if (state == TAG_RETURN) {
1266 const VALUE *current_ep = GET_EP();
1267 const VALUE *target_lep = VM_EP_LEP(current_ep);
1268 int in_class_frame = 0;
1269 int toplevel = 1;
1270 escape_cfp = reg_cfp;
1271
1272 while (escape_cfp < eocfp) {
1273 const VALUE *lep = VM_CF_LEP(escape_cfp);
1274
1275 if (!target_lep) {
1276 target_lep = lep;
1277 }
1278
1279 if (lep == target_lep &&
1280 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1281 escape_cfp->iseq->body->type == ISEQ_TYPE_CLASS) {
1282 in_class_frame = 1;
1283 target_lep = 0;
1284 }
1285
1286 if (lep == target_lep) {
1287 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1288 toplevel = 0;
1289 if (in_class_frame) {
1290 /* lambda {class A; ... return ...; end} */
1291 goto valid_return;
1292 }
1293 else {
1294 const VALUE *tep = current_ep;
1295
1296 while (target_lep != tep) {
1297 if (escape_cfp->ep == tep) {
1298 /* in lambda */
1299 goto valid_return;
1300 }
1301 tep = VM_ENV_PREV_EP(tep);
1302 }
1303 }
1304 }
1305 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1306 switch (escape_cfp->iseq->body->type) {
1307 case ISEQ_TYPE_TOP:
1308 case ISEQ_TYPE_MAIN:
1309 if (toplevel) {
1310 if (in_class_frame) goto unexpected_return;
1311 goto valid_return;
1312 }
1313 break;
1314 case ISEQ_TYPE_EVAL:
1315 case ISEQ_TYPE_CLASS:
1316 toplevel = 0;
1317 break;
1318 default:
1319 break;
1320 }
1321 }
1322 }
1323
1324 if (escape_cfp->ep == target_lep && escape_cfp->iseq->body->type == ISEQ_TYPE_METHOD) {
1325 goto valid_return;
1326 }
1327
1328 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1329 }
1330 unexpected_return:;
1331 rb_vm_localjump_error("unexpected return", throwobj, TAG_RETURN);
1332
1333 valid_return:;
1334 /* do nothing */
1335 }
1336 else {
1337 rb_bug("isns(throw): unsupported throw type");
1338 }
1339
1340 ec->tag->state = state;
1341 return (VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1342}
1343
1344static VALUE
1346 rb_num_t throw_state, VALUE throwobj)
1347{
1348 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1349 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1350
1351 if (state != 0) {
1352 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1353 }
1354 else {
1355 return vm_throw_continue(ec, throwobj);
1356 }
1357}
1358
1359static inline void
1360vm_expandarray(VALUE *sp, VALUE ary, rb_num_t num, int flag)
1361{
1362 int is_splat = flag & 0x01;
1363 rb_num_t space_size = num + is_splat;
1364 VALUE *base = sp - 1;
1365 const VALUE *ptr;
1366 rb_num_t len;
1367 const VALUE obj = ary;
1368
1369 if (!RB_TYPE_P(ary, T_ARRAY) && NIL_P(ary = rb_check_array_type(ary))) {
1370 ary = obj;
1371 ptr = &ary;
1372 len = 1;
1373 }
1374 else {
1376 len = (rb_num_t)RARRAY_LEN(ary);
1377 }
1378
1379 if (space_size == 0) {
1380 /* no space left on stack */
1381 }
1382 else if (flag & 0x02) {
1383 /* post: ..., nil ,ary[-1], ..., ary[0..-num] # top */
1384 rb_num_t i = 0, j;
1385
1386 if (len < num) {
1387 for (i=0; i<num-len; i++) {
1388 *base++ = Qnil;
1389 }
1390 }
1391 for (j=0; i<num; i++, j++) {
1392 VALUE v = ptr[len - j - 1];
1393 *base++ = v;
1394 }
1395 if (is_splat) {
1396 *base = rb_ary_new4(len - j, ptr);
1397 }
1398 }
1399 else {
1400 /* normal: ary[num..-1], ary[num-2], ary[num-3], ..., ary[0] # top */
1401 rb_num_t i;
1402 VALUE *bptr = &base[space_size - 1];
1403
1404 for (i=0; i<num; i++) {
1405 if (len <= i) {
1406 for (; i<num; i++) {
1407 *bptr-- = Qnil;
1408 }
1409 break;
1410 }
1411 *bptr-- = ptr[i];
1412 }
1413 if (is_splat) {
1414 if (num > len) {
1415 *bptr = rb_ary_new();
1416 }
1417 else {
1418 *bptr = rb_ary_new4(len - num, ptr + num);
1419 }
1420 }
1421 }
1422 RB_GC_GUARD(ary);
1423}
1424
1425static VALUE vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, struct rb_call_data *cd);
1426
1427#ifdef __has_attribute
1428#if __has_attribute(artificial)
1429__attribute__((__artificial__))
1430#endif
1431#endif
1432static inline vm_call_handler
1433calccall(const struct rb_call_data *cd, const rb_callable_method_entry_t *me)
1434{
1435 const struct rb_call_info *ci = &cd->ci;
1436 const struct rb_call_cache *cc = &cd->cc;
1437
1438 if (UNLIKELY(!me)) {
1439 RB_DEBUG_COUNTER_INC(mc_miss_by_nome);
1440 return vm_call_general; /* vm_call_method_nome() situation */
1441 }
1442 else if (LIKELY(cc->me != me)) {
1443 RB_DEBUG_COUNTER_INC(mc_miss_by_distinct);
1444 return vm_call_general; /* normal cases */
1445 }
1446 else if (UNLIKELY(cc->method_serial != me->def->method_serial)) {
1447 RB_DEBUG_COUNTER_INC(mc_miss_by_refine);
1448 return vm_call_general; /* cc->me was refined elsewhere */
1449 }
1450 /* "Calling a formerly-public method, which is now privatised, with an
1451 * explicit receiver" is the only situation we have to check here. A
1452 * formerly-private method now publicised is an absolutely safe thing.
1453 * Calling a private method without specifying a receiver is also safe. */
1454 else if ((METHOD_ENTRY_VISI(cc->me) != METHOD_VISI_PUBLIC) &&
1455 !(ci->flag & VM_CALL_FCALL)) {
1456 RB_DEBUG_COUNTER_INC(mc_miss_by_visi);
1457 return vm_call_general;
1458 }
1459 else {
1460 RB_DEBUG_COUNTER_INC(mc_miss_spurious);
1461 (void)RB_DEBUG_COUNTER_INC_IF(mc_miss_reuse_call, cc->call != vm_call_general);
1462 return cc->call;
1463 }
1464}
1465
1468{
1469 const struct rb_call_info *ci = &cd->ci;
1470 struct rb_call_cache *cc = &cd->cc;
1473 const vm_call_handler call = calccall(cd, me);
1474 struct rb_call_cache buf = {
1476 { RCLASS_SERIAL(klass) },
1477 me,
1478 me ? me->def->method_serial : 0,
1479 call,
1480 };
1481 if (call != vm_call_general) {
1482 for (int i = 0; i < numberof(cc->class_serial) - 1; i++) {
1483 buf.class_serial[i + 1] = cc->class_serial[i];
1484 }
1485 }
1486 MEMCPY(cc, &buf, struct rb_call_cache, 1);
1487 VM_ASSERT(callable_method_entry_p(cc->me));
1488}
1489
1490/* # Description of what `vm_cache_check_for_class_serial()` is doing #########
1491 *
1492 * - Let's assume a `struct rb_call_cache` has its `class_serial` as an array
1493 * of length 3 (typical situation for 64 bit environments):
1494 *
1495 * ```C
1496 * struct rb_call_cache {
1497 * rb_serial_t method_state;
1498 * rb_serial_t class_serial[3];
1499 * rb_callable_method_entry_t *me;
1500 * rb_method_definition_struct *def;
1501 * vm_call_handler call;
1502 * union { ... snip ... } aux;
1503 * };
1504 * ```
1505 *
1506 * - Initially, the `cc->class_serial` array is filled with zeros.
1507 *
1508 * - If the cache mishits, and if that was due to mc_miss_spurious situation,
1509 * `rb_vm_search_method_slowpath()` pushes the newest class serial at the
1510 * leftmost position of the `cc->class_serial`.
1511 *
1512 * ```
1513 * from: +--------------+-----+-----+-----+----+-----+------+-----+
1514 * | method_state | (x) | (y) | (z) | me | def | call | aux |
1515 * +--------------+-----+-----+-----+----+-----+------+-----+
1516 * \ \
1517 * \ \
1518 * \ \
1519 * \ \
1520 * \ \
1521 * v v
1522 * to: +--------------+-----+-----+-----+----+-----+------+-----+
1523 * | method_state | NEW | (x) | (y) | me | def | call | aux |
1524 * +--------------+-----+-----+-----+----+-----+------+-----+
1525 * ^^^
1526 * fill RCLASS_SERIAL(klass)
1527 * ```
1528 *
1529 * - Eventually, the `cc->class_serial` is filled with a series of classes that
1530 * share the same method entry for the same call site.
1531 *
1532 * - `vm_cache_check_for_class_serial()` can say that the cache now hits if
1533 * _any_ of the class serials stored inside of `cc->class_serial` is equal to
1534 * the given `class_serial` value.
1535 *
1536 * - It scans the array from left to right, looking for the expected class
1537 * serial. If it finds that at `cc->class_serial[0]` (this branch
1538 * probability is 98% according to @shyouhei's experiment), just returns
1539 * true. If it reaches the end of the array without finding anything,
1540 * returns false. This is done in the #1 loop below.
1541 *
1542 * - What needs to be complicated is when the class serial is found at either
1543 * `cc->class_serial[1]` or `cc->class_serial[2]`. When that happens, its
1544 * return value is true because `cc->me` and `cc->call` are valid. But
1545 * `cc->aux` might be invalid. Also the found class serial is expected to
1546 * hit next time. In this case we reorder the array and wipe out `cc->aux`.
1547 * This is done in the #2 loop below.
1548 *
1549 * ```
1550 * from: +--------------+-----+-----+-----+----+-----+------+-----+
1551 * | method_state | (x) | (y) | (z) | me | def | call | aux |
1552 * +--------------+-----+-----+-----+----+-----+------+-----+
1553 * \ \ |
1554 * \ \ |
1555 * +- \ --- \ -+
1556 * | \ \
1557 * | \ \
1558 * v v v
1559 * to: +--------------+-----+-----+-----+----+-----+------+-----+
1560 * | method_state | (z) | (x) | (y) | me | def | call | 000 |
1561 * +--------------+-----+-----+-----+----+-----+------+-----+
1562 * ^^^
1563 * wipe out
1564 * ```
1565 *
1566 */
1567static inline bool
1568vm_cache_check_for_class_serial(struct rb_call_cache *cc, rb_serial_t class_serial)
1569{
1570 int i;
1571 rb_serial_t j;
1572
1573 /* This is the loop #1 in above description. */
1574 for (i = 0; i < numberof(cc->class_serial); i++) {
1575 j = cc->class_serial[i];
1576
1577 if (! j) {
1578 break;
1579 }
1580 else if (j != class_serial) {
1581 continue;
1582 }
1583 else if (! i) {
1584 return true;
1585 }
1586 else {
1587 goto hit;
1588 }
1589 }
1590
1591 RB_DEBUG_COUNTER_INC(mc_class_serial_miss);
1592 return false;
1593
1594 hit:
1595 /* This is the loop #2 in above description. */
1596 for (; i > 0; i--) {
1597 cc->class_serial[i] = cc->class_serial[i - 1];
1598 }
1599
1600 cc->class_serial[0] = j;
1601 MEMZERO(&cc->aux, cc->aux, 1); /* cc->call is valid, but cc->aux might not. */
1602 return true;
1603}
1604
1605static void
1606vm_search_method_fastpath(struct rb_call_data *cd, VALUE klass)
1607{
1608 struct rb_call_cache *cc = &cd->cc;
1609
1610#if OPT_INLINE_METHOD_CACHE
1611 if (LIKELY(RB_DEBUG_COUNTER_INC_UNLESS(mc_global_state_miss,
1613 vm_cache_check_for_class_serial(cc, RCLASS_SERIAL(klass)))) {
1614 /* cache hit! */
1615 VM_ASSERT(cc->call != NULL);
1616 RB_DEBUG_COUNTER_INC(mc_inline_hit);
1617 return;
1618 }
1619 RB_DEBUG_COUNTER_INC(mc_inline_miss);
1620#endif
1622}
1623
1624static void
1625vm_search_method(struct rb_call_data *cd, VALUE recv)
1626{
1627 VALUE klass = CLASS_OF(recv);
1628
1631 vm_search_method_fastpath(cd, klass);
1632}
1633
1634static inline int
1635check_cfunc(const rb_callable_method_entry_t *me, VALUE (*func)())
1636{
1637 if (me && me->def->type == VM_METHOD_TYPE_CFUNC &&
1638 me->def->body.cfunc.func == func) {
1639 return 1;
1640 }
1641 else {
1642 return 0;
1643 }
1644}
1645
1646static inline int
1647vm_method_cfunc_is(CALL_DATA cd, VALUE recv, VALUE (*func)())
1648{
1649 vm_search_method(cd, recv);
1650 return check_cfunc(cd->cc.me, func);
1651}
1652
1653static VALUE
1654opt_equal_fallback(VALUE recv, VALUE obj, CALL_DATA cd)
1655{
1656 if (vm_method_cfunc_is(cd, recv, rb_obj_equal)) {
1657 return recv == obj ? Qtrue : Qfalse;
1658 }
1659
1660 return Qundef;
1661}
1662
1663#define BUILTIN_CLASS_P(x, k) (!SPECIAL_CONST_P(x) && RBASIC_CLASS(x) == k)
1664#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
1665
1666static inline bool
1667FIXNUM_2_P(VALUE a, VALUE b)
1668{
1669 /* FIXNUM_P(a) && FIXNUM_P(b)
1670 * == ((a & 1) && (b & 1))
1671 * == a & b & 1 */
1672 SIGNED_VALUE x = a;
1673 SIGNED_VALUE y = b;
1674 SIGNED_VALUE z = x & y & 1;
1675 return z == 1;
1676}
1677
1678static inline bool
1679FLONUM_2_P(VALUE a, VALUE b)
1680{
1681#if USE_FLONUM
1682 /* FLONUM_P(a) && FLONUM_P(b)
1683 * == ((a & 3) == 2) && ((b & 3) == 2)
1684 * == ! ((a ^ 2) | (b ^ 2) & 3)
1685 */
1686 SIGNED_VALUE x = a;
1687 SIGNED_VALUE y = b;
1688 SIGNED_VALUE z = ((x ^ 2) | (y ^ 2)) & 3;
1689 return !z;
1690#else
1691 return false;
1692#endif
1693}
1694
1695/* 1: compare by identity, 0: not applicable, -1: redefined */
1696static inline int
1697comparable_by_identity(VALUE recv, VALUE obj)
1698{
1699 if (FIXNUM_2_P(recv, obj)) {
1700 return (EQ_UNREDEFINED_P(INTEGER) != 0) * 2 - 1;
1701 }
1702 if (FLONUM_2_P(recv, obj)) {
1703 return (EQ_UNREDEFINED_P(FLOAT) != 0) * 2 - 1;
1704 }
1705 if (SYMBOL_P(recv) && SYMBOL_P(obj)) {
1706 return (EQ_UNREDEFINED_P(SYMBOL) != 0) * 2 - 1;
1707 }
1708 return 0;
1709}
1710
1711static
1712#ifndef NO_BIG_INLINE
1713inline
1714#endif
1715VALUE
1716opt_eq_func(VALUE recv, VALUE obj, CALL_DATA cd)
1717{
1718 switch (comparable_by_identity(recv, obj)) {
1719 case 1:
1720 return (recv == obj) ? Qtrue : Qfalse;
1721 case -1:
1722 goto fallback;
1723 }
1724 if (0) {
1725 }
1726 else if (BUILTIN_CLASS_P(recv, rb_cFloat)) {
1727 if (EQ_UNREDEFINED_P(FLOAT)) {
1728 return rb_float_equal(recv, obj);
1729 }
1730 }
1731 else if (BUILTIN_CLASS_P(recv, rb_cString) && EQ_UNREDEFINED_P(STRING)) {
1732 if (recv == obj) return Qtrue;
1733 if (RB_TYPE_P(obj, T_STRING)) {
1734 return rb_str_eql_internal(recv, obj);
1735 }
1736 }
1737
1738 fallback:
1739 return opt_equal_fallback(recv, obj, cd);
1740}
1741
1742static
1743#ifndef NO_BIG_INLINE
1744inline
1745#endif
1746VALUE
1747opt_eql_func(VALUE recv, VALUE obj, CALL_DATA cd)
1748{
1749 switch (comparable_by_identity(recv, obj)) {
1750 case 1:
1751 return (recv == obj) ? Qtrue : Qfalse;
1752 case -1:
1753 goto fallback;
1754 }
1755 if (0) {
1756 }
1757 else if (BUILTIN_CLASS_P(recv, rb_cFloat)) {
1758 if (EQ_UNREDEFINED_P(FLOAT)) {
1759 return rb_float_eql(recv, obj);
1760 }
1761 }
1762 else if (BUILTIN_CLASS_P(recv, rb_cString)) {
1763 if (EQ_UNREDEFINED_P(STRING)) {
1764 return rb_str_eql(recv, obj);
1765 }
1766 }
1767
1768 fallback:
1769 return opt_equal_fallback(recv, obj, cd);
1770}
1771#undef BUILTIN_CLASS_P
1772#undef EQ_UNREDEFINED_P
1773
1774VALUE
1776{
1777 struct rb_call_data cd = { .ci = { .mid = idEq, }, };
1778
1779 return opt_eq_func(obj1, obj2, &cd);
1780}
1781
1782VALUE
1784{
1785 struct rb_call_data cd = { .ci = { .mid = idEqlP, }, };
1786
1787 return opt_eql_func(obj1, obj2, &cd);
1788}
1789
1790extern VALUE rb_vm_call0(rb_execution_context_t *ec, VALUE, ID, int, const VALUE*, const rb_callable_method_entry_t *, int kw_splat);
1791
1792static VALUE
1793check_match(rb_execution_context_t *ec, VALUE pattern, VALUE target, enum vm_check_match_type type)
1794{
1795 switch (type) {
1797 return pattern;
1799 if (!rb_obj_is_kind_of(pattern, rb_cModule)) {
1800 rb_raise(rb_eTypeError, "class or module required for rescue clause");
1801 }
1802 /* fall through */
1806 if (me) {
1807 return rb_vm_call0(ec, pattern, idEqq, 1, &target, me, RB_NO_KEYWORDS);
1808 }
1809 else {
1810 /* fallback to funcall (e.g. method_missing) */
1811 return rb_funcallv(pattern, idEqq, 1, &target);
1812 }
1813 }
1814 default:
1815 rb_bug("check_match: unreachable");
1816 }
1817}
1818
1819
1820#if defined(_MSC_VER) && _MSC_VER < 1300
1821#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
1822#else
1823#define CHECK_CMP_NAN(a, b) /* do nothing */
1824#endif
1825
1826static inline VALUE
1827double_cmp_lt(double a, double b)
1828{
1829 CHECK_CMP_NAN(a, b);
1830 return a < b ? Qtrue : Qfalse;
1831}
1832
1833static inline VALUE
1834double_cmp_le(double a, double b)
1835{
1836 CHECK_CMP_NAN(a, b);
1837 return a <= b ? Qtrue : Qfalse;
1838}
1839
1840static inline VALUE
1841double_cmp_gt(double a, double b)
1842{
1843 CHECK_CMP_NAN(a, b);
1844 return a > b ? Qtrue : Qfalse;
1845}
1846
1847static inline VALUE
1848double_cmp_ge(double a, double b)
1849{
1850 CHECK_CMP_NAN(a, b);
1851 return a >= b ? Qtrue : Qfalse;
1852}
1853
1854static inline VALUE *
1855vm_base_ptr(const rb_control_frame_t *cfp)
1856{
1857#if 0 // we may optimize and use this once we confirm it does not spoil performance on JIT.
1859
1860 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
1862 if (cfp->iseq->body->type == ISEQ_TYPE_METHOD) {
1863 /* adjust `self' */
1864 bp += 1;
1865 }
1866#if VM_DEBUG_BP_CHECK
1867 if (bp != cfp->bp_check) {
1868 fprintf(stderr, "bp_check: %ld, bp: %ld\n",
1869 (long)(cfp->bp_check - GET_EC()->vm_stack),
1870 (long)(bp - GET_EC()->vm_stack));
1871 rb_bug("vm_base_ptr: unreachable");
1872 }
1873#endif
1874 return bp;
1875 }
1876 else {
1877 return NULL;
1878 }
1879#else
1880 return cfp->__bp__;
1881#endif
1882}
1883
1884/* method call processes with call_info */
1885
1886#include "vm_args.c"
1887
1888static inline VALUE vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, struct rb_call_data *cd, int opt_pc, int param_size, int local_size);
1890static inline VALUE vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, struct rb_call_data *cd, int opt_pc);
1891static VALUE vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, struct rb_call_data *cd);
1892static VALUE vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, struct rb_call_data *cd);
1893static VALUE vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, struct rb_call_data *cd);
1894static inline VALUE vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, struct rb_call_data *cd);
1895
1896static vm_call_handler vm_call_iseq_setup_func(const struct rb_call_info *ci, const int param_size, const int local_size);
1897
1898static VALUE
1899vm_call_iseq_setup_tailcall_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, struct rb_call_data *cd)
1900{
1901 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
1902
1903 return vm_call_iseq_setup_tailcall(ec, cfp, calling, cd, 0);
1904}
1905
1906static VALUE
1907vm_call_iseq_setup_normal_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, struct rb_call_data *cd)
1908{
1909 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
1910
1911 struct rb_call_cache *cc = &cd->cc;
1912 const rb_iseq_t *iseq = def_iseq_ptr(cc->me->def);
1913 int param = iseq->body->param.size;
1914 int local = iseq->body->local_table_size;
1915 return vm_call_iseq_setup_normal(ec, cfp, calling, cc->me, 0, param, local);
1916}
1917
1918MJIT_STATIC bool
1920{
1921 return iseq->body->param.flags.has_opt == FALSE &&
1928}
1929
1930static bool
1931rb_iseq_only_optparam_p(const rb_iseq_t *iseq)
1932{
1933 return iseq->body->param.flags.has_opt == TRUE &&
1940}
1941
1942static bool
1943rb_iseq_only_kwparam_p(const rb_iseq_t *iseq)
1944{
1945 return iseq->body->param.flags.has_opt == FALSE &&
1948 iseq->body->param.flags.has_kw == TRUE &&
1951}
1952
1953
1954static inline void
1955CALLER_SETUP_ARG(struct rb_control_frame_struct *restrict cfp,
1957 const struct rb_call_info *restrict ci)
1958{
1959 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
1960 VALUE final_hash;
1961 /* This expands the rest argument to the stack.
1962 * So, ci->flag & VM_CALL_ARGS_SPLAT is now inconsistent.
1963 */
1964 vm_caller_setup_arg_splat(cfp, calling);
1965 if (!IS_ARGS_KW_OR_KW_SPLAT(ci) &&
1966 calling->argc > 0 &&
1967 RB_TYPE_P((final_hash = *(cfp->sp - 1)), T_HASH) &&
1968 (((struct RHash *)final_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
1969 *(cfp->sp - 1) = rb_hash_dup(final_hash);
1970 calling->kw_splat = 1;
1971 }
1972 }
1973 if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
1974 /* This converts VM_CALL_KWARG style to VM_CALL_KW_SPLAT style
1975 * by creating a keyword hash.
1976 * So, ci->flag & VM_CALL_KWARG is now inconsistent.
1977 */
1978 vm_caller_setup_arg_kw(cfp, calling, ci);
1979 }
1980}
1981
1982static inline void
1983CALLER_REMOVE_EMPTY_KW_SPLAT(struct rb_control_frame_struct *restrict cfp,
1985 const struct rb_call_info *restrict ci)
1986{
1987 if (UNLIKELY(calling->kw_splat)) {
1988 /* This removes the last Hash object if it is empty.
1989 * So, ci->flag & VM_CALL_KW_SPLAT is now inconsistent.
1990 * However, you can use ci->flag & VM_CALL_KW_SPLAT to
1991 * determine whether a hash should be added back with
1992 * warning (for backwards compatibility in cases where
1993 * the method does not have the number of required
1994 * arguments.
1995 */
1996 if (RHASH_EMPTY_P(cfp->sp[-1])) {
1997 cfp->sp--;
1998 calling->argc--;
1999 calling->kw_splat = 0;
2000 }
2001 }
2002}
2003
2004#define USE_OPT_HIST 0
2005
2006#if USE_OPT_HIST
2007#define OPT_HIST_MAX 64
2008static int opt_hist[OPT_HIST_MAX+1];
2009
2010__attribute__((destructor))
2011static void
2012opt_hist_show_results_at_exit(void)
2013{
2014 for (int i=0; i<OPT_HIST_MAX; i++) {
2015 fprintf(stderr, "opt_hist\t%d\t%d\n", i, opt_hist[i]);
2016 }
2017}
2018#endif
2019
2020static VALUE
2021vm_call_iseq_setup_normal_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2022 struct rb_calling_info *calling,
2023 struct rb_call_data *cd)
2024{
2025 const struct rb_call_cache *cc = &cd->cc;
2026 const rb_iseq_t *iseq = def_iseq_ptr(cc->me->def);
2027 const int lead_num = iseq->body->param.lead_num;
2028 const int opt = calling->argc - lead_num;
2029 const int opt_num = iseq->body->param.opt_num;
2030 const int opt_pc = (int)iseq->body->param.opt_table[opt];
2031 const int param = iseq->body->param.size;
2032 const int local = iseq->body->local_table_size;
2033 const int delta = opt_num - opt;
2034
2035 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2036
2037#if USE_OPT_HIST
2038 if (opt_pc < OPT_HIST_MAX) {
2039 opt_hist[opt]++;
2040 }
2041 else {
2042 opt_hist[OPT_HIST_MAX]++;
2043 }
2044#endif
2045
2046 return vm_call_iseq_setup_normal(ec, cfp, calling, cc->me, opt_pc, param - delta, local);
2047}
2048
2049static VALUE
2050vm_call_iseq_setup_tailcall_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2051 struct rb_calling_info *calling,
2052 struct rb_call_data *cd)
2053{
2054 const struct rb_call_cache *cc = &cd->cc;
2055 const rb_iseq_t *iseq = def_iseq_ptr(cc->me->def);
2056 const int lead_num = iseq->body->param.lead_num;
2057 const int opt = calling->argc - lead_num;
2058 const int opt_pc = (int)iseq->body->param.opt_table[opt];
2059
2060 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2061
2062#if USE_OPT_HIST
2063 if (opt_pc < OPT_HIST_MAX) {
2064 opt_hist[opt]++;
2065 }
2066 else {
2067 opt_hist[OPT_HIST_MAX]++;
2068 }
2069#endif
2070
2071 return vm_call_iseq_setup_tailcall(ec, cfp, calling, cd, opt_pc);
2072}
2073
2074static void
2075args_setup_kw_parameters(rb_execution_context_t *const ec, const rb_iseq_t *const iseq,
2076 VALUE *const passed_values, const int passed_keyword_len, const VALUE *const passed_keywords,
2077 VALUE *const locals);
2078
2079static VALUE
2080vm_call_iseq_setup_kwparm_kwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2081 struct rb_calling_info *calling,
2082 struct rb_call_data *cd)
2083{
2084 const struct rb_kwarg_call_data *kcd = (void *)cd;
2085 const struct rb_call_info_with_kwarg *ci_kw = &kcd->ci_kw;
2086 const struct rb_call_cache *cc = &kcd->cc;
2087
2088 VM_ASSERT(ci_kw->ci.flag & VM_CALL_KWARG);
2089 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2090
2091 const rb_iseq_t *iseq = def_iseq_ptr(cc->me->def);
2092 const struct rb_iseq_param_keyword *kw_param = iseq->body->param.keyword;
2093 const struct rb_call_info_kw_arg *kw_arg = ci_kw->kw_arg;
2094 const int ci_kw_len = kw_arg->keyword_len;
2095 const VALUE * const ci_keywords = kw_arg->keywords;
2096 VALUE *argv = cfp->sp - calling->argc;
2097 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2098 const int lead_num = iseq->body->param.lead_num;
2099 VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
2100 MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
2101 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2102
2103 int param = iseq->body->param.size;
2104 int local = iseq->body->local_table_size;
2105 return vm_call_iseq_setup_normal(ec, cfp, calling, cc->me, 0, param, local);
2106}
2107
2108static VALUE
2109vm_call_iseq_setup_kwparm_nokwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2110 struct rb_calling_info *calling,
2111 struct rb_call_data *cd)
2112{
2113 const struct rb_call_info *MAYBE_UNUSED(ci) = &cd->ci;
2114 const struct rb_call_cache *cc = &cd->cc;
2115
2116 VM_ASSERT((ci->flag & VM_CALL_KWARG) == 0);
2117 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2118
2119 const rb_iseq_t *iseq = def_iseq_ptr(cc->me->def);
2120 const struct rb_iseq_param_keyword *kw_param = iseq->body->param.keyword;
2121 VALUE * const argv = cfp->sp - calling->argc;
2122 VALUE * const klocals = argv + kw_param->bits_start - kw_param->num;
2123
2124 int i;
2125 for (i=0; i<kw_param->num; i++) {
2126 klocals[i] = kw_param->default_values[i];
2127 }
2128 klocals[i] = INT2FIX(0); // kw specify flag
2129 // NOTE:
2130 // nobody check this value, but it should be cleared because it can
2131 // points invalid VALUE (T_NONE objects, raw pointer and so on).
2132
2133 int param = iseq->body->param.size;
2134 int local = iseq->body->local_table_size;
2135 return vm_call_iseq_setup_normal(ec, cfp, calling, cc->me, 0, param, local);
2136}
2137
2138static inline int
2139vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling, struct rb_call_data *cd,
2140 const rb_iseq_t *iseq, VALUE *argv, int param_size, int local_size)
2141{
2142 const struct rb_call_info *ci = &cd->ci;
2143 struct rb_call_cache *cc = &cd->cc;
2144
2145 if (LIKELY(!(ci->flag & VM_CALL_KW_SPLAT))) {
2147 rb_control_frame_t *cfp = ec->cfp;
2148 CALLER_SETUP_ARG(cfp, calling, ci);
2149 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2150
2151 if (calling->argc != iseq->body->param.lead_num) {
2152 argument_arity_error(ec, iseq, calling->argc, iseq->body->param.lead_num, iseq->body->param.lead_num);
2153 }
2154
2155 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size), vm_call_iseq_optimizable_p(&cd->ci, &cd->cc));
2156 return 0;
2157 }
2158 else if (rb_iseq_only_optparam_p(iseq)) {
2159 rb_control_frame_t *cfp = ec->cfp;
2160 CALLER_SETUP_ARG(cfp, calling, ci);
2161 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2162
2163 const int lead_num = iseq->body->param.lead_num;
2164 const int opt_num = iseq->body->param.opt_num;
2165 const int argc = calling->argc;
2166 const int opt = argc - lead_num;
2167
2168 if (opt < 0 || opt > opt_num) {
2169 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
2170 }
2171
2172 if (LIKELY(!(ci->flag & VM_CALL_TAILCALL))) {
2173 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
2176 }
2177 else {
2178 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
2181 }
2182
2183 /* initialize opt vars for self-references */
2184 VM_ASSERT((int)iseq->body->param.size == lead_num + opt_num);
2185 for (int i=argc; i<lead_num + opt_num; i++) {
2186 argv[i] = Qnil;
2187 }
2188 return (int)iseq->body->param.opt_table[opt];
2189 }
2190 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
2191 const int lead_num = iseq->body->param.lead_num;
2192 const int argc = calling->argc;
2193 const struct rb_iseq_param_keyword *kw_param = iseq->body->param.keyword;
2194
2195 if (ci->flag & VM_CALL_KWARG) {
2196 const struct rb_call_info_kw_arg *kw_arg = ((struct rb_call_info_with_kwarg *)ci)->kw_arg;
2197
2198 if (argc - kw_arg->keyword_len == lead_num) {
2199 const int ci_kw_len = kw_arg->keyword_len;
2200 const VALUE * const ci_keywords = kw_arg->keywords;
2201 VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
2202 MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
2203
2204 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2205 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2206
2207 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
2209
2210 return 0;
2211 }
2212 }
2213 else if (argc == lead_num) {
2214 /* no kwarg */
2215 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2216 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
2217
2218 if (klocals[kw_param->num] == INT2FIX(0)) {
2219 /* copy from default_values */
2220 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
2222 }
2223
2224 return 0;
2225 }
2226 }
2227 }
2228
2229 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
2230}
2231
2232static VALUE
2233vm_call_iseq_setup(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, struct rb_call_data *cd)
2234{
2235 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
2236
2237 const struct rb_call_cache *cc = &cd->cc;
2238 const rb_iseq_t *iseq = def_iseq_ptr(cc->me->def);
2239 const int param_size = iseq->body->param.size;
2240 const int local_size = iseq->body->local_table_size;
2241 const int opt_pc = vm_callee_setup_arg(ec, calling, cd, def_iseq_ptr(cc->me->def), cfp->sp - calling->argc, param_size, local_size);
2242 return vm_call_iseq_setup_2(ec, cfp, calling, cd, opt_pc, param_size, local_size);
2243}
2244
2245static inline VALUE
2246vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, struct rb_call_data *cd,
2247 int opt_pc, int param_size, int local_size)
2248{
2249 const struct rb_call_info *ci = &cd->ci;
2250 const struct rb_call_cache *cc = &cd->cc;
2251
2252 if (LIKELY(!(ci->flag & VM_CALL_TAILCALL))) {
2253 return vm_call_iseq_setup_normal(ec, cfp, calling, cc->me, opt_pc, param_size, local_size);
2254 }
2255 else {
2256 return vm_call_iseq_setup_tailcall(ec, cfp, calling, cd, opt_pc);
2257 }
2258}
2259
2260static inline VALUE
2262 int opt_pc, int param_size, int local_size)
2263{
2264 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2265 VALUE *argv = cfp->sp - calling->argc;
2266 VALUE *sp = argv + param_size;
2267 cfp->sp = argv - 1 /* recv */;
2268
2269 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
2270 calling->block_handler, (VALUE)me,
2271 iseq->body->iseq_encoded + opt_pc, sp,
2273 iseq->body->stack_max);
2274 return Qundef;
2275}
2276
2277static inline VALUE
2278vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, struct rb_call_data *cd,
2279 int opt_pc)
2280{
2281 const struct rb_call_cache *cc = &cd->cc;
2282 unsigned int i;
2283 VALUE *argv = cfp->sp - calling->argc;
2285 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2286 VALUE *src_argv = argv;
2287 VALUE *sp_orig, *sp;
2288 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
2289
2290 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
2291 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
2292 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
2293 dst_captured->code.val = src_captured->code.val;
2294 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
2295 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
2296 }
2297 else {
2298 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
2299 }
2300 }
2301
2302 vm_pop_frame(ec, cfp, cfp->ep);
2303 cfp = ec->cfp;
2304
2305 sp_orig = sp = cfp->sp;
2306
2307 /* push self */
2308 sp[0] = calling->recv;
2309 sp++;
2310
2311 /* copy arguments */
2312 for (i=0; i < iseq->body->param.size; i++) {
2313 *sp++ = src_argv[i];
2314 }
2315
2316 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
2317 calling->recv, calling->block_handler, (VALUE)me,
2318 iseq->body->iseq_encoded + opt_pc, sp,
2320 iseq->body->stack_max);
2321
2322 cfp->sp = sp_orig;
2323
2324 return Qundef;
2325}
2326
2327static VALUE
2328call_cfunc_m2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2329{
2330 return (*func)(recv, rb_ary_new4(argc, argv));
2331}
2332
2333static VALUE
2334call_cfunc_m1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2335{
2336 return (*func)(argc, argv, recv);
2337}
2338
2339static VALUE
2340call_cfunc_0(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2341{
2342 VALUE(*f)(VALUE) = (VALUE(*)(VALUE))func;
2343 return (*f)(recv);
2344}
2345static VALUE
2346call_cfunc_1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2347{
2348 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
2349 return (*f)(recv, argv[0]);
2350}
2351static VALUE
2352call_cfunc_2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2353{
2354 VALUE(*f)(VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE))func;
2355 return (*f)(recv, argv[0], argv[1]);
2356}
2357static VALUE
2358call_cfunc_3(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2359{
2360 VALUE(*f)(VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE))func;
2361 return (*f)(recv, argv[0], argv[1], argv[2]);
2362}
2363static VALUE
2364call_cfunc_4(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2365{
2366 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE))func;
2367 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
2368}
2369static VALUE
2370call_cfunc_5(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2371{
2373 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
2374}
2375static VALUE
2376call_cfunc_6(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2377{
2379 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
2380}
2381static VALUE
2382call_cfunc_7(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2383{
2385 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
2386}
2387static VALUE
2388call_cfunc_8(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2389{
2391 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
2392}
2393static VALUE
2394call_cfunc_9(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2395{
2397 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
2398}
2399static VALUE
2400call_cfunc_10(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2401{
2403 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
2404}
2405static VALUE
2406call_cfunc_11(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2407{
2409 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
2410}
2411static VALUE
2412call_cfunc_12(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2413{
2415 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
2416}
2417static VALUE
2418call_cfunc_13(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2419{
2421 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
2422}
2423static VALUE
2424call_cfunc_14(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2425{
2427 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
2428}
2429static VALUE
2430call_cfunc_15(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2431{
2433 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
2434}
2435
2436static inline int
2437vm_cfp_consistent_p(rb_execution_context_t *ec, const rb_control_frame_t *reg_cfp)
2438{
2439 const int ov_flags = RAISED_STACKOVERFLOW;
2440 if (LIKELY(reg_cfp == ec->cfp + 1)) return TRUE;
2441 if (rb_ec_raised_p(ec, ov_flags)) {
2442 rb_ec_raised_reset(ec, ov_flags);
2443 return TRUE;
2444 }
2445 return FALSE;
2446}
2447
2448#define CHECK_CFP_CONSISTENCY(func) \
2449 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
2450 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
2451
2452static inline
2453const rb_method_cfunc_t *
2454vm_method_cfunc_entry(const rb_callable_method_entry_t *me)
2455{
2456#if VM_DEBUG_VERIFY_METHOD_CACHE
2457 switch (me->def->type) {
2460 break;
2461# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
2462 METHOD_BUG(ISEQ);
2463 METHOD_BUG(ATTRSET);
2464 METHOD_BUG(IVAR);
2465 METHOD_BUG(BMETHOD);
2466 METHOD_BUG(ZSUPER);
2467 METHOD_BUG(UNDEF);
2468 METHOD_BUG(OPTIMIZED);
2469 METHOD_BUG(MISSING);
2470 METHOD_BUG(REFINED);
2471 METHOD_BUG(ALIAS);
2472# undef METHOD_BUG
2473 default:
2474 rb_bug("wrong method type: %d", me->def->type);
2475 }
2476#endif
2477 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
2478}
2479
2480/* -- Remove empty_kw_splat In 3.0 -- */
2481static VALUE
2482vm_call_cfunc_with_frame(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, struct rb_call_data *cd, int empty_kw_splat)
2483{
2484 const struct rb_call_info *ci = &cd->ci;
2485 const struct rb_call_cache *cc = &cd->cc;
2486 VALUE val;
2488 const rb_method_cfunc_t *cfunc = vm_method_cfunc_entry(me);
2489 int len = cfunc->argc;
2490
2491 VALUE recv = calling->recv;
2492 VALUE block_handler = calling->block_handler;
2494 int argc = calling->argc;
2495 int orig_argc = argc;
2496
2497 if (UNLIKELY(calling->kw_splat)) {
2498 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
2499 }
2500 else if (UNLIKELY(empty_kw_splat)) {
2501 frame_type |= VM_FRAME_FLAG_CFRAME_EMPTY_KW;
2502 }
2503
2506
2507 vm_push_frame(ec, NULL, frame_type, recv,
2509 0, ec->cfp->sp, 0, 0);
2510
2511 if (len >= 0) rb_check_arity(argc, len, len);
2512
2513 reg_cfp->sp -= orig_argc + 1;
2514 val = (*cfunc->invoker)(recv, argc, reg_cfp->sp + 1, cfunc->func);
2515
2516 CHECK_CFP_CONSISTENCY("vm_call_cfunc");
2517
2518 rb_vm_pop_frame(ec);
2519
2520 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, recv, me->def->original_id, ci->mid, me->owner, val);
2522
2523 return val;
2524}
2525
2526static VALUE
2528{
2529 const struct rb_call_info *ci = &cd->ci;
2530 int empty_kw_splat;
2531 RB_DEBUG_COUNTER_INC(ccf_cfunc);
2532
2533 CALLER_SETUP_ARG(reg_cfp, calling, ci);
2534 empty_kw_splat = calling->kw_splat;
2535 CALLER_REMOVE_EMPTY_KW_SPLAT(reg_cfp, calling, ci);
2536 if (empty_kw_splat && calling->kw_splat) {
2537 empty_kw_splat = 0;
2538 }
2539 return vm_call_cfunc_with_frame(ec, reg_cfp, calling, cd, empty_kw_splat);
2540}
2541
2542static VALUE
2544{
2545 struct rb_call_cache *cc = &cd->cc;
2546 RB_DEBUG_COUNTER_INC(ccf_ivar);
2547 cfp->sp -= 1;
2548 return vm_getivar(calling->recv, cc->me->def->body.attr.id, NULL, cc, TRUE);
2549}
2550
2551static VALUE
2552vm_call_attrset(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, struct rb_call_data *cd)
2553{
2554 struct rb_call_cache *cc = &cd->cc;
2555 RB_DEBUG_COUNTER_INC(ccf_attrset);
2556 VALUE val = *(cfp->sp - 1);
2557 cfp->sp -= 2;
2558 return vm_setivar(calling->recv, cc->me->def->body.attr.id, val, NULL, cc, 1);
2559}
2560
2561static inline VALUE
2562vm_call_bmethod_body(rb_execution_context_t *ec, struct rb_calling_info *calling, struct rb_call_data *cd, const VALUE *argv)
2563{
2564 rb_proc_t *proc;
2565 VALUE val;
2566 const struct rb_call_cache *cc = &cd->cc;
2567
2568 /* control block frame */
2569 GetProcPtr(cc->me->def->body.bmethod.proc, proc);
2570 val = rb_vm_invoke_bmethod(ec, proc, calling->recv, calling->argc, argv, calling->kw_splat, calling->block_handler, cc->me);
2571
2572 return val;
2573}
2574
2575static VALUE
2576vm_call_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, struct rb_call_data *cd)
2577{
2578 RB_DEBUG_COUNTER_INC(ccf_bmethod);
2579
2580 VALUE *argv;
2581 int argc;
2582 const struct rb_call_info *ci = &cd->ci;
2583
2584 CALLER_SETUP_ARG(cfp, calling, ci);
2585 argc = calling->argc;
2586 argv = ALLOCA_N(VALUE, argc);
2587 MEMCPY(argv, cfp->sp - argc, VALUE, argc);
2588 cfp->sp += - argc - 1;
2589
2590 return vm_call_bmethod_body(ec, calling, cd, argv);
2591}
2592
2593static enum method_missing_reason
2594ci_missing_reason(const struct rb_call_info *ci)
2595{
2597 if (ci->flag & VM_CALL_VCALL) stat |= MISSING_VCALL;
2598 if (ci->flag & VM_CALL_FCALL) stat |= MISSING_FCALL;
2599 if (ci->flag & VM_CALL_SUPER) stat |= MISSING_SUPER;
2600 return stat;
2601}
2602
2603static VALUE
2604vm_call_opt_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, struct rb_call_data *orig_cd)
2605{
2606 RB_DEBUG_COUNTER_INC(ccf_opt_send);
2607
2608 int i;
2609 VALUE sym;
2610 const struct rb_call_info *orig_ci = &orig_cd->ci;
2611 const struct rb_call_cache *orig_cc = &orig_cd->cc;
2612 struct rb_call_info *ci;
2613 struct rb_call_cache *cc;
2614 struct rb_kwarg_call_data cd;
2615
2616 CALLER_SETUP_ARG(reg_cfp, calling, orig_ci);
2617
2618 i = calling->argc - 1;
2619
2620 if (calling->argc == 0) {
2621 rb_raise(rb_eArgError, "no method name given");
2622 }
2623
2624 /* setup new ci */
2625 if (orig_ci->flag & VM_CALL_KWARG) {
2626 const struct rb_kwarg_call_data *orig_kcd = (void *)orig_cd;
2627 cd = *orig_kcd;
2628 }
2629 else {
2630 cd.ci_kw.ci = *orig_ci;
2631 cd.cc = *orig_cc;
2632 }
2633 ci = &cd.ci_kw.ci;
2634 cc = &cd.cc;
2635
2636 sym = TOPN(i);
2637
2638 if (!(ci->mid = rb_check_id(&sym))) {
2640 VALUE exc =
2642 rb_long2int(calling->argc), &TOPN(i),
2643 ci->flag & (VM_CALL_FCALL|VM_CALL_VCALL));
2645 }
2646 TOPN(i) = rb_str_intern(sym);
2647 ci->mid = idMethodMissing;
2648 ec->method_missing_reason = cc->aux.method_missing_reason = ci_missing_reason(ci);
2649 }
2650 else {
2651 /* shift arguments */
2652 if (i > 0) {
2653 MEMMOVE(&TOPN(i), &TOPN(i-1), VALUE, i);
2654 }
2655 calling->argc -= 1;
2656 DEC_SP(1);
2657 }
2658
2660 ci->flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
2661 return vm_call_method(ec, reg_cfp, calling, (CALL_DATA)&cd);
2662}
2663
2664static inline VALUE vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, VALUE block_handler);
2665
2667 vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
2668 struct rb_calling_info *calling, const struct rb_call_info *ci, VALUE block_handler));
2669
2670static VALUE
2671vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
2672 struct rb_calling_info *calling, const struct rb_call_info *ci, VALUE block_handler)
2673{
2674 int argc = calling->argc;
2675
2676 /* remove self */
2677 if (argc > 0) MEMMOVE(&TOPN(argc), &TOPN(argc-1), VALUE, argc);
2678 DEC_SP(1);
2679
2680 return vm_invoke_block(ec, reg_cfp, calling, ci, block_handler);
2681}
2682
2683static VALUE
2684vm_call_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, struct rb_call_data *cd)
2685{
2686 RB_DEBUG_COUNTER_INC(ccf_opt_call);
2687
2688 const struct rb_call_info *ci = &cd->ci;
2689 VALUE procval = calling->recv;
2690 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
2691}
2692
2693static VALUE
2694vm_call_opt_block_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, struct rb_call_data *cd)
2695{
2696 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
2697 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
2698 const struct rb_call_info *ci = &cd->ci;
2699
2701 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
2702 }
2703 else {
2705 vm_search_method(cd, calling->recv);
2706 return vm_call_general(ec, reg_cfp, calling, cd);
2707 }
2708}
2709
2710static VALUE
2711vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, struct rb_call_data *orig_cd)
2712{
2713 RB_DEBUG_COUNTER_INC(ccf_method_missing);
2714
2715 const struct rb_call_info *orig_ci = &orig_cd->ci;
2716 const struct rb_call_cache *orig_cc = &orig_cd->cc;
2718 struct rb_call_data cd = *orig_cd;
2719 unsigned int argc;
2720
2721 CALLER_SETUP_ARG(reg_cfp, calling, orig_ci);
2722 argc = calling->argc+1;
2723
2724 cd.ci.flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
2725 cd.ci.mid = idMethodMissing;
2726 cd.ci.orig_argc = argc;
2727
2728 cd.cc.me =
2731
2732 calling->argc = argc;
2733
2734 /* shift arguments: m(a, b, c) #=> method_missing(:m, a, b, c) */
2737 if (argc > 1) {
2738 MEMMOVE(argv+1, argv, VALUE, argc-1);
2739 }
2740 argv[0] = ID2SYM(orig_ci->mid);
2741 INC_SP(1);
2742
2744 return vm_call_method(ec, reg_cfp, calling, &cd);
2745}
2746
2747static const rb_callable_method_entry_t *refined_method_callable_without_refinement(const rb_callable_method_entry_t *me);
2748static VALUE
2750{
2751 RB_DEBUG_COUNTER_INC(ccf_method_missing);
2752
2753 const struct rb_call_info *ci = &cd->ci;
2754 struct rb_call_cache *cc = &cd->cc;
2756 CC_SET_ME(cc, klass ? rb_callable_method_entry(klass, ci->mid) : NULL);
2757
2758 if (!cc->me) {
2759 return vm_call_method_nome(ec, cfp, calling, cd);
2760 }
2761 if (cc->me->def->type == VM_METHOD_TYPE_REFINED &&
2762 cc->me->def->body.refined.orig_me) {
2763 CC_SET_ME(cc, refined_method_callable_without_refinement(cc->me));
2764 }
2765 return vm_call_method_each_type(ec, cfp, calling, cd);
2766}
2767
2768static inline VALUE
2769find_refinement(VALUE refinements, VALUE klass)
2770{
2771 if (NIL_P(refinements)) {
2772 return Qnil;
2773 }
2774 return rb_hash_lookup(refinements, klass);
2775}
2776
2778static rb_control_frame_t *
2779current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp)
2780{
2781 rb_control_frame_t *top_cfp = cfp;
2782
2783 if (cfp->iseq && cfp->iseq->body->type == ISEQ_TYPE_BLOCK) {
2784 const rb_iseq_t *local_iseq = cfp->iseq->body->local_iseq;
2785
2786 do {
2788 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
2789 /* TODO: orphan block */
2790 return top_cfp;
2791 }
2792 } while (cfp->iseq != local_iseq);
2793 }
2794 return cfp;
2795}
2796
2799{
2801
2802 /* for prepended Module, then start from cover class */
2804
2805 while (RTEST(klass)) {
2807 if (owner == target_owner) {
2808 return klass;
2809 }
2811 }
2812
2813 return current_class; /* maybe module function */
2814}
2815
2816static const rb_callable_method_entry_t *
2817aliased_callable_method_entry(const rb_callable_method_entry_t *me)
2818{
2819 const rb_method_entry_t *orig_me = me->def->body.alias.original_me;
2820 const rb_callable_method_entry_t *cme;
2821
2822 if (orig_me->defined_class == 0) {
2823 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
2824 VM_ASSERT(RB_TYPE_P(orig_me->owner, T_MODULE));
2825 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
2826
2827 if (me->def->alias_count + me->def->complemented_count == 0) {
2829 }
2830 else {
2833 rb_method_definition_set((rb_method_entry_t *)me, def, (void *)cme);
2834 }
2835 }
2836 else {
2837 cme = (const rb_callable_method_entry_t *)orig_me;
2838 }
2839
2840 VM_ASSERT(callable_method_entry_p(cme));
2841 return cme;
2842}
2843
2844static const rb_callable_method_entry_t *
2845refined_method_callable_without_refinement(const rb_callable_method_entry_t *me)
2846{
2847 const rb_method_entry_t *orig_me = me->def->body.refined.orig_me;
2848 const rb_callable_method_entry_t *cme;
2849
2850 if (orig_me->defined_class == 0) {
2851 cme = NULL;
2853 }
2854 else {
2855 cme = (const rb_callable_method_entry_t *)orig_me;
2856 }
2857
2858 VM_ASSERT(callable_method_entry_p(cme));
2859
2860 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
2861 cme = NULL;
2862 }
2863
2864 return cme;
2865}
2866
2867static int
2868search_refined_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, ID mid, struct rb_call_cache *cc)
2869{
2870 const rb_cref_t *cref = vm_get_cref(cfp->ep);
2871
2872 for (; cref; cref = CREF_NEXT(cref)) {
2873 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), cc->me->owner);
2874 if (NIL_P(refinement)) continue;
2875
2876 const rb_callable_method_entry_t *const ref_me =
2877 rb_callable_method_entry(refinement, mid);
2878
2879 if (ref_me) {
2880 if (cc->call == vm_call_super_method) {
2881 const rb_control_frame_t *top_cfp = current_method_entry(ec, cfp);
2883 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
2884 continue;
2885 }
2886 }
2887 if (cc->me->def->type != VM_METHOD_TYPE_REFINED ||
2888 cc->me->def != ref_me->def) {
2889 CC_SET_ME(cc, ref_me);
2890 }
2891 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
2892 return TRUE;
2893 }
2894 }
2895 else {
2896 CC_SET_ME(cc, NULL);
2897 return FALSE;
2898 }
2899 }
2900
2901 if (cc->me->def->body.refined.orig_me) {
2902 CC_SET_ME(cc, refined_method_callable_without_refinement(cc->me));
2903 }
2904 else {
2906 CC_SET_ME(cc, klass ? rb_callable_method_entry(klass, mid) : NULL);
2907 }
2908 return TRUE;
2909}
2910
2911static VALUE
2912vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, struct rb_call_data *cd)
2913{
2914 const struct rb_call_info *ci = &cd->ci;
2915 struct rb_call_cache *cc = &cd->cc;
2916
2917 switch (cc->me->def->type) {
2919 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
2920 return vm_call_iseq_setup(ec, cfp, calling, cd);
2921
2924 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
2925 return vm_call_cfunc(ec, cfp, calling, cd);
2926
2928 CALLER_SETUP_ARG(cfp, calling, ci);
2929 if (calling->argc == 1 && calling->kw_splat && RHASH_EMPTY_P(cfp->sp[-1])) {
2930 rb_warn_keyword_to_last_hash(ec, calling, ci, NULL);
2931 }
2932 else {
2933 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2934 }
2935
2936 rb_check_arity(calling->argc, 1, 1);
2937 cc->aux.index = 0;
2938 CC_SET_FASTPATH(cc, vm_call_attrset, !((ci->flag & VM_CALL_ARGS_SPLAT) || (ci->flag & VM_CALL_KWARG)));
2939 return vm_call_attrset(ec, cfp, calling, cd);
2940
2942 CALLER_SETUP_ARG(cfp, calling, ci);
2943 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2944 rb_check_arity(calling->argc, 0, 0);
2945 cc->aux.index = 0;
2946 CC_SET_FASTPATH(cc, vm_call_ivar, !(ci->flag & VM_CALL_ARGS_SPLAT));
2947 return vm_call_ivar(ec, cfp, calling, cd);
2948
2951 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
2952 return vm_call_method_missing(ec, cfp, calling, cd);
2953
2955 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
2956 return vm_call_bmethod(ec, cfp, calling, cd);
2957
2959 CC_SET_ME(cc, aliased_callable_method_entry(cc->me));
2960 VM_ASSERT(cc->me != NULL);
2961 return vm_call_method_each_type(ec, cfp, calling, cd);
2962
2964 switch (cc->me->def->body.optimize_type) {
2966 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
2967 return vm_call_opt_send(ec, cfp, calling, cd);
2969 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
2970 return vm_call_opt_call(ec, cfp, calling, cd);
2972 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
2973 return vm_call_opt_block_call(ec, cfp, calling, cd);
2974 default:
2975 rb_bug("vm_call_method: unsupported optimized method type (%d)",
2977 }
2978
2980 break;
2981
2983 return vm_call_zsuper(ec, cfp, calling, cd, RCLASS_ORIGIN(cc->me->defined_class));
2984
2986 if (search_refined_method(ec, cfp, ci->mid, cc))
2987 return vm_call_method(ec, cfp, calling, cd);
2988 else
2989 return vm_call_method_nome(ec, cfp, calling, cd);
2990 }
2991
2992 rb_bug("vm_call_method: unsupported method type (%d)", cc->me->def->type);
2993}
2994
2995NORETURN(static void vm_raise_method_missing(rb_execution_context_t *ec, int argc, const VALUE *argv, VALUE obj, int call_status));
2996
2997static VALUE
2998vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, struct rb_call_data *cd)
2999{
3000 /* method missing */
3001 const struct rb_call_info *ci = &cd->ci;
3002 struct rb_call_cache *cc = &cd->cc;
3003 const int stat = ci_missing_reason(ci);
3004
3005 if (ci->mid == idMethodMissing) {
3008 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
3009 }
3010 else {
3012 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
3013 return vm_call_method_missing(ec, cfp, calling, cd);
3014 }
3015}
3016
3017static inline VALUE
3018vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, struct rb_call_data *cd)
3019{
3020 const struct rb_call_info *ci = &cd->ci;
3021 struct rb_call_cache *cc = &cd->cc;
3022
3023 VM_ASSERT(callable_method_entry_p(cc->me));
3024
3025 if (cc->me != NULL) {
3026 switch (METHOD_ENTRY_VISI(cc->me)) {
3027 case METHOD_VISI_PUBLIC: /* likely */
3028 return vm_call_method_each_type(ec, cfp, calling, cd);
3029
3031 if (!(ci->flag & VM_CALL_FCALL)) {
3033 if (ci->flag & VM_CALL_VCALL) stat |= MISSING_VCALL;
3034
3036 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
3037 return vm_call_method_missing(ec, cfp, calling, cd);
3038 }
3039 return vm_call_method_each_type(ec, cfp, calling, cd);
3040
3042 if (!(ci->flag & VM_CALL_OPT_SEND)) {
3045 return vm_call_method_missing(ec, cfp, calling, cd);
3046 }
3047 else {
3048 /* caching method info to dummy cc */
3049 VM_ASSERT(cc->me != NULL);
3050 if (ci->flag & VM_CALL_KWARG) {
3051 struct rb_kwarg_call_data *kcd = (void *)cd;
3052 struct rb_kwarg_call_data cd_entry = *kcd;
3053 return vm_call_method_each_type(ec, cfp, calling, (void *)&cd_entry);
3054 }
3055 else {
3056 struct rb_call_data cd_entry = *cd;
3057 return vm_call_method_each_type(ec, cfp, calling, &cd_entry);
3058 }
3059 }
3060 }
3061 return vm_call_method_each_type(ec, cfp, calling, cd);
3062
3063 default:
3064 rb_bug("unreachable");
3065 }
3066 }
3067 else {
3068 return vm_call_method_nome(ec, cfp, calling, cd);
3069 }
3070}
3071
3072static VALUE
3073vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, struct rb_call_data *cd)
3074{
3075 RB_DEBUG_COUNTER_INC(ccf_general);
3076 return vm_call_method(ec, reg_cfp, calling, cd);
3077}
3078
3079static VALUE
3080vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, struct rb_call_data *cd)
3081{
3082 RB_DEBUG_COUNTER_INC(ccf_super_method);
3083
3084 /* this check is required to distinguish with other functions. */
3085 const struct rb_call_cache *cc = &cd->cc;
3086 if (cc->call != vm_call_super_method) rb_bug("bug");
3087 return vm_call_method(ec, reg_cfp, calling, cd);
3088}
3089
3090/* super */
3091
3092static inline VALUE
3093vm_search_normal_superclass(VALUE klass)
3094{
3095 if (BUILTIN_TYPE(klass) == T_ICLASS &&
3097 klass = RBASIC(klass)->klass;
3098 }
3100 return RCLASS_SUPER(klass);
3101}
3102
3103NORETURN(static void vm_super_outside(void));
3104
3105static void
3106vm_super_outside(void)
3107{
3108 rb_raise(rb_eNoMethodError, "super called outside of method");
3109}
3110
3111static void
3112vm_search_super_method(const rb_control_frame_t *reg_cfp, struct rb_call_data *cd, VALUE recv)
3113{
3114 VALUE current_defined_class, klass;
3116 struct rb_call_info *ci = &cd->ci;
3117 struct rb_call_cache *cc = &cd->cc;
3118
3119 if (!me) {
3120 vm_super_outside();
3121 }
3122
3123 current_defined_class = me->defined_class;
3124
3125 if (!NIL_P(RCLASS_REFINED_CLASS(current_defined_class))) {
3126 current_defined_class = RCLASS_REFINED_CLASS(current_defined_class);
3127 }
3128
3129 if (BUILTIN_TYPE(current_defined_class) != T_MODULE &&
3130 !FL_TEST(current_defined_class, RMODULE_INCLUDED_INTO_REFINEMENT) &&
3131 !rb_obj_is_kind_of(recv, current_defined_class)) {
3132 VALUE m = RB_TYPE_P(current_defined_class, T_ICLASS) ?
3133 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
3134
3135 if (m) { /* not bound UnboundMethod */
3137 "self has wrong type to call super in this context: "
3138 "%"PRIsVALUE" (expected %"PRIsVALUE")",
3139 rb_obj_class(recv), m);
3140 }
3141 }
3142
3143 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (ci->flag & VM_CALL_ZSUPER)) {
3145 "implicit argument passing of super from method defined"
3146 " by define_method() is not supported."
3147 " Specify all arguments explicitly.");
3148 }
3149
3150 ci->mid = me->def->original_id;
3151 klass = vm_search_normal_superclass(me->defined_class);
3152
3153 if (!klass) {
3154 /* bound instance method of module */
3156 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
3157 }
3158 else {
3159 /* TODO: use inline cache */
3160 CC_SET_ME(cc, rb_callable_method_entry(klass, ci->mid));
3161 CC_SET_FASTPATH(cc, vm_call_super_method, TRUE);
3162 }
3163}
3164
3165/* yield */
3166
3167static inline int
3168block_proc_is_lambda(const VALUE procval)
3169{
3170 rb_proc_t *proc;
3171
3172 if (procval) {
3173 GetProcPtr(procval, proc);
3174 return proc->is_lambda;
3175 }
3176 else {
3177 return 0;
3178 }
3179}
3180
3181static VALUE
3182vm_yield_with_cfunc(rb_execution_context_t *ec,
3183 const struct rb_captured_block *captured,
3184 VALUE self, int argc, const VALUE *argv, int kw_splat, VALUE block_handler,
3186{
3187 int is_lambda = FALSE; /* TODO */
3188 VALUE val, arg, blockarg;
3189 int frame_flag;
3190 const struct vm_ifunc *ifunc = captured->code.ifunc;
3191
3192 if (is_lambda) {
3194 }
3195 else if (argc == 0) {
3196 arg = Qnil;
3197 }
3198 else {
3199 arg = argv[0];
3200 }
3201
3202 blockarg = rb_vm_bh_to_procval(ec, block_handler);
3203
3205 switch (kw_splat) {
3206 case 1:
3207 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
3208 break;
3209 case 2:
3210 frame_flag |= VM_FRAME_FLAG_CFRAME_EMPTY_KW;
3211 break;
3212 }
3213
3214 vm_push_frame(ec, (const rb_iseq_t *)captured->code.ifunc,
3215 frame_flag,
3216 self,
3217 VM_GUARDED_PREV_EP(captured->ep),
3218 (VALUE)me,
3219 0, ec->cfp->sp, 0, 0);
3220 val = (*ifunc->func)(arg, (VALUE)ifunc->data, argc, argv, blockarg);
3221 rb_vm_pop_frame(ec);
3222
3223 return val;
3224}
3225
3226static VALUE
3227vm_yield_with_symbol(rb_execution_context_t *ec, VALUE symbol, int argc, const VALUE *argv, int kw_splat, VALUE block_handler)
3228{
3229 return rb_sym_proc_call(SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
3230}
3231
3232static inline int
3233vm_callee_setup_block_arg_arg0_splat(rb_control_frame_t *cfp, const rb_iseq_t *iseq, VALUE *argv, VALUE ary)
3234{
3235 int i;
3236 long len = RARRAY_LEN(ary);
3237
3239
3240 for (i=0; i<len && i<iseq->body->param.lead_num; i++) {
3241 argv[i] = RARRAY_AREF(ary, i);
3242 }
3243
3244 return i;
3245}
3246
3247static inline VALUE
3248vm_callee_setup_block_arg_arg0_check(VALUE *argv)
3249{
3250 VALUE ary, arg0 = argv[0];
3251 ary = rb_check_array_type(arg0);
3252#if 0
3253 argv[0] = arg0;
3254#else
3255 VM_ASSERT(argv[0] == arg0);
3256#endif
3257 return ary;
3258}
3259
3260static int
3261vm_callee_setup_block_arg(rb_execution_context_t *ec, struct rb_calling_info *calling, const struct rb_call_info *ci, const rb_iseq_t *iseq, VALUE *argv, const enum arg_setup_type arg_setup_type)
3262{
3263 if (rb_simple_iseq_p(iseq)) {
3264 rb_control_frame_t *cfp = ec->cfp;
3265 VALUE arg0;
3266
3267 CALLER_SETUP_ARG(cfp, calling, ci);
3268 if (calling->kw_splat && calling->argc == iseq->body->param.lead_num + iseq->body->param.post_num && RHASH_EMPTY_P(cfp->sp[-1])) {
3269 rb_warn_keyword_to_last_hash(ec, calling, ci, iseq);
3270 }
3271 else {
3272 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3273 }
3274
3276 calling->argc == 1 &&
3279 !NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
3280 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
3281 }
3282
3283 if (calling->argc != iseq->body->param.lead_num) {
3285 if (calling->argc < iseq->body->param.lead_num) {
3286 int i;
3288 for (i=calling->argc; i<iseq->body->param.lead_num; i++) argv[i] = Qnil;
3289 calling->argc = iseq->body->param.lead_num; /* fill rest parameters */
3290 }
3291 else if (calling->argc > iseq->body->param.lead_num) {
3292 calling->argc = iseq->body->param.lead_num; /* simply truncate arguments */
3293 }
3294 }
3295 else {
3296 argument_arity_error(ec, iseq, calling->argc, iseq->body->param.lead_num, iseq->body->param.lead_num);
3297 }
3298 }
3299
3300 return 0;
3301 }
3302 else {
3303 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
3304 }
3305}
3306
3307static int
3308vm_yield_setup_args(rb_execution_context_t *ec, const rb_iseq_t *iseq, const int argc, VALUE *argv, int kw_splat, VALUE block_handler, enum arg_setup_type arg_setup_type)
3309{
3310 struct rb_calling_info calling_entry, *calling;
3311 struct rb_call_info ci_entry, *ci;
3312
3313 calling = &calling_entry;
3314 calling->argc = argc;
3315 calling->block_handler = block_handler;
3316 calling->kw_splat = kw_splat;
3317 calling->recv = Qundef;
3318
3319 ci_entry.flag = kw_splat ? VM_CALL_KW_SPLAT : 0;
3320 ci = &ci_entry;
3321
3322 return vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, arg_setup_type);
3323}
3324
3325/* ruby iseq -> ruby block */
3326
3327static VALUE
3328vm_invoke_iseq_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3329 struct rb_calling_info *calling, const struct rb_call_info *ci,
3330 int is_lambda, const struct rb_captured_block *captured)
3331{
3332 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
3333 const int arg_size = iseq->body->param.size;
3334 VALUE * const rsp = GET_SP() - calling->argc;
3335 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, rsp, is_lambda ? arg_setup_method : arg_setup_block);
3336
3337 SET_SP(rsp);
3338
3339 vm_push_frame(ec, iseq,
3340 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
3341 captured->self,
3342 VM_GUARDED_PREV_EP(captured->ep), 0,
3344 rsp + arg_size,
3345 iseq->body->local_table_size - arg_size, iseq->body->stack_max);
3346
3347 return Qundef;
3348}
3349
3350static VALUE
3351vm_invoke_symbol_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3352 struct rb_calling_info *calling, const struct rb_call_info *ci,
3353 VALUE symbol)
3354{
3355 VALUE val;
3356 int argc;
3357 CALLER_SETUP_ARG(ec->cfp, calling, ci);
3358 argc = calling->argc;
3359 val = vm_yield_with_symbol(ec, symbol, argc, STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler);
3360 POPN(argc);
3361 return val;
3362}
3363
3364static VALUE
3365vm_invoke_ifunc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3366 struct rb_calling_info *calling, const struct rb_call_info *ci,
3367 const struct rb_captured_block *captured)
3368{
3369 VALUE val;
3370 int argc;
3371 int kw_splat = calling->kw_splat;
3372 CALLER_SETUP_ARG(ec->cfp, calling, ci);
3373 CALLER_REMOVE_EMPTY_KW_SPLAT(ec->cfp, calling, ci);
3374 if (kw_splat && !calling->kw_splat) {
3375 kw_splat = 2;
3376 }
3377 else {
3378 kw_splat = calling->kw_splat;
3379 }
3380 argc = calling->argc;
3381 val = vm_yield_with_cfunc(ec, captured, captured->self, argc, STACK_ADDR_FROM_TOP(argc), kw_splat, calling->block_handler, NULL);
3382 POPN(argc); /* TODO: should put before C/yield? */
3383 return val;
3384}
3385
3386static VALUE
3387vm_proc_to_block_handler(VALUE procval)
3388{
3389 const struct rb_block *block = vm_proc_block(procval);
3390
3391 switch (vm_block_type(block)) {
3392 case block_type_iseq:
3393 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
3394 case block_type_ifunc:
3395 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
3396 case block_type_symbol:
3397 return VM_BH_FROM_SYMBOL(block->as.symbol);
3398 case block_type_proc:
3399 return VM_BH_FROM_PROC(block->as.proc);
3400 }
3401 VM_UNREACHABLE(vm_yield_with_proc);
3402 return Qundef;
3403}
3404
3405static inline VALUE
3407 struct rb_calling_info *calling, const struct rb_call_info *ci, VALUE block_handler)
3408{
3409 int is_lambda = FALSE;
3410
3411 again:
3412 switch (vm_block_handler_type(block_handler)) {
3414 {
3415 const struct rb_captured_block *captured = VM_BH_TO_ISEQ_BLOCK(block_handler);
3416 return vm_invoke_iseq_block(ec, reg_cfp, calling, ci, is_lambda, captured);
3417 }
3419 {
3420 const struct rb_captured_block *captured = VM_BH_TO_IFUNC_BLOCK(block_handler);
3421 return vm_invoke_ifunc_block(ec, reg_cfp, calling, ci, captured);
3422 }
3424 is_lambda = block_proc_is_lambda(VM_BH_TO_PROC(block_handler));
3425 block_handler = vm_proc_to_block_handler(VM_BH_TO_PROC(block_handler));
3426 goto again;
3428 return vm_invoke_symbol_block(ec, reg_cfp, calling, ci, VM_BH_TO_SYMBOL(block_handler));
3429 }
3430 VM_UNREACHABLE(vm_invoke_block: unreachable);
3431 return Qnil;
3432}
3433
3434static VALUE
3435vm_make_proc_with_iseq(const rb_iseq_t *blockiseq)
3436{
3437 const rb_execution_context_t *ec = GET_EC();
3439 struct rb_captured_block *captured;
3440
3441 if (cfp == 0) {
3442 rb_bug("vm_make_proc_with_iseq: unreachable");
3443 }
3444
3445 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
3446 captured->code.iseq = blockiseq;
3447
3448 return rb_vm_make_proc(ec, captured, rb_cProc);
3449}
3450
3451static VALUE
3452vm_once_exec(VALUE iseq)
3453{
3454 VALUE proc = vm_make_proc_with_iseq((rb_iseq_t *)iseq);
3455 return rb_proc_call_with_block(proc, 0, 0, Qnil);
3456}
3457
3458static VALUE
3459vm_once_clear(VALUE data)
3460{
3461 union iseq_inline_storage_entry *is = (union iseq_inline_storage_entry *)data;
3462 is->once.running_thread = NULL;
3463 return Qnil;
3464}
3465
3468{
3469 TOPN(0) = rb_struct_aref(GET_SELF(), TOPN(0));
3470 return reg_cfp;
3471}
3472
3475{
3476 rb_struct_aset(GET_SELF(), TOPN(0), TOPN(1));
3477 return reg_cfp;
3478}
3479
3480/* defined insn */
3481
3482static enum defined_type
3483check_respond_to_missing(VALUE obj, VALUE v)
3484{
3485 VALUE args[2];
3486 VALUE r;
3487
3488 args[0] = obj; args[1] = Qfalse;
3490 if (r != Qundef && RTEST(r)) {
3491 return DEFINED_METHOD;
3492 }
3493 else {
3494 return DEFINED_NOT_DEFINED;
3495 }
3496}
3497
3498static VALUE
3499vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE needstr, VALUE v)
3500{
3501 VALUE klass;
3502 enum defined_type expr_type = DEFINED_NOT_DEFINED;
3503 enum defined_type type = (enum defined_type)op_type;
3504
3505 switch (type) {
3506 case DEFINED_IVAR:
3508 expr_type = DEFINED_IVAR;
3509 }
3510 break;
3511 case DEFINED_IVAR2:
3512 klass = vm_get_cbase(GET_EP());
3513 break;
3514 case DEFINED_GVAR:
3516 expr_type = DEFINED_GVAR;
3517 }
3518 break;
3519 case DEFINED_CVAR: {
3520 const rb_cref_t *cref = vm_get_cref(GET_EP());
3521 klass = vm_get_cvar_base(cref, GET_CFP());
3523 expr_type = DEFINED_CVAR;
3524 }
3525 break;
3526 }
3527 case DEFINED_CONST:
3528 case DEFINED_CONST_FROM: {
3529 bool allow_nil = type == DEFINED_CONST;
3530 klass = v;
3531 if (vm_get_ev_const(ec, klass, SYM2ID(obj), allow_nil, true)) {
3532 expr_type = DEFINED_CONST;
3533 }
3534 break;
3535 }
3536 case DEFINED_FUNC:
3537 klass = CLASS_OF(v);
3538 if (rb_method_boundp(klass, SYM2ID(obj), 0)) {
3539 expr_type = DEFINED_METHOD;
3540 }
3541 else {
3542 expr_type = check_respond_to_missing(obj, v);
3543 }
3544 break;
3545 case DEFINED_METHOD:{
3546 VALUE klass = CLASS_OF(v);
3548
3549 if (me) {
3550 switch (METHOD_ENTRY_VISI(me)) {
3552 break;
3555 break;
3556 }
3557 case METHOD_VISI_PUBLIC:
3558 expr_type = DEFINED_METHOD;
3559 break;
3560 default:
3561 rb_bug("vm_defined: unreachable: %u", (unsigned int)METHOD_ENTRY_VISI(me));
3562 }
3563 }
3564 else {
3565 expr_type = check_respond_to_missing(obj, v);
3566 }
3567 break;
3568 }
3569 case DEFINED_YIELD:
3571 expr_type = DEFINED_YIELD;
3572 }
3573 break;
3574 case DEFINED_ZSUPER:
3575 {
3577
3578 if (me) {
3579 VALUE klass = vm_search_normal_superclass(me->defined_class);
3580 ID id = me->def->original_id;
3581
3582 if (rb_method_boundp(klass, id, 0)) {
3583 expr_type = DEFINED_ZSUPER;
3584 }
3585 }
3586 }
3587 break;
3588 case DEFINED_REF:{
3589 if (vm_getspecial(ec, GET_LEP(), Qfalse, FIX2INT(obj)) != Qnil) {
3590 expr_type = DEFINED_GVAR;
3591 }
3592 break;
3593 }
3594 default:
3595 rb_bug("unimplemented defined? type (VM)");
3596 break;
3597 }
3598
3599 if (expr_type != 0) {
3600 if (needstr != Qfalse) {
3601 return rb_iseq_defined_string(expr_type);
3602 }
3603 else {
3604 return Qtrue;
3605 }
3606 }
3607 else {
3608 return Qnil;
3609 }
3610}
3611
3612static const VALUE *
3613vm_get_ep(const VALUE *const reg_ep, rb_num_t lv)
3614{
3615 rb_num_t i;
3616 const VALUE *ep = reg_ep;
3617 for (i = 0; i < lv; i++) {
3618 ep = GET_PREV_EP(ep);
3619 }
3620 return ep;
3621}
3622
3623static VALUE
3624vm_get_special_object(const VALUE *const reg_ep,
3626{
3627 switch (type) {
3629 return rb_mRubyVMFrozenCore;
3631 return vm_get_cbase(reg_ep);
3633 return vm_get_const_base(reg_ep);
3634 default:
3635 rb_bug("putspecialobject insn: unknown value_type %d", type);
3636 }
3637}
3638
3639static void
3640vm_freezestring(VALUE str, VALUE debug)
3641{
3642 if (!NIL_P(debug)) {
3644 }
3646}
3647
3648static VALUE
3649vm_concat_array(VALUE ary1, VALUE ary2st)
3650{
3651 const VALUE ary2 = ary2st;
3652 VALUE tmp1 = rb_check_to_array(ary1);
3653 VALUE tmp2 = rb_check_to_array(ary2);
3654
3655 if (NIL_P(tmp1)) {
3656 tmp1 = rb_ary_new3(1, ary1);
3657 }
3658
3659 if (NIL_P(tmp2)) {
3660 tmp2 = rb_ary_new3(1, ary2);
3661 }
3662
3663 if (tmp1 == ary1) {
3664 tmp1 = rb_ary_dup(ary1);
3665 }
3666 return rb_ary_concat(tmp1, tmp2);
3667}
3668
3669static VALUE
3670vm_splat_array(VALUE flag, VALUE ary)
3671{
3672 VALUE tmp = rb_check_to_array(ary);
3673 if (NIL_P(tmp)) {
3674 return rb_ary_new3(1, ary);
3675 }
3676 else if (RTEST(flag)) {
3677 return rb_ary_dup(tmp);
3678 }
3679 else {
3680 return tmp;
3681 }
3682}
3683
3684static VALUE
3685vm_check_match(rb_execution_context_t *ec, VALUE target, VALUE pattern, rb_num_t flag)
3686{
3688
3689 if (flag & VM_CHECKMATCH_ARRAY) {
3690 long i;
3691 const long n = RARRAY_LEN(pattern);
3692
3693 for (i = 0; i < n; i++) {
3694 VALUE v = RARRAY_AREF(pattern, i);
3695 VALUE c = check_match(ec, v, target, type);
3696
3697 if (RTEST(c)) {
3698 return c;
3699 }
3700 }
3701 return Qfalse;
3702 }
3703 else {
3704 return check_match(ec, pattern, target, type);
3705 }
3706}
3707
3708static VALUE
3709vm_check_keyword(lindex_t bits, lindex_t idx, const VALUE *ep)
3710{
3711 const VALUE kw_bits = *(ep - bits);
3712
3713 if (FIXNUM_P(kw_bits)) {
3714 unsigned int b = (unsigned int)FIX2ULONG(kw_bits);
3715 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
3716 return Qfalse;
3717 }
3718 else {
3719 VM_ASSERT(RB_TYPE_P(kw_bits, T_HASH));
3720 if (rb_hash_has_key(kw_bits, INT2FIX(idx))) return Qfalse;
3721 }
3722 return Qtrue;
3723}
3724
3725static void
3726vm_dtrace(rb_event_flag_t flag, rb_execution_context_t *ec)
3727{
3732
3733 switch (flag) {
3734 case RUBY_EVENT_CALL:
3736 return;
3737 case RUBY_EVENT_C_CALL:
3739 return;
3740 case RUBY_EVENT_RETURN:
3742 return;
3745 return;
3746 }
3747 }
3748}
3749
3750static VALUE
3751vm_const_get_under(ID id, rb_num_t flags, VALUE cbase)
3752{
3753 VALUE ns;
3754
3755 if ((ns = vm_search_const_defined_class(cbase, id)) == 0) {
3756 return ns;
3757 }
3758 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
3759 return rb_public_const_get_at(ns, id);
3760 }
3761 else {
3762 return rb_const_get_at(ns, id);
3763 }
3764}
3765
3766static VALUE
3767vm_check_if_class(ID id, rb_num_t flags, VALUE super, VALUE klass)
3768{
3769 if (!RB_TYPE_P(klass, T_CLASS)) {
3770 return 0;
3771 }
3772 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
3774
3775 if (tmp != super) {
3777 "superclass mismatch for class %"PRIsVALUE"",
3778 rb_id2str(id));
3779 }
3780 else {
3781 return klass;
3782 }
3783 }
3784 else {
3785 return klass;
3786 }
3787}
3788
3789static VALUE
3790vm_check_if_module(ID id, VALUE mod)
3791{
3792 if (!RB_TYPE_P(mod, T_MODULE)) {
3793 return 0;
3794 }
3795 else {
3796 return mod;
3797 }
3798}
3799
3800static VALUE
3801declare_under(ID id, VALUE cbase, VALUE c)
3802{
3804 rb_const_set(cbase, id, c);
3805 return c;
3806}
3807
3808static VALUE
3809vm_declare_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
3810{
3811 /* new class declaration */
3813 VALUE c = declare_under(id, cbase, rb_define_class_id(id, s));
3814 rb_class_inherited(s, c);
3815 return c;
3816}
3817
3818static VALUE
3819vm_declare_module(ID id, VALUE cbase)
3820{
3821 /* new module declaration */
3822 return declare_under(id, cbase, rb_define_module_id(id));
3823}
3824
3825NORETURN(static void unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old));
3826static void
3827unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old)
3828{
3829 VALUE name = rb_id2str(id);
3830 VALUE message = rb_sprintf("%"PRIsVALUE" is not a %s",
3831 name, type);
3832 VALUE location = rb_const_source_location_at(cbase, id);
3833 if (!NIL_P(location)) {
3834 rb_str_catf(message, "\n%"PRIsVALUE":%"PRIsVALUE":"
3835 " previous definition of %"PRIsVALUE" was here",
3836 rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
3837 }
3839}
3840
3841static VALUE
3842vm_define_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
3843{
3844 VALUE klass;
3845
3846 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !RB_TYPE_P(super, T_CLASS)) {
3848 "superclass must be a Class (%"PRIsVALUE" given)",
3849 rb_obj_class(super));
3850 }
3851
3852 vm_check_if_namespace(cbase);
3853
3854 /* find klass */
3856 if ((klass = vm_const_get_under(id, flags, cbase)) != 0) {
3857 if (!vm_check_if_class(id, flags, super, klass))
3858 unmatched_redefinition("class", cbase, id, klass);
3859 return klass;
3860 }
3861 else {
3862 return vm_declare_class(id, flags, cbase, super);
3863 }
3864}
3865
3866static VALUE
3867vm_define_module(ID id, rb_num_t flags, VALUE cbase)
3868{
3869 VALUE mod;
3870
3871 vm_check_if_namespace(cbase);
3872 if ((mod = vm_const_get_under(id, flags, cbase)) != 0) {
3873 if (!vm_check_if_module(id, mod))
3874 unmatched_redefinition("module", cbase, id, mod);
3875 return mod;
3876 }
3877 else {
3878 return vm_declare_module(id, cbase);
3879 }
3880}
3881
3882static VALUE
3883vm_find_or_create_class_by_id(ID id,
3884 rb_num_t flags,
3885 VALUE cbase,
3886 VALUE super)
3887{
3889
3890 switch (type) {
3892 /* classdef returns class scope value */
3893 return vm_define_class(id, flags, cbase, super);
3894
3896 /* classdef returns class scope value */
3897 return rb_singleton_class(cbase);
3898
3900 /* classdef returns class scope value */
3901 return vm_define_module(id, flags, cbase);
3902
3903 default:
3904 rb_bug("unknown defineclass type: %d", (int)type);
3905 }
3906}
3907
3909vm_scope_visibility_get(const rb_execution_context_t *ec)
3910{
3912
3913 if (!vm_env_cref_by_cref(cfp->ep)) {
3914 return METHOD_VISI_PUBLIC;
3915 }
3916 else {
3917 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
3918 }
3919}
3920
3921static int
3922vm_scope_module_func_check(const rb_execution_context_t *ec)
3923{
3925
3926 if (!vm_env_cref_by_cref(cfp->ep)) {
3927 return FALSE;
3928 }
3929 else {
3930 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
3931 }
3932}
3933
3934static void
3935vm_define_method(const rb_execution_context_t *ec, VALUE obj, ID id, VALUE iseqval, int is_singleton)
3936{
3937 VALUE klass;
3939 rb_cref_t *cref = vm_ec_cref(ec);
3940
3941 if (!is_singleton) {
3942 klass = CREF_CLASS(cref);
3943 visi = vm_scope_visibility_get(ec);
3944 }
3945 else { /* singleton */
3946 klass = rb_singleton_class(obj); /* class and frozen checked in this API */
3947 visi = METHOD_VISI_PUBLIC;
3948 }
3949
3950 if (NIL_P(klass)) {
3951 rb_raise(rb_eTypeError, "no class/module to add method");
3952 }
3953
3954 rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, visi);
3955
3956 if (!is_singleton && vm_scope_module_func_check(ec)) {
3958 rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
3959 }
3960}
3961
3962static void
3963vm_search_method_wrap(
3964 const struct rb_control_frame_struct *reg_cfp,
3965 struct rb_call_data *cd,
3966 VALUE recv)
3967{
3968 vm_search_method(cd, recv);
3969}
3970
3971static void
3972vm_search_invokeblock(
3973 const struct rb_control_frame_struct *reg_cfp,
3974 struct rb_call_data *cd,
3975 VALUE recv)
3976{
3977 /* Does nothing. */
3978}
3979
3980static VALUE
3981vm_invokeblock_i(
3982 struct rb_execution_context_struct *ec,
3984 struct rb_calling_info *calling,
3985 struct rb_call_data *cd)
3986{
3987 const struct rb_call_info *ci = &cd->ci;
3988 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
3989
3991 rb_vm_localjump_error("no block given (yield)", Qnil, 0);
3992 }
3993 else {
3994 return vm_invoke_block(ec, GET_CFP(), calling, ci, block_handler);
3995 }
3996}
3997
3998static VALUE
3999vm_sendish(
4000 struct rb_execution_context_struct *ec,
4002 struct rb_call_data *cd,
4004 void (*method_explorer)(
4005 const struct rb_control_frame_struct *reg_cfp,
4006 struct rb_call_data *cd,
4007 VALUE recv))
4008{
4009 CALL_INFO ci = &cd->ci;
4010 CALL_CACHE cc = &cd->cc;
4011 VALUE val;
4012 int argc = ci->orig_argc;
4013 VALUE recv = TOPN(argc);
4014 struct rb_calling_info calling;
4015
4016 calling.block_handler = block_handler;
4017 calling.kw_splat = IS_ARGS_KW_SPLAT(ci) > 0;
4018 calling.recv = recv;
4019 calling.argc = argc;
4020
4021 method_explorer(GET_CFP(), cd, recv);
4022
4023 val = cc->call(ec, GET_CFP(), &calling, cd);
4024
4025 if (val != Qundef) {
4026 return val; /* CFUNC normal return */
4027 }
4028 else {
4029 RESTORE_REGS(); /* CFP pushed in cc->call() */
4030 }
4031
4032#ifdef MJIT_HEADER
4033 /* When calling ISeq which may catch an exception from JIT-ed
4034 code, we should not call mjit_exec directly to prevent the
4035 caller frame from being canceled. That's because the caller
4036 frame may have stack values in the local variables and the
4037 cancelling the caller frame will purge them. But directly
4038 calling mjit_exec is faster... */
4039 if (GET_ISEQ()->body->catch_except_p) {
4040 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
4041 return vm_exec(ec, true);
4042 }
4043 else if ((val = mjit_exec(ec)) == Qundef) {
4044 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
4045 return vm_exec(ec, false);
4046 }
4047 else {
4048 return val;
4049 }
4050#else
4051 /* When calling from VM, longjmp in the callee won't purge any
4052 JIT-ed caller frames. So it's safe to directly call
4053 mjit_exec. */
4054 return mjit_exec(ec);
4055#endif
4056}
4057
4058static VALUE
4059vm_opt_str_freeze(VALUE str, int bop, ID id)
4060{
4062 return str;
4063 }
4064 else {
4065 return Qundef;
4066 }
4067}
4068
4069/* this macro is mandatory to use OPTIMIZED_CMP. What a design! */
4070#define id_cmp idCmp
4071
4072static VALUE
4073vm_opt_newarray_max(rb_num_t num, const VALUE *ptr)
4074{
4076 if (num == 0) {
4077 return Qnil;
4078 }
4079 else {
4080 struct cmp_opt_data cmp_opt = { 0, 0 };
4081 VALUE result = *ptr;
4082 rb_snum_t i = num - 1;
4083 while (i-- > 0) {
4084 const VALUE v = *++ptr;
4085 if (OPTIMIZED_CMP(v, result, cmp_opt) > 0) {
4086 result = v;
4087 }
4088 }
4089 return result;
4090 }
4091 }
4092 else {
4093 VALUE ary = rb_ary_new4(num, ptr);
4094 return rb_funcall(ary, idMax, 0);
4095 }
4096}
4097
4098static VALUE
4099vm_opt_newarray_min(rb_num_t num, const VALUE *ptr)
4100{
4102 if (num == 0) {
4103 return Qnil;
4104 }
4105 else {
4106 struct cmp_opt_data cmp_opt = { 0, 0 };
4107 VALUE result = *ptr;
4108 rb_snum_t i = num - 1;
4109 while (i-- > 0) {
4110 const VALUE v = *++ptr;
4111 if (OPTIMIZED_CMP(v, result, cmp_opt) < 0) {
4112 result = v;
4113 }
4114 }
4115 return result;
4116 }
4117 }
4118 else {
4119 VALUE ary = rb_ary_new4(num, ptr);
4120 return rb_funcall(ary, idMin, 0);
4121 }
4122}
4123
4124#undef id_cmp
4125
4126static int
4127vm_ic_hit_p(IC ic, const VALUE *reg_ep)
4128{
4129 if (ic->ic_serial == GET_GLOBAL_CONSTANT_STATE()) {
4130 return (ic->ic_cref == NULL || // no need to check CREF
4131 ic->ic_cref == vm_get_cref(reg_ep));
4132 }
4133 return FALSE;
4134}
4135
4136static void
4137vm_ic_update(IC ic, VALUE val, const VALUE *reg_ep)
4138{
4139 VM_ASSERT(ic->value != Qundef);
4140 ic->value = val;
4142 ic->ic_cref = vm_get_const_key_cref(reg_ep);
4144}
4145
4146static VALUE
4147vm_once_dispatch(rb_execution_context_t *ec, ISEQ iseq, ISE is)
4148{
4149 rb_thread_t *th = rb_ec_thread_ptr(ec);
4150 rb_thread_t *const RUNNING_THREAD_ONCE_DONE = (rb_thread_t *)(0x1);
4151
4152 again:
4153 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
4154 return is->once.value;
4155 }
4156 else if (is->once.running_thread == NULL) {
4157 VALUE val;
4158 is->once.running_thread = th;
4159 val = rb_ensure(vm_once_exec, (VALUE)iseq, vm_once_clear, (VALUE)is);
4160 RB_OBJ_WRITE(ec->cfp->iseq, &is->once.value, val);
4161 /* is->once.running_thread is cleared by vm_once_clear() */
4162 is->once.running_thread = RUNNING_THREAD_ONCE_DONE; /* success */
4163 return val;
4164 }
4165 else if (is->once.running_thread == th) {
4166 /* recursive once */
4167 return vm_once_exec((VALUE)iseq);
4168 }
4169 else {
4170 /* waiting for finish */
4173 goto again;
4174 }
4175}
4176
4177static OFFSET
4178vm_case_dispatch(CDHASH hash, OFFSET else_offset, VALUE key)
4179{
4180 switch (OBJ_BUILTIN_TYPE(key)) {
4181 case -1:
4182 case T_FLOAT:
4183 case T_SYMBOL:
4184 case T_BIGNUM:
4185 case T_STRING:
4194 st_data_t val;
4195 if (RB_FLOAT_TYPE_P(key)) {
4196 double kval = RFLOAT_VALUE(key);
4197 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
4198 key = FIXABLE(kval) ? LONG2FIX((long)kval) : rb_dbl2big(kval);
4199 }
4200 }
4201 if (rb_hash_stlike_lookup(hash, key, &val)) {
4202 return FIX2LONG((VALUE)val);
4203 }
4204 else {
4205 return else_offset;
4206 }
4207 }
4208 }
4209 return 0;
4210}
4211
4212NORETURN(static void
4213 vm_stack_consistency_error(const rb_execution_context_t *ec,
4214 const rb_control_frame_t *,
4215 const VALUE *));
4216static void
4217vm_stack_consistency_error(const rb_execution_context_t *ec,
4218 const rb_control_frame_t *cfp,
4219 const VALUE *bp)
4220{
4221 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
4222 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
4223 static const char stack_consistency_error[] =
4224 "Stack consistency error (sp: %"PRIdPTRDIFF", bp: %"PRIdPTRDIFF")";
4225#if defined RUBY_DEVEL
4226 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
4227 rb_str_cat_cstr(mesg, "\n");
4230#else
4231 rb_bug(stack_consistency_error, nsp, nbp);
4232#endif
4233}
4234
4235static VALUE
4236vm_opt_plus(VALUE recv, VALUE obj)
4237{
4238 if (FIXNUM_2_P(recv, obj) &&
4240 return rb_fix_plus_fix(recv, obj);
4241 }
4242 else if (FLONUM_2_P(recv, obj) &&
4244 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
4245 }
4246 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
4247 return Qundef;
4248 }
4249 else if (RBASIC_CLASS(recv) == rb_cFloat &&
4252 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
4253 }
4254 else if (RBASIC_CLASS(recv) == rb_cString &&
4257 return rb_str_opt_plus(recv, obj);
4258 }
4259 else if (RBASIC_CLASS(recv) == rb_cArray &&
4262 return rb_ary_plus(recv, obj);
4263 }
4264 else {
4265 return Qundef;
4266 }
4267}
4268
4269static VALUE
4270vm_opt_minus(VALUE recv, VALUE obj)
4271{
4272 if (FIXNUM_2_P(recv, obj) &&
4274 return rb_fix_minus_fix(recv, obj);
4275 }
4276 else if (FLONUM_2_P(recv, obj) &&
4278 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
4279 }
4280 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
4281 return Qundef;
4282 }
4283 else if (RBASIC_CLASS(recv) == rb_cFloat &&
4286 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
4287 }
4288 else {
4289 return Qundef;
4290 }
4291}
4292
4293static VALUE
4294vm_opt_mult(VALUE recv, VALUE obj)
4295{
4296 if (FIXNUM_2_P(recv, obj) &&
4298 return rb_fix_mul_fix(recv, obj);
4299 }
4300 else if (FLONUM_2_P(recv, obj) &&
4302 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
4303 }
4304 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
4305 return Qundef;
4306 }
4307 else if (RBASIC_CLASS(recv) == rb_cFloat &&
4310 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
4311 }
4312 else {
4313 return Qundef;
4314 }
4315}
4316
4317static VALUE
4318vm_opt_div(VALUE recv, VALUE obj)
4319{
4320 if (FIXNUM_2_P(recv, obj) &&
4322 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_div_fix(recv, obj);
4323 }
4324 else if (FLONUM_2_P(recv, obj) &&
4326 return rb_flo_div_flo(recv, obj);
4327 }
4328 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
4329 return Qundef;
4330 }
4331 else if (RBASIC_CLASS(recv) == rb_cFloat &&
4334 return rb_flo_div_flo(recv, obj);
4335 }
4336 else {
4337 return Qundef;
4338 }
4339}
4340
4341static VALUE
4342vm_opt_mod(VALUE recv, VALUE obj)
4343{
4344 if (FIXNUM_2_P(recv, obj) &&
4346 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_mod_fix(recv, obj);
4347 }
4348 else if (FLONUM_2_P(recv, obj) &&
4351 }
4352 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
4353 return Qundef;
4354 }
4355 else if (RBASIC_CLASS(recv) == rb_cFloat &&
4359 }
4360 else {
4361 return Qundef;
4362 }
4363}
4364
4365static VALUE
4366vm_opt_neq(CALL_DATA cd, CALL_DATA cd_eq, VALUE recv, VALUE obj)
4367{
4368 if (vm_method_cfunc_is(cd, recv, rb_obj_not_equal)) {
4369 VALUE val = opt_eq_func(recv, obj, cd_eq);
4370
4371 if (val != Qundef) {
4372 return RTEST(val) ? Qfalse : Qtrue;
4373 }
4374 }
4375
4376 return Qundef;
4377}
4378
4379static VALUE
4380vm_opt_lt(VALUE recv, VALUE obj)
4381{
4382 if (FIXNUM_2_P(recv, obj) &&
4384 return (SIGNED_VALUE)recv < (SIGNED_VALUE)obj ? Qtrue : Qfalse;
4385 }
4386 else if (FLONUM_2_P(recv, obj) &&
4388 return RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
4389 }
4390 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
4391 return Qundef;
4392 }
4393 else if (RBASIC_CLASS(recv) == rb_cFloat &&
4397 return RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
4398 }
4399 else {
4400 return Qundef;
4401 }
4402}
4403
4404static VALUE
4405vm_opt_le(VALUE recv, VALUE obj)
4406{
4407 if (FIXNUM_2_P(recv, obj) &&
4409 return (SIGNED_VALUE)recv <= (SIGNED_VALUE)obj ? Qtrue : Qfalse;
4410 }
4411 else if (FLONUM_2_P(recv, obj) &&
4413 return RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
4414 }
4415 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
4416 return Qundef;
4417 }
4418 else if (RBASIC_CLASS(recv) == rb_cFloat &&
4422 return RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
4423 }
4424 else {
4425 return Qundef;
4426 }
4427}
4428
4429static VALUE
4430vm_opt_gt(VALUE recv, VALUE obj)
4431{
4432 if (FIXNUM_2_P(recv, obj) &&
4434 return (SIGNED_VALUE)recv > (SIGNED_VALUE)obj ? Qtrue : Qfalse;
4435 }
4436 else if (FLONUM_2_P(recv, obj) &&
4438 return RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
4439 }
4440 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
4441 return Qundef;
4442 }
4443 else if (RBASIC_CLASS(recv) == rb_cFloat &&
4447 return RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
4448 }
4449 else {
4450 return Qundef;
4451 }
4452}
4453
4454static VALUE
4455vm_opt_ge(VALUE recv, VALUE obj)
4456{
4457 if (FIXNUM_2_P(recv, obj) &&
4459 return (SIGNED_VALUE)recv >= (SIGNED_VALUE)obj ? Qtrue : Qfalse;
4460 }
4461 else if (FLONUM_2_P(recv, obj) &&
4463 return RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
4464 }
4465 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
4466 return Qundef;
4467 }
4468 else if (RBASIC_CLASS(recv) == rb_cFloat &&
4472 return RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
4473 }
4474 else {
4475 return Qundef;
4476 }
4477}
4478
4479
4480static VALUE
4481vm_opt_ltlt(VALUE recv, VALUE obj)
4482{
4483 if (SPECIAL_CONST_P(recv)) {
4484 return Qundef;
4485 }
4486 else if (RBASIC_CLASS(recv) == rb_cString &&
4488 return rb_str_concat(recv, obj);
4489 }
4490 else if (RBASIC_CLASS(recv) == rb_cArray &&
4492 return rb_ary_push(recv, obj);
4493 }
4494 else {
4495 return Qundef;
4496 }
4497}
4498
4499static VALUE
4500vm_opt_and(VALUE recv, VALUE obj)
4501{
4502 if (FIXNUM_2_P(recv, obj) &&
4504 return (recv & obj) | 1;
4505 }
4506 else {
4507 return Qundef;
4508 }
4509}
4510
4511static VALUE
4512vm_opt_or(VALUE recv, VALUE obj)
4513{
4514 if (FIXNUM_2_P(recv, obj) &&
4516 return recv | obj;
4517 }
4518 else {
4519 return Qundef;
4520 }
4521}
4522
4523static VALUE
4524vm_opt_aref(VALUE recv, VALUE obj)
4525{
4526 if (SPECIAL_CONST_P(recv)) {
4527 if (FIXNUM_2_P(recv, obj) &&
4529 return rb_fix_aref(recv, obj);
4530 }
4531 return Qundef;
4532 }
4533 else if (RBASIC_CLASS(recv) == rb_cArray &&
4535 if (FIXNUM_P(obj)) {
4536 return rb_ary_entry_internal(recv, FIX2LONG(obj));
4537 }
4538 else {
4539 return rb_ary_aref1(recv, obj);
4540 }
4541 }
4542 else if (RBASIC_CLASS(recv) == rb_cHash &&
4544 return rb_hash_aref(recv, obj);
4545 }
4546 else {
4547 return Qundef;
4548 }
4549}
4550
4551static VALUE
4552vm_opt_aset(VALUE recv, VALUE obj, VALUE set)
4553{
4554 if (SPECIAL_CONST_P(recv)) {
4555 return Qundef;
4556 }
4557 else if (RBASIC_CLASS(recv) == rb_cArray &&
4559 FIXNUM_P(obj)) {
4560 rb_ary_store(recv, FIX2LONG(obj), set);
4561 return set;
4562 }
4563 else if (RBASIC_CLASS(recv) == rb_cHash &&
4565 rb_hash_aset(recv, obj, set);
4566 return set;
4567 }
4568 else {
4569 return Qundef;
4570 }
4571}
4572
4573static VALUE
4574vm_opt_aref_with(VALUE recv, VALUE key)
4575{
4576 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
4579 return rb_hash_aref(recv, key);
4580 }
4581 else {
4582 return Qundef;
4583 }
4584}
4585
4586static VALUE
4587vm_opt_aset_with(VALUE recv, VALUE key, VALUE val)
4588{
4589 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
4592 return rb_hash_aset(recv, key, val);
4593 }
4594 else {
4595 return Qundef;
4596 }
4597}
4598
4599static VALUE
4600vm_opt_length(VALUE recv, int bop)
4601{
4602 if (SPECIAL_CONST_P(recv)) {
4603 return Qundef;
4604 }
4605 else if (RBASIC_CLASS(recv) == rb_cString &&
4607 if (bop == BOP_EMPTY_P) {
4608 return LONG2NUM(RSTRING_LEN(recv));
4609 }
4610 else {
4611 return rb_str_length(recv);
4612 }
4613 }
4614 else if (RBASIC_CLASS(recv) == rb_cArray &&
4616 return LONG2NUM(RARRAY_LEN(recv));
4617 }
4618 else if (RBASIC_CLASS(recv) == rb_cHash &&
4620 return INT2FIX(RHASH_SIZE(recv));
4621 }
4622 else {
4623 return Qundef;
4624 }
4625}
4626
4627static VALUE
4628vm_opt_empty_p(VALUE recv)
4629{
4630 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
4631 case Qundef: return Qundef;
4632 case INT2FIX(0): return Qtrue;
4633 default: return Qfalse;
4634 }
4635}
4636
4638
4639static VALUE
4640vm_opt_nil_p(CALL_DATA cd, VALUE recv)
4641{
4642 if (recv == Qnil &&
4644 return Qtrue;
4645 }
4646 else if (vm_method_cfunc_is(cd, recv, rb_false)) {
4647 return Qfalse;
4648 }
4649 else {
4650 return Qundef;
4651 }
4652}
4653
4654static VALUE
4655fix_succ(VALUE x)
4656{
4657 switch (x) {
4658 case ~0UL:
4659 /* 0xFFFF_FFFF == INT2FIX(-1)
4660 * `-1.succ` is of course 0. */
4661 return INT2FIX(0);
4662 case RSHIFT(~0UL, 1):
4663 /* 0x7FFF_FFFF == LONG2FIX(0x3FFF_FFFF)
4664 * 0x3FFF_FFFF + 1 == 0x4000_0000, which is a Bignum. */
4665 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
4666 default:
4667 /* LONG2FIX(FIX2LONG(x)+FIX2LONG(y))
4668 * == ((lx*2+1)/2 + (ly*2+1)/2)*2+1
4669 * == lx*2 + ly*2 + 1
4670 * == (lx*2+1) + (ly*2+1) - 1
4671 * == x + y - 1
4672 *
4673 * Here, if we put y := INT2FIX(1):
4674 *
4675 * == x + INT2FIX(1) - 1
4676 * == x + 2 .
4677 */
4678 return x + 2;
4679 }
4680}
4681
4682static VALUE
4683vm_opt_succ(VALUE recv)
4684{
4685 if (FIXNUM_P(recv) &&
4687 return fix_succ(recv);
4688 }
4689 else if (SPECIAL_CONST_P(recv)) {
4690 return Qundef;
4691 }
4692 else if (RBASIC_CLASS(recv) == rb_cString &&
4694 return rb_str_succ(recv);
4695 }
4696 else {
4697 return Qundef;
4698 }
4699}
4700
4701static VALUE
4702vm_opt_not(CALL_DATA cd, VALUE recv)
4703{
4704 if (vm_method_cfunc_is(cd, recv, rb_obj_not)) {
4705 return RTEST(recv) ? Qfalse : Qtrue;
4706 }
4707 else {
4708 return Qundef;
4709 }
4710}
4711
4712static VALUE
4713vm_opt_regexpmatch2(VALUE recv, VALUE obj)
4714{
4715 if (SPECIAL_CONST_P(recv)) {
4716 return Qundef;
4717 }
4718 else if (RBASIC_CLASS(recv) == rb_cString &&
4719 CLASS_OF(obj) == rb_cRegexp &&
4721 return rb_reg_match(obj, recv);
4722 }
4723 else if (RBASIC_CLASS(recv) == rb_cRegexp &&
4725 return rb_reg_match(recv, obj);
4726 }
4727 else {
4728 return Qundef;
4729 }
4730}
4731
4733
4735
4736static inline void
4737vm_trace_hook(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const VALUE *pc,
4738 rb_event_flag_t pc_events, rb_event_flag_t target_event,
4739 rb_hook_list_t *global_hooks, rb_hook_list_t *local_hooks, VALUE val)
4740{
4741 rb_event_flag_t event = pc_events & target_event;
4742 VALUE self = GET_SELF();
4743
4744 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
4745
4746 if (event & global_hooks->events) {
4747 /* increment PC because source line is calculated with PC-1 */
4748 reg_cfp->pc++;
4749 vm_dtrace(event, ec);
4750 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
4751 reg_cfp->pc--;
4752 }
4753
4754 if (local_hooks != NULL) {
4755 if (event & local_hooks->events) {
4756 /* increment PC because source line is calculated with PC-1 */
4757 reg_cfp->pc++;
4758 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
4759 reg_cfp->pc--;
4760 }
4761 }
4762}
4763
4764#define VM_TRACE_HOOK(target_event, val) do { \
4765 if ((pc_events & (target_event)) & enabled_flags) { \
4766 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks, (val)); \
4767 } \
4768} while (0)
4769
4770static void
4772{
4774
4775 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
4776 return;
4777 }
4778 else {
4779 const rb_iseq_t *iseq = reg_cfp->iseq;
4780 size_t pos = pc - iseq->body->iseq_encoded;
4781 rb_event_flag_t pc_events = rb_iseq_event_flags(iseq, pos);
4782 rb_hook_list_t *local_hooks = iseq->aux.exec.local_hooks;
4783 rb_event_flag_t local_hook_events = local_hooks != NULL ? local_hooks->events : 0;
4784 enabled_flags |= local_hook_events;
4785
4786 VM_ASSERT((local_hook_events & ~ISEQ_TRACE_EVENTS) == 0);
4787
4788 if ((pc_events & enabled_flags) == 0) {
4789#if 0
4790 /* disable trace */
4791 /* TODO: incomplete */
4792 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
4793#else
4794 /* do not disable trace because of performance problem
4795 * (re-enable overhead)
4796 */
4797#endif
4798 return;
4799 }
4800 else if (ec->trace_arg != NULL) {
4801 /* already tracing */
4802 return;
4803 }
4804 else {
4805 rb_hook_list_t *global_hooks = rb_vm_global_hooks(ec);
4806
4807 if (0) {
4808 fprintf(stderr, "vm_trace>>%4d (%4x) - %s:%d %s\n",
4809 (int)pos,
4810 (int)pc_events,
4812 (int)rb_iseq_line_no(iseq, pos),
4814 }
4815 VM_ASSERT(reg_cfp->pc == pc);
4816 VM_ASSERT(pc_events != 0);
4817 VM_ASSERT(enabled_flags & pc_events);
4818
4819 /* check traces */
4825 }
4826 }
4827}
4828
4829#if VM_CHECK_MODE > 0
4830static NORETURN( NOINLINE( COLDFUNC
4831void vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)));
4832
4833void
4835{
4836 /* This has to be called _after_ our PRNG is properly set up. */
4837 int n = ruby_fill_random_bytes(&vm_stack_canary, sizeof vm_stack_canary, false);
4838
4839 vm_stack_canary_was_born = true;
4840 VM_ASSERT(n == 0);
4841}
4842
4843static void
4844vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)
4845{
4846 /* Because a method has already been called, why not call
4847 * another one. */
4848 const char *insn = rb_insns_name(i);
4849 VALUE inspection = rb_inspect(c);
4850 const char *str = StringValueCStr(inspection);
4851
4852 rb_bug("dead canary found at %s: %s", insn, str);
4853}
4854
4855#else
4856void Init_vm_stack_canary(void) { /* nothing to do */ }
4857#endif
4858
4859
4860/* a part of the following code is generated by this ruby script:
4861
486216.times{|i|
4863 typedef_args = (0...i).map{|j| "VALUE v#{j+1}"}.join(", ")
4864 typedef_args.prepend(", ") if i != 0
4865 call_args = (0...i).map{|j| "argv[#{j}]"}.join(", ")
4866 call_args.prepend(", ") if i != 0
4867 puts %Q{
4868static VALUE
4869builtin_invoker#{i}(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4870{
4871 typedef VALUE (*rb_invoke_funcptr#{i}_t)(rb_execution_context_t *ec, VALUE self#{typedef_args});
4872 return (*(rb_invoke_funcptr#{i}_t)funcptr)(ec, self#{call_args});
4873}}
4874}
4875
4876puts
4877puts "static VALUE (* const cfunc_invokers[])(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr) = {"
487816.times{|i|
4879 puts " builtin_invoker#{i},"
4880}
4881puts "};"
4882*/
4883
4884static VALUE
4885builtin_invoker0(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4886{
4887 typedef VALUE (*rb_invoke_funcptr0_t)(rb_execution_context_t *ec, VALUE self);
4888 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
4889}
4890
4891static VALUE
4892builtin_invoker1(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4893{
4894 typedef VALUE (*rb_invoke_funcptr1_t)(rb_execution_context_t *ec, VALUE self, VALUE v1);
4895 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
4896}
4897
4898static VALUE
4899builtin_invoker2(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4900{
4901 typedef VALUE (*rb_invoke_funcptr2_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2);
4902 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
4903}
4904
4905static VALUE
4906builtin_invoker3(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4907{
4908 typedef VALUE (*rb_invoke_funcptr3_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3);
4909 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
4910}
4911
4912static VALUE
4913builtin_invoker4(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4914{
4915 typedef VALUE (*rb_invoke_funcptr4_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4);
4916 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
4917}
4918
4919static VALUE
4920builtin_invoker5(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4921{
4922 typedef VALUE (*rb_invoke_funcptr5_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5);
4923 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
4924}
4925
4926static VALUE
4927builtin_invoker6(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4928{
4929 typedef VALUE (*rb_invoke_funcptr6_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6);
4930 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
4931}
4932
4933static VALUE
4934builtin_invoker7(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4935{
4936 typedef VALUE (*rb_invoke_funcptr7_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7);
4937 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
4938}
4939
4940static VALUE
4941builtin_invoker8(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4942{
4943 typedef VALUE (*rb_invoke_funcptr8_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8);
4944 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
4945}
4946
4947static VALUE
4948builtin_invoker9(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4949{
4950 typedef VALUE (*rb_invoke_funcptr9_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9);
4951 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
4952}
4953
4954static VALUE
4955builtin_invoker10(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4956{
4957 typedef VALUE (*rb_invoke_funcptr10_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10);
4958 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
4959}
4960
4961static VALUE
4962builtin_invoker11(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4963{
4964 typedef VALUE (*rb_invoke_funcptr11_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11);
4965 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
4966}
4967
4968static VALUE
4969builtin_invoker12(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4970{
4971 typedef VALUE (*rb_invoke_funcptr12_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12);
4972 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
4973}
4974
4975static VALUE
4976builtin_invoker13(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4977{
4978 typedef VALUE (*rb_invoke_funcptr13_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13);
4979 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
4980}
4981
4982static VALUE
4983builtin_invoker14(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4984{
4985 typedef VALUE (*rb_invoke_funcptr14_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14);
4986 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
4987}
4988
4989static VALUE
4990builtin_invoker15(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
4991{
4992 typedef VALUE (*rb_invoke_funcptr15_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14, VALUE v15);
4993 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
4994}
4995
4997
4998static builtin_invoker
4999lookup_builtin_invoker(int argc)
5000{
5001 static const builtin_invoker invokers[] = {
5002 builtin_invoker0,
5003 builtin_invoker1,
5004 builtin_invoker2,
5005 builtin_invoker3,
5006 builtin_invoker4,
5007 builtin_invoker5,
5008 builtin_invoker6,
5009 builtin_invoker7,
5010 builtin_invoker8,
5011 builtin_invoker9,
5012 builtin_invoker10,
5013 builtin_invoker11,
5014 builtin_invoker12,
5015 builtin_invoker13,
5016 builtin_invoker14,
5017 builtin_invoker15,
5018 };
5019
5020 return invokers[argc];
5021}
5022
5023static inline VALUE
5024invoke_bf(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function* bf, const VALUE *argv)
5025{
5026 VALUE self = cfp->self;
5027 return (*lookup_builtin_invoker(bf->argc))(ec, self, argv, (rb_insn_func_t)bf->func_ptr);
5028}
5029
5030static VALUE
5031vm_invoke_builtin(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function* bf, const VALUE *argv)
5032{
5033 return invoke_bf(ec, cfp, bf, argv);
5034}
5035
5036static VALUE
5037vm_invoke_builtin_delegate(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function *bf, unsigned int start_index)
5038{
5039 if (0) { // debug print
5040 fprintf(stderr, "vm_invoke_builtin_delegate: passing -> ");
5041 for (int i=0; i<bf->argc; i++) {
5042 fprintf(stderr, ":%s ", rb_id2name(cfp->iseq->body->local_table[i+start_index]));
5043 }
5044 fprintf(stderr, "\n");
5045 fprintf(stderr, "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc, bf->func_ptr);
5046 }
5047
5048 if (bf->argc == 0) {
5049 return invoke_bf(ec, cfp, bf, NULL);
5050 }
5051 else {
5052 const VALUE *argv = cfp->ep - cfp->iseq->body->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
5053 return invoke_bf(ec, cfp, bf, argv);
5054 }
5055}
5056
5057// for __builtin_inline!()
5058
5059VALUE
5061{
5062 const rb_control_frame_t *cfp = ec->cfp;
5063 return cfp->ep[index];
5064}
VALUE rb_uint2big(uintptr_t n)
Definition: bignum.c:3158
#define CHECK(sub)
Definition: compile.c:448
#define sym(x)
Definition: date_core.c:3717
#define mod(x, y)
Definition: date_strftime.c:28
struct RIMemo * ptr
Definition: debug.c:65
#define d1
char str[HTML_ESCAPE_MAX_LEN+1]
Definition: escape.c:18
int rb_during_gc(void)
Definition: gc.c:8703
VALUE rb_singleton_class(VALUE)
Returns the singleton class of obj.
Definition: class.c:1743
VALUE rb_class_inherited(VALUE, VALUE)
Calls Class::inherited.
Definition: class.c:636
VALUE rb_define_module_id(ID)
Definition: class.c:779
VALUE rb_define_class_id(ID, VALUE)
Defines a new class.
Definition: class.c:615
VALUE rb_cArray
Definition: array.c:27
VALUE rb_cClass
Class class.
Definition: ruby.h:2018
VALUE rb_cBasicObject
BasicObject class.
Definition: ruby.h:2011
VALUE rb_cObject
Object class.
Definition: ruby.h:2012
VALUE rb_cHash
Definition: hash.c:92
VALUE rb_cString
Definition: ruby.h:2046
VALUE rb_cRegexp
Definition: ruby.h:2044
VALUE rb_cProc
Definition: ruby.h:2040
VALUE rb_cModule
Module class.
Definition: ruby.h:2036
VALUE rb_cFloat
Definition: ruby.h:2030
@ RMODULE_IS_REFINEMENT
Definition: ruby.h:956
@ RMODULE_INCLUDED_INTO_REFINEMENT
Definition: ruby.h:957
void rb_notimplement(void)
Definition: error.c:2714
void rb_raise(VALUE exc, const char *fmt,...)
Definition: error.c:2671
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition: eval.c:668
void rb_bug(const char *fmt,...)
Definition: error.c:636
VALUE rb_eTypeError
Definition: error.c:924
VALUE rb_eFatal
Definition: error.c:920
VALUE rb_eNoMethodError
Definition: error.c:932
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
Definition: eval.c:684
VALUE rb_eRuntimeError
Definition: error.c:922
void rb_warn(const char *fmt,...)
Definition: error.c:315
VALUE rb_exc_new_str(VALUE, VALUE)
Definition: error.c:974
VALUE rb_eArgError
Definition: error.c:925
VALUE rb_ensure(VALUE(*)(VALUE), VALUE, VALUE(*)(VALUE), VALUE)
An equivalent to ensure clause.
Definition: eval.c:1115
VALUE rb_false(VALUE obj)
Definition: object.c:1391
VALUE rb_obj_alloc(VALUE)
Allocates an instance of klass.
Definition: object.c:1895
void rb_obj_copy_ivar(VALUE dest, VALUE obj)
Definition: object.c:247
VALUE rb_obj_class(VALUE)
Equivalent to Object#class in Ruby.
Definition: object.c:217
VALUE rb_inspect(VALUE)
Convenient wrapper of Object::inspect.
Definition: object.c:551
VALUE rb_class_real(VALUE cl)
Looks up the nearest ancestor of cl, skipping singleton classes or module inclusions.
Definition: object.c:202
VALUE rb_obj_is_kind_of(VALUE, VALUE)
Determines if obj is a kind of c.
Definition: object.c:692
VALUE rb_obj_not_equal(VALUE obj1, VALUE obj2)
Definition: object.c:187
VALUE type(ANYARGS)
ANYARGS-ed function type.
Definition: cxxanyargs.hpp:39
const char * name
Definition: nkf.c:208
#define debug(lvl, x...)
Definition: ffi.c:52
#define RARRAY_LEN(a)
#define RUBY_EVENT_END
VALUE rb_hash_lookup(VALUE, VALUE)
Definition: hash.c:2063
#define MEMCPY(p1, p2, type, n)
#define NULL
VALUE rb_iseq_path(const rb_iseq_t *iseq)
Definition: iseq.c:1027
#define INC_SP(x)
#define restrict
#define FL_SINGLETON
#define rb_funcallv(recv, mid, argc, argv)
VALUE rb_str_freeze(VALUE)
Definition: string.c:2616
#define RUBY_EVENT_C_CALL
@ id_debug_created_info
@ idRespond_to_missing
unsigned long lindex_t
#define TAG_RAISE
struct rb_global_entry * rb_global_entry(ID)
Definition: variable.c:326
#define UNLIMITED_ARGUMENTS
#define RSTRING_LEN(str)
const rb_callable_method_entry_t * rb_callable_method_entry_without_refinements(VALUE klass, ID id, VALUE *defined_class)
Definition: vm_method.c:948
#define FL_EXIVAR
#define RTEST(v)
#define REGEXP_REDEFINED_OP_FLAG
#define rb_ec_raised_p(ec, f)
#define ALLOCA_N(type, n)
const rb_method_entry_t * rb_method_entry(VALUE klass, ID id)
Definition: vm_method.c:854
VALUE rb_mRubyVMFrozenCore
Definition: vm.c:367
#define STRING_REDEFINED_OP_FLAG
@ VM_THROW_NO_ESCAPE_FLAG
void rb_set_class_path_string(VALUE, VALUE, VALUE)
Definition: variable.c:198
VALUE rb_const_get(VALUE, ID)
Definition: variable.c:2391
#define FALSE_REDEFINED_OP_FLAG
rb_control_frame_t struct rb_calling_info const struct rb_call_info VALUE block_handler
#define RCLASS_SUPER(c)
#define FL_TEST(x, f)
#define bp()
unsigned long st_data_t
#define RBASIC(obj)
#define RUBY_EVENT_B_RETURN
VALUE rb_str_eql(VALUE str1, VALUE str2)
Definition: string.c:3287
const rb_callable_method_entry_t * rb_callable_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class)
Definition: vm_method.c:934
VALUE rb_fix_aref(VALUE fix, VALUE idx)
Definition: numeric.c:4649
#define T_STRING
#define SIZEOF_LONG
VALUE rb_check_to_array(VALUE ary)
Definition: array.c:915
VALUE(* vm_call_handler)(struct rb_execution_context_struct *ec, struct rb_control_frame_struct *cfp, struct rb_calling_info *calling, struct rb_call_data *cd)
long int ptrdiff_t
VALUE rb_hash_aref(VALUE, VALUE)
Definition: hash.c:2037
void rb_add_method_iseq(VALUE klass, ID mid, const rb_iseq_t *iseq, rb_cref_t *cref, rb_method_visibility_t visi)
Definition: vm_method.c:685
#define TAG_RETRY
VALUE rb_vm_invoke_bmethod(rb_execution_context_t *ec, rb_proc_t *proc, VALUE self, int argc, const VALUE *argv, int kw_splat, VALUE block_handler, const rb_callable_method_entry_t *me)
Definition: vm.c:1242
unsigned int rb_iseq_line_no(const rb_iseq_t *iseq, size_t pos)
Definition: iseq.c:1761
VALUE rb_cvar_defined(VALUE, ID)
Definition: variable.c:3123
VALUE ruby_vm_const_missing_count
Definition: vm.c:371
rb_control_frame_t * cfp
#define VM_ENV_DATA_SIZE
#define ROBJECT_IV_INDEX_TBL(o)
#define QUOTE_ID(id)
#define LONG2FIX(i)
#define Qundef
VALUE rb_str_concat(VALUE, VALUE)
Definition: string.c:3065
const struct rb_call_cache * cc
#define CHAR_BIT
VALUE rb_sym_proc_call(ID mid, int argc, const VALUE *argv, int kw_splat, VALUE passed_proc)
Definition: string.c:10816
#define COLDFUNC
#define MAYBE_UNUSED(x)
#define UNALIGNED_MEMBER_PTR(ptr, mem)
VALUE rb_dbl2big(double)
Definition: bignum.c:5249
VALUE rb_ivar_get(VALUE, ID)
Definition: variable.c:1070
const VALUE VALUE obj
VALUE rb_reg_last_match(VALUE)
Definition: re.c:1739
#define OBJ_BUILTIN_TYPE(obj)
VALUE rb_reg_match_post(VALUE)
Definition: re.c:1783
@ VM_SPECIAL_OBJECT_CBASE
@ VM_SPECIAL_OBJECT_VMCORE
@ VM_SPECIAL_OBJECT_CONST_BASE
#define HASH_REDEFINED_OP_FLAG
#define T_FLOAT
#define RSTRING_PTR(str)
#define T_IMEMO
#define RCLASS_SERIAL(c)
VALUE rb_ec_backtrace_object(const rb_execution_context_t *ec)
Definition: vm_backtrace.c:557
#define T_BIGNUM
#define vm_exec
#define GET_EC()
#define RUBY_VM_NEXT_CONTROL_FRAME(cfp)
#define SET_SP(x)
#define IS_ARGS_KW_OR_KW_SPLAT(ci)
#define NIL_P(v)
int rb_autoloading_value(VALUE mod, ID id, VALUE *value, rb_const_flag_t *flag)
Definition: variable.c:2078
#define RCLASS_CLONED
VALUE rb_check_funcall(VALUE, ID, int, const VALUE *)
Definition: vm_eval.c:505
#define VM_ENV_DATA_INDEX_FLAGS
#define KW_SPECIFIED_BITS_MAX
const rb_callable_method_entry_t * me
#define VM_CALL_TAILCALL
#define numberof(array)
#define DBL2NUM(dbl)
#define VM_ASSERT(expr)
#define ID2SYM(x)
const char * rb_id2name(ID)
Definition: symbol.c:801
#define RCLASS_INCLUDER(c)
#define TRUE_REDEFINED_OP_FLAG
@ block_handler_type_ifunc
@ block_handler_type_proc
@ block_handler_type_symbol
@ block_handler_type_iseq
VALUE rb_hash_has_key(VALUE hash, VALUE key)
Definition: hash.c:3507
VALUE rb_iseq_label(const rb_iseq_t *iseq)
Definition: iseq.c:1045
int const VALUE VALUE int call_status
int fprintf(FILE *__restrict__, const char *__restrict__,...) __attribute__((__format__(__printf__
const char size_t n
#define RUBY_DTRACE_METHOD_RETURN_HOOK(ec, klass, id)
VALUE rb_float_equal(VALUE x, VALUE y)
Definition: numeric.c:1383
#define ruby_verbose
#define GET_ISEQ()
VALUE rb_proc_call_with_block(VALUE, int argc, const VALUE *argv, VALUE)
Definition: proc.c:1000
#define MEMZERO(p, type, n)
@ ruby_error_stackfatal
#define SYM2ID(x)
#define GET_PC()
unsigned long VALUE
VALUE rb_ary_push(VALUE, VALUE)
Definition: array.c:1195
#define stderr
#define VMDEBUG
#define RUBY_FUNCTION_NAME_STRING
#define ARRAY_REDEFINED_OP_FLAG
#define POPN(n)
#define rb_ary_new4
#define EC_JUMP_TAG(ec, st)
rb_control_frame_t struct rb_calling_info const struct rb_call_info * ci
rb_control_frame_t * reg_cfp
VALUE rb_ary_concat(VALUE, VALUE)
Definition: array.c:4069
double modf(double, double *)
#define RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, klass, id)
#define EXEC_EVENT_HOOK(ec_, flag_, self_, id_, called_id_, klass_, data_)
@ METHOD_VISI_PROTECTED
return current_class
#define RUBY_EVENT_COVERAGE_LINE
#define VM_CHECKMATCH_TYPE_MASK
int rb_const_defined_at(VALUE, ID)
Definition: variable.c:2692
#define isinf(__x)
VALUE rb_hash_dup(VALUE)
Definition: hash.c:1564
#define T_MODULE
#define TAG_THROW
VALUE rb_flo_div_flo(VALUE x, VALUE y)
Definition: numeric.c:1110
signed long rb_snum_t
uint32_t i
#define RB_FLOAT_TYPE_P(obj)
const rb_callable_method_entry_t * rb_callable_method_entry(VALUE klass, ID id)
Definition: vm_method.c:895
rb_control_frame_t struct rb_calling_info const rb_callable_method_entry_t int int param_size
#define PROC_REDEFINED_OP_FLAG
#define VM_DEFINECLASS_SCOPED_P(x)
#define RESTORE_REGS()
__inline__ const void *__restrict__ size_t len
#define VM_DEFINECLASS_TYPE(x)
__uint64_t uint64_t
VALUE rb_gvar_defined(struct rb_global_entry *)
Definition: variable.c:695
double ruby_float_mod(double x, double y)
Definition: numeric.c:1207
int ruby_fill_random_bytes(void *, size_t, int)
Definition: random.c:438
#define FL_TEST_RAW(x, f)
VALUE rb_iseqw_new(const rb_iseq_t *iseq)
Definition: iseq.c:1157
VALUE rb_public_const_get_at(VALUE klass, ID id)
Definition: variable.c:2409
VALUE rb_float_eql(VALUE x, VALUE y)
Definition: numeric.c:1654
#define RUBY_DTRACE_CMETHOD_ENTRY_ENABLED()
#define PRIdPTR
VALUE rb_reg_match_pre(VALUE)
Definition: re.c:1757
#define RUBY_VM_CHECK_INTS(ec)
#define GET_SP()
#define RB_OBJ_WRITE(a, slot, b)
VALUE rb_iseq_defined_string(enum defined_type type)
Definition: iseq.c:3033
#define VM_CALL_ARGS_SPLAT
#define RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, klass, id)
VALUE rb_const_get_at(VALUE, ID)
Definition: variable.c:2397
#define VM_GUARDED_PREV_EP(ep)
#define T_ICLASS
#define T_HASH
#define RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, klass, id)
#define VM_CALL_OPT_SEND
#define RUBY_EVENT_CLASS
rb_const_entry_t * rb_const_lookup(VALUE klass, ID id)
Definition: variable.c:3352
#define rb_check_frozen_internal(obj)
#define LONG2NUM(x)
#define THROW_DATA_P(err)
@ VM_METHOD_TYPE_ATTRSET
@ VM_METHOD_TYPE_CFUNC
@ VM_METHOD_TYPE_OPTIMIZED
@ VM_METHOD_TYPE_REFINED
@ VM_METHOD_TYPE_NOTIMPLEMENTED
@ VM_METHOD_TYPE_MISSING
@ VM_METHOD_TYPE_BMETHOD
@ VM_METHOD_TYPE_ZSUPER
@ VM_METHOD_TYPE_ALIAS
@ VM_METHOD_TYPE_UNDEF
#define IS_ARGS_SPLAT(ci)
return cc call
#define rb_long2int(n)
#define RB_GC_GUARD(v)
#define IS_ARGS_KW_SPLAT(ci)
#define FUNC_FASTCALL(x)
#define FIX2ULONG(x)
@ OPTIMIZED_METHOD_TYPE_CALL
@ OPTIMIZED_METHOD_TYPE_BLOCK_CALL
@ OPTIMIZED_METHOD_TYPE_SEND
#define PRIsVALUE
unsigned long long rb_serial_t
#define rb_ary_new3
#define RCLASS_ORIGIN(c)
#define RUBY_DTRACE_METHOD_RETURN_ENABLED()
#define rb_funcall(recv, mid, argc,...)
#define FIX2INT(x)
int VALUE v
#define rb_method_basic_definition_p(klass, mid)
VALUE rb_ary_new(void)
Definition: array.c:723
rb_control_frame_t struct rb_calling_info const rb_callable_method_entry_t int int int local_size
#define NIL_REDEFINED_OP_FLAG
void rb_gc_verify_internal_consistency(void)
Definition: gc.c:6218
VALUE rb_reg_match(VALUE, VALUE)
Definition: re.c:3180
#define rb_exc_new3
rb_control_frame_t * rb_vm_get_ruby_level_next_cfp(const rb_execution_context_t *ec, const rb_control_frame_t *cfp)
Definition: vm.c:553
VALUE rb_hash_compare_by_id_p(VALUE hash)
Definition: hash.c:4267
#define GET_EP()
int rb_public_const_defined_from(VALUE klass, ID id)
Definition: variable.c:2698
#define rb_str_cat_cstr(str, ptr)
VALUE rb_ivar_defined(VALUE, ID)
Definition: variable.c:1317
#define RUBY_EVENT_LINE
rb_control_frame_t struct rb_calling_info * calling
#define GET_BLOCK_HANDLER()
#define INT_MAX
#define RB_DEBUG_COUNTER_INC_IF(type, cond)
#define INTEGER_REDEFINED_OP_FLAG
#define VM_DEFINECLASS_HAS_SUPERCLASS_P(x)
#define ISEQ_TRACE_EVENTS
#define CHECK_VM_STACK_OVERFLOW(cfp, margin)
VALUE ID VALUE old
#define SYMBOL_REDEFINED_OP_FLAG
const rb_iseq_t * iseq
VALUE rb_str_catf(VALUE, const char *,...) __attribute__((format(printf
VALUE rb_const_source_location_at(VALUE, ID)
Definition: variable.c:2467
#define RB_DEBUG_COUNTER_INC_UNLESS(type, cond)
#define TAG_BREAK
#define TOPN(n)
#define RUBY_EVENT_RETURN
#define RFLOAT_VALUE(v)
#define MJIT_STATIC
#define RARRAY_CONST_PTR_TRANSIENT(a)
#define TRUE
#define RUBY_EVENT_C_RETURN
#define FALSE
void rb_const_set(VALUE, ID, VALUE)
Definition: variable.c:2756
#define GET_CFP()
#define RHASH_SIZE(h)
#define UNDEFINED_METHOD_ENTRY_P(me)
#define BASIC_OP_UNREDEFINED_P(op, klass)
#define Qtrue
#define MEMMOVE(p1, p2, type, n)
#define RUBY_EVENT_B_CALL
VALUE rb_struct_aref(VALUE, VALUE)
Definition: struct.c:1061
#define VM_SP_CNT(ec, sp)
VALUE target_owner
void rb_iseq_trace_set(const rb_iseq_t *iseq, rb_event_flag_t turnon_events)
Definition: iseq.c:3273
const char * rb_insns_name(int i)
Definition: compile.c:8760
#define UNLIKELY(x)
#define RB_NO_KEYWORDS
#define IS_ARGS_KEYWORD(ci)
VALUE rb_struct_aset(VALUE, VALUE, VALUE)
Definition: struct.c:1088
struct rb_call_cache buf
#define GET_SELF()
VALUE rb_str_append(VALUE, VALUE)
Definition: string.c:2965
const char * rb_obj_info(VALUE obj)
Definition: gc.c:11713
VALUE rb_ary_dup(VALUE)
Definition: array.c:2238
rb_event_flag_t ruby_vm_event_flags
Definition: vm.c:375
#define RUBY_DTRACE_CMETHOD_RETURN_ENABLED()
VALUE rb_attr_get(VALUE, ID)
Definition: variable.c:1084
#define Qnil
rb_control_frame_t struct rb_calling_info const rb_callable_method_entry_t int opt_pc
#define Qfalse
int stat(const char *__restrict__ __path, struct stat *__restrict__ __sbuf)
#define T_ARRAY
#define T_OBJECT
VALUE rb_str_intern(VALUE)
Definition: symbol.c:710
#define TAG_RETURN
#define GET_LEP()
unsigned int ruby_vm_event_local_num
Definition: vm.c:377
#define SIGNED_VALUE
st_data_t st_index_t
#define RCLASS_REFINED_CLASS(c)
#define CHECK_VM_STACK_OVERFLOW0(cfp, sp, margin)
VALUE rb_imemo_new(enum imemo_type type, VALUE v1, VALUE v2, VALUE v3, VALUE v0)
Definition: gc.c:2321
#define SDR()
#define RICLASS_IS_ORIGIN
ID rb_check_id(volatile VALUE *)
Returns ID for the given name if it is interned already, or 0.
Definition: symbol.c:919
#define FIXABLE(f)
#define RB_TYPE_P(obj, type)
VALUE rb_reg_nth_match(int, VALUE)
Definition: re.c:1714
@ RHASH_PASS_AS_KEYWORDS
#define INT2FIX(i)
#define SPECIAL_CONST_P(x)
#define RUBY_EVENT_COVERAGE_BRANCH
VALUE rb_check_array_type(VALUE)
Definition: array.c:909
#define T_SYMBOL
#define PRIdPTRDIFF
#define MJIT_FUNC_EXPORTED
#define VM_CALL_FCALL
const VALUE * argv
char * rb_str_to_cstr(VALUE str)
Definition: string.c:2284
#define VM_BLOCK_HANDLER_NONE
#define SYMBOL_P(x)
uint32_t rb_event_flag_t
#define VM_CALL_VCALL
__inline__ int
VALUE rb_ivar_set(VALUE, ID, VALUE)
Definition: variable.c:1300
#define VM_ENV_DATA_INDEX_SPECVAL
#define FIXNUM_P(f)
int rb_method_boundp(VALUE, ID, int)
Definition: vm_method.c:1119
VALUE rb_iseq_disasm(const rb_iseq_t *iseq)
Definition: iseq.c:2278
#define T_CLASS
#define CLASS_OF(v)
#define VM_CALL_KWARG
void rb_thread_schedule(void)
Definition: thread.c:1407
void rb_gc_writebarrier_remember(VALUE obj)
Definition: gc.c:6891
VALUE rb_reg_match_last(VALUE)
Definition: re.c:1800
VALUE rb_ary_plus(VALUE, VALUE)
Definition: array.c:4000
#define rb_ec_raised_reset(ec, f)
VALUE rb_hash_aset(VALUE, VALUE, VALUE)
Definition: hash.c:2852
rb_control_frame_t const VALUE * pc
#define rb_check_arity
#define GetProcPtr(obj, ptr)
@ VM_FRAME_FLAG_CFRAME_KW
@ VM_ENV_FLAG_WB_REQUIRED
@ VM_FRAME_FLAG_CFRAME_EMPTY_KW
VALUE rb_autoload_load(VALUE, ID)
Definition: variable.c:2226
#define VM_ENV_DATA_INDEX_ME_CREF
#define RUBY_DTRACE_METHOD_ENTRY_ENABLED()
VALUE * rb_iseq_original_iseq(const rb_iseq_t *iseq)
Definition: compile.c:778
#define VM_CALL_KW_SPLAT
#define DEC_SP(x)
@ VM_DEFINECLASS_TYPE_CLASS
@ VM_DEFINECLASS_TYPE_MODULE
@ VM_DEFINECLASS_TYPE_SINGLETON_CLASS
#define ROBJECT_IVPTR(o)
#define RUBY_EVENT_CALL
#define RB_BUILTIN_TYPE(x)
VALUE rb_sym_to_proc(VALUE sym)
Definition: proc.c:1312
#define GET_PREV_EP(ep)
VALUE rb_sprintf(const char *,...) __attribute__((format(printf
#define VM_CHECKMATCH_ARRAY
#define RBASIC_CLASS(obj)
unsigned long ID
#define VM_CALL_ZSUPER
rb_control_frame_t * __attribute__((__fastcall__)) *rb_insn_func_t)(rb_execution_context_t *
#define RCLASS_IV_INDEX_TBL(c)
#define RHASH_EMPTY_P(h)
const char *void rb_warning(const char *,...) __attribute__((format(printf
#define FIX2LONG(x)
const rb_iseq_t const VALUE exc
const rb_callable_method_entry_t * rb_method_entry_complement_defined_class(const rb_method_entry_t *src_me, ID called_id, VALUE defined_class)
Definition: vm_method.c:415
VALUE rb_public_const_get_from(VALUE klass, ID id)
Definition: variable.c:2403
VALUE rb_str_length(VALUE)
Definition: string.c:1843
#define RARRAY_AREF(a, i)
#define BUILTIN_TYPE(x)
#define GET_GLOBAL_CONSTANT_STATE()
#define RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp)
VALUE rb_str_opt_plus(VALUE, VALUE)
Definition: string.c:1925
struct st_table * rb_ivar_generic_ivtbl(void)
Definition: variable.c:803
#define VM_CHECK_MODE
#define VM_UNREACHABLE(func)
@ VM_CHECKMATCH_TYPE_RESCUE
@ VM_CHECKMATCH_TYPE_CASE
@ VM_CHECKMATCH_TYPE_WHEN
#define STACK_ADDR_FROM_TOP(n)
struct iseq_catch_table_entry entries[]
#define ANYARGS
VALUE rb_str_succ(VALUE)
Definition: string.c:4090
void rb_ary_store(VALUE, long, VALUE)
Definition: array.c:1079
#define GET_GLOBAL_METHOD_STATE()
int rb_hash_stlike_lookup(VALUE hash, st_data_t key, st_data_t *pval)
Definition: hash.c:2017
#define OPTIMIZED_CMP(a, b, data)
#define VM_CALL_SUPER
VALUE rb_ary_aref1(VALUE ary, VALUE i)
Definition: array.c:1595
#define FLOAT_REDEFINED_OP_FLAG
#define RSHIFT(x, y)
unsigned long rb_num_t
VALUE rb_ary_entry(VALUE, long)
Definition: array.c:1512
int rb_const_defined(VALUE, ID)
Definition: variable.c:2686
#define StringValueCStr(v)
#define ROBJECT_NUMIV(o)
#define METHOD_ENTRY_VISI(me)
#define RB_DEBUG_COUNTER_INC(type)
#define LIKELY(x)
#define f
int st_lookup(st_table *tab, st_data_t key, st_data_t *value)
Definition: st.c:1101
@ CATCH_TYPE_BREAK
rb_iseq_t * iseq
unsigned int cont
enum iseq_catch_table_entry::catch_type type
unsigned int start
unsigned int end
unsigned int sp
const rb_cref_t * ic_cref
rb_serial_t ic_serial
VALUE value
size_t index
rb_serial_t ic_serial
union rb_block::@54 as
struct rb_captured_block captured
enum method_missing_reason method_missing_reason
const struct rb_callable_method_entry_struct * me
rb_serial_t class_serial[(64 - sizeof(rb_serial_t) - sizeof(struct rb_callable_method_entry_struct *) - sizeof(uintptr_t) - sizeof(enum method_missing_reason) - sizeof(VALUE(*)(struct rb_execution_context_struct *e, struct rb_control_frame_struct *, struct rb_calling_info *, const struct rb_call_data *)))/sizeof(rb_serial_t)]
union rb_call_cache::@37 aux
VALUE(* call)(struct rb_execution_context_struct *ec, struct rb_control_frame_struct *cfp, struct rb_calling_info *calling, struct rb_call_data *cd)
struct rb_call_info ci
struct rb_call_cache cc
struct rb_call_info_kw_arg * kw_arg
ID called_id
struct rb_method_definition_struct *const def
const VALUE defined_class
const VALUE owner
const struct vm_ifunc * ifunc
union rb_captured_block::@53 code
VALUE value
CREF (Class REFerence)
struct rb_cref_struct * next
enum method_missing_reason method_missing_reason
struct rb_trace_arg_struct * trace_arg
enum rb_iseq_constant_body::iseq_type type
struct rb_iseq_constant_body::@45 param
const struct rb_iseq_constant_body::@45::rb_iseq_param_keyword * keyword
struct rb_iseq_constant_body::@45::@47 flags
struct iseq_catch_table * catch_table
const struct rb_iseq_struct * parent_iseq
struct rb_iseq_struct * local_iseq
struct rb_hook_list_struct * local_hooks
struct rb_iseq_constant_body * body
union rb_iseq_struct::@48 aux
struct rb_iseq_struct::@48::@50 exec
struct rb_call_info_with_kwarg ci_kw
struct rb_method_entry_struct * original_me
VALUE(* invoker)(VALUE recv, int argc, const VALUE *argv, VALUE(*func)())
enum method_optimized_type optimize_type
union rb_method_definition_struct::@41 body
VALUE defined_class
VALUE owner
rb_cref_t * cref
class reference, should be marked
struct rb_method_entry_struct * orig_me
enum ruby_tag_type state
IFUNC (Internal FUNCtion)
rb_block_call_func_t func
SVAR (Special VARiable)
const VALUE cref_or_me
class reference or rb_method_entry_t
#define UNDEF
struct iseq_inline_storage_entry::@44 once
VALUE value
struct rb_thread_struct * running_thread
MJIT_FUNC_EXPORTED void rb_const_warn_if_deprecated(const rb_const_entry_t *ce, VALUE klass, ID id)
Definition: variable.c:2307
MJIT_FUNC_EXPORTED void rb_vm_localjump_error(const char *mesg, VALUE value, int reason)
Definition: vm.c:1471
#define rb_id2str(id)
Definition: vm_backtrace.c:30
rb_control_frame_t *FUNC_FASTCALL rb_insn_func_t(rb_execution_context_t *, rb_control_frame_t *)
Definition: vm_core.h:1143
VALUE(* builtin_invoker)(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
#define EQ_UNREDEFINED_P(t)
rb_control_frame_t *FUNC_FASTCALL() rb_vm_opt_struct_aset(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp)
MJIT_FUNC_EXPORTED VALUE rb_find_defined_class_by_owner(VALUE current_class, VALUE target_owner)
int rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2)
Definition: vm_method.c:1521
#define CHECK_CFP_CONSISTENCY(func)
MJIT_STATIC void rb_ec_stack_overflow(rb_execution_context_t *ec, int crit)
Definition: vm_insnhelper.c:67
NOINLINE(static void vm_env_write_slowpath(const VALUE *ep, int index, VALUE v))
ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, IVC, struct rb_call_cache *, int))
NORETURN(static void ec_stack_overflow(rb_execution_context_t *ec, int))
MJIT_STATIC void rb_vm_pop_frame(rb_execution_context_t *ec)
#define BUILTIN_CLASS_P(x, k)
VALUE rb_vm_call0(rb_execution_context_t *ec, VALUE, ID, int, const VALUE *, const rb_callable_method_entry_t *, int kw_splat)
Definition: vm_eval.c:46
MJIT_STATIC VALUE rb_vm_bh_to_procval(const rb_execution_context_t *ec, VALUE block_handler)
void rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts)
Definition: vm_method.c:232
MJIT_STATIC const rb_callable_method_entry_t * rb_vm_frame_method_entry(const rb_control_frame_t *cfp)
VALUE rb_eql_opt(VALUE obj1, VALUE obj2)
MJIT_STATIC void rb_error_arity(int argc, int min, int max)
VALUE rb_make_no_method_exception(VALUE exc, VALUE format, VALUE obj, int argc, const VALUE *argv, int priv)
Definition: vm_eval.c:772
rb_control_frame_t *FUNC_FASTCALL() rb_vm_opt_struct_aref(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp)
PUREFUNC(static rb_callable_method_entry_t *check_method_entry(VALUE obj, int can_be_svar))
VALUE rb_equal_opt(VALUE obj1, VALUE obj2)
#define vm_check_frame(a, b, c, d)
#define CHECK_CMP_NAN(a, b)
void rb_vm_rewrite_cref(rb_cref_t *cref, VALUE old_klass, VALUE new_klass, rb_cref_t **new_cref_ptr)
void Init_vm_stack_canary(void)
#define VM_TRACE_HOOK(target_event, val)
#define vm_check_canary(ec, sp)
rb_event_flag_t rb_iseq_event_flags(const rb_iseq_t *iseq, size_t pos)
Definition: iseq.c:1774
MJIT_STATIC bool rb_simple_iseq_p(const rb_iseq_t *iseq)
MJIT_STATIC VALUE ruby_vm_special_exception_copy(VALUE exc)
Definition: vm_insnhelper.c:34
MJIT_FUNC_EXPORTED void rb_vm_search_method_slowpath(struct rb_call_data *cd, VALUE klass)
rb_method_definition_t * rb_method_definition_create(rb_method_type_t type, ID mid)
Definition: vm_method.c:348
VALUE rb_vm_lvar_exposed(rb_execution_context_t *ec, int index)