50#define MAX_EVENT_NUM 32
68 clean_hooks(
GET_EC(), hooks);
79 if (new_iseq_events & ~enabled_iseq_events) {
127 update_global_event_hook(
list->events);
145 rb_event_hook_t *hook = alloc_event_hook(func, events, data, hook_flags);
147 connect_event_hook(ec, hook);
160 connect_event_hook(
GET_EC(), hook);
166 rb_threadptr_add_event_hook(
GET_EC(), rb_thread_ptr(thval), func, events, data, hook_flags);
172 rb_event_hook_t *hook = alloc_event_hook(func, events, data, hook_flags);
173 connect_event_hook(
GET_EC(), hook);
185 while ((hook = *nextp) != 0) {
196 if (
list == rb_vm_global_hooks(ec)) {
198 update_global_event_hook(
list->events);
209 if (
list->running == 0) {
210 clean_hooks(ec,
list);
215#define MATCH_ANY_FILTER_TH ((rb_thread_t *)1)
221 rb_vm_t *vm = rb_ec_vm_ptr(ec);
227 if (func == 0 || hook->
func == func) {
239 clean_hooks_check(ec,
list);
246 return remove_event_hook(ec, filter_th, func, data);
252 return rb_threadptr_remove_event_hook(
GET_EC(), rb_thread_ptr(thval), func,
Qundef);
258 return rb_threadptr_remove_event_hook(
GET_EC(), rb_thread_ptr(thval), func, data);
270 return remove_event_hook(
GET_EC(),
NULL, func, data);
276 rb_threadptr_remove_event_hook(ec, rb_ec_thread_ptr(ec), 0,
Qundef);
292 for (hook =
list->hooks; hook; hook = hook->
next) {
323 clean_hooks_check(ec,
list);
329 if (exec_hooks_precheck(ec,
list, trace_arg) == 0)
return;
330 exec_hooks_body(ec,
list, trace_arg);
331 exec_hooks_postcheck(ec,
list);
340 if (exec_hooks_precheck(ec,
list, trace_arg) == 0)
return 0;
348 exec_hooks_body(ec,
list, trace_arg);
352 exec_hooks_postcheck(ec,
list);
375 exec_hooks_unprotected(ec, rb_vm_global_hooks(ec), trace_arg);
392 if ((state = exec_hooks_protected(ec, hooks, trace_arg)) ==
TAG_NONE) {
403 if (VM_FRAME_FINISHED_P(ec->
cfp)) {
420 rb_vm_t *
const vm = rb_ec_vm_ptr(ec);
423 dummy_trace_arg.
event = 0;
433 result = (*func)(
arg);
449#if defined RUBY_USE_SETJMPEX && RUBY_USE_SETJMPEX
560 thread_add_trace_func(
GET_EC(), rb_thread_ptr(
obj), trace);
576thread_set_trace_func_m(
VALUE target_thread,
VALUE trace)
579 rb_thread_t *target_th = rb_thread_ptr(target_thread);
581 rb_threadptr_remove_event_hook(ec, target_th, call_trace_func,
Qundef);
587 thread_add_trace_func(ec, target_th, trace);
615#define C(name, NAME) case RUBY_EVENT_##NAME: CONST_ID(id, #name); return id;
622 C(c_return, C_RETURN);
625 C(b_return, B_RETURN);
626 C(thread_begin, THREAD_BEGIN);
627 C(thread_end, THREAD_END);
628 C(fiber_switch, FIBER_SWITCH);
629 C(script_compiled, SCRIPT_COMPILED);
669 get_path_and_lineno(ec, ec->
cfp, event, &filename, &line);
696static VALUE rb_cTracePoint;
722tp_memsize(
const void *
ptr)
750#define C(name, NAME) CONST_ID(id, #name); if (sym == ID2SYM(id)) return RUBY_EVENT_##NAME
757 C(c_return, C_RETURN);
760 C(b_return, B_RETURN);
761 C(thread_begin, THREAD_BEGIN);
762 C(thread_end, THREAD_END);
763 C(fiber_switch, FIBER_SWITCH);
764 C(script_compiled, SCRIPT_COMPILED);
768 C(a_return, A_RETURN);
785 if (trace_arg == 0) {
794 return get_trace_arg();
800 return trace_arg->
event;
813 get_path_and_lineno(trace_arg->
ec, trace_arg->
cfp, trace_arg->
event, &trace_arg->
path, &trace_arg->
lineno);
820 fill_path_and_lineno(trace_arg);
826 fill_path_and_lineno(trace_arg);
827 return trace_arg->
path;
834 if (!trace_arg->
klass) {
838 if (trace_arg->
klass) {
854 switch(trace_arg->
event) {
871 fill_id_and_klass(trace_arg);
872 if (trace_arg->
klass && trace_arg->
id) {
894 fill_id_and_klass(trace_arg);
901 fill_id_and_klass(trace_arg);
908 fill_id_and_klass(trace_arg);
909 return trace_arg->
klass;
929 return trace_arg->
self;
942 rb_bug(
"rb_tracearg_return_value: unreachable");
944 return trace_arg->
data;
957 rb_bug(
"rb_tracearg_raised_exception: unreachable");
959 return trace_arg->
data;
974 rb_bug(
"rb_tracearg_raised_exception: unreachable");
976 if (rb_obj_is_iseq(
data)) {
998 rb_bug(
"rb_tracearg_raised_exception: unreachable");
1001 if (rb_obj_is_iseq(
data)) {
1023 rb_bug(
"rb_tracearg_object: unreachable");
1025 return trace_arg->
data;
1141iseq_of(
VALUE target)
1155rb_tracepoint_enable_for_target(
VALUE tpval,
VALUE target,
VALUE target_line)
1160 unsigned int line = 0;
1166 if (!
NIL_P(target_line)) {
1258 hook_list_connect(target,
list, hook,
FALSE);
1268 if (hook->
data == tpval) {
1278 list->events = events;
1285 int previous_tracing = tp->
tracing;
1288 if (
RTEST(target_thread)) {
1292 tp->
target_th = rb_thread_ptr(target_thread);
1298 if (
NIL_P(target)) {
1299 if (!
NIL_P(target_line)) {
1305 rb_tracepoint_enable_for_target(tpval, target, target_line);
1322 int previous_tracing = tp->
tracing;
1404 if (
RTEST(target_thval)) {
1405 target_th = rb_thread_ptr(target_thval);
1410 return tracepoint_new(rb_cTracePoint, target_th, events, func,
data,
Qundef);
1433 return tracepoint_new(
self, 0, events, 0, 0,
rb_block_proc());
1439 VALUE trace = tracepoint_new_s(
ec,
self, args);
1451 switch (trace_arg->
event) {
1493 int active = 0, deleted = 0;
1520#include "trace_point.rbinc"
1542#define MAX_POSTPONED_JOB 1000
1543#define MAX_POSTPONED_JOB_SPECIAL_ADDITION 24
1572 if (expected_index >= max)
return PJRR_FULL;
1598 rb_vm_t *vm = rb_ec_vm_ptr(ec);
1605 default:
rb_bug(
"unreachable\n");
1617 rb_vm_t *vm = rb_ec_vm_ptr(ec);
1625 if (pjob->
func == func) {
1634 default:
rb_bug(
"unreachable\n");
1648 if (!wq_job)
return FALSE;
1670 list_head_init(&tmp);
@ RUBY_EVENT_HOOK_FLAG_DELETED
@ RUBY_EVENT_HOOK_FLAG_SAFE
@ RUBY_EVENT_HOOK_FLAG_RAW_ARG
void(* rb_postponed_job_func_t)(void *arg)
struct rb_encoding_entry * list
VALUE rb_define_class(const char *, VALUE)
Defines a top-level class.
int rb_block_given_p(void)
Determines if the current method is given a block.
void rb_add_event_hook(rb_event_hook_func_t func, rb_event_flag_t events, VALUE data)
VALUE rb_cObject
Object class.
int rb_remove_event_hook(rb_event_hook_func_t func)
void rb_raise(VALUE exc, const char *fmt,...)
void rb_bug(const char *fmt,...)
VALUE rb_ensure(VALUE(*)(VALUE), VALUE, VALUE(*)(VALUE), VALUE)
An equivalent to ensure clause.
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
struct rb_event_hook_struct::@255 filter
rb_event_hook_func_t func
rb_event_hook_flag_t hook_flags
struct rb_event_hook_struct * next
VALUE local_storage_recursive_hash_for_trace
rb_atomic_t interrupt_mask
struct rb_trace_arg_struct * trace_arg
VALUE local_storage_recursive_hash
struct rb_event_hook_struct * hooks
struct rb_hook_list_struct * hooks
rb_method_bmethod_t bmethod
union rb_method_definition_struct::@41 body
rb_postponed_job_func_t func
void(* func)(VALUE tpval, void *data)
const rb_control_frame_t * cfp
rb_execution_context_t * ec
rb_hook_list_t global_hooks
struct list_head workqueue
struct rb_postponed_job_struct * postponed_job_buffer
rb_nativethread_lock_t workqueue_lock
MJIT_STATIC void rb_vm_pop_frame(rb_execution_context_t *ec)
VALUE rb_tracearg_binding(rb_trace_arg_t *trace_arg)
VALUE rb_tracearg_parameters(rb_trace_arg_t *trace_arg)
struct rb_postponed_job_struct rb_postponed_job_t
VALUE rb_tracearg_instruction_sequence(rb_trace_arg_t *trace_arg)
void(* rb_event_hook_raw_arg_func_t)(VALUE data, const rb_trace_arg_t *arg)
VALUE rb_tracepoint_enabled_p(VALUE tpval)
const rb_method_definition_t * rb_method_def(VALUE method)
VALUE rb_tracearg_object(rb_trace_arg_t *trace_arg)
VALUE rb_tracearg_callee_id(rb_trace_arg_t *trace_arg)
VALUE rb_tracearg_defined_class(rb_trace_arg_t *trace_arg)
#define MATCH_ANY_FILTER_TH
MJIT_FUNC_EXPORTED void rb_exec_event_hooks(rb_trace_arg_t *trace_arg, rb_hook_list_t *hooks, int pop_p)
struct rb_trace_arg_struct * rb_tracearg_from_tracepoint(VALUE tpval)
VALUE rb_suppress_tracing(VALUE(*func)(VALUE), VALUE arg)
VALUE rb_tracearg_raised_exception(rb_trace_arg_t *trace_arg)
postponed_job_register_result
void rb_thread_add_event_hook(VALUE thval, rb_event_hook_func_t func, rb_event_flag_t events, VALUE data)
#define MAX_POSTPONED_JOB_SPECIAL_ADDITION
VALUE rb_tracepoint_disable(VALUE tpval)
VALUE rb_tracearg_self(rb_trace_arg_t *trace_arg)
VALUE rb_tracepoint_new(VALUE target_thval, rb_event_flag_t events, void(*func)(VALUE, void *), void *data)
void rb_hook_list_free(rb_hook_list_t *hooks)
int rb_thread_remove_event_hook(VALUE thval, rb_event_hook_func_t func)
int rb_postponed_job_register_one(unsigned int flags, rb_postponed_job_func_t func, void *data)
VALUE rb_tracearg_return_value(rb_trace_arg_t *trace_arg)
void Init_vm_postponed_job(void)
struct rb_tp_struct rb_tp_t
rb_event_flag_t rb_tracearg_event_flag(rb_trace_arg_t *trace_arg)
void rb_ec_clear_all_trace_func(const rb_execution_context_t *ec)
struct rb_event_hook_struct rb_event_hook_t
VALUE rb_tracearg_path(rb_trace_arg_t *trace_arg)
VALUE rb_tracearg_eval_script(rb_trace_arg_t *trace_arg)
int rb_thread_remove_event_hook_with_data(VALUE thval, rb_event_hook_func_t func, VALUE data)
VALUE rb_tracepoint_enable(VALUE tpval)
#define MAX_POSTPONED_JOB
void rb_hook_list_mark(rb_hook_list_t *hooks)
int rb_postponed_job_register(unsigned int flags, rb_postponed_job_func_t func, void *data)
VALUE rb_tracearg_method_id(rb_trace_arg_t *trace_arg)
int rb_remove_event_hook_with_data(rb_event_hook_func_t func, VALUE data)
void rb_ec_clear_current_thread_trace_func(const rb_execution_context_t *ec)
void rb_thread_add_event_hook2(VALUE thval, rb_event_hook_func_t func, rb_event_flag_t events, VALUE data, rb_event_hook_flag_t hook_flags)
void rb_add_event_hook2(rb_event_hook_func_t func, rb_event_flag_t events, VALUE data, rb_event_hook_flag_t hook_flags)
void rb_hook_list_remove_tracepoint(rb_hook_list_t *list, VALUE tpval)
VALUE rb_tracearg_lineno(rb_trace_arg_t *trace_arg)
int rb_workqueue_register(unsigned flags, rb_postponed_job_func_t func, void *data)
void rb_hook_list_connect_tracepoint(VALUE target, rb_hook_list_t *list, VALUE tpval, unsigned int target_line)
void rb_postponed_job_flush(rb_vm_t *vm)
VALUE rb_tracearg_event(rb_trace_arg_t *trace_arg)
VALUE default_inspect(VALUE self, const char *class_name)