6 profiling_sample_count profiling_sample_count::record_counts() volatile
9 profiling_sample_count returned(
14 foreign_thread_sample_count);
15 atomic::fetch_subtract(&sample_count, returned.sample_count);
16 atomic::fetch_subtract(&gc_sample_count, returned.gc_sample_count);
17 atomic::fetch_subtract(&jit_sample_count, returned.jit_sample_count);
18 atomic::fetch_subtract(&foreign_sample_count, returned.foreign_sample_count);
19 atomic::fetch_subtract(&foreign_thread_sample_count, returned.foreign_thread_sample_count);
23 void profiling_sample_count::clear() volatile
28 foreign_sample_count = 0;
29 foreign_thread_sample_count = 0;
33 profiling_sample::profiling_sample(factor_vm *vm,
34 profiling_sample_count const &counts,
40 vm->record_callstack_sample(&callstack_begin, &callstack_end);
43 void factor_vm::record_sample()
45 profiling_sample_count counts = safepoint.sample_counts.record_counts();
47 samples.push_back(profiling_sample(this,
48 counts, special_objects[OBJ_CURRENT_THREAD]));
51 void factor_vm::record_callstack_sample(cell *begin, cell *end)
53 *begin = sample_callstacks.size();
54 stack_frame *frame = ctx->bottom_frame();
56 while (frame >= ctx->callstack_top) {
57 sample_callstacks.push_back(frame_code(frame)->owner);
58 frame = frame_successor(frame);
61 *end = sample_callstacks.size();
64 void factor_vm::set_sampling_profiler(fixnum rate)
66 bool sampling_p = !!rate;
67 if (sampling_p == !!atomic::load(&sampling_profiler_p))
71 start_sampling_profiler(rate);
73 end_sampling_profiler();
76 void factor_vm::clear_samples()
78 // Swapping into temporaries releases the vector's allocated storage,
79 // whereas clear() would leave the allocation as-is
80 std::vector<profiling_sample> sample_graveyard;
81 std::vector<cell> sample_callstack_graveyard;
82 samples.swap(sample_graveyard);
83 sample_callstacks.swap(sample_callstack_graveyard);
86 void factor_vm::start_sampling_profiler(fixnum rate)
88 samples_per_second = rate;
89 safepoint.sample_counts.clear();
91 samples.reserve(10*rate);
92 sample_callstacks.reserve(100*rate);
93 atomic::store(&sampling_profiler_p, true);
94 start_sampling_profiler_timer();
97 void factor_vm::end_sampling_profiler()
99 atomic::store(&sampling_profiler_p, false);
100 end_sampling_profiler_timer();
104 void factor_vm::primitive_sampling_profiler()
106 set_sampling_profiler(to_fixnum(ctx->pop()));
109 void factor_vm::primitive_get_samples()
111 if (atomic::load(&sampling_profiler_p) || samples.empty()) {
112 ctx->push(false_object);
114 data_root<array> samples_array(allot_array(samples.size(), false_object),this);
115 std::vector<profiling_sample>::const_iterator from_iter = samples.begin();
118 for (; from_iter != samples.end(); ++from_iter, ++to_i)
120 data_root<array> sample(allot_array(7, false_object),this);
122 set_array_nth(sample.untagged(),0,tag_fixnum(from_iter->counts.sample_count));
123 set_array_nth(sample.untagged(),1,tag_fixnum(from_iter->counts.gc_sample_count));
124 set_array_nth(sample.untagged(),2,tag_fixnum(from_iter->counts.jit_sample_count));
125 set_array_nth(sample.untagged(),3,tag_fixnum(from_iter->counts.foreign_sample_count));
126 set_array_nth(sample.untagged(),4,tag_fixnum(from_iter->counts.foreign_thread_sample_count));
128 set_array_nth(sample.untagged(),5,from_iter->thread);
130 cell callstack_size = from_iter->callstack_end - from_iter->callstack_begin;
131 data_root<array> callstack(allot_array(callstack_size,false_object),this);
133 std::vector<cell>::const_iterator
134 callstacks_begin = sample_callstacks.begin(),
135 c_from_iter = callstacks_begin + from_iter->callstack_begin,
136 c_from_iter_end = callstacks_begin + from_iter->callstack_end;
139 for (; c_from_iter != c_from_iter_end; ++c_from_iter, ++c_to_i)
140 set_array_nth(callstack.untagged(),c_to_i,*c_from_iter);
142 set_array_nth(sample.untagged(),6,callstack.value());
144 set_array_nth(samples_array.untagged(),to_i,sample.value());
146 ctx->push(samples_array.value());
150 void factor_vm::primitive_clear_samples()