5 profiling_sample_count profiling_sample_count::record_counts() volatile {
7 profiling_sample_count returned(sample_count, gc_sample_count,
8 jit_sample_count, foreign_sample_count,
9 foreign_thread_sample_count);
10 atomic::fetch_subtract(&sample_count, returned.sample_count);
11 atomic::fetch_subtract(&gc_sample_count, returned.gc_sample_count);
12 atomic::fetch_subtract(&jit_sample_count, returned.jit_sample_count);
13 atomic::fetch_subtract(&foreign_sample_count, returned.foreign_sample_count);
14 atomic::fetch_subtract(&foreign_thread_sample_count,
15 returned.foreign_thread_sample_count);
19 void profiling_sample_count::clear() volatile {
23 foreign_sample_count = 0;
24 foreign_thread_sample_count = 0;
28 profiling_sample::profiling_sample(factor_vm* vm, bool prolog_p,
29 profiling_sample_count const& counts,
31 : counts(counts), thread(thread) {
32 vm->record_callstack_sample(&callstack_begin, &callstack_end, prolog_p);
35 void factor_vm::record_sample(bool prolog_p) {
36 profiling_sample_count counts = safepoint.sample_counts.record_counts();
38 samples.push_back(profiling_sample(this, prolog_p, counts,
39 special_objects[OBJ_CURRENT_THREAD]));
42 void factor_vm::record_callstack_sample(cell* begin, cell* end, bool prolog_p) {
43 *begin = sample_callstacks.size();
45 bool skip_p = prolog_p;
46 auto recorder = [&](cell frame_top, cell size, code_block* owner, cell addr) {
50 sample_callstacks.push_back(owner->owner);
52 iterate_callstack(ctx, recorder);
54 *end = sample_callstacks.size();
56 std::reverse(sample_callstacks.begin() + *begin, sample_callstacks.end());
59 void factor_vm::set_sampling_profiler(fixnum rate) {
60 bool sampling_p = !!rate;
61 if (sampling_p == !!atomic::load(&sampling_profiler_p))
65 start_sampling_profiler(rate);
67 end_sampling_profiler();
70 void factor_vm::start_sampling_profiler(fixnum rate) {
71 samples_per_second = rate;
72 safepoint.sample_counts.clear();
73 // Release the memory consumed by colleting samples.
74 samples.shrink_to_fit();
75 sample_callstacks.shrink_to_fit();
76 samples.reserve(10 * rate);
77 sample_callstacks.reserve(100 * rate);
78 atomic::store(&sampling_profiler_p, true);
79 start_sampling_profiler_timer();
82 void factor_vm::end_sampling_profiler() {
83 atomic::store(&sampling_profiler_p, false);
84 end_sampling_profiler_timer();
88 void factor_vm::primitive_sampling_profiler() {
89 set_sampling_profiler(to_fixnum(ctx->pop()));
92 /* Allocates memory */
93 void factor_vm::primitive_get_samples() {
94 if (atomic::load(&sampling_profiler_p) || samples.empty()) {
95 ctx->push(false_object);
97 data_root<array> samples_array(allot_array(samples.size(), false_object),
99 std::vector<profiling_sample>::const_iterator from_iter = samples.begin();
102 for (; from_iter != samples.end(); ++from_iter, ++to_i) {
103 data_root<array> sample(allot_array(7, false_object), this);
105 set_array_nth(sample.untagged(), 0,
106 tag_fixnum(from_iter->counts.sample_count));
107 set_array_nth(sample.untagged(), 1,
108 tag_fixnum(from_iter->counts.gc_sample_count));
109 set_array_nth(sample.untagged(), 2,
110 tag_fixnum(from_iter->counts.jit_sample_count));
111 set_array_nth(sample.untagged(), 3,
112 tag_fixnum(from_iter->counts.foreign_sample_count));
113 set_array_nth(sample.untagged(), 4,
114 tag_fixnum(from_iter->counts.foreign_thread_sample_count));
116 set_array_nth(sample.untagged(), 5, from_iter->thread);
118 cell callstack_size =
119 from_iter->callstack_end - from_iter->callstack_begin;
120 data_root<array> callstack(allot_array(callstack_size, false_object),
123 std::vector<cell>::const_iterator callstacks_begin =
124 sample_callstacks.begin(),
127 from_iter->callstack_begin,
130 from_iter->callstack_end;
133 for (; c_from_iter != c_from_iter_end; ++c_from_iter, ++c_to_i)
134 set_array_nth(callstack.untagged(), c_to_i, *c_from_iter);
136 set_array_nth(sample.untagged(), 6, callstack.value());
138 set_array_nth(samples_array.untagged(), to_i, sample.value());
140 ctx->push(samples_array.value());