|
1 | 1 | // Rust cycle collector. Temporary, but will probably stick around for some
|
2 | 2 | // time until LLVM's GC infrastructure is more mature.
|
3 | 3 |
|
4 |
| -#include <cstdio> |
5 |
| -#include <cstdlib> |
6 |
| -#include <map> |
7 |
| -#include <vector> |
8 | 4 | #include "rust_gc.h"
|
9 | 5 | #include "rust_internal.h"
|
10 | 6 | #include "rust_shape.h"
|
11 | 7 | #include "rust_task.h"
|
| 8 | +#include <cstdio> |
| 9 | +#include <cstdlib> |
| 10 | +#include <map> |
| 11 | +#include <vector> |
| 12 | +#include <stdint.h> |
12 | 13 |
|
13 | 14 | #undef DPRINT
|
14 | 15 | #define DPRINT(fmt,...) fprintf(stderr, fmt, ##__VA_ARGS__)
|
15 | 16 |
|
16 | 17 | namespace cc {
|
17 | 18 |
|
| 19 | +// Internal reference count computation |
| 20 | + |
| 21 | +typedef std::map<void *,uintptr_t> irc_map; |
| 22 | + |
| 23 | +class irc : public shape::data<irc,shape::ptr> { |
| 24 | + friend class shape::data<irc,shape::ptr>; |
| 25 | + |
| 26 | + irc_map ircs; |
| 27 | + |
| 28 | + irc(const irc &other, const shape::ptr &in_dp) |
| 29 | + : shape::data<irc,shape::ptr>(other.task, other.align, other.sp, |
| 30 | + other.params, other.tables, in_dp), |
| 31 | + ircs(other.ircs) {} |
| 32 | + |
| 33 | + irc(const irc &other, |
| 34 | + const uint8_t *in_sp, |
| 35 | + const shape::type_param *in_params, |
| 36 | + const rust_shape_tables *in_tables = NULL) |
| 37 | + : shape::data<irc,shape::ptr>(other.task, |
| 38 | + other.align, |
| 39 | + in_sp, |
| 40 | + in_params, |
| 41 | + in_tables ? in_tables : other.tables, |
| 42 | + other.dp), |
| 43 | + ircs(other.ircs) {} |
| 44 | + |
| 45 | + irc(const irc &other, |
| 46 | + const uint8_t *in_sp, |
| 47 | + const shape::type_param *in_params, |
| 48 | + const rust_shape_tables *in_tables, |
| 49 | + shape::ptr in_dp) |
| 50 | + : shape::data<irc,shape::ptr>(other.task, |
| 51 | + other.align, |
| 52 | + in_sp, |
| 53 | + in_params, |
| 54 | + in_tables, |
| 55 | + in_dp), |
| 56 | + ircs(other.ircs) {} |
| 57 | + |
| 58 | + irc(rust_task *in_task, |
| 59 | + bool in_align, |
| 60 | + const uint8_t *in_sp, |
| 61 | + const shape::type_param *in_params, |
| 62 | + const rust_shape_tables *in_tables, |
| 63 | + uint8_t *in_data, |
| 64 | + irc_map &in_ircs) |
| 65 | + : shape::data<irc,shape::ptr>(in_task, in_align, in_sp, in_params, |
| 66 | + in_tables, in_data), |
| 67 | + ircs(in_ircs) {} |
| 68 | + |
| 69 | + void walk_vec(bool is_pod, uint16_t sp_size) { |
| 70 | + if (is_pod || shape::get_dp<void *>(dp) == NULL) |
| 71 | + return; // There can't be any outbound pointers from this. |
| 72 | + |
| 73 | + std::pair<uint8_t *,uint8_t *> data_range(get_vec_data_range(dp)); |
| 74 | + if (data_range.second - data_range.first > 10000) |
| 75 | + abort(); // FIXME: Temporary sanity check. |
| 76 | + |
| 77 | + irc sub(*this, data_range.first); |
| 78 | + shape::ptr data_end = sub.end_dp = data_range.second; |
| 79 | + while (sub.dp < data_end) { |
| 80 | + sub.walk_reset(); |
| 81 | + align = true; |
| 82 | + } |
| 83 | + } |
| 84 | + |
| 85 | + void walk_tag(shape::tag_info &tinfo, uint32_t tag_variant) { |
| 86 | + shape::data<irc,shape::ptr>::walk_variant(tinfo, tag_variant); |
| 87 | + } |
| 88 | + |
| 89 | + void walk_box() { |
| 90 | + shape::data<irc,shape::ptr>::walk_box_contents(); |
| 91 | + } |
| 92 | + |
| 93 | + void walk_fn() { |
| 94 | + shape::data<irc,shape::ptr>::walk_fn_contents(dp); |
| 95 | + } |
| 96 | + |
| 97 | + void walk_obj() { |
| 98 | + shape::data<irc,shape::ptr>::walk_obj_contents(dp); |
| 99 | + } |
| 100 | + |
| 101 | + void walk_res(const shape::rust_fn *dtor, unsigned n_params, |
| 102 | + const shape::type_param *params, const uint8_t *end_sp, |
| 103 | + bool live) { |
| 104 | + while (this->sp != end_sp) { |
| 105 | + this->walk(); |
| 106 | + align = true; |
| 107 | + } |
| 108 | + } |
| 109 | + |
| 110 | + void walk_subcontext(irc &sub) { sub.walk(); } |
| 111 | + |
| 112 | + void walk_box_contents(irc &sub, shape::ptr &ref_count_dp) { |
| 113 | + if (!ref_count_dp) |
| 114 | + return; |
| 115 | + |
| 116 | + // Bump the internal reference count of the box. |
| 117 | + if (ircs.find((void *)dp) == ircs.end()) |
| 118 | + ircs[(void *)dp] = 1; |
| 119 | + else |
| 120 | + ++ircs[(void *)dp]; |
| 121 | + |
| 122 | + // Do not traverse the contents of this box; it's in the allocation |
| 123 | + // somewhere, so we're guaranteed to come back to it (if we haven't |
| 124 | + // traversed it already). |
| 125 | + } |
| 126 | + |
| 127 | + void walk_struct(const uint8_t *end_sp) { |
| 128 | + while (this->sp != end_sp) { |
| 129 | + this->walk(); |
| 130 | + align = true; |
| 131 | + } |
| 132 | + } |
| 133 | + |
| 134 | + void walk_variant(shape::tag_info &tinfo, uint32_t variant_id, |
| 135 | + const std::pair<const uint8_t *,const uint8_t *> |
| 136 | + variant_ptr_and_end); |
| 137 | + |
| 138 | + template<typename T> |
| 139 | + inline void walk_number() { /* no-op */ } |
| 140 | + |
| 141 | +public: |
| 142 | + static void compute_ircs(rust_task *task, irc_map &ircs); |
| 143 | +}; |
| 144 | + |
18 | 145 | void
|
19 |
| -do_cc(rust_task *task) { |
20 |
| - std::map<void *,type_desc *>::iterator begin(task->local_allocs.begin()); |
21 |
| - std::map<void *,type_desc *>::iterator end(task->local_allocs.end()); |
| 146 | +irc::walk_variant(shape::tag_info &tinfo, uint32_t variant_id, |
| 147 | + const std::pair<const uint8_t *,const uint8_t *> |
| 148 | + variant_ptr_and_end) { |
| 149 | + irc sub(*this, variant_ptr_and_end.first, tinfo.params); |
| 150 | + |
| 151 | + assert(variant_id < 256); // FIXME: Temporary sanity check. |
| 152 | + |
| 153 | + const uint8_t *variant_end = variant_ptr_and_end.second; |
| 154 | + while (sub.sp < variant_end) { |
| 155 | + sub.walk(); |
| 156 | + align = true; |
| 157 | + } |
| 158 | +} |
| 159 | + |
| 160 | +void |
| 161 | +irc::compute_ircs(rust_task *task, irc_map &ircs) { |
| 162 | + std::map<void *,type_desc *>::iterator begin(task->local_allocs.begin()), |
| 163 | + end(task->local_allocs.end()); |
22 | 164 | while (begin != end) {
|
23 |
| - void *p = begin->first; |
| 165 | + uint8_t *p = reinterpret_cast<uint8_t *>(begin->first); |
24 | 166 | type_desc *tydesc = begin->second;
|
25 | 167 |
|
26 |
| - DPRINT("marking allocation: %p, tydesc=%p\n", p, tydesc); |
| 168 | + DPRINT("determining internal ref counts: %p, tydesc=%p\n", p, tydesc); |
27 | 169 |
|
28 | 170 | // Prevents warnings for now
|
29 |
| - (void)p; |
30 |
| - (void)tydesc; |
31 |
| -#if 0 |
32 | 171 | shape::arena arena;
|
33 | 172 | shape::type_param *params =
|
34 | 173 | shape::type_param::from_tydesc(tydesc, arena);
|
35 |
| - mark mark(task, true, tydesc->shape, params, tydesc->shape_tables, p); |
36 |
| - mark.walk(); |
37 |
| -#endif |
| 174 | + irc irc(task, true, tydesc->shape, params, tydesc->shape_tables, p, |
| 175 | + ircs); |
| 176 | + irc.walk(); |
38 | 177 |
|
39 | 178 | ++begin;
|
40 | 179 | }
|
41 | 180 | }
|
42 | 181 |
|
| 182 | + |
| 183 | +void |
| 184 | +do_cc(rust_task *task) { |
| 185 | + irc_map ircs; |
| 186 | + irc::compute_ircs(task, ircs); |
| 187 | +} |
| 188 | + |
43 | 189 | void
|
44 | 190 | maybe_cc(rust_task *task) {
|
45 | 191 | // FIXME: We ought to lock this.
|
|
0 commit comments