Annotation of OpenXM_contrib/gc/nursery.c, Revision 1.1.1.1
1.1 maekawa 1: /*
2: * Copyright (c) 1999 by Silicon Graphics. All rights reserved.
3: *
4: * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
5: * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
6: *
7: * Permission is hereby granted to use or copy this program
8: * for any purpose, provided the above notices are retained on all copies.
9: * Permission to modify the code and to distribute modified code is granted,
10: * provided the above notices are retained, and a notice that the code was
11: * modified is included with the above copyright notice.
12: */
13:
14: #ifdef NURSERY
15: ??? This implementation is incomplete. If you are trying to
16: ??? compile this you are doing something wrong.
17:
18: #include "nursery.h"
19:
20: #define SCAN_STATICS_FOR_NURSERY
21: /* If this is not defined, the collector will not see */
22: /* references from static data areas to the nursery. */
23:
24: struct copy_obj {
25: ptr_t forward; /* Forwarding link for copied objects. */
26: GC_copy_descriptor descr; /* Object descriptor */
27: word data[1];
28: }
29:
30: ptr_t GC_nursery_start; /* Start of nursery area. */
31: /* Must be NURSERY_BLOCK_SIZE */
32: /* aligned. */
33: ptr_t GC_nursery_end; /* End of nursery area. */
34: unsigned char * GC_nursery_map;
35: /* GC_nursery_map[i] != 0 if an object */
36: /* starts on the ith 64-bit "word" of */
37: /* nursery. This simple structure has */
38: /* the advantage that */
39: /* allocation is cheap. Lookup is */
40: /* cheap for pointers to the head of */
41: /* an object, which should be the */
42: /* usual case. */
43: # define NURSERY_MAP_NOT_START 0 /* Not start of object. */
44: # define NURSERY_MAP_START 1 /* Start of object. */
45: # define NURSERY_MAP_PINNED 2 /* Start of pinned obj. */
46:
47: # ifdef ALIGN_DOUBLE
48: # define NURSERY_WORD_SIZE (2 * sizeof(word))
49: # else
50: # define NURSERY_WORD_SIZE sizeof(word)
51: # endif
52:
53: # define NURSERY_BLOCK_SIZE (HBLKSIZE/2)
54: /* HBLKSIZE must be a multiple of NURSERY_BLOCK_SIZE */
55: # define NURSERY_SIZE (1024 * NURSERY_BLOCK_SIZE)
56:
57: size_t GC_nursery_size = NURSERY_SIZE;
58: /* Must be multiple of NURSERY_BLOCK_SIZE */
59:
60: size_t GC_nursery_blocks; /* Number of blocks in the nursery. */
61:
62: unsigned GC_next_nursery_block; /* index of next block we will attempt */
63: /* allocate from during this cycle. */
64: /* If it is pinned, we won't actually */
65: /* use it. */
66:
67: unsigned short *GC_pinned; /* Number of pinned objects in ith */
68: /* nursery block. */
69: /* GC_pinned[i] != 0 if the ith nursery */
70: /* block is pinned, and thus not used */
71: /* for allocation. */
72:
73: GC_copy_alloc_state global_alloc_state = (ptr_t)(-1); /* will overflow. */
74:
75: /* Array of known rescuing pointers from the heap to the nursery. */
76: ptr_t ** nursery_rescuers;
77: /* Pointer to one past the last slot in rescuer table */
78: ptr_t ** nursery_rescuers_end;
79: /* Maximum number of known rescuing pointers. */
80: # define MAX_NURSERY_RESCUERS 32*1024
81: /* Add a rescuer to the list */
82: # define ADD_NURSERY_RESCUER(p) \
83: if (nursery_rescuers_end >= nursery_rescuers + MAX_NURSERY_RESCUERS) { \
84: ABORT("Nursery recuers overflow"); /* Fix later !!! */ \
85: } else { \
86: *nursery_rescuers_end++ = p; \
87: }
88: /* Remove rescuer at the given position in the table */
89: # define REMOVE_RESCUER(p) \
90: *p = *--nursery_rescuers_end
91:
92: /* Should be called with allocator lock held. */
93: GC_nursery_init() {
94: GC_nursery_start = GET_MEM(GC_nursery_size);
95: GC_nursery_end = GC_nursery_start + GC_nursery_size;
96: GC_next_nursery_block = 0;
97: if (GC_nursery_start < GC_least_plausible_heap_addr) {
98: GC_least_plausible_heap_addr = GC_nursery_start;
99: }
100: if (GC_nursery_end > GC_greatest_plausible_heap_addr) {
101: GC_greatest_plausible_heap_addr = GC_nursery_end;
102: }
103: if (GC_nursery_start & (NURSERY_BLOCK_SIZE-1)) {
104: GC_err_printf1("Nursery area is misaligned!!");
105: /* This should be impossible, since GET_MEM returns HBLKSIZE */
106: /* aligned chunks, and that should be a multiple of */
107: /* NURSERY_BLOCK_SIZE */
108: ABORT("misaligned nursery");
109: }
110: GC_nursery_map = GET_MEM(GC_nursery_size/NURSERY_WORD_SIZE);
111: /* Map is cleared a block at a time when we allocate from the block. */
112: /* BZERO(GC_nursery_map, GC_nursery_size/NURSERY_WORD_SIZE); */
113: GC_nursery_blocks = GC_nursery_size/NURSERY_BLOCK_SIZE;
114: GC_pinned = GC_scratch_alloc(GC_nursery_blocks * sizeof(unsigned short));
115: BZERO(GC_pinned, GC_nursery_blocks);
116: nursery_rescuers = GET_MEM(MAX_NURSERY_RESCUERS * sizeof(ptr_t *));
117: nursery_rescuers_end = nursery_rescuers;
118: if (0 == GC_nursery_start || 0 == GC_nursery_map || 0 == nursery_rescuers)
119: ABORT("Insufficient memory for nursery");
120: }
121:
122: #define PIN_OBJ(p) \
123: if (p >= GC_nursery_start && p < GC_nursery_end) { GC_pin_obj_checked(p); }
124:
125: /* Pin the object at p, if it's in the nursery. */
126: void GC_pin_obj(ptr_t p) {
127: PIN_OBJ(p);
128: }
129:
130: void (*GC_push_proc)(ptr_t) = 0;
131:
132: /* Pin the object at p, which is known to be in the nursery. */
133: void GC_pin_obj_checked(ptr_t p) {
134: unsigned offset = p - GC_nursery_start;
135: unsigned word_offset = BYTES_TO_WORDS(offset);
136: unsigned blockno = (current - GC_nursery_start)/NURSERY_BLOCK_SIZE;
137: while (GC_nursery_map[word_offset] == NURSERY_MAP_NOT_START) {
138: --word_offset;
139: }
140: if (GC_nursery_map[word_offset] != NURSERY_MAP_PINNED) {
141: GC_nursery_map[word_offset] = NURSERY_MAP_PINNED;
142: ++GC_pinned[blockno];
143: ??Push object at GC_nursery_start + WORDS_TO_BYTES(word_offset)
144: ??onto mark stack.
145: }
146: }
147:
148: void GC_scan_region_for_nursery(ptr_t low, ptr_t high) {
149: # if CPP_WORDSZ/8 != ALIGNMENT
150: --> fix this
151: # endif
152: word * l = (word *)((word)low + ALIGNMENT - 1 & ~(ALIGNMENT - 1));
153: word * h = (word *)((word)high & ~(ALIGNMENT - 1));
154: word * p;
155: for (p = l; p < h; ++p) {
156: PIN_OBJ(p);
157: }
158: }
159:
160: /* Invoke GC_scan_region_for_nursery on ranges that are not excluded. */
161: void GC_scan_region_for_nursery_with_exclusions(ptr_t bottom, ptr_t top)
162: {
163: struct exclusion * next;
164: ptr_t excl_start;
165:
166: while (bottom < top) {
167: next = GC_next_exclusion(bottom);
168: if (0 == next || (excl_start = next -> e_start) >= top) {
169: GC_scan_region_for_nursery(bottom, top);
170: return;
171: }
172: if (excl_start > bottom)
173: GC_scan_region_for_nursery(bottom, excl_start);
174: bottom = next -> e_end;
175: }
176: }
177:
178:
179: void GC_scan_stacks_for_nursery(void) {
180: # ifdef THREADS
181: --> fix this
182: # endif
183: # ifdef STACK_GROWS_DOWN
184: ptr_t stack_low = GC_approx_sp();
185: ptr_t stack_high = GC_stackbottom;
186: # else
187: ptr_t stack_low = GC_stackbottom;
188: ptr_t stack_high = GC_approx_sp();
189: # endif
190: GC_scan_region_for_nursery(stack_low, stack_high);
191: # ifdef IA64
192: GC_scan_region_for_nursery(BACKING_STORE_BASE,
193: (ptr_t) GC_save_regs_ret_val);
194: # endif
195: }
196:
197: void GC_scan_roots_for_nursery(void) {
198: /* Scan registers. */
199: /* Direct GC_push_one to call GC_pin_obj instead of marking */
200: /* and pushing objects. */
201: /* This is a bit ugly, but we don't have to touch the */
202: /* platform-dependent code. */
203:
204: void (*old_push_proc)(ptr_t) = GC_push_proc;
205: GC_push_proc = GC_pin_obj;
206: GC_push_regs();
207: GC_push_proc = old_push_proc;
208: GC_scan_stacks_for_nursery();
209: # ifdef SCAN_STATICS_FOR_NURSERY
210: # if (defined(DYNAMIC_LOADING) || defined(MSWIN32) || defined(PCR)) \
211: && !defined(SRC_M3)
212: GC_remove_tmp_roots();
213: GC_register_dynamic_libraries();
214: # endif
215: /* Mark everything in static data areas */
216: for (i = 0; i < n_root_sets; i++) {
217: GC_scan_region_for_nursery_with_exclusions (
218: GC_static_roots[i].r_start,
219: GC_static_roots[i].r_end);
220: }
221: # endif
222: }
223:
224: /* Array of known rescuing pointers from the heap to the nursery. */
225: ptr_t ** nursery_rescuers;
226:
227: /* Caller holds allocation lock. */
228: void GC_collect_nursery(void) {
229: int i;
230: ptr_t scan_ptr = 0;
231: STOP_WORLD;
232: for (i = 0; i < GC_nursery_blocks; ++i) GC_pinned[i] = 0;
233: GC_scan_roots_for_nursery();
234: /* All objects referenced by roots are now pinned. */
235: /* Their contents are described by */
236: /* mark stack entries. */
237:
238: /* Pin blocks corresponding to valid allocation states. */
239: /* that probably happens automagically if the allocation */
240: /* states are kept where we can see them. */
241: /* It will take work if static roots are not scanned. */
242: /* We want to do this both for correctness and to avoid */
243: /* promoting very young objects. */
244:
245: /* Somehow capture dirty bits. Update rescuers array to */
246: /* reflect newly valid and invalid references from dirty */
247: /* pages. Other references should remain valid, since the */
248: /* referents should have been pinned. */
249:
250: /* Traverse the old object heap. Pin objects in the */
251: /* nursery that are ambiguously referenced, copy those */
252: /* that are unambiguously referenced. */
253:
254: /* Traverse objects in mark stack. */
255: /* If referenced object is in pinned block, add contents */
256: /* to mark stack. If referenced object is forwarded, */
257: /* update pointer. Otherwise reallocate the object in the */
258: /* old heap, copy its contents, and then enqueue its */
259: /* contents in the mark stack. */
260: START_WORLD;
261: }
262:
263: /* Initialize an allocation state so that it can be used for */
264: /* allocation. This implicitly reserves a small section of the */
265: /* nursery for use with this allocator. */
266: /* Also called to replenish an allocator that has been */
267: /* exhausted. */
268: void GC_init_copy_alloc_state(GC_copy_alloc_state *)
269: unsigned next_block;
270: ptr_t block_addr;
271: LOCK();
272: next_block = GC_next_nursery_block;
273: while(is_pinned[next_block] && next_block < GC_nursery_blocks) {
274: ++next_block;
275: }
276: if (next_block < GC_nursery_blocks) {
277: block_addr = GC_nursery_start + NURSERY_BLOCK_SIZE * next_block;
278: GC_next_nursery_block = next_block + 1;
279: BZERO(GC_nursery_map + next_block *
280: (NURSERY_BLOCK_SIZE/NURSERY_WORD_SIZE),
281: NURSERY_BLOCK_SIZE/NURSERY_WORD_SIZE);
282: *GC_copy_alloc_state = block_addr;
283: UNLOCK();
284: } else {
285: GC_collect_nursery();
286: GC_next_nursery_block = 0;
287: UNLOCK();
288: get_new_block(s);
289: }
290: }
291:
292: GC_PTR GC_copying_malloc2(GC_copy_descriptor *d, GC_copy_alloc_state *s) {
293: size_t sz = GC_SIZE_FROM_DESCRIPTOR(d);
294: ptrdiff_t offset;
295: ptr_t result = *s;
296: ptr_t new = result + sz;
297: if (new & COPY_BLOCK_MASK <= result & COPY_BLOCK_MASK> {
298: GC_init_copy_alloc_state(s);
299: result = *s;
300: new = result + sz;
301: GC_ASSERT(new & COPY_BLOCK_MASK > result & COPY_BLOCK_MASK>
302: }
303: (struct copy_obj *)result -> descr = d;
304: (struct copy_obj *)result -> forward = 0;
305: offset = (result - GC_nursery_start)/NURSERY_WORD_SIZE;
306: GC_nursery_map[offset] = NURSERY_MAP_NOT_START;
307: }
308:
309: GC_PTR GC_copying_malloc(GC_copy_descriptor *d) {
310: }
311:
312: #endif /* NURSERY */
FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>