[前][次][番号順一覧][スレッド一覧]

ruby-changes:31271

From: ko1 <ko1@a...>
Date: Fri, 18 Oct 2013 15:33:45 +0900 (JST)
Subject: [ruby-changes:31271] ko1:r43350 (trunk): * gc.c: change terminology of heap.

ko1	2013-10-18 15:33:36 +0900 (Fri, 18 Oct 2013)

  New Revision: 43350

  http://svn.ruby-lang.org/cgi-bin/viewvc.cgi?view=rev&revision=43350

  Log:
    * gc.c: change terminology of heap.
      Change "slot" to "page". "Slot" is a space of RVALUE.
      1. "Heap" consists of a set of "heap_page"s (pages).
      2. Each "heap_page" has "heap_page_body".
      3. "heap_page_body" has RVALUE (a.k.a. "slot") spaces.
      4. "sorted" is a sorted array of "heap_page"s, sorted
      by address of heap_page_body (for "is_pointer_to_heap").
      See https://bugs.ruby-lang.org/attachments/4008/data-heap_structure.png.

  Modified files:
    trunk/ChangeLog
    trunk/gc.c
Index: ChangeLog
===================================================================
--- ChangeLog	(revision 43349)
+++ ChangeLog	(revision 43350)
@@ -1,3 +1,15 @@ https://github.com/ruby/ruby/blob/trunk/ChangeLog#L1
+Fri Oct 18 15:23:34 2013  Koichi Sasada  <ko1@a...>
+
+	* gc.c: change terminology of heap.
+	  Change "slot" to "page". "Slot" is a space of RVALUE.
+	  1. "Heap" consists of a set of "heap_page"s (pages).
+	  2. Each "heap_page" has "heap_page_body".
+	  3. "heap_page_body" has RVALUE (a.k.a. "slot") spaces.
+	  4. "sorted" is a sorted array of "heap_page"s, sorted
+	     by address of heap_page_body (for "is_pointer_to_heap").
+
+	  See https://bugs.ruby-lang.org/attachments/4008/data-heap_structure.png.
+
 Fri Oct 18 09:40:43 2013  Eric Hodel  <drbrain@s...>
 
 	* lib/rubygems:  Update to RubyGems master cee6788.  Changes:
Index: gc.c
===================================================================
--- gc.c	(revision 43349)
+++ gc.c	(revision 43350)
@@ -214,7 +214,7 @@ typedef struct gc_profile_record { https://github.com/ruby/ruby/blob/trunk/gc.c#L214
     double gc_mark_time;
     double gc_sweep_time;
 
-    size_t heap_use_slots;
+    size_t heap_use_pages;
     size_t heap_live_objects;
     size_t heap_free_objects;
 
@@ -287,12 +287,12 @@ enum { https://github.com/ruby/ruby/blob/trunk/gc.c#L287
     BITS_BITLENGTH = ( BITS_SIZE * CHAR_BIT )
 };
 
-struct heap_slot_header {
-    struct heap_slot *slot;
+struct heap_page_header {
+    struct heap_page *page;
 };
 
-struct heap_slot_body {
-    struct heap_slot_header header;
+struct heap_page_body {
+    struct heap_page_header header;
     /* char gap[];      */
     /* RVALUE values[]; */
 };
@@ -329,11 +329,11 @@ typedef struct rb_objspace { https://github.com/ruby/ruby/blob/trunk/gc.c#L329
     } malloc_params;
     struct {
 	size_t increment;
-	struct heap_slot *slots;
-	struct heap_slot *sweep_slots;
-	struct heap_slot *free_slots;
-	struct heap_slot *using_slot;
-	struct heap_slot **sorted;
+	struct heap_page *pages;
+	struct heap_page *sweep_pages;
+	struct heap_page *free_pages;
+	struct heap_page *using_page;
+	struct heap_page **sorted;
 	size_t length;
 	size_t used;
 	size_t limit;
@@ -439,21 +439,21 @@ enum { https://github.com/ruby/ruby/blob/trunk/gc.c#L439
     HEAP_ALIGN_MASK = (~(~0UL << HEAP_ALIGN_LOG)),
     REQUIRED_SIZE_BY_MALLOC = (sizeof(size_t) * 5),
     HEAP_SIZE = (HEAP_ALIGN - REQUIRED_SIZE_BY_MALLOC),
-    HEAP_OBJ_LIMIT = (unsigned int)((HEAP_SIZE - sizeof(struct heap_slot_header))/sizeof(struct RVALUE)),
+    HEAP_OBJ_LIMIT = (unsigned int)((HEAP_SIZE - sizeof(struct heap_page_header))/sizeof(struct RVALUE)),
     HEAP_BITMAP_LIMIT = CEILDIV(CEILDIV(HEAP_SIZE, sizeof(struct RVALUE)), BITS_BITLENGTH),
     HEAP_BITMAP_SIZE = ( BITS_SIZE * HEAP_BITMAP_LIMIT),
     HEAP_BITMAP_PLANES = USE_RGENGC ? 3 : 1 /* RGENGC: mark bits, rememberset bits and oldgen bits */
 };
 
-struct heap_slot {
-    struct heap_slot_body *body;
+struct heap_page {
+    struct heap_page_body *body;
     RVALUE *start;
     size_t limit;
 
     RVALUE *freelist;
-    struct heap_slot *next;
-    struct heap_slot *prev;
-    struct heap_slot *free_next;
+    struct heap_page *next;
+    struct heap_page *prev;
+    struct heap_page *free_next;
 
     bits_t mark_bits[HEAP_BITMAP_LIMIT];
 #if USE_RGENGC
@@ -467,15 +467,15 @@ struct heap_slot { https://github.com/ruby/ruby/blob/trunk/gc.c#L467
 #endif
 };
 
-#define GET_SLOT_BODY(x)             ((struct heap_slot_body *)((bits_t)(x) & ~(HEAP_ALIGN_MASK)))
-#define GET_SLOT_HEADER(x)           (&GET_SLOT_BODY(x)->header)
-#define GET_HEAP_SLOT(x)             (GET_SLOT_HEADER(x)->slot)
-#define GET_HEAP_MARK_BITS(x)        (&GET_HEAP_SLOT(x)->mark_bits[0])
-#define GET_HEAP_REMEMBERSET_BITS(x) (&GET_HEAP_SLOT(x)->rememberset_bits[0])
-#define GET_HEAP_OLDGEN_BITS(x)      (&GET_HEAP_SLOT(x)->oldgen_bits[0])
-#define NUM_IN_SLOT(p)               (((bits_t)(p) & HEAP_ALIGN_MASK)/sizeof(RVALUE))
-#define BITMAP_INDEX(p)              (NUM_IN_SLOT(p) / BITS_BITLENGTH )
-#define BITMAP_OFFSET(p)             (NUM_IN_SLOT(p) & (BITS_BITLENGTH-1))
+#define GET_PAGE_BODY(x)             ((struct heap_page_body *)((bits_t)(x) & ~(HEAP_ALIGN_MASK)))
+#define GET_PAGE_HEADER(x)           (&GET_PAGE_BODY(x)->header)
+#define GET_HEAP_PAGE(x)             (GET_PAGE_HEADER(x)->page)
+#define GET_HEAP_MARK_BITS(x)        (&GET_HEAP_PAGE(x)->mark_bits[0])
+#define GET_HEAP_REMEMBERSET_BITS(x) (&GET_HEAP_PAGE(x)->rememberset_bits[0])
+#define GET_HEAP_OLDGEN_BITS(x)      (&GET_HEAP_PAGE(x)->oldgen_bits[0])
+#define NUM_IN_PAGE(p)               (((bits_t)(p) & HEAP_ALIGN_MASK)/sizeof(RVALUE))
+#define BITMAP_INDEX(p)              (NUM_IN_PAGE(p) / BITS_BITLENGTH )
+#define BITMAP_OFFSET(p)             (NUM_IN_PAGE(p) & (BITS_BITLENGTH-1))
 #define BITMAP_BIT(p)                ((bits_t)1 << BITMAP_OFFSET(p))
 /* Bitmap Operations */
 #define MARKED_IN_BITMAP(bits, p)    ((bits)[BITMAP_INDEX(p)] & BITMAP_BIT(p))
@@ -495,7 +495,7 @@ VALUE *ruby_initial_gc_stress_ptr = &rb_ https://github.com/ruby/ruby/blob/trunk/gc.c#L495
 #define malloc_limit		objspace->malloc_params.limit
 #define malloc_increase 	objspace->malloc_params.increase
 #define malloc_allocated_size 	objspace->malloc_params.allocated_size
-#define heap_slots		objspace->heap.slots
+#define heap_pages		objspace->heap.pages
 #define heap_length		objspace->heap.length
 #define heap_used		objspace->heap.used
 #define heap_limit		objspace->heap.limit
@@ -519,7 +519,7 @@ VALUE *ruby_initial_gc_stress_ptr = &rb_ https://github.com/ruby/ruby/blob/trunk/gc.c#L519
 #define initial_free_min	           initial_params.initial_free_min
 #define initial_growth_factor	           initial_params.initial_growth_factor
 
-#define is_lazy_sweeping(objspace) ((objspace)->heap.sweep_slots != 0)
+#define is_lazy_sweeping(objspace) ((objspace)->heap.sweep_pages != 0)
 
 #if SIZEOF_LONG == SIZEOF_VOIDP
 # define nonspecial_obj_id(obj) (VALUE)((SIGNED_VALUE)(obj)|FIXNUM_FLAG)
@@ -643,15 +643,15 @@ RVALUE_PROMOTE(VALUE obj) https://github.com/ruby/ruby/blob/trunk/gc.c#L643
 static inline int
 is_before_sweep(VALUE obj)
 {
-    struct heap_slot *slot;
+    struct heap_page *page;
     rb_objspace_t *objspace = &rb_objspace;
     if (is_lazy_sweeping(objspace)) {
-	slot = objspace->heap.sweep_slots;
-	while (slot) {
-	    if (slot->body == GET_SLOT_BODY(obj)) {
+	page = objspace->heap.sweep_pages;
+	while (page) {
+	    if (page->body == GET_PAGE_BODY(obj)) {
 		return TRUE;
 	    }
-	    slot = slot->next;
+	    page = page->next;
 	}
     }
     return FALSE;
@@ -685,7 +685,7 @@ rb_objspace_alloc(void) https://github.com/ruby/ruby/blob/trunk/gc.c#L685
 
 #if defined(ENABLE_VM_OBJSPACE) && ENABLE_VM_OBJSPACE
 static void free_stack_chunks(mark_stack_t *);
-static void free_heap_slot(rb_objspace_t *objspace, struct heap_slot *slot);
+static void free_heap_page(rb_objspace_t *objspace, struct heap_page *page);
 
 void
 rb_objspace_free(rb_objspace_t *objspace)
@@ -706,12 +706,12 @@ rb_objspace_free(rb_objspace_t *objspace https://github.com/ruby/ruby/blob/trunk/gc.c#L706
     if (objspace->heap.sorted) {
 	size_t i;
 	for (i = 0; i < heap_used; ++i) {
-	    free_heap_slot(objspace, objspace->heap.sorted[i]);
+	    free_heap_page(objspace, objspace->heap.sorted[i]);
 	}
 	free(objspace->heap.sorted);
 	heap_used = 0;
 	heap_limit = 0;
-	heap_slots = 0;
+	heap_pages = 0;
     }
     free_stack_chunks(&objspace->mark_stack);
     free(objspace);
@@ -721,17 +721,17 @@ rb_objspace_free(rb_objspace_t *objspace https://github.com/ruby/ruby/blob/trunk/gc.c#L721
 static void
 heap_allocate_sorted_array(rb_objspace_t *objspace, size_t next_heap_length)
 {
-    struct heap_slot **p;
+    struct heap_page **p;
     size_t size;
 
-    size = next_heap_length * sizeof(struct heap_slot *);
+    size = next_heap_length * sizeof(struct heap_page *);
 
     if (heap_used > 0) {
-	p = (struct heap_slot **)realloc(objspace->heap.sorted, size);
+	p = (struct heap_page **)realloc(objspace->heap.sorted, size);
 	if (p) objspace->heap.sorted = p;
     }
     else {
-	p = objspace->heap.sorted = (struct heap_slot **)malloc(size);
+	p = objspace->heap.sorted = (struct heap_page **)malloc(size);
     }
 
     if (p == 0) {
@@ -741,61 +741,61 @@ heap_allocate_sorted_array(rb_objspace_t https://github.com/ruby/ruby/blob/trunk/gc.c#L741
 }
 
 static inline void
-heap_slot_add_freeobj(rb_objspace_t *objspace, struct heap_slot *slot, VALUE obj)
+heap_page_add_freeobj(rb_objspace_t *objspace, struct heap_page *page, VALUE obj)
 {
     RVALUE *p = (RVALUE *)obj;
     p->as.free.flags = 0;
-    p->as.free.next = slot->freelist;
-    slot->freelist = p;
-    rgengc_report(3, objspace, "heap_slot_add_freeobj: %p (%s) is added to freelist\n", p, obj_type_name(obj));
+    p->as.free.next = page->freelist;
+    page->freelist = p;
+    rgengc_report(3, objspace, "heap_page_add_freeobj: %p (%s) is added to freelist\n", p, obj_type_name(obj));
 }
 
 static inline void
-heap_add_freeslot(rb_objspace_t *objspace, struct heap_slot *slot)
+heap_add_freepage(rb_objspace_t *objspace, struct heap_page *page)
 {
-    if (slot->freelist) {
-	slot->free_next = objspace->heap.free_slots;
-	objspace->heap.free_slots = slot;
+    if (page->freelist) {
+	page->free_next = objspace->heap.free_pages;
+	objspace->heap.free_pages = page;
     }
 }
 
 static void
-heap_assign_slot(rb_objspace_t *objspace)
+heap_assign_page(rb_objspace_t *objspace)
 {
     RVALUE *start, *end, *p;
-    struct heap_slot *slot;
-    struct heap_slot_body *slot_body = 0;
+    struct heap_page *page;
+    struct heap_page_body *page_body = 0;
     size_t hi, lo, mid;
     size_t limit = HEAP_OBJ_LIMIT;
 
-    /* assign heap_slot body (contains heap_slot_header and RVALUEs) */
-    slot_body = (struct heap_slot_body *)aligned_malloc(HEAP_ALIGN, HEAP_SIZE);
-    if (slot_body == 0) {
+    /* assign heap_page body (contains heap_page_header and RVALUEs) */
+    page_body = (struct heap_page_body *)aligned_malloc(HEAP_ALIGN, HEAP_SIZE);
+    if (page_body == 0) {
 	during_gc = 0;
 	rb_memerror();
     }
 
-    /* assign heap_slot entry */
-    slot = (struct heap_slot *)malloc(sizeof(struct heap_slot));
-    if (slot == 0) {
-	aligned_free(slot_body);
+    /* assign heap_page entry */
+    page = (struct heap_page *)malloc(sizeof(struct heap_page));
+    if (page == 0) {
+	aligned_free(page_body);
 	during_gc = 0;
 	rb_memerror();
     }
-    MEMZERO((void*)slot, struct heap_slot, 1);
+    MEMZERO((void*)page, struct heap_page, 1);
 
-    slot->body = slot_body;
+    page->body = page_body;
 
-    slot->next = objspace->heap.slots;
-    if (objspace->heap.slots) objspace->heap.slots->prev = slot;
-    objspace->heap.slots = slot;
+    page->next = objspace->heap.pages;
+    if (objspace->heap.pages) objspace->heap.pages->prev = page;
+    objspace->heap.pages = page;
 
-    /* adjust obj_limit (object number available in this slot) */
-    start = (RVALUE*)((VALUE)slot_body + sizeof(struct heap_slot_header));
+    /* adjust obj_limit (object number available in this page) */
+    start = (RVALUE*)((VALUE)page_body + sizeof(struct heap_page_header));
     if ((VALUE)start % sizeof(RVALUE) != 0) {
 	int delta = (int)(sizeof(RVALUE) - ((VALUE)start % sizeof(RVALUE)));
 	start = (RVALUE*)((VALUE)start + delta);
-	limit = (HEAP_SIZE - (size_t)((VALUE)start - (VALUE)slot_body))/sizeof(RVALUE);
+	limit = (HEAP_SIZE - (size_t)((VALUE)start - (VALUE)page_body))/sizeof(RVALUE);
     }
     end = start + limit;
 
@@ -803,28 +803,28 @@ heap_assign_slot(rb_objspace_t *objspace https://github.com/ruby/ruby/blob/trunk/gc.c#L803
     lo = 0;
     hi = heap_used;
     while (lo < hi) {
-	struct heap_slot *mid_slot;
+	struct heap_page *mid_page;
 
 	mid = (lo + hi) / 2;
-	mid_slot = objspace->heap.sorted[mid];
-	if (mid_slot->body < slot_body) {
+	mid_page = objspace->heap.sorted[mid];
+	if (mid_page->body < page_body) {
 	    lo = mid + 1;
 	}
-	else if (mid_slot->body > slot_body) {
+	else if (mid_page->body > page_body) {
 	    hi = mid;
 	}
 	else {
-	    rb_bug("same heap slot is allocated: %p at %"PRIuVALUE, (void *)slot_body, (VALUE)mid);
+	    rb_bug("same heap page is allocated: %p at %"PRIuVALUE, (void *)page_body, (VALUE)mid);
 	}
     }
     if (hi < heap_used) {
-	MEMMOVE(&objspace->heap.sorted[hi+1], &objspace->heap.sorted[hi], struct heap_slot_header*, heap_used - hi);
+	MEMMOVE(&objspace->heap.sorted[hi+1], &objspace->heap.sorted[hi], struct heap_page_header*, heap_used - hi);
     }
 
-    /* setup slot */
-    slot->start = start;
-    slot->limit = limit;
-    slot_body->header.slot = objspace->heap.sorted[hi] = slot;
+    /* setup page */
+    page->start = start;
+    page->limit = limit;
+    page_body->header.page = objspace->heap.sorted[hi] = page;
 
     if (lomem == 0 || lomem > start) lomem = start;
     if (himem < end) himem = end;
@@ -832,15 +832,15 @@ heap_assign_slot(rb_objspace_t *objspace https://github.com/ruby/ruby/blob/trunk/gc.c#L832
     heap_limit += limit;
 
     for (p = start; p != end; p++) {
-	rgengc_report(3, objspace, "assign_heap_slot: %p is added to freelist\n");
-	heap_slot_add_freeobj(objspace, slot, (VALUE)p);
+	rgengc_report(3, objspace, "assign_heap_page: %p is added to freelist\n");
+	heap_page_add_freeobj(objspace, page, (VALUE)p);
     }
 
-    heap_add_freeslot(objspace, slot);
+    heap_add_freepage(objspace, page);
 }
 
 static void
-heap_add_slots(rb_objspace_t *objspace, size_t add)
+heap_add_pages(rb_objspace_t *objspace, size_t add)
 {
     size_t i;
     size_t next_heap_length;
@@ -853,7 +853,7 @@ heap_add_slots(rb_objspace_t *objspace, https://github.com/ruby/ruby/blob/trunk/gc.c#L853
     }
 
     for (i = 0; i < add; i++) {
-	heap_assign_slot(objspace);
+	heap_assign_page(objspace);
     }
     heap_inc = 0;
 }
@@ -861,7 +861,7 @@ heap_add_slots(rb_objspace_t *objspace, https://github.com/ruby/ruby/blob/trunk/gc.c#L861
 static void
 heap_init(rb_objspace_t *objspace)
 {
-    heap_add_slots(objspace, initial_heap_min_slots / HEAP_OBJ_LIMIT);
+    heap_add_pages(objspace, initial_heap_min_slots / HEAP_OBJ_LIMIT);
     init_mark_stack(&objspace->mark_stack);
 
 #ifdef USE_SIGALTSTACK
@@ -904,15 +904,15 @@ heap_increment(rb_objspace_t *objspace) https://github.com/ruby/ruby/blob/trunk/gc.c#L904
     rgengc_report(5, objspace, "heap_increment: heap_inc: %d\n", heap_inc);
 
     if (heap_inc > 0) {
-	heap_assign_slot(objspace);
+	heap_assign_page(objspace);
 	heap_inc--;
 	return TRUE;
     }
     return FALSE;
 }
 
-static struct heap_slot *
-heap_prepare_freeslot(rb_objspace_t *objspace)
+static struct heap_page *
+heap_prepare_freepage(rb_objspace_t *objspace)
 {
     if (!GC_ENABLE_LAZY_SWEEP && objspace->flags.dont_lazy_sweep) {
 	if (heap_increment(objspace) == 0 &&
@@ -922,7 +922,7 @@ heap_prepare_freeslot(rb_objspace_t *obj https://github.com/ruby/ruby/blob/trunk/gc.c#L922
 	goto ok;
     }
 
-    if (!ready_to_gc(objspace)) return objspace->heap.free_slots;
+    if (!ready_to_gc(objspace)) return objspace->heap.free_pages;
 
     during_gc++;
 
@@ -942,21 +942,21 @@ heap_prepare_freeslot(rb_objspace_t *obj https://github.com/ruby/ruby/blob/trunk/gc.c#L942
     }
   ok:
     during_gc = 0;
-    return objspace->heap.free_slots;
+    return objspace->heap.free_pages;
 }
 
-static inline struct heap_slot *
-heap_get_freeslot(rb_objspace_t *objspace)
+static inline struct heap_page *
+heap_get_freepage(rb_objspace_t *objspace)
 {
-    struct heap_slot *slot;
+    struct heap_page *page;
 
-    slot = objspace->heap.free_slots;
-    while (slot == NULL) {
-	slot = heap_prepare_freeslot(objspace);
+    page = objspace->heap.free_pages;
+    while (page == NULL) {
+	page = heap_prepare_freepage(objspace);
     }
-    objspace->heap.free_slots = slot->free_next;
+    objspace->heap.free_pages = page->free_next;
 
-    return slot;
+    return page;
 }
 
 static inline VALUE
@@ -965,10 +965,10 @@ heap_get_freeobj(rb_objspace_t *objspace https://github.com/ruby/ruby/blob/trunk/gc.c#L965
     RVALUE *p = objspace->freelist;
 
     while (UNLIKELY(p == NULL)) {
-	struct heap_slot *slot = heap_get_freeslot(objspace);
-	objspace->heap.using_slot = slot;
-	p = objspace->freelist = slot->freelist;
-	slot->freelist = NULL;
+	struct heap_page *page = heap_get_freepage(objspace);
+	objspace->heap.using_page = page;
+	p = objspace->freelist = page->freelist;
+	page->freelist = NULL;
     }
     objspace->freelist = p->as.free.next;
 
@@ -1118,7 +1118,7 @@ static inline int https://github.com/ruby/ruby/blob/trunk/gc.c#L1118
 is_pointer_to_heap(rb_objspace_t *objspace, void *ptr)
 {
     register RVALUE *p = RANY(ptr);
-    register struct heap_slot *slot;
+    register struct heap_page *page;
     register size_t hi, lo, mid;
 
     if (p < lomem || p > himem) return FALSE;
@@ -1129,9 +1129,9 @@ is_pointer_to_heap(rb_objspace_t *objspa https://github.com/ruby/ruby/blob/trunk/gc.c#L1129
     hi = heap_used;
     while (lo < hi) {
 	mid = (lo + hi) / 2;
-	slot = objspace->heap.sorted[mid];
-	if (slot->start <= p) {
-	    if (p < slot->start + slot->limit) {
+	page = objspace->heap.sorted[mid];
+	if (page->start <= p) {
+	    if (p < page->start + page->limit) {
 		return TRUE;
 	    }
 	    lo = mid + 1;
@@ -1174,42 +1174,42 @@ rb_free_const_table(st_table *tbl) https://github.com/ruby/ruby/blob/trunk/gc.c#L1174
 }
 
 static void
-unlink_heap_slot(rb_objspace_t *objspace, struct heap_slot *slot)
+unlink_heap_page(rb_objspace_t *objspace, struct heap_page *page)
 {
-    if (slot->prev)
-        slot->prev->next = slot->next;
-    if (slot->next)
-        slot->next->prev = slot->prev;
-    if (heap_slots == slot)
-        heap_slots = slot->next;
-    if (objspace->heap.sweep_slots == slot)
-        objspace->heap.sweep_slots = slot->next;
-    slot->prev = NULL;
-    slot->next = NULL;
+    if (page->prev)
+        page->prev->next = page->next;
+    if (page->next)
+        page->next->prev = page->prev;
+    if (heap_pages == page)
+        heap_pages = page->next;
+    if (objspace->heap.sweep_pages == page)
+        objspace->heap.sweep_pages = page->next;
+    page->prev = NULL;
+    page->next = NULL;
 }
 
 static void
-free_heap_slot(rb_objspace_t *objspace, struct heap_slot *slot)
+free_heap_page(rb_objspace_t *objspace, struct heap_page *page)
 {
-    aligned_free(slot->body);
-    free(slot);
+    aligned_free(page->body);
+    free(page);
 }
 
 static void
-free_unused_slots(rb_objspace_t *objspace)
+free_unused_pages(rb_objspace_t *objspace)
 {
     size_t i, j;
 
     for (i = j = 1; j < heap_used; i++) {
-	struct heap_slot *slot = objspace->heap.sorted[i];
+	struct heap_page *page = objspace->heap.sorted[i];
 
-	if (slot->limit == 0) {
-	    free_heap_slot(objspace, slot);
+	if (page->limit == 0) {
+	    free_heap_page(objspace, page);
 	    heap_used--;
 	}
 	else {
 	    if (i != j) {
-		objspace->heap.sorted[j] = slot;
+		objspace->heap.sorted[j] = page;
 	    }
 	    j++;
 	}
@@ -1409,8 +1409,8 @@ static VALUE https://github.com/ruby/ruby/blob/trunk/gc.c#L1409
 objspace_each_objects(VALUE arg)
 {
     size_t i;
-    struct heap_slot_body *last_body = 0;
-    struct heap_slot *slot;
+    struct heap_page_body *last_body = 0;
+    struct heap_page *page;
     RVALUE *pstart, *pend;
     rb_objspace_t *objspace = &rb_objspace;
     struct each_obj_args *args = (struct each_obj_args *)arg;
@@ -1421,11 +1421,11 @@ objspace_each_objects(VALUE arg) https://github.com/ruby/ruby/blob/trunk/gc.c#L1421
 	while (i < heap_used && objspace->heap.sorted[i]->body <= last_body) i++;
 	if (heap_used <= i) break;
 
-	slot = objspace->heap.sorted[i];
-	last_body = slot->body;
+	page = objspace->heap.sorted[i];
+	last_body = page->body;
 
-	pstart = slot->start;
-	pend = pstart + slot->limit;
+	pstart = page->start;
+	pend = pstart + page->limit;
 
 	if ((*args->callback)(pstart, pend, sizeof(RVALUE), args->data)) {
 	    break (... truncated)

--
ML: ruby-changes@q...
Info: http://www.atdot.net/~ko1/quickml/

[前][次][番号順一覧][スレッド一覧]