-
Notifications
You must be signed in to change notification settings - Fork 165
/
Copy pathjulia_gc.c
1053 lines (926 loc) · 32.9 KB
/
julia_gc.c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
/****************************************************************************
**
** This file is part of GAP, a system for computational discrete algebra.
**
** Copyright of GAP belongs to its developers, whose names are too numerous
** to list here. Please refer to the COPYRIGHT file for details.
**
** SPDX-License-Identifier: GPL-2.0-or-later
**
** This file stores code only required by the Julia garbage collector
**
** The definitions of methods in this file can be found in gasman.h,
** where the non-Julia versions of these methods live. See also boehm_gc.c
** and gasman.c for two other garbage collector implementations.
**/
#include "julia_gc.h"
#include "common.h"
#include "fibhash.h"
#include "funcs.h"
#include "gap.h"
#include "gapstate.h"
#include "gasman.h"
#include "objects.h"
#include "plist.h"
#include "sysmem.h"
#include "vars.h"
#include "bags.inc"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <julia.h>
#include <julia_gcext.h>
// import jl_get_current_task from julia_internal.h, which unfortunately
// isn't installed as part of a typical Julia installation
JL_DLLEXPORT jl_value_t *jl_get_current_task(void);
/****************************************************************************
**
** Various options controlling special features of the Julia GC code follow
*/
// if SCAN_STACK_FOR_MPTRS_ONLY is defined, stack scanning will only
// look for references to master pointers, but not bags themselves. This
// should be safe, as GASMAN uses the same mechanism. It is also faster
// and avoids certain complicated issues that can lead to crashes, and
// is therefore the default. The option to scan for all pointers remains
// available for the time being and should be considered to be
// deprecated.
#define SCAN_STACK_FOR_MPTRS_ONLY
// if REQUIRE_PRECISE_MARKING is defined, we assume that all marking
// functions are precise, i.e., they only invoke MarkBag on valid bags,
// immediate objects or NULL pointers, but not on any other random data
// #define REQUIRE_PRECISE_MARKING
// if COLLECT_MARK_CACHE_STATS is defined, we track some statistics about the
// usage of the MarkCache
// #define COLLECT_MARK_CACHE_STATS
// if MARKING_STRESS_TEST is defined, we stress test the TryMark code
// #define MARKING_STRESS_TEST
// if VALIDATE_MARKING is defined, the program is aborted if we ever
// encounter a reference during marking that does not meet additional
// validation criteria. These tests are compararively expensive and
// should not be enabled by default.
// #define VALIDATE_MARKING
// Comparing pointers in C without triggering undefined behavior
// can be difficult. As the GC already assumes that the memory
// range goes from 0 to 2^k-1 (region tables), we simply convert
// to uintptr_t and compare those.
static inline int cmp_ptr(void * p, void * q)
{
uintptr_t paddr = (uintptr_t)p;
uintptr_t qaddr = (uintptr_t)q;
if (paddr < qaddr)
return -1;
else if (paddr > qaddr)
return 1;
else
return 0;
}
static inline int lt_ptr(void * a, void * b)
{
return (uintptr_t)a < (uintptr_t)b;
}
#if 0
static inline int gt_ptr(void * a, void * b)
{
return (uintptr_t)a > (uintptr_t)b;
}
static inline void *max_ptr(void *a, void *b)
{
if ((uintptr_t) a > (uintptr_t) b)
return a;
else
return b;
}
static inline void *min_ptr(void *a, void *b)
{
if ((uintptr_t) a < (uintptr_t) b)
return a;
else
return b;
}
#endif
/* align pointer to full word if mis-aligned */
static inline void * align_ptr(void * p)
{
uintptr_t u = (uintptr_t)p;
u &= ~(sizeof(p) - 1);
return (void *)u;
}
#ifndef REQUIRE_PRECISE_MARKING
#define MARK_CACHE_BITS 16
#define MARK_CACHE_SIZE (1 << MARK_CACHE_BITS)
#define MARK_HASH(x) (FibHash((x), MARK_CACHE_BITS))
// The MarkCache exists to speed up the conservative tracing of
// objects. While its performance benefit is minimal with the current
// API functionality, it can significantly reduce overhead if a slower
// conservative mechanism is used. It should be disabled for precise
// object tracing, however. The cache does not affect conservative
// *stack* tracing at all, only conservative tracing of objects.
//
// It functions by remembering valid object references in a (lossy)
// hash table. If we find an object reference in that table, we no
// longer need to verify that it is accurate, which is a potentially
// expensive call to the Julia runtime.
static Bag MarkCache[MARK_CACHE_SIZE];
#ifdef COLLECT_MARK_CACHE_STATS
static UInt MarkCacheHits, MarkCacheAttempts, MarkCacheCollisions;
#endif
#endif
static inline Bag * DATA(BagHeader * bag)
{
return (Bag *)(((char *)bag) + sizeof(BagHeader));
}
static TNumExtraMarkFuncBags ExtraMarkFuncBags;
void SetExtraMarkFuncBags(TNumExtraMarkFuncBags func)
{
ExtraMarkFuncBags = func;
}
/****************************************************************************
**
*F InitFreeFuncBag(<type>,<free-func>)
*/
static TNumFreeFuncBags TabFreeFuncBags[NUM_TYPES];
void InitFreeFuncBag(UInt type, TNumFreeFuncBags finalizer_func)
{
TabFreeFuncBags[type] = finalizer_func;
}
static void JFinalizer(jl_value_t * obj)
{
BagHeader * hdr = (BagHeader *)obj;
Bag contents = (Bag)(hdr + 1);
UInt tnum = hdr->type;
// if a bag needing a finalizer is retyped to a new tnum which no longer
// needs one, it may happen that JFinalize is called even though
// TabFreeFuncBags[tnum] is NULL
if (TabFreeFuncBags[tnum])
TabFreeFuncBags[tnum]((Bag)&contents);
}
static jl_module_t * Module;
static jl_datatype_t * datatype_mptr;
static jl_datatype_t * datatype_bag;
static jl_datatype_t * datatype_largebag;
static UInt StackAlignBags;
static Bag * GapStackBottom;
static jl_ptls_t JuliaTLS, SaveTLS;
static size_t max_pool_obj_size;
static UInt YoungRef;
static int FullGC;
#if !defined(SCAN_STACK_FOR_MPTRS_ONLY)
typedef struct {
void * addr;
size_t size;
} MemBlock;
static inline int CmpMemBlock(MemBlock m1, MemBlock m2)
{
char * l1 = (char *)m1.addr;
char * r1 = l1 + m1.size;
char * l2 = (char *)m2.addr;
char * r2 = l2 + m2.size;
if (lt_ptr(r1, l1))
return -1;
if (!lt_ptr(l2, r2))
return 1;
return 0;
}
#define ELEM_TYPE MemBlock
#define COMPARE CmpMemBlock
#include "baltree.h"
static size_t bigval_startoffset;
static MemBlockTree * bigvals;
void alloc_bigval(void * addr, size_t size)
{
MemBlock mem = { addr, size };
MemBlockTreeInsert(bigvals, mem);
}
void free_bigval(void * addr)
{
MemBlock mem = { addr, 0 };
MemBlockTreeRemove(bigvals, mem);
}
#endif
typedef void * Ptr;
#define ELEM_TYPE Ptr
#define COMPARE cmp_ptr
#include "dynarray.h"
#ifndef NR_GLOBAL_BAGS
#define NR_GLOBAL_BAGS 20000L
#endif
static Bag * GlobalAddr[NR_GLOBAL_BAGS];
static const Char * GlobalCookie[NR_GLOBAL_BAGS];
static Int GlobalCount;
/****************************************************************************
**
*F AllocateBagMemory( <type>, <size> )
**
** Allocate memory for a new bag.
**/
static void * AllocateBagMemory(UInt type, UInt size)
{
// HOOK: return `size` bytes memory of TNUM `type`.
void * result;
if (size <= max_pool_obj_size) {
result = (void *)jl_gc_alloc_typed(JuliaTLS, size, datatype_bag);
}
else {
result = (void *)jl_gc_alloc_typed(JuliaTLS, size, datatype_largebag);
}
memset(result, 0, size);
if (TabFreeFuncBags[type])
jl_gc_schedule_foreign_sweepfunc(JuliaTLS, (jl_value_t *)result);
return result;
}
/****************************************************************************
**
*F MarkAllSubBagsDefault(<bag>) . . . marking function that marks everything
**
** 'MarkAllSubBagsDefault' is the same as 'MarkAllSubBags' but is used as
** the initial default marking function. This allows to catch cases where
** 'InitMarkFuncBags' is called twice for the same type: the first time is
** accepted because the marking function is still 'MarkAllSubBagsDefault';
** the second time raises a warning, because a non-default marking function
** is being replaced.
*/
static void MarkAllSubBagsDefault(Bag bag)
{
MarkArrayOfBags(CONST_PTR_BAG(bag), SIZE_BAG(bag) / sizeof(Bag));
}
TNumMarkFuncBags TabMarkFuncBags[NUM_TYPES];
void InitMarkFuncBags(UInt type, TNumMarkFuncBags mark_func)
{
// HOOK: set mark function for type `type`.
GAP_ASSERT(TabMarkFuncBags[type] == MarkAllSubBagsDefault);
TabMarkFuncBags[type] = mark_func;
}
static inline int JMarkTyped(void * obj, jl_datatype_t * ty)
{
// only traverse objects internally used by GAP
if (!jl_typeis(obj, ty))
return 0;
return jl_gc_mark_queue_obj(JuliaTLS, (jl_value_t *)obj);
}
static inline int JMark(void * obj)
{
#ifdef VALIDATE_MARKING
// Validate that `obj` is still allocated and not on a
// free list already. We verify this by checking that the
// type is a pool object of type `jl_datatype_type`.
jl_value_t * ty = jl_typeof(obj);
if (jl_gc_internal_obj_base_ptr(ty) != ty)
abort();
if (!jl_typeis(ty, jl_datatype_type))
abort();
#endif
return jl_gc_mark_queue_obj(JuliaTLS, (jl_value_t *)obj);
}
void MarkJuliaObjSafe(void * obj)
{
if (!obj)
return;
// Validate that `obj` is still allocated and not on a
// free list already. We verify this by checking that the
// type is a pool object of type `jl_datatype_type`.
jl_value_t * ty = jl_typeof(obj);
if (jl_gc_internal_obj_base_ptr(ty) != ty)
return;
if (!jl_typeis(ty, jl_datatype_type))
return;
if (jl_gc_mark_queue_obj(JuliaTLS, (jl_value_t *)obj))
YoungRef++;
}
void MarkJuliaObj(void * obj)
{
if (!obj)
return;
if (JMark(obj))
YoungRef++;
}
// Overview of conservative stack scanning
//
// A key aspect of conservative marking is that (1) we need to determine
// whether a machine word is a pointer to a live object and (2) if it points
// to the interior of the object, to determine its base address.
//
// For Julia's internal objects, we call back to Julia to find out the
// necessary information. For external objects that we allocate ourselves in
// `alloc_bigval()`, we use balanced binary trees (treaps) to determine that
// information. Each node in such a tree contains an (address, size) pair
// and we use the usual binary tree search to figure out whether there is a
// node with an address range containing that address and, if so, returns
// the `address` part of the pair.
//
// While at the C level, we will generally always have a reference to the
// masterpointer, the presence of optimizing compilers, multiple threads, or
// Julia tasks (= coroutines) means that we cannot necessarily rely on this
// information; also, the `NewBag()` implementation may trigger a GC after
// allocating the bag contents, but before allocating the master pointer.
//
// As a consequence, we play it safe and assume that any word anywhere on
// the stack (including Julia task stacks) that points to a master pointer
// or the contents of a bag (including a location after the start of the
// bag) indicates a valid reference that needs to be marked.
//
// One additional concern is that Julia may opportunistically free a subset
// of unreachable objects. Thus, with conservative stack scanning, it is
// possible for a pointer to resurrect a previously unreachable object,
// from which freed objects are then marked. Hence, we add additional checks
// when traversing GAP master pointer and bag objects that this happens
// only for live objects.
static void TryMark(void * p)
{
jl_value_t * p2 = jl_gc_internal_obj_base_ptr(p);
if (!p2) {
#if !defined(SCAN_STACK_FOR_MPTRS_ONLY)
// It is possible for p to point past the end of
// the object, so we subtract one word from the
// address. This is safe, as the object is preceded
// by a larger header.
MemBlock tmp = { (char *)p - 1, 0 };
MemBlock * found = MemBlockTreeFind(bigvals, tmp);
if (found) {
p2 = (jl_value_t *)found->addr;
// It is possible for types to not be valid objects.
// Objects with such types are not normally made visible
// to the mark loop, so we need to avoid marking them
// during conservative stack scanning.
// While jl_gc_internal_obj_base_ptr(p) already eliminates this
// case, it can still happen for bigval_t objects, so
// we run an explicit check that the type is a valid
// object for these.
p2 = (jl_value_t *)((char *)p2 + bigval_startoffset);
jl_taggedvalue_t * hdr = jl_astaggedvalue(p2);
if (hdr->type != jl_gc_internal_obj_base_ptr(hdr->type))
return;
}
#endif
}
else {
// Prepopulate the mark cache with references we know
// are valid and in current use.
#ifndef REQUIRE_PRECISE_MARKING
if (jl_typeis(p2, datatype_mptr))
MarkCache[MARK_HASH((UInt)p2)] = (Bag)p2;
#endif
}
if (p2) {
#ifdef SCAN_STACK_FOR_MPTRS_ONLY
if (jl_typeis(p2, datatype_mptr))
JMark(p2);
#else
void * ty = jl_typeof(p2);
if (ty != datatype_mptr && ty != datatype_bag &&
ty != datatype_largebag && ty != jl_weakref_type)
return;
JMark(p2);
#endif
}
}
static void FindLiveRangeReverse(PtrArray * arr, void * start, void * end)
{
if (lt_ptr(end, start)) {
SWAP(void *, start, end);
}
char * p = (char *)(align_ptr(start));
char * q = (char *)end - sizeof(void *);
while (!lt_ptr(q, p)) {
void * addr = *(void **)q;
if (addr && jl_gc_internal_obj_base_ptr(addr) == addr &&
jl_typeis(addr, datatype_mptr)) {
PtrArrayAdd(arr, addr);
}
q -= StackAlignBags;
}
}
typedef struct {
jl_task_t * task;
PtrArray * stack;
} TaskInfo;
static int CmpTaskInfo(TaskInfo i1, TaskInfo i2)
{
return cmp_ptr(i1.task, i2.task);
}
static void MarkFromList(PtrArray * arr)
{
for (Int i = 0; i < arr->len; i++) {
JMark(arr->items[i]);
}
}
#define ELEM_TYPE TaskInfo
#define COMPARE CmpTaskInfo
#include "baltree.h"
static TaskInfoTree * task_stacks = NULL;
static void SafeScanTaskStack(PtrArray * stack, void * start, void * end)
{
volatile jl_jmp_buf * old_safe_restore =
(volatile jl_jmp_buf *)JuliaTLS->safe_restore;
jl_jmp_buf exc_buf;
if (!jl_setjmp(exc_buf, 0)) {
// The bottom of the stack may be protected with
// guard pages; accessing these results in segmentation
// faults. Julia catches those segmentation faults and
// longjmps to JuliaTLS->safe_restore; we use this
// mechamism to abort stack scanning when a protected
// page is hit. For this to work, we must scan the stack
// from top to bottom, so we see any guard pages last.
JuliaTLS->safe_restore = &exc_buf;
FindLiveRangeReverse(stack, start, end);
}
JuliaTLS->safe_restore = (jl_jmp_buf *)old_safe_restore;
}
static void
ScanTaskStack(int rescan, jl_task_t * task, void * start, void * end)
{
if (!task_stacks) {
task_stacks = TaskInfoTreeMake();
}
TaskInfo tmp = { task, NULL };
TaskInfo * taskinfo = TaskInfoTreeFind(task_stacks, tmp);
PtrArray * stack;
if (taskinfo != NULL) {
stack = taskinfo->stack;
if (rescan)
PtrArraySetLen(stack, 0);
}
else {
tmp.stack = PtrArrayMake(1024);
stack = tmp.stack;
TaskInfoTreeInsert(task_stacks, tmp);
}
if (rescan) {
SafeScanTaskStack(stack, start, end);
// Remove duplicates
if (stack->len > 0) {
PtrArraySort(stack);
Int p = 0;
for (Int i = 1; i < stack->len; i++) {
if (stack->items[i] != stack->items[p]) {
p++;
stack->items[p] = stack->items[i];
}
}
PtrArraySetLen(stack, p + 1);
}
}
MarkFromList(stack);
}
static NOINLINE void TryMarkRange(void * start, void * end)
{
if (lt_ptr(end, start)) {
SWAP(void *, start, end);
}
char * p = (char *)align_ptr(start);
char * q = (char *)end - sizeof(void *) + StackAlignBags;
while (lt_ptr(p, q)) {
void * addr = *(void **)p;
if (addr) {
TryMark(addr);
#ifdef MARKING_STRESS_TEST
for (int j = 0; j < 1000; ++j) {
UInt val = (UInt)addr + rand() - rand();
TryMark((void *)val);
}
#endif
}
p += StackAlignBags;
}
}
BOOL IsGapObj(void * p)
{
return jl_typeis(p, datatype_mptr);
}
void CHANGED_BAG(Bag bag)
{
jl_gc_wb_back(BAG_HEADER(bag));
}
static void GapRootScanner(int full)
{
// Mark our Julia module (this contains references to our custom data
// types, which thus also will not be collected prematurely).
// We call jl_gc_mark_queue_obj() directly here, because we know that
// Module is a valid Julia object at this point, so further checks
// in JMark() can be skipped.
jl_gc_mark_queue_obj(JuliaTLS, (jl_value_t *)Module);
jl_task_t * task = (jl_task_t *)jl_get_current_task();
size_t size;
int tid; // unused
// We figure out the end of the stack from the current task. While
// `stack_bottom` is passed to InitBags(), we cannot use that if
// current_task != root_task.
char * stackend = (char *)jl_task_stack_buffer(task, &size, &tid);
stackend += size;
// The following test overrides the stackend if the following two
// conditions hold:
//
// 1. GAP is not being used as a library, but is the main program
// and in charge of the main() function.
// 2. The stack of the current task is that of the root task of the
// main thread (which has thread id 0).
//
// The reason is that if Julia is being initialized from GAP, it
// cannot always reliably find the top of the stack for that task,
// so we have to fall back to GAP for that.
if (!IsUsingLibGap() && jl_threadid() == 0 &&
JuliaTLS->root_task == task) {
stackend = (char *)GapStackBottom;
}
// allow installing a custom marking function. This is used for
// integrating GAP (possibly linked as a shared library) with other code
// bases which use their own form of garbage collection. For example,
// with Python (for SageMath).
if (ExtraMarkFuncBags)
(*ExtraMarkFuncBags)();
// scan the stack for further object references, and mark them
jmp_buf registers;
setjmp(registers);
TryMarkRange(registers, (char *)registers + sizeof(jmp_buf));
TryMarkRange((char *)registers + sizeof(jmp_buf), stackend);
// mark all global objects
for (Int i = 0; i < GlobalCount; i++) {
Bag p = *GlobalAddr[i];
if (IS_BAG_REF(p)) {
JMark(p);
}
}
}
static void GapTaskScanner(jl_task_t * task, int root_task)
{
size_t size;
int tid;
char * stack = (char *)jl_task_stack_buffer(task, &size, &tid);
// If it is the current task, it has been scanned by GapRootScanner()
// already.
if (task == (jl_task_t *)jl_get_current_task())
return;
int rescan = 1;
if (!FullGC) {
// This is a temp hack to work around a problem with the
// generational GC. Basically, task stacks are treated as roots
// and are therefore being scanned regardless of whether they
// are old or new, which can be expensive in the conservative
// case. In order to avoid that, we're manually checking whether
// the old flag is set for a task.
//
// This works specifically for task stacks as the current task
// is being scanned regardless and a write barrier will flip the
// age bit back to new if tasks are being switched.
jl_taggedvalue_t * tag = jl_astaggedvalue(task);
if (tag->bits.gc & 2)
rescan = 0;
}
if (stack && tid < 0) {
if (task->copy_stack) {
// We know which part of the task stack is actually used,
// so we shorten the range we have to scan.
stack = stack + size - task->copy_stack;
size = task->copy_stack;
}
ScanTaskStack(rescan, task, stack, stack + size);
}
}
static void PreGCHook(int full)
{
// It is possible for the garbage collector to be invoked from a
// different thread other than the main thread that is running
// GAP. So we save the TLS pointer temporarily and restore it
// afterwards. In the long run, JuliaTLS needs to simply become
// a thread-local variable.
FullGC = full;
SaveTLS = JuliaTLS;
JuliaTLS = jl_get_ptls_states();
// This is the same code as in VarsBeforeCollectBags() for GASMAN.
// It is necessary because ASS_LVAR() and related functionality
// does not call CHANGED_BAG() for performance reasons. CHANGED_BAG()
// is only called when the current lvars bag is being changed. Thus,
// we have to add a write barrier at the start of the GC, too.
if (STATE(CurrLVars))
CHANGED_BAG(STATE(CurrLVars));
/* information at the beginning of garbage collections */
SyMsgsBags(full, 0, 0);
#ifndef REQUIRE_PRECISE_MARKING
memset(MarkCache, 0, sizeof(MarkCache));
#ifdef COLLECT_MARK_CACHE_STATS
MarkCacheHits = MarkCacheAttempts = MarkCacheCollisions = 0;
#endif
#endif
}
static void PostGCHook(int full)
{
JuliaTLS = SaveTLS;
/* information at the end of garbage collections */
UInt totalAlloc = 0; // FIXME -- is this data even available?
SyMsgsBags(full, 6, totalAlloc);
#ifdef COLLECT_MARK_CACHE_STATS
/* printf("\n>>>Attempts: %ld\nHit rate: %lf\nCollision rate: %lf\n",
(long) MarkCacheAttempts,
(double) MarkCacheHits/(double)MarkCacheAttempts,
(double) MarkCacheCollisions/(double)MarkCacheAttempts
); */
#endif
}
// the Julia marking function for master pointer objects (i.e., this function
// is called by the Julia GC whenever it marks a GAP master pointer object)
static uintptr_t MPtrMarkFunc(jl_ptls_t ptls, jl_value_t * obj)
{
if (!*(void **)obj)
return 0;
void * header = BAG_HEADER((Bag)obj);
// The following check ensures that the master pointer does
// indeed reference a bag that has not yet been freed. See
// the comments on conservative stack scanning for an in-depth
// explanation.
void * ty = jl_typeof(header);
if (ty != datatype_bag && ty != datatype_largebag)
return 0;
if (JMark(header))
return 1;
return 0;
}
// the Julia marking function for bags (i.e., this function is called by the
// Julia GC whenever it marks a GAP bag object)
static uintptr_t BagMarkFunc(jl_ptls_t ptls, jl_value_t * obj)
{
BagHeader * hdr = (BagHeader *)obj;
Bag contents = (Bag)(hdr + 1);
UInt tnum = hdr->type;
YoungRef = 0;
TabMarkFuncBags[tnum]((Bag)&contents);
return YoungRef;
}
//
// This function is called from GAP.jl to set the super type our custom Julia
// types to GAP.GapObj
//
void GAP_register_GapObj(jl_datatype_t * gapobj_type)
{
if (!gapobj_type || !jl_is_datatype(gapobj_type)) {
Panic("GAP_register_GapObj: GapObj is not a datatype");
}
// DO THE GREAT MAGIC HACK (yes we are bad citizens :/) and
// change the super type of ForeignGAP.MPtr to GapObj
datatype_mptr->super = gapobj_type;
jl_gc_wb(datatype_mptr, datatype_mptr->super);
}
void InitBags(UInt initial_size, Bag * stack_bottom, UInt stack_align)
{
// HOOK: initialization happens here.
GapStackBottom = stack_bottom;
for (UInt i = 0; i < NUM_TYPES; i++) {
TabMarkFuncBags[i] = MarkAllSubBagsDefault;
}
// These callbacks need to be set before initialization so
// that we can track objects allocated during `jl_init()`.
#if !defined(SCAN_STACK_FOR_MPTRS_ONLY)
bigvals = MemBlockTreeMake();
jl_gc_set_cb_notify_external_alloc(alloc_bigval, 1);
jl_gc_set_cb_notify_external_free(free_bigval, 1);
bigval_startoffset = jl_gc_external_obj_hdr_size();
#endif
max_pool_obj_size = jl_gc_max_internal_obj_size();
jl_gc_enable_conservative_gc_support();
jl_init();
Module = jl_new_module(jl_symbol("ForeignGAP"));
Module->parent = jl_main_module;
// If we were loaded from GAP.jl, then try to retrieve GapObj from it.
// We do this by extracting it from the relevant instances of the GAP.jl
// module, which we get from the global variable `__JULIAGAPMODULE` if
// present.
//
// Newer versions of GAP.jl do not need this mechanism and instead call
// GAP_register_GapObj.
jl_module_t * parent_module;
jl_datatype_t * gapobj_type;
parent_module = (jl_module_t *)jl_get_global(
jl_main_module, jl_symbol("__JULIAGAPMODULE"));
if (parent_module) {
if (!jl_is_module(parent_module)) {
Panic("__JULIAGAPMODULE is set in julia main module, but does "
"not point to a module");
}
// retrieve GapObj abstract julia type
gapobj_type =
(jl_datatype_t *)jl_get_global(parent_module, jl_symbol("GapObj"));
if (!gapobj_type) {
Panic("GapObj type is not bound in GAP module");
}
if (!jl_is_datatype(gapobj_type)) {
Panic("GapObj in the GAP module is not a datatype");
}
}
else {
gapobj_type = jl_any_type;
}
JuliaTLS = jl_get_ptls_states();
// These callbacks potentially require access to the Julia
// TLS and thus need to be installed after initialization.
jl_gc_set_cb_root_scanner(GapRootScanner, 1);
jl_gc_set_cb_task_scanner(GapTaskScanner, 1);
jl_gc_set_cb_pre_gc(PreGCHook, 1);
jl_gc_set_cb_post_gc(PostGCHook, 1);
// jl_gc_enable(0); /// DEBUGGING
jl_set_const(jl_main_module, jl_symbol("ForeignGAP"),
(jl_value_t *)Module);
datatype_mptr = jl_new_foreign_type(
jl_symbol("MPtr"), Module, gapobj_type, MPtrMarkFunc, NULL, 1, 0);
datatype_bag = jl_new_foreign_type(jl_symbol("Bag"), Module, jl_any_type,
BagMarkFunc, JFinalizer, 1, 0);
datatype_largebag =
jl_new_foreign_type(jl_symbol("LargeBag"), Module, jl_any_type,
BagMarkFunc, JFinalizer, 1, 1);
// export datatypes to Julia level
jl_set_const(Module, jl_symbol("MPtr"), (jl_value_t *)datatype_mptr);
jl_set_const(Module, jl_symbol("Bag"), (jl_value_t *)datatype_bag);
jl_set_const(Module, jl_symbol("LargeBag"),
(jl_value_t *)datatype_largebag);
GAP_ASSERT(jl_is_datatype(datatype_mptr));
GAP_ASSERT(jl_is_datatype(datatype_bag));
GAP_ASSERT(jl_is_datatype(datatype_largebag));
StackAlignBags = stack_align;
}
UInt CollectBags(UInt size, UInt full)
{
// HOOK: perform a garbage collection
jl_gc_collect(full);
return 1;
}
void RetypeBagIntern(Bag bag, UInt new_type)
{
BagHeader * header = BAG_HEADER(bag);
UInt old_type = header->type;
// exit early if nothing is to be done
if (old_type == new_type)
return;
#ifdef COUNT_BAGS
/* update the statistics */
{
UInt size;
size = header->size;
InfoBags[old_type].nrLive -= 1;
InfoBags[new_type].nrLive += 1;
InfoBags[old_type].nrAll -= 1;
InfoBags[new_type].nrAll += 1;
InfoBags[old_type].sizeLive -= size;
InfoBags[new_type].sizeLive += size;
InfoBags[old_type].sizeAll -= size;
InfoBags[new_type].sizeAll += size;
}
#endif
if (!TabFreeFuncBags[old_type] && TabFreeFuncBags[new_type]) {
// Retyping a bag can change whether a finalizer needs to be run for
// it or not, depending on whether TabFreeFuncBags[tnum] is NULL or
// not. While JFinalizer checks for this, there is a deeper problem:
// jl_gc_schedule_foreign_sweepfunc is not idempotent, and we must not
// call it more than once for any given bag. But this could happen if
// a bag changed its tnum multiple times between one needing and one
// not needing a finalizer. To avoid this, we only allow changing from
// needing a finalizer to not needing one, but not the other way
// around.
//
// The alternative would be to write code which tracks whether
// jl_gc_schedule_foreign_sweepfunc was already called for an object
// (e.g. by using an object flag). But right now no GAP code needs to
// do this, and changing the type of an object to a completely
// different type is something better to be avoided anyway. So instead
// of supporting a feature nobody uses right now, we error out and
// wait to see if somebody complains.
Panic("cannot change bag type to one requiring a 'free' callback");
}
header->type = new_type;
}
Bag NewBag(UInt type, UInt size)
{
Bag bag; /* identifier of the new bag */
UInt alloc_size;
alloc_size = sizeof(BagHeader) + size;
SizeAllBags += size;
/* If the size of an object is zero (such as an empty permutation),
* and the header size is a multiple of twice the word size of the
* architecture, then the master pointer will actually point past
* the allocated area. Because this would result in the object
* being freed prematurely, we will allocate at least one extra
* byte so that the master pointer actually points to within an
* allocated memory area.
*/
if (size == 0)
alloc_size++;
#if defined(SCAN_STACK_FOR_MPTRS_ONLY)
bag = jl_gc_alloc_typed(JuliaTLS, sizeof(void *), datatype_mptr);
SET_PTR_BAG(bag, 0);
#endif
BagHeader * header = AllocateBagMemory(type, alloc_size);
header->type = type;
header->flags = 0;
header->size = size;
#if !defined(SCAN_STACK_FOR_MPTRS_ONLY)
// allocate the new masterpointer
bag = jl_gc_alloc_typed(JuliaTLS, sizeof(void *), datatype_mptr);
SET_PTR_BAG(bag, DATA(header));
#else
// change the masterpointer to reference the new bag memory
SET_PTR_BAG(bag, DATA(header));
jl_gc_wb_back((void *)bag);
#endif
// return the identifier of the new bag
return bag;
}
UInt ResizeBag(Bag bag, UInt new_size)
{
BagHeader * header = BAG_HEADER(bag);
UInt old_size = header->size;
#ifdef COUNT_BAGS
// update the statistics
InfoBags[header->type].sizeLive += new_size - old_size;
InfoBags[header->type].sizeAll += new_size - old_size;
#endif
// if the bag is enlarged
if (new_size > old_size) {
SizeAllBags += new_size;
UInt alloc_size = sizeof(BagHeader) + new_size;
// if size is zero, increment by 1; see NewBag for an explanation
if (new_size == 0)
alloc_size++;
// allocate new bag
header = AllocateBagMemory(header->type, alloc_size);
// copy bag header and data, and update size
memcpy(header, BAG_HEADER(bag), sizeof(BagHeader) + old_size);
// update the master pointer
SET_PTR_BAG(bag, DATA(header));
jl_gc_wb_back((void *)bag);
}
// update the size
header->size = new_size;
return 1;
}
void InitGlobalBag(Bag * addr, const Char * cookie)
{
// HOOK: Register global root.
GAP_ASSERT(GlobalCount < NR_GLOBAL_BAGS);
GlobalAddr[GlobalCount] = addr;
GlobalCookie[GlobalCount] = cookie;
GlobalCount++;
}
void SwapMasterPoint(Bag bag1, Bag bag2)
{
Obj * ptr1 = PTR_BAG(bag1);
Obj * ptr2 = PTR_BAG(bag2);
SET_PTR_BAG(bag1, ptr2);
SET_PTR_BAG(bag2, ptr1);
jl_gc_wb((void *)bag1, BAG_HEADER(bag1));
jl_gc_wb((void *)bag2, BAG_HEADER(bag2));
}
// HOOK: mark functions
inline void MarkBag(Bag bag)
{
if (!IS_BAG_REF(bag))
return;
jl_value_t * p = (jl_value_t *)bag;