|
7 | 7 | # with older compiler releases. It is executed only once at the beginning of the |
8 | 8 | # program and, for the legacy implementation, the result is passed on each call |
9 | 9 | # to `__crystal_once`. |
10 | | -# |
11 | | -# In multithread mode a mutex is used to avoid race conditions between threads. |
12 | | -# |
13 | | -# On Win32, `Crystal::System::FileDescriptor#@@reader_thread` spawns a new |
14 | | -# thread even without the `preview_mt` flag, and the thread can also reference |
15 | | -# Crystal constants, leading to race conditions, so we always enable the mutex. |
16 | 10 |
|
17 | | -{% if compare_versions(Crystal::VERSION, "1.16.0-dev") >= 0 %} |
18 | | - # This implementation uses an enum over the initialization flag pointer for |
19 | | - # each value to find infinite loops and raise an error. |
20 | | - |
21 | | - module Crystal |
22 | | - # :nodoc: |
23 | | - enum OnceState : Int8 |
24 | | - Processing = -1 |
25 | | - Uninitialized = 0 |
26 | | - Initialized = 1 |
| 11 | +require "crystal/pointer_linked_list" |
| 12 | +require "crystal/spin_lock" |
| 13 | + |
| 14 | +module Crystal |
| 15 | + # :nodoc: |
| 16 | + module Once |
| 17 | + struct Operation |
| 18 | + include PointerLinkedList::Node |
| 19 | + |
| 20 | + getter fiber : Fiber |
| 21 | + getter flag : Bool* |
| 22 | + |
| 23 | + def initialize(@flag : Bool*, @fiber : Fiber) |
| 24 | + @waiting = PointerLinkedList(Fiber::PointerLinkedListNode).new |
| 25 | + end |
| 26 | + |
| 27 | + def add_waiter(node) : Nil |
| 28 | + @waiting.push(node) |
| 29 | + end |
| 30 | + |
| 31 | + def resume_all : Nil |
| 32 | + @waiting.each(&.value.enqueue) |
| 33 | + end |
27 | 34 | end |
28 | 35 |
|
29 | | - {% if flag?(:preview_mt) || flag?(:win32) %} |
30 | | - @@once_mutex = uninitialized Mutex |
31 | | - {% end %} |
| 36 | + @@spin = uninitialized SpinLock |
| 37 | + @@operations = uninitialized PointerLinkedList(Operation) |
32 | 38 |
|
33 | | - # :nodoc: |
34 | | - def self.once_init : Nil |
35 | | - {% if flag?(:preview_mt) || flag?(:win32) %} |
36 | | - @@once_mutex = Mutex.new(:reentrant) |
37 | | - {% end %} |
| 39 | + def self.init : Nil |
| 40 | + @@spin = SpinLock.new |
| 41 | + @@operations = PointerLinkedList(Operation).new |
38 | 42 | end |
39 | 43 |
|
40 | | - # :nodoc: |
41 | | - # Using @[NoInline] so LLVM optimizes for the hot path (var already |
42 | | - # initialized). |
43 | | - @[NoInline] |
44 | | - def self.once(flag : OnceState*, initializer : Void*) : Nil |
45 | | - {% if flag?(:preview_mt) || flag?(:win32) %} |
46 | | - @@once_mutex.synchronize { once_exec(flag, initializer) } |
47 | | - {% else %} |
48 | | - once_exec(flag, initializer) |
49 | | - {% end %} |
| 44 | + protected def self.exec(flag : Bool*, &) |
| 45 | + @@spin.lock |
| 46 | + |
| 47 | + if flag.value |
| 48 | + @@spin.unlock |
| 49 | + elsif operation = processing?(flag) |
| 50 | + check_reentrancy(operation) |
| 51 | + wait_initializer(operation) |
| 52 | + else |
| 53 | + run_initializer(flag) { yield } |
| 54 | + end |
50 | 55 |
|
51 | 56 | # safety check, and allows to safely call `Intrinsics.unreachable` in |
52 | 57 | # `__crystal_once` |
53 | | - unless flag.value.initialized? |
54 | | - System.print_error "BUG: failed to initialize constant or class variable\n" |
55 | | - LibC._exit(1) |
| 58 | + return if flag.value |
| 59 | + |
| 60 | + System.print_error "BUG: failed to initialize class variable or constant\n" |
| 61 | + LibC._exit(1) |
| 62 | + end |
| 63 | + |
| 64 | + private def self.processing?(flag) |
| 65 | + @@operations.each do |operation| |
| 66 | + return operation if operation.value.flag == flag |
56 | 67 | end |
57 | 68 | end |
58 | 69 |
|
59 | | - private def self.once_exec(flag : OnceState*, initializer : Void*) : Nil |
60 | | - case flag.value |
61 | | - in .initialized? |
62 | | - return |
63 | | - in .uninitialized? |
64 | | - flag.value = :processing |
65 | | - Proc(Nil).new(initializer, Pointer(Void).null).call |
66 | | - flag.value = :initialized |
67 | | - in .processing? |
| 70 | + private def self.check_reentrancy(operation) |
| 71 | + if operation.value.fiber == Fiber.current |
| 72 | + @@spin.unlock |
68 | 73 | raise "Recursion while initializing class variables and/or constants" |
69 | 74 | end |
70 | 75 | end |
| 76 | + |
| 77 | + private def self.wait_initializer(operation) |
| 78 | + waiting = Fiber::PointerLinkedListNode.new(Fiber.current) |
| 79 | + operation.value.add_waiter(pointerof(waiting)) |
| 80 | + @@spin.unlock |
| 81 | + Fiber.suspend |
| 82 | + end |
| 83 | + |
| 84 | + private def self.run_initializer(flag, &) |
| 85 | + operation = Operation.new(flag, Fiber.current) |
| 86 | + @@operations.push pointerof(operation) |
| 87 | + @@spin.unlock |
| 88 | + |
| 89 | + yield |
| 90 | + |
| 91 | + @@spin.lock |
| 92 | + flag.value = true |
| 93 | + @@operations.delete pointerof(operation) |
| 94 | + @@spin.unlock |
| 95 | + |
| 96 | + operation.resume_all |
| 97 | + end |
71 | 98 | end |
72 | 99 |
|
73 | 100 | # :nodoc: |
74 | 101 | # |
75 | | - # Using `@[AlwaysInline]` allows LLVM to optimize const accesses. Since this |
76 | | - # is a `fun` the function will still appear in the symbol table, though it |
77 | | - # will never be called. |
78 | | - @[AlwaysInline] |
79 | | - fun __crystal_once(flag : Crystal::OnceState*, initializer : Void*) : Nil |
80 | | - return if flag.value.initialized? |
81 | | - |
82 | | - Crystal.once(flag, initializer) |
| 102 | + # Never inlined to avoid bloating the call site with the slow-path that should |
| 103 | + # usually not be taken. |
| 104 | + @[NoInline] |
| 105 | + def self.once(flag : Bool*, initializer : Void*) |
| 106 | + Once.exec(flag, &Proc(Nil).new(initializer, Pointer(Void).null)) |
| 107 | + end |
83 | 108 |
|
84 | | - # tell LLVM that it can optimize away repeated `__crystal_once` calls for |
85 | | - # this global (e.g. repeated access to constant in a single funtion); |
86 | | - # this is truly unreachable otherwise `Crystal.once` would have panicked |
87 | | - Intrinsics.unreachable unless flag.value.initialized? |
| 109 | + # :nodoc: |
| 110 | + # |
| 111 | + # NOTE: should also never be inlined, but that would capture the block, which |
| 112 | + # would be a breaking change when we use this method to protect class getter |
| 113 | + # and class property macros with lazy initialization (the block may return or |
| 114 | + # break). |
| 115 | + # |
| 116 | + # TODO: consider a compile time flag to enable/disable the capture? returning |
| 117 | + # from the block is unexpected behavior: the returned value won't be saved in |
| 118 | + # the class variable. |
| 119 | + def self.once(flag : Bool*, &) |
| 120 | + Once.exec(flag) { yield } unless flag.value |
88 | 121 | end |
89 | | -{% else %} |
90 | | - # This implementation uses a global array to store the initialization flag |
91 | | - # pointers for each value to find infinite loops and raise an error. |
92 | | - |
93 | | - module Crystal |
94 | | - # :nodoc: |
95 | | - class OnceState |
96 | | - @rec = [] of Bool* |
97 | | - |
98 | | - @[NoInline] |
99 | | - def once(flag : Bool*, initializer : Void*) |
100 | | - unless flag.value |
101 | | - if @rec.includes?(flag) |
102 | | - raise "Recursion while initializing class variables and/or constants" |
103 | | - end |
104 | | - @rec << flag |
105 | | - |
106 | | - Proc(Nil).new(initializer, Pointer(Void).null).call |
107 | | - flag.value = true |
108 | | - |
109 | | - @rec.pop |
110 | | - end |
111 | | - end |
| 122 | +end |
112 | 123 |
|
113 | | - {% if flag?(:preview_mt) || flag?(:win32) %} |
114 | | - @mutex = Mutex.new(:reentrant) |
115 | | - |
116 | | - @[NoInline] |
117 | | - def once(flag : Bool*, initializer : Void*) |
118 | | - unless flag.value |
119 | | - @mutex.synchronize do |
120 | | - previous_def |
121 | | - end |
122 | | - end |
123 | | - end |
124 | | - {% end %} |
125 | | - end |
| 124 | +{% if compare_versions(Crystal::VERSION, "1.16.0-dev") >= 0 %} |
| 125 | + # :nodoc: |
| 126 | + # |
| 127 | + # We always inline this accessor to optimize for the fast-path (already |
| 128 | + # initialized). |
| 129 | + @[AlwaysInline] |
| 130 | + fun __crystal_once(flag : Bool*, initializer : Void*) |
| 131 | + return if flag.value |
| 132 | + Crystal.once(flag, initializer) |
126 | 133 |
|
127 | | - # :nodoc: |
128 | | - def self.once_init : Nil |
129 | | - end |
| 134 | + # tells LLVM to assume that the flag is true, this avoids repeated access to |
| 135 | + # the same constant or class variable to check the flag and try to run the |
| 136 | + # initializer (only the first access will) |
| 137 | + Intrinsics.unreachable unless flag.value |
130 | 138 | end |
131 | | - |
| 139 | +{% else %} |
132 | 140 | # :nodoc: |
| 141 | + # |
| 142 | + # Unused. Kept for backward compatibility with older compilers. |
133 | 143 | fun __crystal_once_init : Void* |
134 | | - Crystal::OnceState.new.as(Void*) |
| 144 | + Pointer(Void).null |
135 | 145 | end |
136 | 146 |
|
137 | 147 | # :nodoc: |
138 | 148 | @[AlwaysInline] |
139 | 149 | fun __crystal_once(state : Void*, flag : Bool*, initializer : Void*) |
140 | 150 | return if flag.value |
141 | | - state.as(Crystal::OnceState).once(flag, initializer) |
| 151 | + Crystal.once(flag, initializer) |
142 | 152 | Intrinsics.unreachable unless flag.value |
143 | 153 | end |
144 | 154 | {% end %} |
0 commit comments