Source file
src/runtime/pinner.go
1
2
3
4
5 package runtime
6
7 import (
8 "internal/abi"
9 "internal/runtime/atomic"
10 "unsafe"
11 )
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34 type Pinner struct {
35 *pinner
36 }
37
38
39
40
41
42
43
44
45
46
47
48
49 func (p *Pinner) Pin(pointer any) {
50 if p.pinner == nil {
51
52 mp := acquirem()
53 if pp := mp.p.ptr(); pp != nil {
54 p.pinner = pp.pinnerCache
55 pp.pinnerCache = nil
56 }
57 releasem(mp)
58
59 if p.pinner == nil {
60
61 p.pinner = new(pinner)
62 p.refs = p.refStore[:0]
63
64
65
66
67
68
69
70 SetFinalizer(p.pinner, func(i *pinner) {
71 if len(i.refs) != 0 {
72 i.unpin()
73 pinnerLeakPanic()
74 }
75 })
76 }
77 }
78 ptr := pinnerGetPtr(&pointer)
79 if setPinned(ptr, true) {
80 p.refs = append(p.refs, ptr)
81 }
82 }
83
84
85
86 func (p *Pinner) Unpin() {
87 p.pinner.unpin()
88
89 mp := acquirem()
90 if pp := mp.p.ptr(); pp != nil && pp.pinnerCache == nil {
91
92
93
94
95 pp.pinnerCache = p.pinner
96 p.pinner = nil
97 }
98 releasem(mp)
99 }
100
101 const (
102 pinnerSize = 64
103 pinnerRefStoreSize = (pinnerSize - unsafe.Sizeof([]unsafe.Pointer{})) / unsafe.Sizeof(unsafe.Pointer(nil))
104 )
105
106 type pinner struct {
107 refs []unsafe.Pointer
108 refStore [pinnerRefStoreSize]unsafe.Pointer
109 }
110
111 func (p *pinner) unpin() {
112 if p == nil || p.refs == nil {
113 return
114 }
115 for i := range p.refs {
116 setPinned(p.refs[i], false)
117 }
118
119
120
121 p.refStore = [pinnerRefStoreSize]unsafe.Pointer{}
122 p.refs = p.refStore[:0]
123 }
124
125 func pinnerGetPtr(i *any) unsafe.Pointer {
126 e := efaceOf(i)
127 etyp := e._type
128 if etyp == nil {
129 panic(errorString("runtime.Pinner: argument is nil"))
130 }
131 if kind := etyp.Kind(); kind != abi.Pointer && kind != abi.UnsafePointer {
132 panic(errorString("runtime.Pinner: argument is not a pointer: " + toRType(etyp).string()))
133 }
134 if inUserArenaChunk(uintptr(e.data)) {
135
136 panic(errorString("runtime.Pinner: object was allocated into an arena"))
137 }
138 return e.data
139 }
140
141
142
143
144
145 func isPinned(ptr unsafe.Pointer) bool {
146 span := spanOfHeap(uintptr(ptr))
147 if span == nil {
148
149
150 return true
151 }
152 pinnerBits := span.getPinnerBits()
153
154
155
156 if pinnerBits == nil {
157 return false
158 }
159 objIndex := span.objIndex(uintptr(ptr))
160 pinState := pinnerBits.ofObject(objIndex)
161 KeepAlive(ptr)
162 return pinState.isPinned()
163 }
164
165
166
167
168
169 func setPinned(ptr unsafe.Pointer, pin bool) bool {
170 span := spanOfHeap(uintptr(ptr))
171 if span == nil {
172 if !pin {
173 panic(errorString("tried to unpin non-Go pointer"))
174 }
175
176
177 return false
178 }
179
180
181
182 mp := acquirem()
183 span.ensureSwept()
184 KeepAlive(ptr)
185
186 objIndex := span.objIndex(uintptr(ptr))
187
188 lock(&span.speciallock)
189
190 pinnerBits := span.getPinnerBits()
191 if pinnerBits == nil {
192 pinnerBits = span.newPinnerBits()
193 span.setPinnerBits(pinnerBits)
194 }
195 pinState := pinnerBits.ofObject(objIndex)
196 if pin {
197 if pinState.isPinned() {
198
199 pinState.setMultiPinned(true)
200
201
202 systemstack(func() {
203 offset := objIndex * span.elemsize
204 span.incPinCounter(offset)
205 })
206 } else {
207
208 pinState.setPinned(true)
209 }
210 } else {
211
212 if pinState.isPinned() {
213 if pinState.isMultiPinned() {
214 var exists bool
215
216 systemstack(func() {
217 offset := objIndex * span.elemsize
218 exists = span.decPinCounter(offset)
219 })
220 if !exists {
221
222 pinState.setMultiPinned(false)
223 }
224 } else {
225
226 pinState.setPinned(false)
227 }
228 } else {
229
230 throw("runtime.Pinner: object already unpinned")
231 }
232 }
233 unlock(&span.speciallock)
234 releasem(mp)
235 return true
236 }
237
238 type pinState struct {
239 bytep *uint8
240 byteVal uint8
241 mask uint8
242 }
243
244
245
246
247 func (v *pinState) isPinned() bool {
248 return (v.byteVal & v.mask) != 0
249 }
250
251 func (v *pinState) isMultiPinned() bool {
252 return (v.byteVal & (v.mask << 1)) != 0
253 }
254
255 func (v *pinState) setPinned(val bool) {
256 v.set(val, false)
257 }
258
259 func (v *pinState) setMultiPinned(val bool) {
260 v.set(val, true)
261 }
262
263
264
265 func (v *pinState) set(val bool, multipin bool) {
266 mask := v.mask
267 if multipin {
268 mask <<= 1
269 }
270 if val {
271 atomic.Or8(v.bytep, mask)
272 } else {
273 atomic.And8(v.bytep, ^mask)
274 }
275 }
276
277
278 type pinnerBits gcBits
279
280
281
282
283
284 func (p *pinnerBits) ofObject(n uintptr) pinState {
285 bytep, mask := (*gcBits)(p).bitp(n * 2)
286 byteVal := atomic.Load8(bytep)
287 return pinState{bytep, byteVal, mask}
288 }
289
290 func (s *mspan) pinnerBitSize() uintptr {
291 return divRoundUp(uintptr(s.nelems)*2, 8)
292 }
293
294
295
296
297 func (s *mspan) newPinnerBits() *pinnerBits {
298 return (*pinnerBits)(newMarkBits(uintptr(s.nelems) * 2))
299 }
300
301
302
303
304 func (s *mspan) getPinnerBits() *pinnerBits {
305 return (*pinnerBits)(atomic.Loadp(unsafe.Pointer(&s.pinnerBits)))
306 }
307
308 func (s *mspan) setPinnerBits(p *pinnerBits) {
309 atomicstorep(unsafe.Pointer(&s.pinnerBits), unsafe.Pointer(p))
310 }
311
312
313
314
315 func (s *mspan) refreshPinnerBits() {
316 p := s.getPinnerBits()
317 if p == nil {
318 return
319 }
320
321 hasPins := false
322 bytes := alignUp(s.pinnerBitSize(), 8)
323
324
325
326
327
328 for _, x := range unsafe.Slice((*uint64)(unsafe.Pointer(&p.x)), bytes/8) {
329 if x != 0 {
330 hasPins = true
331 break
332 }
333 }
334
335 if hasPins {
336 newPinnerBits := s.newPinnerBits()
337 memmove(unsafe.Pointer(&newPinnerBits.x), unsafe.Pointer(&p.x), bytes)
338 s.setPinnerBits(newPinnerBits)
339 } else {
340 s.setPinnerBits(nil)
341 }
342 }
343
344
345
346 func (span *mspan) incPinCounter(offset uintptr) {
347 var rec *specialPinCounter
348 ref, exists := span.specialFindSplicePoint(offset, _KindSpecialPinCounter)
349 if !exists {
350 lock(&mheap_.speciallock)
351 rec = (*specialPinCounter)(mheap_.specialPinCounterAlloc.alloc())
352 unlock(&mheap_.speciallock)
353
354 rec.special.offset = offset
355 rec.special.kind = _KindSpecialPinCounter
356 rec.special.next = *ref
357 *ref = (*special)(unsafe.Pointer(rec))
358 spanHasSpecials(span)
359 } else {
360 rec = (*specialPinCounter)(unsafe.Pointer(*ref))
361 }
362 rec.counter++
363 }
364
365
366
367 func (span *mspan) decPinCounter(offset uintptr) bool {
368 ref, exists := span.specialFindSplicePoint(offset, _KindSpecialPinCounter)
369 if !exists {
370 throw("runtime.Pinner: decreased non-existing pin counter")
371 }
372 counter := (*specialPinCounter)(unsafe.Pointer(*ref))
373 counter.counter--
374 if counter.counter == 0 {
375 *ref = counter.special.next
376 if span.specials == nil {
377 spanHasNoSpecials(span)
378 }
379 lock(&mheap_.speciallock)
380 mheap_.specialPinCounterAlloc.free(unsafe.Pointer(counter))
381 unlock(&mheap_.speciallock)
382 return false
383 }
384 return true
385 }
386
387
388 func pinnerGetPinCounter(addr unsafe.Pointer) *uintptr {
389 _, span, objIndex := findObject(uintptr(addr), 0, 0)
390 offset := objIndex * span.elemsize
391 t, exists := span.specialFindSplicePoint(offset, _KindSpecialPinCounter)
392 if !exists {
393 return nil
394 }
395 counter := (*specialPinCounter)(unsafe.Pointer(*t))
396 return &counter.counter
397 }
398
399
400
401 var pinnerLeakPanic = func() {
402 panic(errorString("runtime.Pinner: found leaking pinned pointer; forgot to call Unpin()?"))
403 }
404
View as plain text