Source file
src/runtime/pinner.go
1
2
3
4
5 package runtime
6
7 import (
8 "internal/abi"
9 "internal/runtime/atomic"
10 "unsafe"
11 )
12
13
14
15
16 type Pinner struct {
17 *pinner
18 }
19
20
21
22
23
24
25
26
27
28
29
30 func (p *Pinner) Pin(pointer any) {
31 if p.pinner == nil {
32
33 mp := acquirem()
34 if pp := mp.p.ptr(); pp != nil {
35 p.pinner = pp.pinnerCache
36 pp.pinnerCache = nil
37 }
38 releasem(mp)
39
40 if p.pinner == nil {
41
42 p.pinner = new(pinner)
43 p.refs = p.refStore[:0]
44
45
46
47
48
49
50
51 SetFinalizer(p.pinner, func(i *pinner) {
52 if len(i.refs) != 0 {
53 i.unpin()
54 pinnerLeakPanic()
55 }
56 })
57 }
58 }
59 ptr := pinnerGetPtr(&pointer)
60 if setPinned(ptr, true) {
61 p.refs = append(p.refs, ptr)
62 }
63 }
64
65
66 func (p *Pinner) Unpin() {
67 p.pinner.unpin()
68
69 mp := acquirem()
70 if pp := mp.p.ptr(); pp != nil && pp.pinnerCache == nil {
71
72
73
74
75 pp.pinnerCache = p.pinner
76 p.pinner = nil
77 }
78 releasem(mp)
79 }
80
81 const (
82 pinnerSize = 64
83 pinnerRefStoreSize = (pinnerSize - unsafe.Sizeof([]unsafe.Pointer{})) / unsafe.Sizeof(unsafe.Pointer(nil))
84 )
85
86 type pinner struct {
87 refs []unsafe.Pointer
88 refStore [pinnerRefStoreSize]unsafe.Pointer
89 }
90
91 func (p *pinner) unpin() {
92 if p == nil || p.refs == nil {
93 return
94 }
95 for i := range p.refs {
96 setPinned(p.refs[i], false)
97 }
98
99
100
101 p.refStore = [pinnerRefStoreSize]unsafe.Pointer{}
102 p.refs = p.refStore[:0]
103 }
104
105 func pinnerGetPtr(i *any) unsafe.Pointer {
106 e := efaceOf(i)
107 etyp := e._type
108 if etyp == nil {
109 panic(errorString("runtime.Pinner: argument is nil"))
110 }
111 if kind := etyp.Kind_ & abi.KindMask; kind != abi.Pointer && kind != abi.UnsafePointer {
112 panic(errorString("runtime.Pinner: argument is not a pointer: " + toRType(etyp).string()))
113 }
114 if inUserArenaChunk(uintptr(e.data)) {
115
116 panic(errorString("runtime.Pinner: object was allocated into an arena"))
117 }
118 return e.data
119 }
120
121
122
123
124
125 func isPinned(ptr unsafe.Pointer) bool {
126 span := spanOfHeap(uintptr(ptr))
127 if span == nil {
128
129
130 return true
131 }
132 pinnerBits := span.getPinnerBits()
133
134
135
136 if pinnerBits == nil {
137 return false
138 }
139 objIndex := span.objIndex(uintptr(ptr))
140 pinState := pinnerBits.ofObject(objIndex)
141 KeepAlive(ptr)
142 return pinState.isPinned()
143 }
144
145
146
147
148
149 func setPinned(ptr unsafe.Pointer, pin bool) bool {
150 span := spanOfHeap(uintptr(ptr))
151 if span == nil {
152 if !pin {
153 panic(errorString("tried to unpin non-Go pointer"))
154 }
155
156
157 return false
158 }
159
160
161
162 mp := acquirem()
163 span.ensureSwept()
164 KeepAlive(ptr)
165
166 objIndex := span.objIndex(uintptr(ptr))
167
168 lock(&span.speciallock)
169
170 pinnerBits := span.getPinnerBits()
171 if pinnerBits == nil {
172 pinnerBits = span.newPinnerBits()
173 span.setPinnerBits(pinnerBits)
174 }
175 pinState := pinnerBits.ofObject(objIndex)
176 if pin {
177 if pinState.isPinned() {
178
179 pinState.setMultiPinned(true)
180
181
182 systemstack(func() {
183 offset := objIndex * span.elemsize
184 span.incPinCounter(offset)
185 })
186 } else {
187
188 pinState.setPinned(true)
189 }
190 } else {
191
192 if pinState.isPinned() {
193 if pinState.isMultiPinned() {
194 var exists bool
195
196 systemstack(func() {
197 offset := objIndex * span.elemsize
198 exists = span.decPinCounter(offset)
199 })
200 if !exists {
201
202 pinState.setMultiPinned(false)
203 }
204 } else {
205
206 pinState.setPinned(false)
207 }
208 } else {
209
210 throw("runtime.Pinner: object already unpinned")
211 }
212 }
213 unlock(&span.speciallock)
214 releasem(mp)
215 return true
216 }
217
218 type pinState struct {
219 bytep *uint8
220 byteVal uint8
221 mask uint8
222 }
223
224
225
226
227 func (v *pinState) isPinned() bool {
228 return (v.byteVal & v.mask) != 0
229 }
230
231 func (v *pinState) isMultiPinned() bool {
232 return (v.byteVal & (v.mask << 1)) != 0
233 }
234
235 func (v *pinState) setPinned(val bool) {
236 v.set(val, false)
237 }
238
239 func (v *pinState) setMultiPinned(val bool) {
240 v.set(val, true)
241 }
242
243
244
245 func (v *pinState) set(val bool, multipin bool) {
246 mask := v.mask
247 if multipin {
248 mask <<= 1
249 }
250 if val {
251 atomic.Or8(v.bytep, mask)
252 } else {
253 atomic.And8(v.bytep, ^mask)
254 }
255 }
256
257
258 type pinnerBits gcBits
259
260
261
262
263
264 func (p *pinnerBits) ofObject(n uintptr) pinState {
265 bytep, mask := (*gcBits)(p).bitp(n * 2)
266 byteVal := atomic.Load8(bytep)
267 return pinState{bytep, byteVal, mask}
268 }
269
270 func (s *mspan) pinnerBitSize() uintptr {
271 return divRoundUp(uintptr(s.nelems)*2, 8)
272 }
273
274
275
276
277 func (s *mspan) newPinnerBits() *pinnerBits {
278 return (*pinnerBits)(newMarkBits(uintptr(s.nelems) * 2))
279 }
280
281
282
283
284 func (s *mspan) getPinnerBits() *pinnerBits {
285 return (*pinnerBits)(atomic.Loadp(unsafe.Pointer(&s.pinnerBits)))
286 }
287
288 func (s *mspan) setPinnerBits(p *pinnerBits) {
289 atomicstorep(unsafe.Pointer(&s.pinnerBits), unsafe.Pointer(p))
290 }
291
292
293
294
295 func (s *mspan) refreshPinnerBits() {
296 p := s.getPinnerBits()
297 if p == nil {
298 return
299 }
300
301 hasPins := false
302 bytes := alignUp(s.pinnerBitSize(), 8)
303
304
305
306
307
308 for _, x := range unsafe.Slice((*uint64)(unsafe.Pointer(&p.x)), bytes/8) {
309 if x != 0 {
310 hasPins = true
311 break
312 }
313 }
314
315 if hasPins {
316 newPinnerBits := s.newPinnerBits()
317 memmove(unsafe.Pointer(&newPinnerBits.x), unsafe.Pointer(&p.x), bytes)
318 s.setPinnerBits(newPinnerBits)
319 } else {
320 s.setPinnerBits(nil)
321 }
322 }
323
324
325
326 func (span *mspan) incPinCounter(offset uintptr) {
327 var rec *specialPinCounter
328 ref, exists := span.specialFindSplicePoint(offset, _KindSpecialPinCounter)
329 if !exists {
330 lock(&mheap_.speciallock)
331 rec = (*specialPinCounter)(mheap_.specialPinCounterAlloc.alloc())
332 unlock(&mheap_.speciallock)
333
334 rec.special.offset = uint16(offset)
335 rec.special.kind = _KindSpecialPinCounter
336 rec.special.next = *ref
337 *ref = (*special)(unsafe.Pointer(rec))
338 spanHasSpecials(span)
339 } else {
340 rec = (*specialPinCounter)(unsafe.Pointer(*ref))
341 }
342 rec.counter++
343 }
344
345
346
347 func (span *mspan) decPinCounter(offset uintptr) bool {
348 ref, exists := span.specialFindSplicePoint(offset, _KindSpecialPinCounter)
349 if !exists {
350 throw("runtime.Pinner: decreased non-existing pin counter")
351 }
352 counter := (*specialPinCounter)(unsafe.Pointer(*ref))
353 counter.counter--
354 if counter.counter == 0 {
355 *ref = counter.special.next
356 if span.specials == nil {
357 spanHasNoSpecials(span)
358 }
359 lock(&mheap_.speciallock)
360 mheap_.specialPinCounterAlloc.free(unsafe.Pointer(counter))
361 unlock(&mheap_.speciallock)
362 return false
363 }
364 return true
365 }
366
367
368 func pinnerGetPinCounter(addr unsafe.Pointer) *uintptr {
369 _, span, objIndex := findObject(uintptr(addr), 0, 0)
370 offset := objIndex * span.elemsize
371 t, exists := span.specialFindSplicePoint(offset, _KindSpecialPinCounter)
372 if !exists {
373 return nil
374 }
375 counter := (*specialPinCounter)(unsafe.Pointer(*t))
376 return &counter.counter
377 }
378
379
380
381 var pinnerLeakPanic = func() {
382 panic(errorString("runtime.Pinner: found leaking pinned pointer; forgot to call Unpin()?"))
383 }
384
View as plain text