1
2
3
4
5 package maps
6
7 import (
8 "internal/abi"
9 "internal/race"
10 "internal/runtime/sys"
11 "unsafe"
12 )
13
14
15 func runtime_mapaccess1_fast32(typ *abi.MapType, m *Map, key uint32) unsafe.Pointer {
16 if race.Enabled && m != nil {
17 callerpc := sys.GetCallerPC()
18 pc := abi.FuncPCABIInternal(runtime_mapaccess1_fast32)
19 race.ReadPC(unsafe.Pointer(m), callerpc, pc)
20 }
21
22 if m == nil || m.Used() == 0 {
23 return unsafe.Pointer(&zeroVal[0])
24 }
25
26 if m.writing != 0 {
27 fatal("concurrent map read and map write")
28 return nil
29 }
30
31 if m.dirLen == 0 {
32 g := groupReference{
33 data: m.dirPtr,
34 }
35 full := g.ctrls().matchFull()
36 slotKey := g.key(typ, 0)
37 slotSize := typ.SlotSize
38 for full != 0 {
39 if key == *(*uint32)(slotKey) && full.lowestSet() {
40 slotElem := unsafe.Pointer(uintptr(slotKey) + typ.ElemOff)
41 return slotElem
42 }
43 slotKey = unsafe.Pointer(uintptr(slotKey) + slotSize)
44 full = full.shiftOutLowest()
45 }
46 return unsafe.Pointer(&zeroVal[0])
47 }
48
49 k := key
50 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
51
52
53 idx := m.directoryIndex(hash)
54 t := m.directoryAt(idx)
55
56
57 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
58 for ; ; seq = seq.next() {
59 g := t.groups.group(typ, seq.offset)
60
61 match := g.ctrls().matchH2(h2(hash))
62
63 for match != 0 {
64 i := match.first()
65
66 slotKey := g.key(typ, i)
67 if key == *(*uint32)(slotKey) {
68 slotElem := unsafe.Pointer(uintptr(slotKey) + typ.ElemOff)
69 return slotElem
70 }
71 match = match.removeFirst()
72 }
73
74 match = g.ctrls().matchEmpty()
75 if match != 0 {
76
77
78 return unsafe.Pointer(&zeroVal[0])
79 }
80 }
81 }
82
83
84 func runtime_mapaccess2_fast32(typ *abi.MapType, m *Map, key uint32) (unsafe.Pointer, bool) {
85 if race.Enabled && m != nil {
86 callerpc := sys.GetCallerPC()
87 pc := abi.FuncPCABIInternal(runtime_mapaccess2_fast32)
88 race.ReadPC(unsafe.Pointer(m), callerpc, pc)
89 }
90
91 if m == nil || m.Used() == 0 {
92 return unsafe.Pointer(&zeroVal[0]), false
93 }
94
95 if m.writing != 0 {
96 fatal("concurrent map read and map write")
97 return nil, false
98 }
99
100 if m.dirLen == 0 {
101 g := groupReference{
102 data: m.dirPtr,
103 }
104 full := g.ctrls().matchFull()
105 slotKey := g.key(typ, 0)
106 slotSize := typ.SlotSize
107 for full != 0 {
108 if key == *(*uint32)(slotKey) && full.lowestSet() {
109 slotElem := unsafe.Pointer(uintptr(slotKey) + typ.ElemOff)
110 return slotElem, true
111 }
112 slotKey = unsafe.Pointer(uintptr(slotKey) + slotSize)
113 full = full.shiftOutLowest()
114 }
115 return unsafe.Pointer(&zeroVal[0]), false
116 }
117
118 k := key
119 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
120
121
122 idx := m.directoryIndex(hash)
123 t := m.directoryAt(idx)
124
125
126 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
127 for ; ; seq = seq.next() {
128 g := t.groups.group(typ, seq.offset)
129
130 match := g.ctrls().matchH2(h2(hash))
131
132 for match != 0 {
133 i := match.first()
134
135 slotKey := g.key(typ, i)
136 if key == *(*uint32)(slotKey) {
137 slotElem := unsafe.Pointer(uintptr(slotKey) + typ.ElemOff)
138 return slotElem, true
139 }
140 match = match.removeFirst()
141 }
142
143 match = g.ctrls().matchEmpty()
144 if match != 0 {
145
146
147 return unsafe.Pointer(&zeroVal[0]), false
148 }
149 }
150 }
151
152 func (m *Map) putSlotSmallFast32(typ *abi.MapType, hash uintptr, key uint32) unsafe.Pointer {
153 g := groupReference{
154 data: m.dirPtr,
155 }
156
157 match := g.ctrls().matchH2(h2(hash))
158
159
160 for match != 0 {
161 i := match.first()
162
163 slotKey := g.key(typ, i)
164 if key == *(*uint32)(slotKey) {
165 slotElem := g.elem(typ, i)
166 return slotElem
167 }
168 match = match.removeFirst()
169 }
170
171
172
173
174 match = g.ctrls().matchEmptyOrDeleted()
175 if match == 0 {
176 fatal("small map with no empty slot (concurrent map writes?)")
177 }
178
179 i := match.first()
180
181 slotKey := g.key(typ, i)
182 *(*uint32)(slotKey) = key
183
184 slotElem := g.elem(typ, i)
185
186 g.ctrls().set(i, ctrl(h2(hash)))
187 m.used++
188
189 return slotElem
190 }
191
192
193 func runtime_mapassign_fast32(typ *abi.MapType, m *Map, key uint32) unsafe.Pointer {
194 if m == nil {
195 panic(errNilAssign)
196 }
197 if race.Enabled {
198 callerpc := sys.GetCallerPC()
199 pc := abi.FuncPCABIInternal(runtime_mapassign_fast32)
200 race.WritePC(unsafe.Pointer(m), callerpc, pc)
201 }
202 if m.writing != 0 {
203 fatal("concurrent map writes")
204 }
205
206 k := key
207 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
208
209
210
211 m.writing ^= 1
212
213 if m.dirPtr == nil {
214 m.growToSmall(typ)
215 }
216
217 if m.dirLen == 0 {
218 if m.used < abi.MapGroupSlots {
219 elem := m.putSlotSmallFast32(typ, hash, key)
220
221 if m.writing == 0 {
222 fatal("concurrent map writes")
223 }
224 m.writing ^= 1
225
226 return elem
227 }
228
229
230 m.growToTable(typ)
231 }
232
233 var slotElem unsafe.Pointer
234 outer:
235 for {
236
237 idx := m.directoryIndex(hash)
238 t := m.directoryAt(idx)
239
240 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
241
242
243
244
245 var firstDeletedGroup groupReference
246 var firstDeletedSlot uintptr
247
248 for ; ; seq = seq.next() {
249 g := t.groups.group(typ, seq.offset)
250 match := g.ctrls().matchH2(h2(hash))
251
252
253 for match != 0 {
254 i := match.first()
255
256 slotKey := g.key(typ, i)
257 if key == *(*uint32)(slotKey) {
258 slotElem = g.elem(typ, i)
259
260 t.checkInvariants(typ, m)
261 break outer
262 }
263 match = match.removeFirst()
264 }
265
266
267
268 match = g.ctrls().matchEmptyOrDeleted()
269 if match == 0 {
270 continue
271 }
272 i := match.first()
273 if g.ctrls().get(i) == ctrlDeleted {
274
275
276 if firstDeletedGroup.data == nil {
277 firstDeletedGroup = g
278 firstDeletedSlot = i
279 }
280 continue
281 }
282
283
284
285
286
287 if firstDeletedGroup.data != nil {
288 g = firstDeletedGroup
289 i = firstDeletedSlot
290 t.growthLeft++
291 }
292
293
294 if t.growthLeft == 0 {
295 t.pruneTombstones(typ, m)
296 }
297
298
299 if t.growthLeft > 0 {
300 slotKey := g.key(typ, i)
301 *(*uint32)(slotKey) = key
302
303 slotElem = g.elem(typ, i)
304
305 g.ctrls().set(i, ctrl(h2(hash)))
306 t.growthLeft--
307 t.used++
308 m.used++
309
310 t.checkInvariants(typ, m)
311 break outer
312 }
313
314 t.rehash(typ, m)
315 continue outer
316 }
317 }
318
319 if m.writing == 0 {
320 fatal("concurrent map writes")
321 }
322 m.writing ^= 1
323
324 return slotElem
325 }
326
327
328
329
330
331
332 func runtime_mapassign_fast32ptr(typ *abi.MapType, m *Map, key unsafe.Pointer) unsafe.Pointer {
333 if m == nil {
334 panic(errNilAssign)
335 }
336 if race.Enabled {
337 callerpc := sys.GetCallerPC()
338 pc := abi.FuncPCABIInternal(runtime_mapassign_fast32ptr)
339 race.WritePC(unsafe.Pointer(m), callerpc, pc)
340 }
341 if m.writing != 0 {
342 fatal("concurrent map writes")
343 }
344
345 k := key
346 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
347
348
349
350 m.writing ^= 1
351
352 if m.dirPtr == nil {
353 m.growToSmall(typ)
354 }
355
356 if m.dirLen == 0 {
357 if m.used < abi.MapGroupSlots {
358 elem := m.putSlotSmallFastPtr(typ, hash, key)
359
360 if m.writing == 0 {
361 fatal("concurrent map writes")
362 }
363 m.writing ^= 1
364
365 return elem
366 }
367
368
369 m.growToTable(typ)
370 }
371
372 var slotElem unsafe.Pointer
373 outer:
374 for {
375
376 idx := m.directoryIndex(hash)
377 t := m.directoryAt(idx)
378
379 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
380
381
382
383 var firstDeletedGroup groupReference
384 var firstDeletedSlot uintptr
385
386 for ; ; seq = seq.next() {
387 g := t.groups.group(typ, seq.offset)
388 match := g.ctrls().matchH2(h2(hash))
389
390
391 for match != 0 {
392 i := match.first()
393
394 slotKey := g.key(typ, i)
395 if key == *(*unsafe.Pointer)(slotKey) {
396 slotElem = g.elem(typ, i)
397
398 t.checkInvariants(typ, m)
399 break outer
400 }
401 match = match.removeFirst()
402 }
403
404
405
406 match = g.ctrls().matchEmptyOrDeleted()
407 if match == 0 {
408 continue
409 }
410 i := match.first()
411 if g.ctrls().get(i) == ctrlDeleted {
412
413
414 if firstDeletedGroup.data == nil {
415 firstDeletedGroup = g
416 firstDeletedSlot = i
417 }
418 continue
419 }
420
421
422
423
424
425 if firstDeletedGroup.data != nil {
426 g = firstDeletedGroup
427 i = firstDeletedSlot
428 t.growthLeft++
429 }
430
431
432 if t.growthLeft > 0 {
433 slotKey := g.key(typ, i)
434 *(*unsafe.Pointer)(slotKey) = key
435
436 slotElem = g.elem(typ, i)
437
438 g.ctrls().set(i, ctrl(h2(hash)))
439 t.growthLeft--
440 t.used++
441 m.used++
442
443 t.checkInvariants(typ, m)
444 break outer
445 }
446
447 t.rehash(typ, m)
448 continue outer
449 }
450 }
451
452 if m.writing == 0 {
453 fatal("concurrent map writes")
454 }
455 m.writing ^= 1
456
457 return slotElem
458 }
459
460
461 func runtime_mapdelete_fast32(typ *abi.MapType, m *Map, key uint32) {
462 if race.Enabled {
463 callerpc := sys.GetCallerPC()
464 pc := abi.FuncPCABIInternal(runtime_mapdelete_fast32)
465 race.WritePC(unsafe.Pointer(m), callerpc, pc)
466 }
467
468 if m == nil || m.Used() == 0 {
469 return
470 }
471
472 m.Delete(typ, abi.NoEscape(unsafe.Pointer(&key)))
473 }
474
View as plain text