1
2
3
4
5
6
7 package maps
8
9 import (
10 "internal/abi"
11 "internal/race"
12 "internal/runtime/sys"
13 "unsafe"
14 )
15
16
17 func runtime_mapaccess1_fast64(typ *abi.SwissMapType, m *Map, key uint64) unsafe.Pointer {
18 if race.Enabled && m != nil {
19 callerpc := sys.GetCallerPC()
20 pc := abi.FuncPCABIInternal(runtime_mapaccess1)
21 race.ReadPC(unsafe.Pointer(m), callerpc, pc)
22 }
23
24 if m == nil || m.Used() == 0 {
25 return unsafe.Pointer(&zeroVal[0])
26 }
27
28 if m.writing != 0 {
29 fatal("concurrent map read and map write")
30 return nil
31 }
32
33 if m.dirLen == 0 {
34 g := groupReference{
35 data: m.dirPtr,
36 }
37 full := g.ctrls().matchFull()
38 slotKey := g.key(typ, 0)
39 slotSize := typ.SlotSize
40 for full != 0 {
41 if key == *(*uint64)(slotKey) && full.lowestSet() {
42 slotElem := unsafe.Pointer(uintptr(slotKey) + 8)
43 return slotElem
44 }
45 slotKey = unsafe.Pointer(uintptr(slotKey) + slotSize)
46 full = full.shiftOutLowest()
47 }
48 return unsafe.Pointer(&zeroVal[0])
49 }
50
51 k := key
52 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
53
54
55 idx := m.directoryIndex(hash)
56 t := m.directoryAt(idx)
57
58
59 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
60 for ; ; seq = seq.next() {
61 g := t.groups.group(typ, seq.offset)
62
63 match := g.ctrls().matchH2(h2(hash))
64
65 for match != 0 {
66 i := match.first()
67
68 slotKey := g.key(typ, i)
69 if key == *(*uint64)(slotKey) {
70 slotElem := unsafe.Pointer(uintptr(slotKey) + 8)
71 return slotElem
72 }
73 match = match.removeFirst()
74 }
75
76 match = g.ctrls().matchEmpty()
77 if match != 0 {
78
79
80 return unsafe.Pointer(&zeroVal[0])
81 }
82 }
83 }
84
85
86 func runtime_mapaccess2_fast64(typ *abi.SwissMapType, m *Map, key uint64) (unsafe.Pointer, bool) {
87 if race.Enabled && m != nil {
88 callerpc := sys.GetCallerPC()
89 pc := abi.FuncPCABIInternal(runtime_mapaccess1)
90 race.ReadPC(unsafe.Pointer(m), callerpc, pc)
91 }
92
93 if m == nil || m.Used() == 0 {
94 return unsafe.Pointer(&zeroVal[0]), false
95 }
96
97 if m.writing != 0 {
98 fatal("concurrent map read and map write")
99 return nil, false
100 }
101
102 if m.dirLen == 0 {
103 g := groupReference{
104 data: m.dirPtr,
105 }
106 full := g.ctrls().matchFull()
107 slotKey := g.key(typ, 0)
108 slotSize := typ.SlotSize
109 for full != 0 {
110 if key == *(*uint64)(slotKey) && full.lowestSet() {
111 slotElem := unsafe.Pointer(uintptr(slotKey) + 8)
112 return slotElem, true
113 }
114 slotKey = unsafe.Pointer(uintptr(slotKey) + slotSize)
115 full = full.shiftOutLowest()
116 }
117 return unsafe.Pointer(&zeroVal[0]), false
118 }
119
120 k := key
121 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
122
123
124 idx := m.directoryIndex(hash)
125 t := m.directoryAt(idx)
126
127
128 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
129 for ; ; seq = seq.next() {
130 g := t.groups.group(typ, seq.offset)
131
132 match := g.ctrls().matchH2(h2(hash))
133
134 for match != 0 {
135 i := match.first()
136
137 slotKey := g.key(typ, i)
138 if key == *(*uint64)(slotKey) {
139 slotElem := unsafe.Pointer(uintptr(slotKey) + 8)
140 return slotElem, true
141 }
142 match = match.removeFirst()
143 }
144
145 match = g.ctrls().matchEmpty()
146 if match != 0 {
147
148
149 return unsafe.Pointer(&zeroVal[0]), false
150 }
151 }
152 }
153
154 func (m *Map) putSlotSmallFast64(typ *abi.SwissMapType, hash uintptr, key uint64) unsafe.Pointer {
155 g := groupReference{
156 data: m.dirPtr,
157 }
158
159 match := g.ctrls().matchH2(h2(hash))
160
161
162 for match != 0 {
163 i := match.first()
164
165 slotKey := g.key(typ, i)
166 if key == *(*uint64)(slotKey) {
167 slotElem := g.elem(typ, i)
168 return slotElem
169 }
170 match = match.removeFirst()
171 }
172
173
174
175
176 match = g.ctrls().matchEmptyOrDeleted()
177 if match == 0 {
178 fatal("small map with no empty slot (concurrent map writes?)")
179 }
180
181 i := match.first()
182
183 slotKey := g.key(typ, i)
184 *(*uint64)(slotKey) = key
185
186 slotElem := g.elem(typ, i)
187
188 g.ctrls().set(i, ctrl(h2(hash)))
189 m.used++
190
191 return slotElem
192 }
193
194
195 func runtime_mapassign_fast64(typ *abi.SwissMapType, m *Map, key uint64) unsafe.Pointer {
196 if m == nil {
197 panic(errNilAssign)
198 }
199 if race.Enabled {
200 callerpc := sys.GetCallerPC()
201 pc := abi.FuncPCABIInternal(runtime_mapassign)
202 race.WritePC(unsafe.Pointer(m), callerpc, pc)
203 }
204 if m.writing != 0 {
205 fatal("concurrent map writes")
206 }
207
208 k := key
209 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
210
211
212
213 m.writing ^= 1
214
215 if m.dirPtr == nil {
216 m.growToSmall(typ)
217 }
218
219 if m.dirLen == 0 {
220 if m.used < abi.SwissMapGroupSlots {
221 elem := m.putSlotSmallFast64(typ, hash, key)
222
223 if m.writing == 0 {
224 fatal("concurrent map writes")
225 }
226 m.writing ^= 1
227
228 return elem
229 }
230
231
232 m.growToTable(typ)
233 }
234
235 var slotElem unsafe.Pointer
236 outer:
237 for {
238
239 idx := m.directoryIndex(hash)
240 t := m.directoryAt(idx)
241
242 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
243
244
245
246
247 var firstDeletedGroup groupReference
248 var firstDeletedSlot uintptr
249
250 for ; ; seq = seq.next() {
251 g := t.groups.group(typ, seq.offset)
252 match := g.ctrls().matchH2(h2(hash))
253
254
255 for match != 0 {
256 i := match.first()
257
258 slotKey := g.key(typ, i)
259 if key == *(*uint64)(slotKey) {
260 slotElem = g.elem(typ, i)
261
262 t.checkInvariants(typ, m)
263 break outer
264 }
265 match = match.removeFirst()
266 }
267
268
269
270 match = g.ctrls().matchEmptyOrDeleted()
271 if match == 0 {
272 continue
273 }
274 i := match.first()
275 if g.ctrls().get(i) == ctrlDeleted {
276
277
278 if firstDeletedGroup.data == nil {
279 firstDeletedGroup = g
280 firstDeletedSlot = i
281 }
282 continue
283 }
284
285
286
287
288
289 if firstDeletedGroup.data != nil {
290 g = firstDeletedGroup
291 i = firstDeletedSlot
292 t.growthLeft++
293 }
294
295
296 if t.growthLeft > 0 {
297 slotKey := g.key(typ, i)
298 *(*uint64)(slotKey) = key
299
300 slotElem = g.elem(typ, i)
301
302 g.ctrls().set(i, ctrl(h2(hash)))
303 t.growthLeft--
304 t.used++
305 m.used++
306
307 t.checkInvariants(typ, m)
308 break outer
309 }
310
311 t.rehash(typ, m)
312 continue outer
313 }
314 }
315
316 if m.writing == 0 {
317 fatal("concurrent map writes")
318 }
319 m.writing ^= 1
320
321 return slotElem
322 }
323
324 func (m *Map) putSlotSmallFastPtr(typ *abi.SwissMapType, hash uintptr, key unsafe.Pointer) unsafe.Pointer {
325 g := groupReference{
326 data: m.dirPtr,
327 }
328
329 match := g.ctrls().matchH2(h2(hash))
330
331
332 for match != 0 {
333 i := match.first()
334
335 slotKey := g.key(typ, i)
336 if key == *(*unsafe.Pointer)(slotKey) {
337 slotElem := g.elem(typ, i)
338 return slotElem
339 }
340 match = match.removeFirst()
341 }
342
343
344
345
346 match = g.ctrls().matchEmptyOrDeleted()
347 if match == 0 {
348 fatal("small map with no empty slot (concurrent map writes?)")
349 }
350
351 i := match.first()
352
353 slotKey := g.key(typ, i)
354 *(*unsafe.Pointer)(slotKey) = key
355
356 slotElem := g.elem(typ, i)
357
358 g.ctrls().set(i, ctrl(h2(hash)))
359 m.used++
360
361 return slotElem
362 }
363
364
365
366
367 func runtime_mapassign_fast64ptr(typ *abi.SwissMapType, m *Map, key unsafe.Pointer) unsafe.Pointer {
368 if m == nil {
369 panic(errNilAssign)
370 }
371 if race.Enabled {
372 callerpc := sys.GetCallerPC()
373 pc := abi.FuncPCABIInternal(runtime_mapassign)
374 race.WritePC(unsafe.Pointer(m), callerpc, pc)
375 }
376 if m.writing != 0 {
377 fatal("concurrent map writes")
378 }
379
380 k := key
381 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
382
383
384
385 m.writing ^= 1
386
387 if m.dirPtr == nil {
388 m.growToSmall(typ)
389 }
390
391 if m.dirLen == 0 {
392 if m.used < abi.SwissMapGroupSlots {
393 elem := m.putSlotSmallFastPtr(typ, hash, key)
394
395 if m.writing == 0 {
396 fatal("concurrent map writes")
397 }
398 m.writing ^= 1
399
400 return elem
401 }
402
403
404 m.growToTable(typ)
405 }
406
407 var slotElem unsafe.Pointer
408 outer:
409 for {
410
411 idx := m.directoryIndex(hash)
412 t := m.directoryAt(idx)
413
414 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
415
416
417
418
419 var firstDeletedGroup groupReference
420 var firstDeletedSlot uintptr
421
422 for ; ; seq = seq.next() {
423 g := t.groups.group(typ, seq.offset)
424 match := g.ctrls().matchH2(h2(hash))
425
426
427 for match != 0 {
428 i := match.first()
429
430 slotKey := g.key(typ, i)
431 if key == *(*unsafe.Pointer)(slotKey) {
432 slotElem = g.elem(typ, i)
433
434 t.checkInvariants(typ, m)
435 break outer
436 }
437 match = match.removeFirst()
438 }
439
440
441
442 match = g.ctrls().matchEmptyOrDeleted()
443 if match == 0 {
444 continue
445 }
446 i := match.first()
447 if g.ctrls().get(i) == ctrlDeleted {
448
449
450 if firstDeletedGroup.data == nil {
451 firstDeletedGroup = g
452 firstDeletedSlot = i
453 }
454 continue
455 }
456
457
458
459
460
461 if firstDeletedGroup.data != nil {
462 g = firstDeletedGroup
463 i = firstDeletedSlot
464 t.growthLeft++
465 }
466
467
468 if t.growthLeft > 0 {
469 slotKey := g.key(typ, i)
470 *(*unsafe.Pointer)(slotKey) = key
471
472 slotElem = g.elem(typ, i)
473
474 g.ctrls().set(i, ctrl(h2(hash)))
475 t.growthLeft--
476 t.used++
477 m.used++
478
479 t.checkInvariants(typ, m)
480 break outer
481 }
482
483 t.rehash(typ, m)
484 continue outer
485 }
486 }
487
488 if m.writing == 0 {
489 fatal("concurrent map writes")
490 }
491 m.writing ^= 1
492
493 return slotElem
494 }
495
496
497 func runtime_mapdelete_fast64(typ *abi.SwissMapType, m *Map, key uint64) {
498 if race.Enabled {
499 callerpc := sys.GetCallerPC()
500 pc := abi.FuncPCABIInternal(runtime_mapassign)
501 race.WritePC(unsafe.Pointer(m), callerpc, pc)
502 }
503
504 if m == nil || m.Used() == 0 {
505 return
506 }
507
508 m.Delete(typ, abi.NoEscape(unsafe.Pointer(&key)))
509 }
510
View as plain text