| |
| |
| |
|
|
| package maps |
|
|
| import ( |
| "internal/abi" |
| "internal/asan" |
| "internal/msan" |
| "internal/race" |
| "internal/runtime/sys" |
| "unsafe" |
| ) |
|
|
| |
|
|
| |
| func fatal(s string) |
|
|
| |
| func rand() uint64 |
|
|
| |
| func typedmemmove(typ *abi.Type, dst, src unsafe.Pointer) |
|
|
| |
| func typedmemclr(typ *abi.Type, ptr unsafe.Pointer) |
|
|
| |
| func newarray(typ *abi.Type, n int) unsafe.Pointer |
|
|
| |
| func newobject(typ *abi.Type) unsafe.Pointer |
|
|
| |
| |
| |
| var errNilAssign error |
|
|
| |
| |
| |
| |
| |
| |
| var zeroVal [abi.ZeroValSize]byte |
|
|
| |
| |
| |
| |
| |
| |
| |
| func runtime_mapaccess1(typ *abi.MapType, m *Map, key unsafe.Pointer) unsafe.Pointer { |
| if race.Enabled && m != nil { |
| callerpc := sys.GetCallerPC() |
| pc := abi.FuncPCABIInternal(runtime_mapaccess1) |
| race.ReadPC(unsafe.Pointer(m), callerpc, pc) |
| race.ReadObjectPC(typ.Key, key, callerpc, pc) |
| } |
| if msan.Enabled && m != nil { |
| msan.Read(key, typ.Key.Size_) |
| } |
| if asan.Enabled && m != nil { |
| asan.Read(key, typ.Key.Size_) |
| } |
|
|
| if m == nil || m.Used() == 0 { |
| if err := mapKeyError(typ, key); err != nil { |
| panic(err) |
| } |
| return unsafe.Pointer(&zeroVal[0]) |
| } |
|
|
| if m.writing != 0 { |
| fatal("concurrent map read and map write") |
| } |
|
|
| hash := typ.Hasher(key, m.seed) |
|
|
| if m.dirLen <= 0 { |
| _, elem, ok := m.getWithKeySmall(typ, hash, key) |
| if !ok { |
| return unsafe.Pointer(&zeroVal[0]) |
| } |
| return elem |
| } |
|
|
| |
| idx := m.directoryIndex(hash) |
| t := m.directoryAt(idx) |
|
|
| |
| seq := makeProbeSeq(h1(hash), t.groups.lengthMask) |
| h2Hash := h2(hash) |
| for ; ; seq = seq.next() { |
| g := t.groups.group(typ, seq.offset) |
|
|
| match := g.ctrls().matchH2(h2Hash) |
|
|
| for match != 0 { |
| i := match.first() |
|
|
| slotKey := g.key(typ, i) |
| slotKeyOrig := slotKey |
| if typ.IndirectKey() { |
| slotKey = *((*unsafe.Pointer)(slotKey)) |
| } |
| if typ.Key.Equal(key, slotKey) { |
| slotElem := unsafe.Pointer(uintptr(slotKeyOrig) + typ.ElemOff) |
| if typ.IndirectElem() { |
| slotElem = *((*unsafe.Pointer)(slotElem)) |
| } |
| return slotElem |
| } |
| match = match.removeFirst() |
| } |
|
|
| match = g.ctrls().matchEmpty() |
| if match != 0 { |
| |
| |
| return unsafe.Pointer(&zeroVal[0]) |
| } |
| } |
| } |
|
|
| |
| func runtime_mapaccess2(typ *abi.MapType, m *Map, key unsafe.Pointer) (unsafe.Pointer, bool) { |
| if race.Enabled && m != nil { |
| callerpc := sys.GetCallerPC() |
| pc := abi.FuncPCABIInternal(runtime_mapaccess1) |
| race.ReadPC(unsafe.Pointer(m), callerpc, pc) |
| race.ReadObjectPC(typ.Key, key, callerpc, pc) |
| } |
| if msan.Enabled && m != nil { |
| msan.Read(key, typ.Key.Size_) |
| } |
| if asan.Enabled && m != nil { |
| asan.Read(key, typ.Key.Size_) |
| } |
|
|
| if m == nil || m.Used() == 0 { |
| if err := mapKeyError(typ, key); err != nil { |
| panic(err) |
| } |
| return unsafe.Pointer(&zeroVal[0]), false |
| } |
|
|
| if m.writing != 0 { |
| fatal("concurrent map read and map write") |
| } |
|
|
| hash := typ.Hasher(key, m.seed) |
|
|
| if m.dirLen == 0 { |
| _, elem, ok := m.getWithKeySmall(typ, hash, key) |
| if !ok { |
| return unsafe.Pointer(&zeroVal[0]), false |
| } |
| return elem, true |
| } |
|
|
| |
| idx := m.directoryIndex(hash) |
| t := m.directoryAt(idx) |
|
|
| |
| seq := makeProbeSeq(h1(hash), t.groups.lengthMask) |
| h2Hash := h2(hash) |
| for ; ; seq = seq.next() { |
| g := t.groups.group(typ, seq.offset) |
|
|
| match := g.ctrls().matchH2(h2Hash) |
|
|
| for match != 0 { |
| i := match.first() |
|
|
| slotKey := g.key(typ, i) |
| slotKeyOrig := slotKey |
| if typ.IndirectKey() { |
| slotKey = *((*unsafe.Pointer)(slotKey)) |
| } |
| if typ.Key.Equal(key, slotKey) { |
| slotElem := unsafe.Pointer(uintptr(slotKeyOrig) + typ.ElemOff) |
| if typ.IndirectElem() { |
| slotElem = *((*unsafe.Pointer)(slotElem)) |
| } |
| return slotElem, true |
| } |
| match = match.removeFirst() |
| } |
|
|
| match = g.ctrls().matchEmpty() |
| if match != 0 { |
| |
| |
| return unsafe.Pointer(&zeroVal[0]), false |
| } |
| } |
| } |
|
|
| |
| func runtime_mapassign(typ *abi.MapType, m *Map, key unsafe.Pointer) unsafe.Pointer { |
| if m == nil { |
| panic(errNilAssign) |
| } |
| if race.Enabled { |
| callerpc := sys.GetCallerPC() |
| pc := abi.FuncPCABIInternal(runtime_mapassign) |
| race.WritePC(unsafe.Pointer(m), callerpc, pc) |
| race.ReadObjectPC(typ.Key, key, callerpc, pc) |
| } |
| if msan.Enabled { |
| msan.Read(key, typ.Key.Size_) |
| } |
| if asan.Enabled { |
| asan.Read(key, typ.Key.Size_) |
| } |
| if m.writing != 0 { |
| fatal("concurrent map writes") |
| } |
|
|
| hash := typ.Hasher(key, m.seed) |
|
|
| |
| |
| m.writing ^= 1 |
|
|
| if m.dirPtr == nil { |
| m.growToSmall(typ) |
| } |
|
|
| if m.dirLen == 0 { |
| if m.used < abi.MapGroupSlots { |
| elem := m.putSlotSmall(typ, hash, key) |
|
|
| if m.writing == 0 { |
| fatal("concurrent map writes") |
| } |
| m.writing ^= 1 |
|
|
| return elem |
| } |
|
|
| |
| m.growToTable(typ) |
| } |
|
|
| var slotElem unsafe.Pointer |
| outer: |
| for { |
| |
| idx := m.directoryIndex(hash) |
| t := m.directoryAt(idx) |
|
|
| seq := makeProbeSeq(h1(hash), t.groups.lengthMask) |
|
|
| |
| |
| |
| var firstDeletedGroup groupReference |
| var firstDeletedSlot uintptr |
|
|
| h2Hash := h2(hash) |
| for ; ; seq = seq.next() { |
| g := t.groups.group(typ, seq.offset) |
| match := g.ctrls().matchH2(h2Hash) |
|
|
| |
| for match != 0 { |
| i := match.first() |
|
|
| slotKey := g.key(typ, i) |
| slotKeyOrig := slotKey |
| if typ.IndirectKey() { |
| slotKey = *((*unsafe.Pointer)(slotKey)) |
| } |
| if typ.Key.Equal(key, slotKey) { |
| if typ.NeedKeyUpdate() { |
| typedmemmove(typ.Key, slotKey, key) |
| } |
|
|
| slotElem = unsafe.Pointer(uintptr(slotKeyOrig) + typ.ElemOff) |
| if typ.IndirectElem() { |
| slotElem = *((*unsafe.Pointer)(slotElem)) |
| } |
|
|
| t.checkInvariants(typ, m) |
| break outer |
| } |
| match = match.removeFirst() |
| } |
|
|
| |
| |
| match = g.ctrls().matchEmpty() |
| if match != 0 { |
| |
| |
|
|
| var i uintptr |
|
|
| |
| |
| if firstDeletedGroup.data != nil { |
| g = firstDeletedGroup |
| i = firstDeletedSlot |
| t.growthLeft++ |
| } else { |
| |
| i = match.first() |
| } |
|
|
| |
| if t.growthLeft > 0 { |
| slotKey := g.key(typ, i) |
| slotKeyOrig := slotKey |
| if typ.IndirectKey() { |
| kmem := newobject(typ.Key) |
| *(*unsafe.Pointer)(slotKey) = kmem |
| slotKey = kmem |
| } |
| typedmemmove(typ.Key, slotKey, key) |
|
|
| slotElem = unsafe.Pointer(uintptr(slotKeyOrig) + typ.ElemOff) |
| if typ.IndirectElem() { |
| emem := newobject(typ.Elem) |
| *(*unsafe.Pointer)(slotElem) = emem |
| slotElem = emem |
| } |
|
|
| g.ctrls().set(i, ctrl(h2Hash)) |
| t.growthLeft-- |
| t.used++ |
| m.used++ |
|
|
| t.checkInvariants(typ, m) |
| break outer |
| } |
|
|
| t.rehash(typ, m) |
| continue outer |
| } |
|
|
| |
| |
| |
| |
| |
| if firstDeletedGroup.data == nil { |
| |
| |
| match = g.ctrls().matchEmptyOrDeleted() |
| if match != 0 { |
| firstDeletedGroup = g |
| firstDeletedSlot = match.first() |
| } |
| } |
| } |
| } |
|
|
| if m.writing == 0 { |
| fatal("concurrent map writes") |
| } |
| m.writing ^= 1 |
|
|
| return slotElem |
| } |
|
|