internal/sync: fix typo of panic message

Change-Id: I3e7a8498514da1d278acd566d526fdf6278f7d41
Reviewed-on: https://go-review.googlesource.com/c/go/+/638916
Reviewed-by: Dmitri Shuralyov <dmitshur@golang.org>
LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com>
Reviewed-by: Ian Lance Taylor <iant@google.com>
Auto-Submit: Dmitri Shuralyov <dmitshur@golang.org>
Reviewed-by: Dmitri Shuralyov <dmitshur@google.com>
This commit is contained in:
Koichi Shiraishi 2024-12-28 13:56:04 +09:00 committed by Gopher Robot
parent 39f2032c17
commit 1e9835f5b1

View file

@ -79,7 +79,7 @@ func (ht *HashTrieMap[K, V]) Load(key K) (value V, ok bool) {
} }
i = n.indirect() i = n.indirect()
} }
panic("internal/concurrent.HashMapTrie: ran out of hash bits while iterating") panic("internal/sync.HashTrieMap: ran out of hash bits while iterating")
} }
// LoadOrStore returns the existing value for the key if present. // LoadOrStore returns the existing value for the key if present.
@ -120,7 +120,7 @@ func (ht *HashTrieMap[K, V]) LoadOrStore(key K, value V) (result V, loaded bool)
i = n.indirect() i = n.indirect()
} }
if !haveInsertPoint { if !haveInsertPoint {
panic("internal/concurrent.HashMapTrie: ran out of hash bits while iterating") panic("internal/sync.HashTrieMap: ran out of hash bits while iterating")
} }
// Grab the lock and double-check what we saw. // Grab the lock and double-check what we saw.
@ -178,7 +178,7 @@ func (ht *HashTrieMap[K, V]) expand(oldEntry, newEntry *entry[K, V], newHash uin
top := newIndirect top := newIndirect
for { for {
if hashShift == 0 { if hashShift == 0 {
panic("internal/concurrent.HashMapTrie: ran out of hash bits while inserting") panic("internal/sync.HashTrieMap: ran out of hash bits while inserting")
} }
hashShift -= nChildrenLog2 // hashShift is for the level parent is at. We need to go deeper. hashShift -= nChildrenLog2 // hashShift is for the level parent is at. We need to go deeper.
oi := (oldHash >> hashShift) & nChildrenMask oi := (oldHash >> hashShift) & nChildrenMask
@ -228,7 +228,7 @@ func (ht *HashTrieMap[K, V]) Swap(key K, new V) (previous V, loaded bool) {
i = n.indirect() i = n.indirect()
} }
if !haveInsertPoint { if !haveInsertPoint {
panic("internal/concurrent.HashMapTrie: ran out of hash bits while iterating") panic("internal/sync.HashTrieMap: ran out of hash bits while iterating")
} }
// Grab the lock and double-check what we saw. // Grab the lock and double-check what we saw.
@ -339,7 +339,7 @@ func (ht *HashTrieMap[K, V]) LoadAndDelete(key K) (value V, loaded bool) {
// Check if the node is now empty (and isn't the root), and delete it if able. // Check if the node is now empty (and isn't the root), and delete it if able.
for i.parent != nil && i.empty() { for i.parent != nil && i.empty() {
if hashShift == 8*goarch.PtrSize { if hashShift == 8*goarch.PtrSize {
panic("internal/concurrent.HashMapTrie: ran out of hash bits while iterating") panic("internal/sync.HashTrieMap: ran out of hash bits while iterating")
} }
hashShift += nChildrenLog2 hashShift += nChildrenLog2
@ -401,7 +401,7 @@ func (ht *HashTrieMap[K, V]) CompareAndDelete(key K, old V) (deleted bool) {
// Check if the node is now empty (and isn't the root), and delete it if able. // Check if the node is now empty (and isn't the root), and delete it if able.
for i.parent != nil && i.empty() { for i.parent != nil && i.empty() {
if hashShift == 8*goarch.PtrSize { if hashShift == 8*goarch.PtrSize {
panic("internal/concurrent.HashMapTrie: ran out of hash bits while iterating") panic("internal/sync.HashTrieMap: ran out of hash bits while iterating")
} }
hashShift += nChildrenLog2 hashShift += nChildrenLog2
@ -454,7 +454,7 @@ func (ht *HashTrieMap[K, V]) find(key K, hash uintptr, valEqual equalFunc, value
i = n.indirect() i = n.indirect()
} }
if !found { if !found {
panic("internal/concurrent.HashMapTrie: ran out of hash bits while iterating") panic("internal/sync.HashTrieMap: ran out of hash bits while iterating")
} }
// Grab the lock and double-check what we saw. // Grab the lock and double-check what we saw.