mirror of
https://github.com/tailscale/tailscale.git
synced 2024-11-29 04:55:31 +00:00
util/deephash: expand fast-path capabilities (#5404)
Add support for maps and interfaces to the fast path. Add cycle-detection to the pointer handling logic. This logic is mostly copied from the slow path. A future commit will delete the slow path once the fast path never falls back to the slow path. Performance: name old time/op new time/op delta Hash-24 18.5µs ± 1% 14.9µs ± 2% -19.52% (p=0.000 n=10+10) HashPacketFilter-24 2.54µs ± 1% 2.60µs ± 1% +2.19% (p=0.000 n=10+10) HashMapAcyclic-24 31.6µs ± 1% 30.5µs ± 1% -3.42% (p=0.000 n=9+8) TailcfgNode-24 1.44µs ± 2% 1.43µs ± 1% ~ (p=0.171 n=10+10) HashArray-24 324ns ± 1% 324ns ± 2% ~ (p=0.425 n=9+9) The additional cycle detection logic doesn't incur much slow down since it only activates if a type is recursive, which does not apply for any of the types that we care about. There is a notable performance boost since we switch from the fath path to the slow path less often. Most notably, a struct with a field that could not be handled by the fast path would previously cause the entire struct to go through the slow path. Signed-off-by: Joe Tsai <joetsai@digital-static.net>
This commit is contained in:
parent
78b90c3685
commit
3fc8683585
@ -25,7 +25,6 @@
|
||||
"encoding/binary"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"log"
|
||||
"math"
|
||||
"net/netip"
|
||||
"reflect"
|
||||
@ -246,7 +245,7 @@ func (ti *typeInfo) hasher() typeHasherFunc {
|
||||
}
|
||||
|
||||
func (ti *typeInfo) buildHashFuncOnce() {
|
||||
ti.hashFuncLazy = genTypeHasher(ti.rtype)
|
||||
ti.hashFuncLazy = genTypeHasher(ti)
|
||||
}
|
||||
|
||||
func (h *hasher) hashBoolv(v addressableValue) bool {
|
||||
@ -380,13 +379,8 @@ func genHashPtrToMemoryRange(eleType reflect.Type) typeHasherFunc {
|
||||
}
|
||||
}
|
||||
|
||||
const debug = false
|
||||
|
||||
func genTypeHasher(t reflect.Type) typeHasherFunc {
|
||||
if debug {
|
||||
log.Printf("generating func for %v", t)
|
||||
}
|
||||
|
||||
func genTypeHasher(ti *typeInfo) typeHasherFunc {
|
||||
t := ti.rtype
|
||||
switch t.Kind() {
|
||||
case reflect.Bool:
|
||||
return (*hasher).hashBoolv
|
||||
@ -436,30 +430,67 @@ func genTypeHasher(t reflect.Type) typeHasherFunc {
|
||||
default:
|
||||
return genHashStructFields(t)
|
||||
}
|
||||
case reflect.Map:
|
||||
return func(h *hasher, v addressableValue) bool {
|
||||
if v.IsNil() {
|
||||
h.HashUint8(0) // indicates nil
|
||||
return true
|
||||
}
|
||||
if ti.isRecursive {
|
||||
ptr := pointerOf(v)
|
||||
if idx, ok := h.visitStack.seen(ptr); ok {
|
||||
h.HashUint8(2) // indicates cycle
|
||||
h.HashUint64(uint64(idx))
|
||||
return true
|
||||
}
|
||||
h.visitStack.push(ptr)
|
||||
defer h.visitStack.pop(ptr)
|
||||
}
|
||||
h.HashUint8(1) // indicates visiting a map
|
||||
h.hashMap(v, ti, ti.isRecursive)
|
||||
return true
|
||||
}
|
||||
case reflect.Pointer:
|
||||
et := t.Elem()
|
||||
if typeIsMemHashable(et) {
|
||||
return genHashPtrToMemoryRange(et)
|
||||
}
|
||||
if !typeIsRecursive(t) {
|
||||
eti := getTypeInfo(et)
|
||||
return func(h *hasher, v addressableValue) bool {
|
||||
if v.IsNil() {
|
||||
h.HashUint8(0) // indicates nil
|
||||
eti := getTypeInfo(et)
|
||||
return func(h *hasher, v addressableValue) bool {
|
||||
if v.IsNil() {
|
||||
h.HashUint8(0) // indicates nil
|
||||
return true
|
||||
}
|
||||
if ti.isRecursive {
|
||||
ptr := pointerOf(v)
|
||||
if idx, ok := h.visitStack.seen(ptr); ok {
|
||||
h.HashUint8(2) // indicates cycle
|
||||
h.HashUint64(uint64(idx))
|
||||
return true
|
||||
}
|
||||
h.HashUint8(1) // indicates visiting a pointer
|
||||
va := addressableValue{v.Elem()} // dereferenced pointer is always addressable
|
||||
return eti.hasher()(h, va)
|
||||
h.visitStack.push(ptr)
|
||||
defer h.visitStack.pop(ptr)
|
||||
}
|
||||
h.HashUint8(1) // indicates visiting a pointer
|
||||
va := addressableValue{v.Elem()} // dereferenced pointer is always addressable
|
||||
return eti.hasher()(h, va)
|
||||
}
|
||||
}
|
||||
case reflect.Interface:
|
||||
return func(h *hasher, v addressableValue) bool {
|
||||
if v.IsNil() {
|
||||
h.HashUint8(0) // indicates nil
|
||||
return true
|
||||
}
|
||||
va := newAddressableValue(v.Elem().Type())
|
||||
va.Set(v.Elem())
|
||||
|
||||
return func(h *hasher, v addressableValue) bool {
|
||||
if debug {
|
||||
log.Printf("unhandled type %v", v.Type())
|
||||
h.HashUint8(1) // indicates visiting interface value
|
||||
h.hashType(va.Type())
|
||||
h.hashValue(va, true)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
default: // Func, Chan, UnsafePointer
|
||||
return noopHasherFunc
|
||||
}
|
||||
}
|
||||
|
||||
@ -646,11 +677,8 @@ func (h *hasher) hashValue(v addressableValue, forceCycleChecking bool) {
|
||||
func (h *hasher) hashValueWithType(v addressableValue, ti *typeInfo, forceCycleChecking bool) {
|
||||
doCheckCycles := forceCycleChecking || ti.isRecursive
|
||||
|
||||
if !doCheckCycles {
|
||||
hf := ti.hasher()
|
||||
if hf(h, v) {
|
||||
return
|
||||
}
|
||||
if ti.hasher()(h, v) {
|
||||
return
|
||||
}
|
||||
|
||||
// Generic handling.
|
||||
|
Loading…
Reference in New Issue
Block a user