util/deephash: expand fast-path capabilities (#5404)

Add support for maps and interfaces to the fast path.
Add cycle-detection to the pointer handling logic.
This logic is mostly copied from the slow path.

A future commit will delete the slow path once
the fast path never falls back to the slow path.

Performance:

	name                 old time/op    new time/op    delta
	Hash-24                18.5µs ± 1%    14.9µs ± 2%  -19.52%  (p=0.000 n=10+10)
	HashPacketFilter-24    2.54µs ± 1%    2.60µs ± 1%   +2.19%  (p=0.000 n=10+10)
	HashMapAcyclic-24      31.6µs ± 1%    30.5µs ± 1%   -3.42%  (p=0.000 n=9+8)
	TailcfgNode-24         1.44µs ± 2%    1.43µs ± 1%     ~     (p=0.171 n=10+10)
	HashArray-24            324ns ± 1%     324ns ± 2%     ~     (p=0.425 n=9+9)

The additional cycle detection logic doesn't incur much slow down
since it only activates if a type is recursive, which does not apply
for any of the types that we care about.

There is a notable performance boost since we switch from the fath path
to the slow path less often. Most notably, a struct with a field that
could not be handled by the fast path would previously cause
the entire struct to go through the slow path.

Signed-off-by: Joe Tsai <joetsai@digital-static.net>
This commit is contained in:
Joe Tsai 2022-08-24 01:31:01 -07:00 committed by GitHub
parent 78b90c3685
commit 3fc8683585
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -14,7 +14,7 @@
// - time.Time are compared based on whether they are the same instant in time // - time.Time are compared based on whether they are the same instant in time
// and also in the same zone offset. Monotonic measurements and zone names // and also in the same zone offset. Monotonic measurements and zone names
// are ignored as part of the hash. // are ignored as part of the hash.
// - netip.Addr are compared based on a shallow comparison of the struct. // - netip.Addr are compared based on a shallow comparison of the struct.
// //
// WARNING: This package, like most of the tailscale.com Go module, // WARNING: This package, like most of the tailscale.com Go module,
// should be considered Tailscale-internal; we make no API promises. // should be considered Tailscale-internal; we make no API promises.
@ -25,7 +25,6 @@
"encoding/binary" "encoding/binary"
"encoding/hex" "encoding/hex"
"fmt" "fmt"
"log"
"math" "math"
"net/netip" "net/netip"
"reflect" "reflect"
@ -246,7 +245,7 @@ func (ti *typeInfo) hasher() typeHasherFunc {
} }
func (ti *typeInfo) buildHashFuncOnce() { func (ti *typeInfo) buildHashFuncOnce() {
ti.hashFuncLazy = genTypeHasher(ti.rtype) ti.hashFuncLazy = genTypeHasher(ti)
} }
func (h *hasher) hashBoolv(v addressableValue) bool { func (h *hasher) hashBoolv(v addressableValue) bool {
@ -380,13 +379,8 @@ func genHashPtrToMemoryRange(eleType reflect.Type) typeHasherFunc {
} }
} }
const debug = false func genTypeHasher(ti *typeInfo) typeHasherFunc {
t := ti.rtype
func genTypeHasher(t reflect.Type) typeHasherFunc {
if debug {
log.Printf("generating func for %v", t)
}
switch t.Kind() { switch t.Kind() {
case reflect.Bool: case reflect.Bool:
return (*hasher).hashBoolv return (*hasher).hashBoolv
@ -436,30 +430,67 @@ func genTypeHasher(t reflect.Type) typeHasherFunc {
default: default:
return genHashStructFields(t) return genHashStructFields(t)
} }
case reflect.Map:
return func(h *hasher, v addressableValue) bool {
if v.IsNil() {
h.HashUint8(0) // indicates nil
return true
}
if ti.isRecursive {
ptr := pointerOf(v)
if idx, ok := h.visitStack.seen(ptr); ok {
h.HashUint8(2) // indicates cycle
h.HashUint64(uint64(idx))
return true
}
h.visitStack.push(ptr)
defer h.visitStack.pop(ptr)
}
h.HashUint8(1) // indicates visiting a map
h.hashMap(v, ti, ti.isRecursive)
return true
}
case reflect.Pointer: case reflect.Pointer:
et := t.Elem() et := t.Elem()
if typeIsMemHashable(et) { if typeIsMemHashable(et) {
return genHashPtrToMemoryRange(et) return genHashPtrToMemoryRange(et)
} }
if !typeIsRecursive(t) { eti := getTypeInfo(et)
eti := getTypeInfo(et) return func(h *hasher, v addressableValue) bool {
return func(h *hasher, v addressableValue) bool { if v.IsNil() {
if v.IsNil() { h.HashUint8(0) // indicates nil
h.HashUint8(0) // indicates nil return true
}
if ti.isRecursive {
ptr := pointerOf(v)
if idx, ok := h.visitStack.seen(ptr); ok {
h.HashUint8(2) // indicates cycle
h.HashUint64(uint64(idx))
return true return true
} }
h.HashUint8(1) // indicates visiting a pointer h.visitStack.push(ptr)
va := addressableValue{v.Elem()} // dereferenced pointer is always addressable defer h.visitStack.pop(ptr)
return eti.hasher()(h, va)
} }
h.HashUint8(1) // indicates visiting a pointer
va := addressableValue{v.Elem()} // dereferenced pointer is always addressable
return eti.hasher()(h, va)
} }
} case reflect.Interface:
return func(h *hasher, v addressableValue) bool {
if v.IsNil() {
h.HashUint8(0) // indicates nil
return true
}
va := newAddressableValue(v.Elem().Type())
va.Set(v.Elem())
return func(h *hasher, v addressableValue) bool { h.HashUint8(1) // indicates visiting interface value
if debug { h.hashType(va.Type())
log.Printf("unhandled type %v", v.Type()) h.hashValue(va, true)
return true
} }
return false default: // Func, Chan, UnsafePointer
return noopHasherFunc
} }
} }
@ -646,11 +677,8 @@ func (h *hasher) hashValue(v addressableValue, forceCycleChecking bool) {
func (h *hasher) hashValueWithType(v addressableValue, ti *typeInfo, forceCycleChecking bool) { func (h *hasher) hashValueWithType(v addressableValue, ti *typeInfo, forceCycleChecking bool) {
doCheckCycles := forceCycleChecking || ti.isRecursive doCheckCycles := forceCycleChecking || ti.isRecursive
if !doCheckCycles { if ti.hasher()(h, v) {
hf := ti.hasher() return
if hf(h, v) {
return
}
} }
// Generic handling. // Generic handling.