util/deephash: use sha256x (#5339)

Switch deephash to use sha256x.Hash.

We add sha256x.HashString to efficiently hash a string.
It uses unsafe under the hood to convert a string to a []byte.
We also modify sha256x.Hash to export the underlying hash.Hash
for testing purposes so that we can intercept all hash.Hash calls.

Performance:

	name                 old time/op    new time/op    delta
	Hash-24                19.8µs ± 1%    19.2µs ± 1%  -3.01%  (p=0.000 n=10+10)
	HashPacketFilter-24    2.61µs ± 0%    2.53µs ± 1%  -3.01%  (p=0.000 n=8+10)
	HashMapAcyclic-24      31.3µs ± 1%    29.8µs ± 0%  -4.80%  (p=0.000 n=10+9)
	TailcfgNode-24         1.83µs ± 1%    1.82µs ± 2%    ~     (p=0.305 n=10+10)
	HashArray-24            344ns ± 2%     323ns ± 1%  -6.02%  (p=0.000 n=9+10)

The performance gains is not as dramatic as sha256x over sha256 due to:
1. most of the hashing already occurring through the direct memory hashing logic, and
2. what does not go through direct memory hashing is slowed down by reflect.

Signed-off-by: Joe Tsai <joetsai@digital-static.net>
This commit is contained in:
Joe Tsai
2022-08-11 17:44:09 -07:00
committed by GitHub
parent d942a2ff56
commit 1f7479466e
5 changed files with 192 additions and 160 deletions

View File

@@ -11,6 +11,7 @@ import (
"crypto/sha256"
"encoding/binary"
"hash"
"unsafe"
)
var _ hash.Hash = (*Hash)(nil)
@@ -24,13 +25,16 @@ type Hash struct {
// However, it does mean that sha256.digest.x goes unused,
// which is a waste of 64B.
h hash.Hash // always *sha256.digest
// H is the underlying hash.Hash.
// The hash.Hash.BlockSize must be equal to sha256.BlockSize.
// It is exported only for testing purposes.
H hash.Hash // usually a *sha256.digest
x [sha256.BlockSize]byte // equivalent to sha256.digest.x
nx int // equivalent to sha256.digest.nx
}
func New() *Hash {
return &Hash{h: sha256.New()}
return &Hash{H: sha256.New()}
}
func (h *Hash) Write(b []byte) (int, error) {
@@ -42,32 +46,32 @@ func (h *Hash) Sum(b []byte) []byte {
if h.nx > 0 {
// This causes block mis-alignment. Future operations will be correct,
// but are less efficient until Reset is called.
h.h.Write(h.x[:h.nx])
h.H.Write(h.x[:h.nx])
h.nx = 0
}
// Unfortunately hash.Hash.Sum always causes the input to escape since
// escape analysis cannot prove anything past an interface method call.
// Assuming h already escapes, we call Sum with h.x first,
// and then the copy the result to b.
sum := h.h.Sum(h.x[:0])
// and then copy the result to b.
sum := h.H.Sum(h.x[:0])
return append(b, sum...)
}
func (h *Hash) Reset() {
if h.h == nil {
h.h = sha256.New()
if h.H == nil {
h.H = sha256.New()
}
h.h.Reset()
h.H.Reset()
h.nx = 0
}
func (h *Hash) Size() int {
return h.h.Size()
return h.H.Size()
}
func (h *Hash) BlockSize() int {
return h.h.BlockSize()
return h.H.BlockSize()
}
func (h *Hash) HashUint8(n uint8) {
@@ -125,7 +129,7 @@ func (h *Hash) hashUint64Slow(n uint64) { h.hashUint(uint64(n), 8) }
func (h *Hash) hashUint(n uint64, i int) {
for ; i > 0; i-- {
if h.nx == len(h.x) {
h.h.Write(h.x[:])
h.H.Write(h.x[:])
h.nx = 0
}
h.x[h.nx] = byte(n)
@@ -140,14 +144,14 @@ func (h *Hash) HashBytes(b []byte) {
n := copy(h.x[h.nx:], b)
h.nx += n
if h.nx == len(h.x) {
h.h.Write(h.x[:])
h.H.Write(h.x[:])
h.nx = 0
}
b = b[n:]
}
if len(b) >= len(h.x) {
n := len(b) &^ (len(h.x) - 1) // n is a multiple of len(h.x)
h.h.Write(b[:n])
h.H.Write(b[:n])
b = b[n:]
}
if len(b) > 0 {
@@ -155,4 +159,14 @@ func (h *Hash) HashBytes(b []byte) {
}
}
func (h *Hash) HashString(s string) {
type stringHeader struct {
p unsafe.Pointer
n int
}
p := (*stringHeader)(unsafe.Pointer(&s))
b := unsafe.Slice((*byte)(p.p), p.n)
h.HashBytes(b)
}
// TODO: Add Hash.MarshalBinary and Hash.UnmarshalBinary?

View File

@@ -17,7 +17,7 @@ import (
// naiveHash is an obviously correct implementation of Hash.
type naiveHash struct {
hash.Hash
scratch [8]byte
scratch [256]byte
}
func newNaive() *naiveHash { return &naiveHash{Hash: sha256.New()} }
@@ -26,6 +26,7 @@ func (h *naiveHash) HashUint16(n uint16) { h.Write(binary.LittleEndian.AppendUin
func (h *naiveHash) HashUint32(n uint32) { h.Write(binary.LittleEndian.AppendUint32(h.scratch[:0], n)) }
func (h *naiveHash) HashUint64(n uint64) { h.Write(binary.LittleEndian.AppendUint64(h.scratch[:0], n)) }
func (h *naiveHash) HashBytes(b []byte) { h.Write(b) }
func (h *naiveHash) HashString(s string) { h.Write(append(h.scratch[:0], s...)) }
var bytes = func() (out []byte) {
out = make([]byte, 130)
@@ -41,6 +42,7 @@ type hasher interface {
HashUint32(uint32)
HashUint64(uint64)
HashBytes([]byte)
HashString(string)
}
func hashSuite(h hasher) {
@@ -61,7 +63,12 @@ func hashSuite(h hasher) {
h.HashUint16(0x89ab)
h.HashUint8(0xcd)
}
h.HashBytes(bytes[:(i+1)*13])
b := bytes[:(i+1)*13]
if i%2 == 0 {
h.HashBytes(b)
} else {
h.HashString(string(b))
}
}
}
@@ -74,14 +81,51 @@ func Test(t *testing.T) {
c.Assert(h1.Sum(nil), qt.DeepEquals, h2.Sum(nil))
}
func TestSumAllocations(t *testing.T) {
func TestAllocations(t *testing.T) {
c := qt.New(t)
h := New()
n := testing.AllocsPerRun(100, func() {
var a [sha256.Size]byte
h.Sum(a[:0])
c.Run("Sum", func(c *qt.C) {
h := New()
c.Assert(testing.AllocsPerRun(100, func() {
var a [sha256.Size]byte
h.Sum(a[:0])
}), qt.Equals, 0.0)
})
c.Run("HashUint8", func(c *qt.C) {
h := New()
c.Assert(testing.AllocsPerRun(100, func() {
h.HashUint8(0x01)
}), qt.Equals, 0.0)
})
c.Run("HashUint16", func(c *qt.C) {
h := New()
c.Assert(testing.AllocsPerRun(100, func() {
h.HashUint16(0x0123)
}), qt.Equals, 0.0)
})
c.Run("HashUint32", func(c *qt.C) {
h := New()
c.Assert(testing.AllocsPerRun(100, func() {
h.HashUint32(0x01234567)
}), qt.Equals, 0.0)
})
c.Run("HashUint64", func(c *qt.C) {
h := New()
c.Assert(testing.AllocsPerRun(100, func() {
h.HashUint64(0x0123456789abcdef)
}), qt.Equals, 0.0)
})
c.Run("HashBytes", func(c *qt.C) {
h := New()
c.Assert(testing.AllocsPerRun(100, func() {
h.HashBytes(bytes)
}), qt.Equals, 0.0)
})
c.Run("HashString", func(c *qt.C) {
h := New()
c.Assert(testing.AllocsPerRun(100, func() {
h.HashString("abcdefghijklmnopqrstuvwxyz")
}), qt.Equals, 0.0)
})
c.Assert(n, qt.Equals, 0.0)
}
func Fuzz(f *testing.F) {