func (d *decoder) parsePLTE(r io.Reader, crc hash.Hash32, length uint32) os.Error { np := int(length / 3) // The number of palette entries. if length%3 != 0 || np <= 0 || np > 256 { return FormatError("bad PLTE length") } n, err := io.ReadFull(r, d.tmp[0:3*np]) if err != nil { return err } crc.Write(d.tmp[0:n]) switch d.colorType { case ctPaletted: palette := make([]image.Color, np) for i := 0; i < np; i++ { palette[i] = image.RGBAColor{d.tmp[3*i+0], d.tmp[3*i+1], d.tmp[3*i+2], 0xff} } d.image.(*image.Paletted).Palette = image.PalettedColorModel(palette) case ctTrueColor, ctTrueColorAlpha: // As per the PNG spec, a PLTE chunk is optional (and for practical purposes, // ignorable) for the ctTrueColor and ctTrueColorAlpha color types (section 4.1.2). return nil default: return FormatError("PLTE, color type mismatch") } return nil }
func (d *decoder) parsetRNS(r io.Reader, crc hash.Hash32, length uint32) os.Error { if length > 256 { return FormatError("bad tRNS length") } n, err := io.ReadFull(r, d.tmp[0:length]) if err != nil { return err } crc.Write(d.tmp[0:n]) switch d.cb { case cbG8, cbG16: return UnsupportedError("grayscale transparency") case cbTC8, cbTC16: return UnsupportedError("truecolor transparency") case cbP8: if n > len(d.palette) { return FormatError("bad tRNS length") } for i := 0; i < n; i++ { rgba := d.palette[i].(image.RGBAColor) d.palette[i] = image.RGBAColor{rgba.R, rgba.G, rgba.B, d.tmp[i]} } case cbTCA8, cbTCA16: return FormatError("tRNS, color type mismatch") } return nil }
func (d *decoder) parsetRNS(r io.Reader, crc hash.Hash32, length uint32) os.Error { if length > 256 { return FormatError("bad tRNS length") } n, err := io.ReadFull(r, d.tmp[0:length]) if err != nil { return err } crc.Write(d.tmp[0:n]) switch d.colorType { case ctTrueColor: return UnsupportedError("TrueColor transparency") case ctPaletted: p := d.image.(*image.Paletted).Palette if n > len(p) { return FormatError("bad tRNS length") } for i := 0; i < n; i++ { rgba := p[i].(image.RGBAColor) p[i] = image.RGBAColor{rgba.R, rgba.G, rgba.B, d.tmp[i]} } case ctTrueColorAlpha: return FormatError("tRNS, color type mismatch") } return nil }
func TestRef(t *testing.T) { for _, elem := range data { var h32 hash.Hash32 = New32() h32.Write([]byte(elem.s)) if v := h32.Sum32(); v != elem.h32 { t.Errorf("'%s': 0x%x (want 0x%x)", elem.s, v, elem.h32) } if v := Sum32([]byte(elem.s)); v != elem.h32 { t.Errorf("'%s': 0x%x (want 0x%x)", elem.s, v, elem.h32) } var h64 hash.Hash64 = New64() h64.Write([]byte(elem.s)) if v := h64.Sum64(); v != elem.h64_1 { t.Errorf("'%s': 0x%x (want 0x%x)", elem.s, v, elem.h64_1) } var h128 Hash128 = New128() h128.Write([]byte(elem.s)) if v1, v2 := h128.Sum128(); v1 != elem.h64_1 || v2 != elem.h64_2 { t.Errorf("'%s': 0x%x-0x%x (want 0x%x-0x%x)", elem.s, v1, v2, elem.h64_1, elem.h64_2) } if v1, v2 := Sum128([]byte(elem.s)); v1 != elem.h64_1 || v2 != elem.h64_2 { t.Errorf("'%s': 0x%x-0x%x (want 0x%x-0x%x)", elem.s, v1, v2, elem.h64_1, elem.h64_2) } } }
/* Helper function for copyUpwards. Recursive. */ func (node *SpecializationPathNode) copyUpwards1(downNode *SpecializationPathNode, h hash.Hash32) *SpecializationPathNode { h.Write([]byte(node.Type.Name)) copiedNode := &SpecializationPathNode{Up: nil, Down: downNode, Level: node.Level, Type: node.Type} if node.Up != nil { copiedUpNode := node.Up.copyUpwards1(copiedNode, h) copiedNode.Up = copiedUpNode } return copiedNode }
func imageHash(h hash.Hash32, dir *Directory, img *Image) int { h.Reset() h.Write([]byte(dir.RelPat())) h.Write([]byte(img.Name())) bytes, err := img.FileTime().MarshalBinary() if err == nil { h.Write(bytes) } return int(h.Sum32()) }
func TestRef(t *testing.T) { for _, elem := range data { var h32 hash.Hash32 = New32() h32.Write([]byte(elem.s)) if v := h32.Sum32(); v != elem.h32 { t.Errorf("'%s': 0x%x (want 0x%x)", elem.s, v, elem.h32) } var h32_byte hash.Hash32 = New32() h32_byte.Write([]byte(elem.s)) target := fmt.Sprintf("%08x", elem.h32) if p := fmt.Sprintf("%x", h32_byte.Sum(nil)); p != target { t.Errorf("'%s': %s (want %s)", elem.s, p, target) } if v := Sum32([]byte(elem.s)); v != elem.h32 { t.Errorf("'%s': 0x%x (want 0x%x)", elem.s, v, elem.h32) } var h64 hash.Hash64 = New64() h64.Write([]byte(elem.s)) if v := h64.Sum64(); v != elem.h64_1 { t.Errorf("'%s': 0x%x (want 0x%x)", elem.s, v, elem.h64_1) } var h64_byte hash.Hash64 = New64() h64_byte.Write([]byte(elem.s)) target = fmt.Sprintf("%016x", elem.h64_1) if p := fmt.Sprintf("%x", h64_byte.Sum(nil)); p != target { t.Errorf("Sum64: '%s': %s (want %s)", elem.s, p, target) } if v := Sum64([]byte(elem.s)); v != elem.h64_1 { t.Errorf("Sum64: '%s': 0x%x (want 0x%x)", elem.s, v, elem.h64_1) } var h128 Hash128 = New128() h128.Write([]byte(elem.s)) if v1, v2 := h128.Sum128(); v1 != elem.h64_1 || v2 != elem.h64_2 { t.Errorf("New128: '%s': 0x%x-0x%x (want 0x%x-0x%x)", elem.s, v1, v2, elem.h64_1, elem.h64_2) } var h128_byte Hash128 = New128() h128_byte.Write([]byte(elem.s)) target = fmt.Sprintf("%016x%016x", elem.h64_1, elem.h64_2) if p := fmt.Sprintf("%x", h128_byte.Sum(nil)); p != target { t.Errorf("New128: '%s': %s (want %s)", elem.s, p, target) } if v1, v2 := Sum128([]byte(elem.s)); v1 != elem.h64_1 || v2 != elem.h64_2 { t.Errorf("Sum128: '%s': 0x%x-0x%x (want 0x%x-0x%x)", elem.s, v1, v2, elem.h64_1, elem.h64_2) } } }
// FilesPathToPartition hashes a file path to a partition. func FilesPathToPartition(h hash.Hash32, partitions []string, path string) string { if len(partitions) <= 0 { return "" } h.Reset() io.WriteString(h, path) i := h.Sum32() % uint32(len(partitions)) return partitions[i] }
func (d *decoder) parseIHDR(r io.Reader, crc hash.Hash32, length uint32) os.Error { if length != 13 { return FormatError("bad IHDR length") } _, err := io.ReadFull(r, d.tmp[0:13]) if err != nil { return err } crc.Write(d.tmp[0:13]) if d.tmp[10] != 0 || d.tmp[11] != 0 || d.tmp[12] != 0 { return UnsupportedError("compression, filter or interlace method") } w := int32(parseUint32(d.tmp[0:4])) h := int32(parseUint32(d.tmp[4:8])) if w < 0 || h < 0 { return FormatError("negative dimension") } nPixels := int64(w) * int64(h) if nPixels != int64(int(nPixels)) { return UnsupportedError("dimension overflow") } d.cb = cbInvalid switch d.tmp[8] { case 8: switch d.tmp[9] { case ctGrayscale: d.cb = cbG8 case ctTrueColor: d.cb = cbTC8 case ctPaletted: d.cb = cbP8 case ctTrueColorAlpha: d.cb = cbTCA8 } case 16: switch d.tmp[9] { case ctGrayscale: d.cb = cbG16 case ctTrueColor: d.cb = cbTC16 case ctTrueColorAlpha: d.cb = cbTCA16 } } if d.cb == cbInvalid { return UnsupportedError(fmt.Sprintf("bit depth %d, color type %d", d.tmp[8], d.tmp[9])) } d.width, d.height = int(w), int(h) return nil }
// SeriesHash returns a hash of the metric, tags pair. The hash falls in uint16 // range. func (c cmd) SeriesHash(hash hash.Hash32) int { hash.Reset() buf := c.Point() // include Metric i := bytes.IndexByte(buf, ' ') hash.Write(buf[:i]) buf = buf[i+1:] // exclude Time i = bytes.IndexByte(buf, ' ') buf = buf[i+1:] // exclude Value i = bytes.IndexByte(buf, ' ') // include Tags hash.Write(buf[i:]) // Sum sum := hash.Sum32() return int(uint16(sum)) }
func hashFieldValue(h hash.Hash32, event common.MapStr, field string) error { type stringer interface { String() string } type hashable interface { Hash32(h hash.Hash32) error } v, err := event.GetValue(field) if err != nil { return err } switch s := v.(type) { case hashable: err = s.Hash32(h) case string: _, err = h.Write([]byte(s)) case []byte: _, err = h.Write(s) case stringer: _, err = h.Write([]byte(s.String())) case int8, int16, int32, int64, int, uint8, uint16, uint32, uint64, uint: err = binary.Write(h, binary.LittleEndian, v) case float32: tmp := strconv.FormatFloat(float64(s), 'g', -1, 32) _, err = h.Write([]byte(tmp)) case float64: tmp := strconv.FormatFloat(s, 'g', -1, 32) _, err = h.Write([]byte(tmp)) default: // try to hash using reflection: err = binary.Write(h, binary.LittleEndian, v) if err != nil { err = fmt.Errorf("can not hash key '%v' of unknown type", field) } } return err }
func testGolden(t *testing.T, h hash.Hash32, golden []_Golden, which string) { for _, g := range golden { h.Reset() h.Write([]byte(g.in)) sum := h.Sum32() if sum != g.out { t.Errorf("%s(%s) = 0x%x want 0x%x", which, g.in, sum, g.out) } bsum := h.Sum(nil) if len(bsum) != 4 { t.Errorf("%s Sum(nil) returned %d bytes, wanted 4: %s\n", which, len(bsum), bsum) } s := binary.BigEndian.Uint32(bsum) if s != sum { t.Errorf("%s(%s).Sum(nil) = 0x%x want 0x%x", which, g.in, sum, g.out) } bsum = h.Sum([]byte{0x01, 0x02, 0x03, 0x04}) if len(bsum) != 8 { t.Errorf("%s Sum(bsum) returned %d bytes, wanted 8: %x\n", which, len(bsum), bsum) } s = binary.BigEndian.Uint32(bsum[0:]) s2 := binary.BigEndian.Uint32(bsum[4:]) if s != 0x01020304 || s2 != sum { t.Errorf("%s(%s).Sum(bsum) = %x (expected 0x01020304 %x )", which, g.in, bsum, sum) } } }
func becnchmark(b *testing.B, h hash.Hash32, n int64) { b.SetBytes(n) data := make([]byte, n) for i := range data { data[i] = byte(i) } in := make([]byte, 0, h.Size()) b.ResetTimer() for i := 0; i < b.N; i++ { h.Reset() h.Write(data) h.Sum(in) } }
func (d *decoder) parseIHDR(r io.Reader, crc hash.Hash32, length uint32) os.Error { if length != 13 { return FormatError("bad IHDR length") } _, err := io.ReadFull(r, d.tmp[0:13]) if err != nil { return err } crc.Write(d.tmp[0:13]) if d.tmp[8] != 8 { return UnsupportedError("bit depth") } if d.tmp[10] != 0 || d.tmp[11] != 0 || d.tmp[12] != 0 { return UnsupportedError("compression, filter or interlace method") } w := int32(parseUint32(d.tmp[0:4])) h := int32(parseUint32(d.tmp[4:8])) if w < 0 || h < 0 { return FormatError("negative dimension") } nPixels := int64(w) * int64(h) if nPixels != int64(int(nPixels)) { return UnsupportedError("dimension overflow") } d.colorType = d.tmp[9] switch d.colorType { case ctTrueColor: d.image = image.NewRGBA(int(w), int(h)) case ctPaletted: d.image = image.NewPaletted(int(w), int(h), nil) case ctTrueColorAlpha: d.image = image.NewNRGBA(int(w), int(h)) default: return UnsupportedError("color type") } d.width, d.height = int(w), int(h) return nil }
func commonBench(b *testing.B, h hash.Hash32, golden []_Golden) { for i := 0; i < b.N; i++ { for _, g := range golden { h.Reset() h.Write([]byte(g.in)) h.Sum32() } } }
func (d *decoder) parseIDAT(r io.Reader, crc hash.Hash32, length uint32) os.Error { // There may be more than one IDAT chunk, but their contents must be // treated as if it was one continuous stream (to the zlib decoder). // We bring up an io.Pipe and write the IDAT chunks into the pipe as // we see them, and decode the stream in a separate go-routine, which // signals its completion (successful or not) via a channel. if d.idatWriter == nil { pr, pw := io.Pipe() d.idatWriter = pw d.idatDone = make(chan os.Error) go func() { err := d.idatReader(pr) if err == os.EOF { err = FormatError("too little IDAT") } pr.CloseWithError(FormatError("too much IDAT")) d.idatDone <- err }() } var buf [4096]byte for length > 0 { n, err1 := r.Read(buf[0:min(len(buf), int(length))]) // We delay checking err1. It is possible to get n bytes and an error, // but if the n bytes themselves contain a FormatError, for example, we // want to report that error, and not the one that made the Read stop. n, err2 := d.idatWriter.Write(buf[0:n]) if err2 != nil { return err2 } if err1 != nil { return err1 } crc.Write(buf[0:n]) length -= uint32(n) } return nil }
// Returns shard under given key func (m ConcurrentMap) GetShard(key string) *ConcurrentMapShared { hasherAsInterface := hashPool.Get() var hasher hash.Hash32 if hasherAsInterface == nil { hasher = fnv.New32() } else { hasher = hasherAsInterface.(hash.Hash32) hasher.Reset() } hasher.Write([]byte(key)) sum := hasher.Sum32() hashPool.Put(hasher) return m[uint(sum)%uint(SHARD_COUNT)] }
func (p *xxhPool) Put(h hash.Hash32) { h.Reset() p.Pool.Put(h) }
// ReleaseCRC32Checksum releases a CRC32 back to the allocation pool. func ReleaseCRC32Checksum(crc hash.Hash32) { crc.Reset() crc32Pool.Put(crc) }
// Hash32 is a convenience method for hashing a string against a hash.Hash32 func Hash32(s string, h hash.Hash32) uint32 { h.Reset() h.Write([]byte(s)) return h.Sum32() }
func benchmark(b *testing.B, h hash.Hash32, n, alignment int64) { b.SetBytes(n) data := make([]byte, n+alignment) data = data[alignment:] for i := range data { data[i] = byte(i) } in := make([]byte, 0, h.Size()) // Warm up h.Reset() h.Write(data) h.Sum(in) b.ResetTimer() for i := 0; i < b.N; i++ { h.Reset() h.Write(data) h.Sum(in) } }
func testIncremental(t *testing.T, h hash.Hash32, result uint32, which string) { h.Reset() var parts = []string{ "h", "ell", "o", "he", "llo", "hellohello", } for _, p := range parts { l, _ := h.Write([]byte(p)) if l != len(p) { t.Errorf("Write(%d bytes) = %d, want %d\n", len(p), l, len(p)) } } h32 := h.Sum32() if h32 != result { t.Errorf("%s: incremental failed: got %08x", which, h32) } h.Reset() h.Write([]byte("hellohellohellohello")) h32 = h.Sum32() if h32 != result { t.Errorf("%s: failed: got %08x", which, h32) } }
func testIncremental(t *testing.T, h hash.Hash32, result uint32, which string) { h.Reset() h.Write([]byte("hello")) h.Write([]byte("h")) h.Write([]byte("e")) h.Write([]byte("l")) h.Write([]byte("l")) h.Write([]byte("o")) h.Write([]byte("hellohello")) h32 := h.Sum32() if h32 != result { t.Errorf("%s: incremental failed: got %08x", which, h32) } h.Reset() h.Write([]byte("hellohellohellohello")) h32 = h.Sum32() if h32 != result { t.Errorf("%s: failed: got %08x", which, h32) } }
package jenkins import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "hash" ) var _ = Describe("Jenkins", func() { var jhash hash.Hash32 var key []byte BeforeEach(func() { jhash = New() key = []byte("Apple") }) Describe("New", func() { It("returns jenkhash", func() { var h *jenkhash Expect(jhash).To(BeAssignableToTypeOf(h)) }) It("initializes offset to 0", func() { Expect(jhash.Sum32()).To(Equal(uint32(0))) }) }) Describe("Write", func() {
func doHash(in string) string { var h hash.Hash32 = murmur3.New32() h.Write([]byte(in)) return hex.EncodeToString(h.Sum(nil)) }