func nextAtom(sr *io.SectionReader) (string, *io.SectionReader, error) { var asz uint32 var sz int64 atyp := make([]byte, 4) if err := binary.Read(sr, binary.BigEndian, &asz); err != nil { return "", nil, err } if asz == 0 { // Size is entire section sz = sr.Size() } else if asz == 1 { return "", nil, ErrNotImplemented } else { sz = int64(asz) } if _, err := io.ReadFull(sr, atyp); err != nil { return "", nil, err } sz = sz - 8 // 4 bytes for size, 4 bytes for type // Get current offset cur, err := seekCur(sr) if err != nil { return "", nil, err } // Consume remainder of parent if _, err := io.CopyN(ioutil.Discard, sr, sz); err != nil { return "", nil, err } return string(atyp), io.NewSectionReader(sr, cur, sz), nil }
func readCompressed(r *io.SectionReader, offset int64, s []byte) (int, error) { zr, err := zlib.NewReader(io.NewSectionReader(r, offset, r.Size()-offset)) if err != nil { return 0, err } return io.ReadFull(zr, s) }
func readVarint(r *io.SectionReader, offset int64) (v int64, n int, err error) { var buf [16]byte // 109 bits should be enough for everybody. _, err = r.ReadAt(buf[:], offset) if err == io.EOF || err == io.ErrUnexpectedEOF { err = nil } if err != nil { return } u, n := binary.Uvarint(buf[:]) v = int64(u) return }
func readStringPool(sr *io.SectionReader) (*ResStringPool, error) { sp := new(ResStringPool) if err := binary.Read(sr, binary.LittleEndian, &sp.Header); err != nil { return nil, err } stringStarts := make([]uint32, sp.Header.StringCount) if err := binary.Read(sr, binary.LittleEndian, stringStarts); err != nil { return nil, err } styleStarts := make([]uint32, sp.Header.StyleCount) if err := binary.Read(sr, binary.LittleEndian, styleStarts); err != nil { return nil, err } sp.Strings = make([]string, sp.Header.StringCount) for i, start := range stringStarts { var str string var err error sr.Seek(int64(sp.Header.StringStart+start), os.SEEK_SET) if (sp.Header.Flags & UTF8_FLAG) == 0 { str, err = readUTF16(sr) } else { str, err = readUTF8(sr) } if err != nil { return nil, err } sp.Strings[i] = str } sp.Styles = make([]string, sp.Header.StyleCount) for i, start := range styleStarts { var str string var err error sr.Seek(int64(sp.Header.StylesStart+start), os.SEEK_SET) if (sp.Header.Flags & UTF8_FLAG) == 0 { str, err = readUTF16(sr) } else { str, err = readUTF8(sr) } if err != nil { return nil, err } sp.Styles[i] = str } return sp, nil }
func (pk *PackReader) checkIdxMagic(idx *io.SectionReader) (err error) { var buf [idxHeaderSize]byte _, err = idx.ReadAt(buf[:], 0) if err != nil { return } magic := [4]byte{buf[0], buf[1], buf[2], buf[3]} if magic != ([4]byte{'\xff', 't', 'O', 'c'}) { return errBadIdxMagic } for i := range pk.idxFanout { pk.idxFanout[i] = binary.BigEndian.Uint32(buf[8+4*i:]) } return nil }
func checkPackMagic(pack *io.SectionReader) (version, count uint32, err error) { var buf [12]byte _, err = pack.ReadAt(buf[:], 0) if err != nil { return } magic := [4]byte{buf[0], buf[1], buf[2], buf[3]} if magic != ([4]byte{'P', 'A', 'C', 'K'}) { err = errBadPackMagic } version = binary.BigEndian.Uint32(buf[4:8]) if version != 2 { err = errUnsupportedPackVersion } count = binary.BigEndian.Uint32(buf[4:8]) return }
/*ParsePackHeader A header appears at the beginning and consists of the following: 4-byte signature: The signature is: {'P', 'A', 'C', 'K'} 4-byte version number (network byte order): Git currently accepts version number 2 or 3 but generates version 2 only. 4-byte number of objects contained in the pack (network byte order) Observation: we cannot have more than 4G versions ;-) and more than 4G objects in a pack. */ func ParsePackHeader(pack *io.SectionReader) (version, objectCount uint32, err error) { buf := make([]byte, 12) _, err = pack.Read(buf) if err != nil { return } if signature := string(buf[:4]); signature != packSignature { err = errors.New("pack header has wrong signature: " + signature) return } version = binary.BigEndian.Uint32(buf[4:8]) if version != 2 { err = fmt.Errorf("version unsupport: %d ", version) return } objectCount = binary.BigEndian.Uint32(buf[8:]) return }
// readVaroffset reads the pseudo-varint used to encode offsets for delta bases. // It is a big-endian form: 1|a0, ..., 1|a_{n-1}, 0|a_n. // representing: // (a0+1)<<7*n + ... + (a_{n-1}+1)<<7 + a_n func readVaroffset(r *io.SectionReader, offset int64) (v int64, n int, err error) { var buf [16]byte // 109 bits should be enough for everybody. n, err = r.ReadAt(buf[:], offset) if err == io.EOF || err == io.ErrUnexpectedEOF { err = nil } if err != nil { return } u := uint64(0) for i, b := range buf[:n] { if i > 0 { u++ } u <<= 7 u |= uint64(b &^ 0x80) if b&0x80 == 0 { return int64(u), i + 1, nil } } return int64(u), len(buf), io.ErrUnexpectedEOF }
//InflateZlib unbuffered io func InflateZlib(r *io.SectionReader, len int) (bs []byte, err error) { var out bytes.Buffer br := bufio.NewReader(r) zr, err := zlib.NewReader(br) if err != nil { return } defer zr.Close() _, err = io.Copy(&out, zr) if err != nil { return } if out.Len() != len { return nil, fmt.Errorf("inflated size mismatch, expected %d, got %d", len, out.Len()) } bs = out.Bytes() _, err = r.Seek(0-int64(br.Buffered()), 1) if err != nil { return } return }
// newDecryptionReader returns an authenticated, decryption reader func newDecryptionReader(r *io.SectionReader, f *File) (io.Reader, error) { keyLen := aesKeyLen(f.aesStrength) saltLen := keyLen / 2 // salt is half of key len if saltLen == 0 { return nil, ErrDecryption } // grab the salt and pwvv saltpwvv := make([]byte, saltLen+2) if _, err := r.Read(saltpwvv); err != nil { return nil, err } salt := saltpwvv[:saltLen] pwvv := saltpwvv[saltLen : saltLen+2] // generate keys only if we have a password if f.password == nil { return nil, ErrPassword } decKey, authKey, pwv := generateKeys(f.password(), salt, keyLen) if !checkPasswordVerification(pwv, pwvv) { return nil, ErrPassword } dataOff := int64(saltLen + 2) dataLen := int64(f.CompressedSize64 - uint64(saltLen) - 2 - 10) // // TODO(alex): Should the compressed sizes be fixed? // // Not the ideal place to do this. // f.CompressedSize64 = uint64(dataLen) // f.CompressedSize = uint32(dataLen) data := io.NewSectionReader(r, dataOff, dataLen) authOff := dataOff + dataLen authcode := io.NewSectionReader(r, authOff, 10) ar := newAuthReader(authKey, data, authcode, f.DeferAuth) dr := decryptStream(decKey, ar) if dr == nil { return nil, ErrDecryption } return dr, nil }
func cmapParser(_ SFNT, r *io.SectionReader) Table { t := new(Cmap) header := CmapHeader{} if err := binary.Read(r, binary.BigEndian, &header); err != nil { return nil } t.Version = header.Version numTables := header.NumTables encodingRecord := make([]EncodingRecord, numTables) if err := binary.Read(r, binary.BigEndian, encodingRecord); err != nil { return nil } t.Subtable = make([]CmapSubtable, numTables) for i, v := range encodingRecord { pid, eid, offset := v.PlatformID, v.EncodingID, int64(v.Offset) format := make([]byte, 8) r.ReadAt(format, offset) var length int64 switch binary.BigEndian.Uint16(format[:2]) { case 0, 2, 4, 6: length = int64(binary.BigEndian.Uint16(format[2:4])) break case 8, 10, 12, 13: length = int64(binary.BigEndian.Uint32(format[4:8])) break case 14: length = int64(binary.BigEndian.Uint32(format[2:6])) break default: return nil } sr := io.NewSectionReader(r, offset, length) t.Subtable[i] = CmapSubtable{pid, eid, &SubtableReader{sr}} } return Table(t) }
// hashCore hashes a SectionReader using the ImoHash parameters. func (imo *ImoHash) hashCore(f *io.SectionReader) [Size]byte { var result [Size]byte imo.hasher.Reset() if f.Size() < int64(imo.sampleThreshold) || imo.sampleSize < 1 { buffer := make([]byte, f.Size()) f.Read(buffer) imo.hasher.Write(buffer) } else { buffer := make([]byte, imo.sampleSize) f.Read(buffer) imo.hasher.Write(buffer) f.Seek(f.Size()/2, 0) f.Read(buffer) imo.hasher.Write(buffer) f.Seek(int64(-imo.sampleSize), 2) f.Read(buffer) imo.hasher.Write(buffer) } hash := imo.hasher.Sum(nil) binary.PutUvarint(hash, uint64(f.Size())) copy(result[:], hash) return result }