func ti_read_string(rd *bytes.Reader, str_off, table int16) (string, error) { var off int16 _, err := rd.Seek(int64(str_off), 0) if err != nil { return "", err } err = binary.Read(rd, binary.LittleEndian, &off) if err != nil { return "", err } _, err = rd.Seek(int64(table+off), 0) if err != nil { return "", err } var bs []byte for { b, err := rd.ReadByte() if err != nil { return "", err } if b == byte(0x00) { break } bs = append(bs, b) } return string(bs), nil }
// Removes the oldest entries to limit the log's length to `maxLength`. // This is the same as ChangeLog.Truncate except it works directly on the encoded form, which is // much faster than decoding+truncating+encoding. func TruncateEncodedChangeLog(r *bytes.Reader, maxLength, minLength int, w io.Writer) (removed int, newLength int) { since := readSequence(r) // Find the starting position and sequence of each entry: entryPos := make([]int64, 0, 1000) entrySeq := make([]uint64, 0, 1000) for { pos, err := r.Seek(0, 1) if err != nil { panic("Seek??") } flags, err := r.ReadByte() if err != nil { if err == io.EOF { break // eof } panic("ReadByte failed") } seq := readSequence(r) skipString(r) skipString(r) skipString(r) if flags > kMaxFlag { panic(fmt.Sprintf("TruncateEncodedChangeLog: bad flags 0x%x, entry %d, offset %d", flags, len(entryPos), pos)) } entryPos = append(entryPos, pos) entrySeq = append(entrySeq, seq) } // How many entries to remove? // * Leave no more than maxLength entries // * Every sequence value removed should be less than every sequence remaining. // * The new 'since' value should be the maximum sequence removed. oldLength := len(entryPos) removed = oldLength - maxLength if removed <= 0 { removed = 0 } else { pivot, newSince := findPivot(entrySeq, removed-1) removed = pivot + 1 if oldLength-removed >= minLength { since = newSince } else { removed = 0 base.Warn("TruncateEncodedChangeLog: Couldn't find a safe place to truncate") //TODO: Possibly find a pivot earlier than desired? } } // Write the updated Since and the remaining entries: writeSequence(since, w) if _, err := r.Seek(entryPos[removed], 0); err != nil { panic("Seek back???") } if _, err := io.Copy(w, r); err != nil { panic("Copy???") } return removed, oldLength - removed }
// Removes the oldest entries to limit the log's length to `maxLength`. // This is the same as ChangeLog.Truncate except it works directly on the encoded form, which is // much faster than decoding+truncating+encoding. func TruncateEncodedChangeLog(r *bytes.Reader, maxLength int, w io.Writer) int { since := readSequence(r) // Find the starting position of each entry: entryPos := make([]int64, 0, 1000) for { pos, _ := r.Seek(0, 1) flags, err := r.ReadByte() if err != nil { break // eof } entryPos = append(entryPos, pos) readSequence(r) skipString(r) skipString(r) skipString(r) if flags > 7 { panic(fmt.Sprintf("bad flags %x, entry %d, offset %d", flags, len(entryPos)-1, pos)) } } // How many entries to remove? remove := len(entryPos) - maxLength if remove <= 0 { return 0 } // Update the log's Since to the sequence of the last entry being removed: r.Seek(entryPos[remove-1]+1, 0) since = readSequence(r) // Write the updated Since and the remaining entries: writeSequence(since, w) r.Seek(entryPos[remove], 0) io.Copy(w, r) return remove }
func (ra *radiusAttribute) decode(b *bytes.Reader) error { raType, err := b.ReadByte() if err != nil { return err } ra.raType = radiusAttributeType(raType) length, err := b.ReadByte() if err != nil { return err } if length < 2 { return fmt.Errorf("invalid attribute length: %d", length) } ra.length = length if length == 2 { return nil } ra.value = make([]byte, length-2) n, err := b.Read(ra.value) if err != nil { return err } if n != int(length)-2 { return fmt.Errorf("attribute value short read: %d", n) } return nil }
func readTracks(reader *bytes.Reader, encodedDataSize int) ([]Track, error) { var tracks []Track position := encodedDataSize - reader.Len() for position < encodedDataSize { var id int32 binary.Read(reader, binary.LittleEndian, &id) channelNameSize, _ := reader.ReadByte() channelBytes := make([]byte, channelNameSize) _, err := reader.Read(channelBytes) if err != nil { return []Track{}, errors.New("Could not read Track name with id " + string(id)) } pattern := make([]uint32, 4) patternReadErr := binary.Read(reader, binary.LittleEndian, &pattern) if patternReadErr != nil { return []Track{}, errors.New("Could not read Track step with id " + string(id)) } tracks = append(tracks, Track{ id, string(channelBytes), pattern}) position += int(21) + int(channelNameSize) } return tracks, nil }
func UnpackLabels(buf *bytes.Reader, p []byte) ([]string, error) { labels := make([]string, 0, 5) for { n, e := buf.ReadByte() // label length if e != nil { return nil, e } if n == 0 { break } if isRedirect(n) { b, e := buf.ReadByte() if e != nil { return nil, e } off := offset(n, b) if off >= len(p) { return nil, errors.New("offset out of range") } buf = bytes.NewReader(p[off:]) continue } if n > 63 { return nil, errors.New("label too long") } labelBuf := make([]byte, n) if _, e := buf.Read(labelBuf); e != nil { return nil, e } label := strings.ToLower(string(labelBuf)) labels = append(labels, label) } return labels, nil }
func vint(r *bytes.Reader) (int, error) { if first, err := r.ReadByte(); err != nil { return -1, err } else { if first < 0x80 { return int(first), nil } if first >= 0x90 { return int(first) - 0x100, nil } count := 0 neg := false if first < 0x88 { neg = true count = int(0x88 - first) } else { count = int(0x90 - first) } ret := 0 for i := 0; i < count; i++ { if b, err := r.ReadByte(); err != nil { return -1, err } else { ret = (ret << 8) | int(b) } } if neg { ret = (ret ^ -1) } return ret, nil } }
func pad(data *bytes.Reader, align int64) { for pos, _ := data.Seek(0, 1); pos%align != 0; pos++ { data.ReadByte() } }
func (xs *XSettings) readSetting(data *bytes.Reader, endian binary.ByteOrder) (*XSetting, string, error) { var name string var err error setting := &XSetting{} var tmp byte if tmp, err = data.ReadByte(); err != nil { return nil, "", err } if tmp > 2 { return nil, "", fmt.Errorf("Invalid type identifier %d", tmp) } setting.Type = XSettingType(tmp) if _, err = data.ReadByte(); err != nil { return nil, "", err } var l16 uint16 if err = binary.Read(data, endian, &l16); err != nil { return nil, "", err } buff := make([]byte, int(l16), int(l16)) if n, err := data.Read(buff); err != nil || n != int(l16) { return nil, "", err } name = string(buff) pad(data, 4) if err = binary.Read(data, endian, &setting.Serial); err != nil { return nil, "", err } switch setting.Type { case XSettingInteger: if err = binary.Read(data, endian, &setting.Integer); err != nil { return nil, "", err } case XSettingString: var l32 uint32 if err = binary.Read(data, endian, &l32); err != nil { return nil, "", err } buff := make([]byte, int(l32), int(l32)) if n, err := data.Read(buff); err != nil || n != int(l32) { return nil, "", err } setting.String = string(buff) pad(data, 4) case XSettingColour: var r, g, b, a uint16 if err = binary.Read(data, endian, &r); err != nil { return nil, "", err } if err = binary.Read(data, endian, &g); err != nil { return nil, "", err } if err = binary.Read(data, endian, &b); err != nil { return nil, "", err } if err = binary.Read(data, endian, &a); err != nil { return nil, "", err } setting.Colour = XSColour{r, g, b, a} default: panic("This shouldn't be happening! D:") } return setting, name, nil }
func (wav *Wave) parse(r *bytes.Reader, formatWasRead bool) error { if r.Len() == 0 { return nil } var header chunkHeader if err := binary.Read(r, endiannes, &header); err != nil { return loadErr("unable to read chunk header", err) } if header.ChunkID == formatChunkID { if formatWasRead { return errors.New("load WAV: two format chunks detected") } var chunk formatChunkExtended if header.ChunkSize == 16 { if err := binary.Read(r, endiannes, &(chunk.formatChunkBase)); err != nil { return loadErr("reading format chunk", err) } } else if header.ChunkSize == 18 { err := binary.Read(r, endiannes, &(chunk.formatChunkWithExtension)) if err != nil { return loadErr("reading format chunk", err) } } else if header.ChunkSize == 40 { if err := binary.Read(r, endiannes, &chunk); err != nil { return loadErr("reading format chunk", err) } } else { return fmt.Errorf("load WAV: illegal format chunk header size: %v", header.ChunkSize) } if chunk.FormatTag != pcmFormat { return fmt.Errorf( "load WAV: unsupported format: %v (only PCM is supported)", chunk.FormatTag) } wav.ChannelCount = int(chunk.Channels) wav.SamplesPerSecond = int(chunk.SamplesPerSec) wav.BitsPerSample = int(chunk.BitsPerSample) formatWasRead = true } else if header.ChunkID == dataChunkID { data := make([]byte, header.ChunkSize) if _, err := io.ReadFull(r, data); err != nil { return err } if len(wav.Data) > 0 { return errors.New("load WAV: multiple data chunks found") } if !formatWasRead { return errors.New("load WAV: found data chunk before format chunk") } wav.Data = data if header.ChunkSize%2 == 1 { // there is one byte padding if the chunk size is odd if _, err := r.ReadByte(); err != nil { return loadErr("reading data chunk padding", err) } } } else { // skip unknown chunks io.CopyN(ioutil.Discard, r, int64(header.ChunkSize)) } if r.Len() == 0 { if !formatWasRead { return errors.New("load WAV: file does not contain format information") } return nil } return wav.parse(r, formatWasRead) }
func loadChunk(x, z int, data *bytes.Reader, mask int32, sky, isNew bool) { var c *chunk if isNew { c = &chunk{ chunkPosition: chunkPosition{ X: x, Z: z, }, } } else { c = chunkMap[chunkPosition{ X: x, Z: z, }] if c == nil { return } } for i := 0; i < 16; i++ { if mask&(1<<uint(i)) == 0 { continue } if c.Sections[i] == nil { c.Sections[i] = newChunkSection(c, i) } cs := c.Sections[i] bitSize, err := data.ReadByte() if err != nil { panic(err) } blockMap := map[int]int{} if bitSize <= 8 { count, _ := protocol.ReadVarInt(data) for i := 0; i < int(count); i++ { bID, _ := protocol.ReadVarInt(data) blockMap[i] = int(bID) } } len, _ := protocol.ReadVarInt(data) bits := make([]uint64, len) binary.Read(data, binary.BigEndian, &bits) m := bit.NewMapFromRaw(bits, int(bitSize)) for i := 0; i < 4096; i++ { val := m.Get(i) bID, ok := blockMap[val] if !ok { bID = val } block := GetBlockByCombinedID(uint16(bID)) pos := Position{X: i & 0xF, Z: (i >> 4) & 0xF, Y: i >> 8} cs.setBlock(block, pos.X, pos.Y, pos.Z) if be := block.CreateBlockEntity(); be != nil { pos = pos.Shift(x<<4, cs.Y<<4, z<<4) be.SetPosition(pos) cs.BlockEntities[pos] = be } } data.Read(cs.BlockLight) if sky { data.Read(cs.SkyLight) } else { for i := range cs.SkyLight { cs.SkyLight[i] = 0x00 } } } if isNew { data.Read(c.Biomes[:]) } c.calcHeightmap() syncChan <- c.postLoad }